| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589115901159111592115931159411595115961159711598115991160011601116021160311604116051160611607116081160911610116111161211613116141161511616116171161811619116201162111622116231162411625116261162711628116291163011631116321163311634116351163611637116381163911640116411164211643116441164511646116471164811649116501165111652116531165411655116561165711658116591166011661116621166311664116651166611667116681166911670116711167211673116741167511676116771167811679116801168111682116831168411685116861168711688116891169011691116921169311694116951169611697116981169911700117011170211703117041170511706117071170811709117101171111712117131171411715117161171711718117191172011721117221172311724117251172611727117281172911730117311173211733117341173511736117371173811739117401174111742117431174411745117461174711748117491175011751117521175311754117551175611757117581175911760117611176211763117641176511766117671176811769117701177111772117731177411775117761177711778117791178011781117821178311784117851178611787117881178911790117911179211793117941179511796117971179811799118001180111802118031180411805118061180711808118091181011811118121181311814118151181611817118181181911820118211182211823118241182511826118271182811829118301183111832118331183411835118361183711838118391184011841118421184311844118451184611847118481184911850118511185211853118541185511856118571185811859118601186111862118631186411865118661186711868118691187011871118721187311874118751187611877118781187911880118811188211883118841188511886118871188811889118901189111892118931189411895118961189711898118991190011901119021190311904119051190611907119081190911910119111191211913119141191511916119171191811919119201192111922119231192411925119261192711928119291193011931119321193311934119351193611937119381193911940119411194211943119441194511946119471194811949119501195111952119531195411955119561195711958119591196011961119621196311964119651196611967119681196911970119711197211973119741197511976119771197811979119801198111982119831198411985119861198711988119891199011991119921199311994119951199611997119981199912000120011200212003120041200512006120071200812009120101201112012120131201412015120161201712018120191202012021120221202312024120251202612027120281202912030120311203212033120341203512036120371203812039120401204112042120431204412045120461204712048120491205012051120521205312054120551205612057120581205912060120611206212063120641206512066120671206812069120701207112072120731207412075120761207712078120791208012081120821208312084120851208612087120881208912090120911209212093120941209512096120971209812099121001210112102121031210412105121061210712108121091211012111121121211312114121151211612117121181211912120121211212212123121241212512126121271212812129121301213112132121331213412135121361213712138121391214012141121421214312144121451214612147121481214912150121511215212153121541215512156121571215812159121601216112162121631216412165121661216712168121691217012171121721217312174121751217612177121781217912180121811218212183121841218512186121871218812189121901219112192121931219412195121961219712198121991220012201122021220312204122051220612207122081220912210122111221212213122141221512216122171221812219122201222112222122231222412225122261222712228122291223012231122321223312234122351223612237122381223912240122411224212243122441224512246122471224812249122501225112252122531225412255122561225712258122591226012261122621226312264122651226612267122681226912270122711227212273122741227512276122771227812279122801228112282122831228412285122861228712288122891229012291122921229312294122951229612297122981229912300123011230212303123041230512306123071230812309123101231112312123131231412315123161231712318123191232012321123221232312324123251232612327123281232912330123311233212333123341233512336123371233812339123401234112342123431234412345123461234712348123491235012351123521235312354123551235612357123581235912360123611236212363123641236512366123671236812369123701237112372123731237412375123761237712378123791238012381123821238312384123851238612387123881238912390123911239212393123941239512396123971239812399124001240112402124031240412405124061240712408124091241012411124121241312414124151241612417124181241912420124211242212423124241242512426124271242812429124301243112432124331243412435124361243712438124391244012441124421244312444124451244612447124481244912450124511245212453124541245512456124571245812459124601246112462124631246412465124661246712468124691247012471124721247312474124751247612477124781247912480124811248212483124841248512486124871248812489124901249112492124931249412495124961249712498124991250012501125021250312504125051250612507125081250912510125111251212513125141251512516125171251812519125201252112522125231252412525125261252712528125291253012531125321253312534125351253612537125381253912540125411254212543125441254512546125471254812549125501255112552125531255412555125561255712558125591256012561125621256312564125651256612567125681256912570125711257212573125741257512576125771257812579125801258112582125831258412585125861258712588125891259012591125921259312594125951259612597125981259912600126011260212603126041260512606126071260812609126101261112612126131261412615126161261712618126191262012621126221262312624126251262612627126281262912630126311263212633126341263512636126371263812639126401264112642126431264412645126461264712648126491265012651126521265312654126551265612657126581265912660126611266212663126641266512666126671266812669126701267112672126731267412675126761267712678126791268012681126821268312684126851268612687126881268912690126911269212693126941269512696126971269812699127001270112702127031270412705127061270712708127091271012711127121271312714127151271612717127181271912720127211272212723127241272512726127271272812729127301273112732127331273412735127361273712738127391274012741127421274312744127451274612747127481274912750127511275212753127541275512756127571275812759127601276112762127631276412765127661276712768127691277012771127721277312774127751277612777127781277912780127811278212783127841278512786127871278812789127901279112792127931279412795127961279712798127991280012801128021280312804128051280612807128081280912810128111281212813128141281512816128171281812819128201282112822128231282412825128261282712828128291283012831128321283312834128351283612837128381283912840128411284212843128441284512846128471284812849128501285112852128531285412855128561285712858128591286012861128621286312864128651286612867128681286912870128711287212873128741287512876128771287812879128801288112882128831288412885128861288712888128891289012891128921289312894128951289612897128981289912900129011290212903129041290512906129071290812909129101291112912129131291412915129161291712918129191292012921129221292312924129251292612927129281292912930129311293212933129341293512936129371293812939129401294112942129431294412945129461294712948129491295012951129521295312954129551295612957129581295912960129611296212963129641296512966129671296812969129701297112972129731297412975129761297712978129791298012981129821298312984129851298612987129881298912990129911299212993129941299512996129971299812999130001300113002130031300413005130061300713008130091301013011130121301313014130151301613017130181301913020130211302213023130241302513026130271302813029130301303113032130331303413035130361303713038130391304013041130421304313044130451304613047130481304913050130511305213053130541305513056130571305813059130601306113062130631306413065130661306713068130691307013071130721307313074130751307613077130781307913080130811308213083130841308513086130871308813089130901309113092130931309413095130961309713098130991310013101131021310313104131051310613107131081310913110131111311213113131141311513116131171311813119131201312113122131231312413125131261312713128131291313013131131321313313134131351313613137131381313913140131411314213143131441314513146131471314813149131501315113152131531315413155131561315713158131591316013161131621316313164131651316613167131681316913170131711317213173131741317513176131771317813179131801318113182131831318413185131861318713188131891319013191131921319313194131951319613197131981319913200132011320213203132041320513206132071320813209132101321113212132131321413215132161321713218132191322013221132221322313224132251322613227132281322913230132311323213233132341323513236132371323813239132401324113242132431324413245132461324713248132491325013251132521325313254132551325613257132581325913260132611326213263132641326513266132671326813269132701327113272132731327413275132761327713278132791328013281132821328313284132851328613287132881328913290132911329213293132941329513296132971329813299133001330113302133031330413305133061330713308133091331013311133121331313314133151331613317133181331913320133211332213323133241332513326133271332813329133301333113332133331333413335133361333713338133391334013341133421334313344133451334613347133481334913350133511335213353133541335513356133571335813359133601336113362133631336413365133661336713368133691337013371133721337313374133751337613377133781337913380133811338213383133841338513386133871338813389133901339113392133931339413395133961339713398133991340013401134021340313404134051340613407134081340913410134111341213413134141341513416134171341813419134201342113422134231342413425134261342713428134291343013431134321343313434134351343613437134381343913440134411344213443134441344513446134471344813449134501345113452134531345413455134561345713458134591346013461134621346313464134651346613467134681346913470134711347213473134741347513476134771347813479134801348113482134831348413485134861348713488134891349013491134921349313494134951349613497134981349913500135011350213503135041350513506135071350813509135101351113512135131351413515135161351713518135191352013521135221352313524135251352613527135281352913530135311353213533135341353513536135371353813539135401354113542135431354413545135461354713548135491355013551135521355313554135551355613557135581355913560135611356213563135641356513566135671356813569135701357113572135731357413575135761357713578135791358013581135821358313584135851358613587135881358913590135911359213593135941359513596135971359813599136001360113602136031360413605136061360713608136091361013611136121361313614136151361613617136181361913620136211362213623136241362513626136271362813629136301363113632136331363413635136361363713638136391364013641136421364313644136451364613647136481364913650136511365213653136541365513656136571365813659136601366113662136631366413665136661366713668136691367013671136721367313674136751367613677136781367913680136811368213683136841368513686136871368813689136901369113692136931369413695136961369713698136991370013701137021370313704137051370613707137081370913710137111371213713137141371513716137171371813719137201372113722137231372413725137261372713728137291373013731137321373313734137351373613737137381373913740137411374213743137441374513746137471374813749137501375113752137531375413755137561375713758137591376013761137621376313764137651376613767137681376913770137711377213773137741377513776137771377813779137801378113782137831378413785137861378713788137891379013791137921379313794137951379613797137981379913800138011380213803138041380513806138071380813809138101381113812138131381413815138161381713818138191382013821138221382313824138251382613827138281382913830138311383213833138341383513836138371383813839138401384113842138431384413845138461384713848138491385013851138521385313854138551385613857138581385913860138611386213863138641386513866138671386813869138701387113872138731387413875138761387713878138791388013881138821388313884138851388613887138881388913890138911389213893138941389513896138971389813899139001390113902139031390413905139061390713908139091391013911139121391313914139151391613917139181391913920139211392213923139241392513926139271392813929139301393113932139331393413935139361393713938139391394013941139421394313944139451394613947139481394913950139511395213953139541395513956139571395813959139601396113962139631396413965139661396713968139691397013971139721397313974139751397613977139781397913980139811398213983139841398513986139871398813989139901399113992139931399413995139961399713998139991400014001140021400314004140051400614007140081400914010140111401214013140141401514016140171401814019140201402114022140231402414025140261402714028140291403014031140321403314034140351403614037140381403914040140411404214043140441404514046140471404814049140501405114052140531405414055140561405714058140591406014061140621406314064140651406614067140681406914070140711407214073140741407514076140771407814079140801408114082140831408414085140861408714088140891409014091140921409314094140951409614097140981409914100141011410214103141041410514106141071410814109141101411114112141131411414115141161411714118141191412014121141221412314124141251412614127141281412914130141311413214133141341413514136141371413814139141401414114142141431414414145141461414714148141491415014151141521415314154141551415614157141581415914160141611416214163141641416514166141671416814169141701417114172141731417414175141761417714178141791418014181141821418314184141851418614187141881418914190141911419214193141941419514196141971419814199142001420114202142031420414205142061420714208142091421014211142121421314214142151421614217142181421914220142211422214223142241422514226142271422814229142301423114232142331423414235142361423714238142391424014241142421424314244142451424614247142481424914250142511425214253142541425514256142571425814259142601426114262142631426414265142661426714268142691427014271142721427314274142751427614277142781427914280142811428214283142841428514286142871428814289142901429114292142931429414295142961429714298142991430014301143021430314304143051430614307143081430914310143111431214313143141431514316143171431814319143201432114322143231432414325143261432714328143291433014331143321433314334143351433614337143381433914340143411434214343143441434514346143471434814349143501435114352143531435414355143561435714358143591436014361143621436314364143651436614367143681436914370143711437214373143741437514376143771437814379143801438114382143831438414385143861438714388143891439014391143921439314394143951439614397143981439914400144011440214403144041440514406144071440814409144101441114412144131441414415144161441714418144191442014421144221442314424144251442614427144281442914430144311443214433144341443514436144371443814439144401444114442144431444414445144461444714448144491445014451144521445314454144551445614457144581445914460144611446214463144641446514466144671446814469144701447114472144731447414475144761447714478144791448014481144821448314484144851448614487144881448914490144911449214493144941449514496144971449814499145001450114502145031450414505145061450714508145091451014511145121451314514145151451614517145181451914520145211452214523145241452514526145271452814529145301453114532145331453414535145361453714538145391454014541145421454314544145451454614547145481454914550145511455214553145541455514556145571455814559145601456114562145631456414565145661456714568145691457014571145721457314574145751457614577145781457914580145811458214583145841458514586145871458814589145901459114592145931459414595145961459714598145991460014601146021460314604146051460614607146081460914610146111461214613146141461514616146171461814619146201462114622146231462414625146261462714628146291463014631146321463314634146351463614637146381463914640146411464214643146441464514646146471464814649146501465114652146531465414655146561465714658146591466014661146621466314664146651466614667146681466914670146711467214673146741467514676146771467814679146801468114682146831468414685146861468714688146891469014691146921469314694146951469614697146981469914700147011470214703147041470514706147071470814709147101471114712147131471414715147161471714718147191472014721147221472314724147251472614727147281472914730147311473214733147341473514736147371473814739147401474114742147431474414745147461474714748147491475014751147521475314754147551475614757147581475914760147611476214763147641476514766147671476814769147701477114772147731477414775147761477714778147791478014781147821478314784147851478614787147881478914790147911479214793147941479514796147971479814799148001480114802148031480414805148061480714808148091481014811148121481314814148151481614817148181481914820148211482214823148241482514826148271482814829148301483114832148331483414835148361483714838148391484014841148421484314844148451484614847148481484914850148511485214853148541485514856148571485814859148601486114862148631486414865148661486714868148691487014871148721487314874148751487614877148781487914880148811488214883148841488514886148871488814889148901489114892148931489414895148961489714898148991490014901149021490314904149051490614907149081490914910149111491214913149141491514916149171491814919149201492114922149231492414925149261492714928149291493014931149321493314934149351493614937149381493914940149411494214943149441494514946149471494814949149501495114952149531495414955149561495714958149591496014961149621496314964149651496614967149681496914970149711497214973149741497514976149771497814979149801498114982149831498414985149861498714988149891499014991149921499314994149951499614997149981499915000150011500215003150041500515006150071500815009150101501115012150131501415015150161501715018150191502015021150221502315024150251502615027150281502915030150311503215033150341503515036150371503815039150401504115042150431504415045150461504715048150491505015051150521505315054150551505615057150581505915060150611506215063150641506515066150671506815069150701507115072150731507415075150761507715078150791508015081150821508315084150851508615087150881508915090150911509215093150941509515096150971509815099151001510115102151031510415105151061510715108151091511015111151121511315114151151511615117151181511915120151211512215123151241512515126151271512815129151301513115132151331513415135151361513715138151391514015141151421514315144151451514615147151481514915150151511515215153151541515515156151571515815159151601516115162151631516415165151661516715168151691517015171151721517315174151751517615177151781517915180151811518215183151841518515186151871518815189151901519115192151931519415195151961519715198151991520015201152021520315204152051520615207152081520915210152111521215213152141521515216152171521815219152201522115222152231522415225152261522715228152291523015231152321523315234152351523615237152381523915240152411524215243152441524515246152471524815249152501525115252152531525415255152561525715258152591526015261152621526315264152651526615267152681526915270152711527215273152741527515276152771527815279152801528115282152831528415285152861528715288152891529015291152921529315294152951529615297152981529915300153011530215303153041530515306153071530815309153101531115312153131531415315153161531715318153191532015321153221532315324153251532615327153281532915330153311533215333153341533515336153371533815339153401534115342153431534415345153461534715348153491535015351153521535315354153551535615357153581535915360153611536215363153641536515366153671536815369153701537115372153731537415375153761537715378153791538015381153821538315384153851538615387153881538915390153911539215393153941539515396153971539815399154001540115402154031540415405154061540715408154091541015411154121541315414154151541615417154181541915420154211542215423154241542515426154271542815429154301543115432154331543415435154361543715438154391544015441154421544315444154451544615447154481544915450154511545215453154541545515456154571545815459154601546115462154631546415465154661546715468154691547015471154721547315474154751547615477154781547915480154811548215483154841548515486154871548815489154901549115492154931549415495154961549715498154991550015501155021550315504155051550615507155081550915510155111551215513155141551515516155171551815519155201552115522155231552415525155261552715528155291553015531155321553315534155351553615537155381553915540155411554215543155441554515546155471554815549155501555115552155531555415555155561555715558155591556015561155621556315564155651556615567155681556915570155711557215573155741557515576155771557815579155801558115582155831558415585155861558715588155891559015591155921559315594155951559615597155981559915600156011560215603156041560515606156071560815609156101561115612156131561415615156161561715618156191562015621156221562315624156251562615627156281562915630156311563215633156341563515636156371563815639156401564115642156431564415645156461564715648156491565015651156521565315654156551565615657156581565915660156611566215663156641566515666156671566815669156701567115672156731567415675156761567715678156791568015681156821568315684156851568615687156881568915690156911569215693156941569515696156971569815699157001570115702157031570415705157061570715708157091571015711157121571315714157151571615717157181571915720157211572215723157241572515726157271572815729157301573115732157331573415735157361573715738157391574015741157421574315744157451574615747157481574915750157511575215753157541575515756157571575815759157601576115762157631576415765157661576715768157691577015771157721577315774157751577615777157781577915780157811578215783157841578515786157871578815789157901579115792157931579415795157961579715798157991580015801158021580315804158051580615807158081580915810158111581215813158141581515816158171581815819158201582115822158231582415825158261582715828158291583015831158321583315834158351583615837158381583915840158411584215843158441584515846158471584815849158501585115852158531585415855158561585715858158591586015861158621586315864158651586615867158681586915870158711587215873158741587515876158771587815879158801588115882158831588415885158861588715888158891589015891158921589315894158951589615897158981589915900159011590215903159041590515906159071590815909159101591115912159131591415915159161591715918159191592015921159221592315924159251592615927159281592915930159311593215933159341593515936159371593815939159401594115942159431594415945159461594715948159491595015951159521595315954159551595615957159581595915960159611596215963159641596515966159671596815969159701597115972159731597415975159761597715978159791598015981159821598315984159851598615987159881598915990159911599215993159941599515996159971599815999160001600116002160031600416005160061600716008160091601016011160121601316014160151601616017160181601916020160211602216023160241602516026160271602816029160301603116032160331603416035160361603716038160391604016041160421604316044160451604616047160481604916050160511605216053160541605516056160571605816059160601606116062160631606416065160661606716068160691607016071160721607316074160751607616077160781607916080160811608216083160841608516086160871608816089160901609116092160931609416095160961609716098160991610016101161021610316104161051610616107161081610916110161111611216113161141611516116161171611816119161201612116122161231612416125161261612716128161291613016131161321613316134161351613616137161381613916140161411614216143161441614516146161471614816149161501615116152161531615416155161561615716158161591616016161161621616316164161651616616167161681616916170161711617216173161741617516176161771617816179161801618116182161831618416185161861618716188161891619016191161921619316194161951619616197161981619916200162011620216203162041620516206162071620816209162101621116212162131621416215162161621716218162191622016221162221622316224162251622616227162281622916230162311623216233162341623516236162371623816239162401624116242162431624416245162461624716248162491625016251162521625316254162551625616257162581625916260162611626216263162641626516266162671626816269162701627116272162731627416275162761627716278162791628016281162821628316284162851628616287162881628916290162911629216293162941629516296162971629816299163001630116302163031630416305163061630716308163091631016311163121631316314163151631616317163181631916320163211632216323163241632516326163271632816329163301633116332163331633416335163361633716338163391634016341163421634316344163451634616347163481634916350163511635216353163541635516356163571635816359163601636116362163631636416365163661636716368163691637016371163721637316374163751637616377163781637916380163811638216383163841638516386163871638816389163901639116392163931639416395163961639716398163991640016401164021640316404164051640616407164081640916410164111641216413164141641516416164171641816419164201642116422164231642416425164261642716428164291643016431164321643316434164351643616437164381643916440164411644216443164441644516446164471644816449164501645116452164531645416455164561645716458164591646016461164621646316464164651646616467164681646916470164711647216473164741647516476164771647816479164801648116482164831648416485164861648716488164891649016491164921649316494164951649616497164981649916500165011650216503165041650516506165071650816509165101651116512165131651416515165161651716518165191652016521165221652316524165251652616527165281652916530165311653216533165341653516536165371653816539165401654116542165431654416545165461654716548165491655016551165521655316554165551655616557165581655916560165611656216563165641656516566165671656816569165701657116572165731657416575165761657716578165791658016581165821658316584165851658616587165881658916590165911659216593165941659516596165971659816599166001660116602166031660416605166061660716608166091661016611166121661316614166151661616617166181661916620166211662216623166241662516626166271662816629166301663116632166331663416635166361663716638166391664016641166421664316644166451664616647166481664916650166511665216653166541665516656166571665816659166601666116662166631666416665166661666716668166691667016671166721667316674166751667616677166781667916680166811668216683166841668516686166871668816689166901669116692166931669416695166961669716698166991670016701167021670316704167051670616707167081670916710167111671216713167141671516716167171671816719167201672116722167231672416725167261672716728167291673016731167321673316734167351673616737167381673916740167411674216743167441674516746167471674816749167501675116752167531675416755167561675716758167591676016761167621676316764167651676616767167681676916770167711677216773167741677516776167771677816779167801678116782167831678416785167861678716788167891679016791167921679316794167951679616797167981679916800168011680216803168041680516806168071680816809168101681116812168131681416815168161681716818168191682016821168221682316824168251682616827168281682916830168311683216833168341683516836168371683816839168401684116842168431684416845168461684716848168491685016851168521685316854168551685616857168581685916860168611686216863168641686516866168671686816869168701687116872168731687416875168761687716878168791688016881168821688316884168851688616887168881688916890168911689216893168941689516896168971689816899169001690116902169031690416905169061690716908169091691016911169121691316914169151691616917169181691916920169211692216923169241692516926169271692816929169301693116932169331693416935169361693716938169391694016941169421694316944169451694616947169481694916950169511695216953169541695516956169571695816959169601696116962169631696416965169661696716968169691697016971169721697316974169751697616977169781697916980169811698216983169841698516986169871698816989169901699116992169931699416995169961699716998169991700017001170021700317004170051700617007170081700917010170111701217013170141701517016170171701817019170201702117022170231702417025170261702717028170291703017031170321703317034170351703617037170381703917040170411704217043170441704517046170471704817049170501705117052170531705417055170561705717058170591706017061170621706317064170651706617067170681706917070170711707217073170741707517076170771707817079170801708117082170831708417085170861708717088170891709017091170921709317094170951709617097170981709917100171011710217103171041710517106171071710817109171101711117112171131711417115171161711717118171191712017121171221712317124171251712617127171281712917130171311713217133171341713517136171371713817139171401714117142171431714417145171461714717148171491715017151171521715317154171551715617157171581715917160171611716217163171641716517166171671716817169171701717117172171731717417175171761717717178171791718017181171821718317184171851718617187171881718917190171911719217193171941719517196171971719817199172001720117202172031720417205172061720717208172091721017211172121721317214172151721617217172181721917220172211722217223172241722517226172271722817229172301723117232172331723417235172361723717238172391724017241172421724317244172451724617247172481724917250172511725217253172541725517256172571725817259172601726117262172631726417265172661726717268172691727017271172721727317274172751727617277172781727917280172811728217283172841728517286172871728817289172901729117292172931729417295172961729717298172991730017301173021730317304173051730617307173081730917310173111731217313173141731517316173171731817319173201732117322173231732417325173261732717328173291733017331173321733317334173351733617337173381733917340173411734217343173441734517346173471734817349173501735117352173531735417355173561735717358173591736017361173621736317364173651736617367173681736917370173711737217373173741737517376173771737817379173801738117382173831738417385173861738717388173891739017391173921739317394173951739617397173981739917400174011740217403174041740517406174071740817409174101741117412174131741417415174161741717418174191742017421174221742317424174251742617427174281742917430174311743217433174341743517436174371743817439174401744117442174431744417445174461744717448174491745017451174521745317454174551745617457174581745917460174611746217463174641746517466174671746817469174701747117472174731747417475174761747717478174791748017481174821748317484174851748617487174881748917490174911749217493174941749517496174971749817499175001750117502175031750417505175061750717508175091751017511175121751317514175151751617517175181751917520175211752217523175241752517526175271752817529175301753117532175331753417535175361753717538175391754017541175421754317544175451754617547175481754917550175511755217553175541755517556175571755817559175601756117562175631756417565175661756717568175691757017571175721757317574175751757617577175781757917580175811758217583175841758517586175871758817589175901759117592175931759417595175961759717598175991760017601176021760317604176051760617607176081760917610176111761217613176141761517616176171761817619176201762117622176231762417625176261762717628176291763017631176321763317634176351763617637176381763917640176411764217643176441764517646176471764817649176501765117652176531765417655176561765717658176591766017661176621766317664176651766617667176681766917670176711767217673176741767517676176771767817679176801768117682176831768417685176861768717688176891769017691176921769317694176951769617697176981769917700177011770217703177041770517706177071770817709177101771117712177131771417715177161771717718177191772017721177221772317724177251772617727177281772917730177311773217733177341773517736177371773817739177401774117742177431774417745177461774717748177491775017751177521775317754177551775617757177581775917760177611776217763177641776517766177671776817769177701777117772177731777417775177761777717778177791778017781177821778317784177851778617787177881778917790177911779217793177941779517796177971779817799178001780117802178031780417805178061780717808178091781017811178121781317814178151781617817178181781917820178211782217823178241782517826178271782817829178301783117832178331783417835178361783717838178391784017841178421784317844178451784617847178481784917850178511785217853178541785517856178571785817859178601786117862178631786417865178661786717868178691787017871178721787317874178751787617877178781787917880178811788217883178841788517886178871788817889178901789117892178931789417895178961789717898178991790017901179021790317904179051790617907179081790917910179111791217913179141791517916179171791817919179201792117922179231792417925179261792717928179291793017931179321793317934179351793617937179381793917940179411794217943179441794517946179471794817949179501795117952179531795417955179561795717958179591796017961179621796317964179651796617967179681796917970179711797217973179741797517976179771797817979179801798117982179831798417985179861798717988179891799017991179921799317994179951799617997179981799918000180011800218003180041800518006180071800818009180101801118012180131801418015180161801718018180191802018021 |
- //-------------------------------------------------------------------------------------------------------
- // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
- // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
- //-------------------------------------------------------------------------------------------------------
- #include "Backend.h"
- #if DBG_DUMP
- #define DO_MEMOP_TRACE() (PHASE_TRACE(Js::MemOpPhase, this->func) ||\
- PHASE_TRACE(Js::MemSetPhase, this->func) ||\
- PHASE_TRACE(Js::MemCopyPhase, this->func))
- #define DO_MEMOP_TRACE_PHASE(phase) (PHASE_TRACE(Js::MemOpPhase, this->func) || PHASE_TRACE(Js::phase ## Phase, this->func))
- #define OUTPUT_MEMOP_TRACE(loop, instr, ...) {\
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];\
- Output::Print(15, _u("Function: %s%s, Loop: %u: "), this->func->GetJITFunctionBody()->GetDisplayName(), this->func->GetDebugNumberSet(debugStringBuffer), loop->GetLoopNumber());\
- Output::Print(__VA_ARGS__);\
- IR::Instr* __instr__ = instr;\
- if(__instr__) __instr__->DumpByteCodeOffset();\
- if(__instr__) Output::Print(_u(" (%s)"), Js::OpCodeUtil::GetOpCodeName(__instr__->m_opcode));\
- Output::Print(_u("\n"));\
- Output::Flush(); \
- }
- #define TRACE_MEMOP(loop, instr, ...) \
- if (DO_MEMOP_TRACE()) {\
- Output::Print(_u("TRACE MemOp:"));\
- OUTPUT_MEMOP_TRACE(loop, instr, __VA_ARGS__)\
- }
- #define TRACE_MEMOP_VERBOSE(loop, instr, ...) if(CONFIG_FLAG(Verbose)) {TRACE_MEMOP(loop, instr, __VA_ARGS__)}
- #define TRACE_MEMOP_PHASE(phase, loop, instr, ...) \
- if (DO_MEMOP_TRACE_PHASE(phase))\
- {\
- Output::Print(_u("TRACE ") _u(#phase) _u(":"));\
- OUTPUT_MEMOP_TRACE(loop, instr, __VA_ARGS__)\
- }
- #define TRACE_MEMOP_PHASE_VERBOSE(phase, loop, instr, ...) if(CONFIG_FLAG(Verbose)) {TRACE_MEMOP_PHASE(phase, loop, instr, __VA_ARGS__)}
- #else
- #define DO_MEMOP_TRACE()
- #define DO_MEMOP_TRACE_PHASE(phase)
- #define OUTPUT_MEMOP_TRACE(loop, instr, ...)
- #define TRACE_MEMOP(loop, instr, ...)
- #define TRACE_MEMOP_VERBOSE(loop, instr, ...)
- #define TRACE_MEMOP_PHASE(phase, loop, instr, ...)
- #define TRACE_MEMOP_PHASE_VERBOSE(phase, loop, instr, ...)
- #endif
- class AutoRestoreVal
- {
- private:
- Value *const originalValue;
- Value *const tempValue;
- Value * *const valueRef;
- public:
- AutoRestoreVal(Value *const originalValue, Value * *const tempValueRef)
- : originalValue(originalValue), tempValue(*tempValueRef), valueRef(tempValueRef)
- {
- }
- ~AutoRestoreVal()
- {
- if(*valueRef == tempValue)
- {
- *valueRef = originalValue;
- }
- }
- PREVENT_COPY(AutoRestoreVal);
- };
- GlobOpt::GlobOpt(Func * func)
- : func(func),
- intConstantToStackSymMap(nullptr),
- intConstantToValueMap(nullptr),
- currentValue(FirstNewValueNumber),
- prePassLoop(nullptr),
- alloc(nullptr),
- isCallHelper(false),
- inInlinedBuiltIn(false),
- rootLoopPrePass(nullptr),
- noImplicitCallUsesToInsert(nullptr),
- valuesCreatedForClone(nullptr),
- valuesCreatedForMerge(nullptr),
- instrCountSinceLastCleanUp(0),
- isRecursiveCallOnLandingPad(false),
- updateInductionVariableValueNumber(false),
- isPerformingLoopBackEdgeCompensation(false),
- currentRegion(nullptr),
- auxSlotPtrSyms(nullptr),
- changedSymsAfterIncBailoutCandidate(nullptr),
- doTypeSpec(
- !IsTypeSpecPhaseOff(func)),
- doAggressiveIntTypeSpec(
- doTypeSpec &&
- DoAggressiveIntTypeSpec(func)),
- doAggressiveMulIntTypeSpec(
- doTypeSpec &&
- !PHASE_OFF(Js::AggressiveMulIntTypeSpecPhase, func) &&
- (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsAggressiveMulIntTypeSpecDisabled(func->IsLoopBody()))),
- doDivIntTypeSpec(
- doAggressiveIntTypeSpec &&
- (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsDivIntTypeSpecDisabled(func->IsLoopBody()))),
- doLossyIntTypeSpec(
- doTypeSpec &&
- DoLossyIntTypeSpec(func)),
- doFloatTypeSpec(
- doTypeSpec &&
- DoFloatTypeSpec(func)),
- doArrayCheckHoist(
- DoArrayCheckHoist(func)),
- doArrayMissingValueCheckHoist(
- doArrayCheckHoist &&
- DoArrayMissingValueCheckHoist(func)),
- doArraySegmentHoist(
- doArrayCheckHoist &&
- DoArraySegmentHoist(ValueType::GetObject(ObjectType::Int32Array), func)),
- doJsArraySegmentHoist(
- doArraySegmentHoist &&
- DoArraySegmentHoist(ValueType::GetObject(ObjectType::Array), func)),
- doArrayLengthHoist(
- doArrayCheckHoist &&
- DoArrayLengthHoist(func)),
- doEliminateArrayAccessHelperCall(
- doArrayCheckHoist &&
- !PHASE_OFF(Js::EliminateArrayAccessHelperCallPhase, func)),
- doTrackRelativeIntBounds(
- doAggressiveIntTypeSpec &&
- DoPathDependentValues() &&
- !PHASE_OFF(Js::Phase::TrackRelativeIntBoundsPhase, func)),
- doBoundCheckElimination(
- doTrackRelativeIntBounds &&
- !PHASE_OFF(Js::Phase::BoundCheckEliminationPhase, func)),
- doBoundCheckHoist(
- doEliminateArrayAccessHelperCall &&
- doBoundCheckElimination &&
- DoConstFold() &&
- !PHASE_OFF(Js::Phase::BoundCheckHoistPhase, func) &&
- (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsBoundCheckHoistDisabled(func->IsLoopBody()))),
- doLoopCountBasedBoundCheckHoist(
- doBoundCheckHoist &&
- !PHASE_OFF(Js::Phase::LoopCountBasedBoundCheckHoistPhase, func) &&
- (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsLoopCountBasedBoundCheckHoistDisabled(func->IsLoopBody()))),
- doPowIntIntTypeSpec(
- doAggressiveIntTypeSpec &&
- (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsPowIntIntTypeSpecDisabled())),
- doTagChecks(
- (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsTagCheckDisabled())),
- isAsmJSFunc(func->GetJITFunctionBody()->IsAsmJsMode())
- {
- }
- void
- GlobOpt::BackwardPass(Js::Phase tag)
- {
- BEGIN_CODEGEN_PHASE(this->func, tag);
- ::BackwardPass backwardPass(this->func, this, tag);
- backwardPass.Optimize();
- END_CODEGEN_PHASE(this->func, tag);
- }
- void
- GlobOpt::Optimize()
- {
- this->objectTypeSyms = nullptr;
- this->func->argInsCount = this->func->GetInParamsCount();
- if (!func->GetJITFunctionBody()->IsAsmJsMode())
- {
- // Don't include "this" pointer in the count when not in AsmJs mode (AsmJS does not have "this").
- this->func->argInsCount--;
- }
- if (!func->DoGlobOpt())
- {
- this->lengthEquivBv = nullptr;
- this->argumentsEquivBv = nullptr;
- this->callerEquivBv = nullptr;
- // Still need to run the dead store phase to calculate the live reg on back edge
- this->BackwardPass(Js::DeadStorePhase);
- CannotAllocateArgumentsObjectOnStack(nullptr);
- return;
- }
- {
- this->lengthEquivBv = this->func->m_symTable->m_propertyEquivBvMap->Lookup(Js::PropertyIds::length, nullptr); // Used to kill live "length" properties
- this->argumentsEquivBv = func->m_symTable->m_propertyEquivBvMap->Lookup(Js::PropertyIds::arguments, nullptr); // Used to kill live "arguments" properties
- this->callerEquivBv = func->m_symTable->m_propertyEquivBvMap->Lookup(Js::PropertyIds::caller, nullptr); // Used to kill live "caller" properties
- // The backward phase needs the glob opt's allocator to allocate the propertyTypeValueMap
- // in GlobOpt::EnsurePropertyTypeValue and ranges of instructions where int overflow may be ignored.
- // (see BackwardPass::TrackIntUsage)
- PageAllocator * pageAllocator = this->func->m_alloc->GetPageAllocator();
- NoRecoverMemoryJitArenaAllocator localAlloc(_u("BE-GlobOpt"), pageAllocator, Js::Throw::OutOfMemory);
- this->alloc = &localAlloc;
- NoRecoverMemoryJitArenaAllocator localTempAlloc(_u("BE-GlobOpt temp"), pageAllocator, Js::Throw::OutOfMemory);
- this->tempAlloc = &localTempAlloc;
- // The forward passes use info (upwardExposedUses) from the backward pass. This info
- // isn't available for some of the symbols created during the backward pass, or the forward pass.
- // Keep track of the last symbol for which we're guaranteed to have data.
- this->maxInitialSymID = this->func->m_symTable->GetMaxSymID();
- #if DBG
- this->BackwardPass(Js::CaptureByteCodeRegUsePhase);
- #endif
- this->BackwardPass(Js::BackwardPhase);
- this->ForwardPass();
- this->BackwardPass(Js::DeadStorePhase);
- }
- this->TailDupPass();
- }
- bool GlobOpt::ShouldExpectConventionalArrayIndexValue(IR::IndirOpnd *const indirOpnd)
- {
- Assert(indirOpnd);
- if(!indirOpnd->GetIndexOpnd())
- {
- return indirOpnd->GetOffset() >= 0;
- }
- IR::RegOpnd *const indexOpnd = indirOpnd->GetIndexOpnd();
- if(indexOpnd->m_sym->m_isNotNumber)
- {
- // Typically, single-def or any sym-specific information for type-specialized syms should not be used because all of
- // their defs will not have been accounted for until after the forward pass. But m_isNotNumber is only ever changed from
- // false to true, so it's okay in this case.
- return false;
- }
- StackSym *indexVarSym = indexOpnd->m_sym;
- if(indexVarSym->IsTypeSpec())
- {
- indexVarSym = indexVarSym->GetVarEquivSym(nullptr);
- Assert(indexVarSym);
- }
- else if(!IsLoopPrePass())
- {
- // Don't use single-def info or const flags for type-specialized syms, as all of their defs will not have been accounted
- // for until after the forward pass. Also, don't use the const flags in a loop prepass because the const flags may not
- // be up-to-date.
- if (indexOpnd->IsNotInt())
- {
- return false;
- }
- StackSym *const indexSym = indexOpnd->m_sym;
- if(indexSym->IsIntConst())
- {
- return indexSym->GetIntConstValue() >= 0;
- }
- }
- Value *const indexValue = CurrentBlockData()->FindValue(indexVarSym);
- if(!indexValue)
- {
- // Treat it as Uninitialized, assume it's going to be valid
- return true;
- }
- ValueInfo *const indexValueInfo = indexValue->GetValueInfo();
- int32 indexConstantValue;
- if(indexValueInfo->TryGetIntConstantValue(&indexConstantValue))
- {
- return indexConstantValue >= 0;
- }
- if(indexValueInfo->IsUninitialized())
- {
- // Assume it's going to be valid
- return true;
- }
- return indexValueInfo->HasBeenNumber() && !indexValueInfo->HasBeenFloat();
- }
- //
- // Either result is float or 1/x or cst1/cst2 where cst1%cst2 != 0
- //
- ValueType GlobOpt::GetDivValueType(IR::Instr* instr, Value* src1Val, Value* src2Val, bool specialize)
- {
- ValueInfo *src1ValueInfo = (src1Val ? src1Val->GetValueInfo() : nullptr);
- ValueInfo *src2ValueInfo = (src2Val ? src2Val->GetValueInfo() : nullptr);
- if (instr->IsProfiledInstr() && instr->m_func->HasProfileInfo())
- {
- ValueType resultType = instr->m_func->GetReadOnlyProfileInfo()->GetDivProfileInfo(static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId));
- if (resultType.IsLikelyInt())
- {
- if (specialize && src1ValueInfo && src2ValueInfo
- && ((src1ValueInfo->IsInt() && src2ValueInfo->IsInt()) ||
- (this->DoDivIntTypeSpec() && src1ValueInfo->IsLikelyInt() && src2ValueInfo->IsLikelyInt())))
- {
- return ValueType::GetInt(true);
- }
- return resultType;
- }
- // Consider: Checking that the sources are numbers.
- if (resultType.IsLikelyFloat())
- {
- return ValueType::Float;
- }
- return resultType;
- }
- int32 src1IntConstantValue;
- if(!src1ValueInfo || !src1ValueInfo->TryGetIntConstantValue(&src1IntConstantValue))
- {
- return ValueType::Number;
- }
- if (src1IntConstantValue == 1)
- {
- return ValueType::Float;
- }
- int32 src2IntConstantValue;
- if(!src2Val || !src2ValueInfo->TryGetIntConstantValue(&src2IntConstantValue))
- {
- return ValueType::Number;
- }
- if (src2IntConstantValue // Avoid divide by zero
- && !(src1IntConstantValue == 0x80000000 && src2IntConstantValue == -1) // Avoid integer overflow
- && (src1IntConstantValue % src2IntConstantValue) != 0)
- {
- return ValueType::Float;
- }
- return ValueType::Number;
- }
- void
- GlobOpt::ForwardPass()
- {
- BEGIN_CODEGEN_PHASE(this->func, Js::ForwardPhase);
- #if DBG_DUMP
- if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::GlobOptPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
- {
- this->func->DumpHeader();
- }
- if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::GlobOptPhase))
- {
- this->TraceSettings();
- }
- #endif
- // GetConstantCount() gives us the right size to pick for the SparseArray, but we may need more if we've inlined
- // functions with constants. There will be a gap in the symbol numbering between the main constants and
- // the inlined ones, so we'll most likely need a new array chunk. Make the min size of the array chunks be 64
- // in case we have a main function with very few constants and a bunch of constants from inlined functions.
- this->byteCodeConstantValueArray = SparseArray<Value>::New(this->alloc, max(this->func->GetJITFunctionBody()->GetConstCount(), 64U));
- this->byteCodeConstantValueNumbersBv = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
- this->tempBv = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
- this->prePassCopyPropSym = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
- this->slotSyms = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
- this->byteCodeUses = nullptr;
- this->propertySymUse = nullptr;
- // changedSymsAfterIncBailoutCandidate helps track building incremental bailout in ForwardPass
- this->changedSymsAfterIncBailoutCandidate = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
- this->auxSlotPtrSyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
- #if DBG
- this->byteCodeUsesBeforeOpt = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
- if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldCopyPropPhase) && this->DoFunctionFieldCopyProp())
- {
- Output::Print(_u("TRACE: CanDoFieldCopyProp Func: "));
- this->func->DumpFullFunctionName();
- Output::Print(_u("\n"));
- }
- #endif
- OpndList localNoImplicitCallUsesToInsert(alloc);
- this->noImplicitCallUsesToInsert = &localNoImplicitCallUsesToInsert;
- IntConstantToStackSymMap localIntConstantToStackSymMap(alloc);
- this->intConstantToStackSymMap = &localIntConstantToStackSymMap;
- IntConstantToValueMap localIntConstantToValueMap(alloc);
- this->intConstantToValueMap = &localIntConstantToValueMap;
- Int64ConstantToValueMap localInt64ConstantToValueMap(alloc);
- this->int64ConstantToValueMap = &localInt64ConstantToValueMap;
- AddrConstantToValueMap localAddrConstantToValueMap(alloc);
- this->addrConstantToValueMap = &localAddrConstantToValueMap;
- StringConstantToValueMap localStringConstantToValueMap(alloc);
- this->stringConstantToValueMap = &localStringConstantToValueMap;
- SymIdToInstrMap localPrePassInstrMap(alloc);
- this->prePassInstrMap = &localPrePassInstrMap;
- ValueSetByValueNumber localValuesCreatedForClone(alloc, 64);
- this->valuesCreatedForClone = &localValuesCreatedForClone;
- ValueNumberPairToValueMap localValuesCreatedForMerge(alloc, 64);
- this->valuesCreatedForMerge = &localValuesCreatedForMerge;
- #if DBG
- BVSparse<JitArenaAllocator> localFinishedStackLiteralInitFld(alloc);
- this->finishedStackLiteralInitFld = &localFinishedStackLiteralInitFld;
- #endif
- FOREACH_BLOCK_IN_FUNC_EDITING(block, this->func)
- {
- this->OptBlock(block);
- } NEXT_BLOCK_IN_FUNC_EDITING;
- if (!PHASE_OFF(Js::MemOpPhase, this->func))
- {
- ProcessMemOp();
- }
- this->noImplicitCallUsesToInsert = nullptr;
- this->intConstantToStackSymMap = nullptr;
- this->intConstantToValueMap = nullptr;
- this->int64ConstantToValueMap = nullptr;
- this->addrConstantToValueMap = nullptr;
- this->stringConstantToValueMap = nullptr;
- #if DBG
- this->finishedStackLiteralInitFld = nullptr;
- uint freedCount = 0;
- uint spilledCount = 0;
- #endif
- FOREACH_BLOCK_IN_FUNC(block, this->func)
- {
- #if DBG
- if (block->GetDataUseCount() == 0)
- {
- freedCount++;
- }
- else
- {
- spilledCount++;
- }
- #endif
- block->SetDataUseCount(0);
- if (block->cloneStrCandidates)
- {
- JitAdelete(this->alloc, block->cloneStrCandidates);
- block->cloneStrCandidates = nullptr;
- }
- } NEXT_BLOCK_IN_FUNC;
- // Make sure we free most of them.
- Assert(freedCount >= spilledCount);
- // this->alloc will be freed right after return, no need to free it here
- this->changedSymsAfterIncBailoutCandidate = nullptr;
- this->auxSlotPtrSyms = nullptr;
- END_CODEGEN_PHASE(this->func, Js::ForwardPhase);
- }
- void
- GlobOpt::OptBlock(BasicBlock *block)
- {
- if (this->func->m_fg->RemoveUnreachableBlock(block, this))
- {
- GOPT_TRACE(_u("Removing unreachable block #%d\n"), block->GetBlockNum());
- return;
- }
- Loop * loop = block->loop;
- if (loop && block->isLoopHeader)
- {
- if (loop != this->prePassLoop)
- {
- OptLoops(loop);
- if (!IsLoopPrePass() && loop->parent)
- {
- loop->fieldPRESymStores->Or(loop->parent->fieldPRESymStores);
- }
-
- if (!this->IsLoopPrePass() && DoFieldPRE(loop))
- {
- // Note: !IsLoopPrePass means this was a root loop pre-pass. FieldPre() is called once per loop.
- this->FieldPRE(loop);
- // Re-optimize the landing pad
- BasicBlock *landingPad = loop->landingPad;
- this->isRecursiveCallOnLandingPad = true;
- this->OptBlock(landingPad);
- this->isRecursiveCallOnLandingPad = false;
- }
- }
- }
- this->currentBlock = block;
- PrepareLoopArrayCheckHoist();
- block->MergePredBlocksValueMaps(this);
- this->intOverflowCurrentlyMattersInRange = true;
- this->intOverflowDoesNotMatterRange = this->currentBlock->intOverflowDoesNotMatterRange;
- if (!DoFieldCopyProp() && !DoFieldRefOpts())
- {
- this->KillAllFields(CurrentBlockData()->liveFields);
- }
-
- this->tempAlloc->Reset();
- if(loop && block->isLoopHeader)
- {
- loop->firstValueNumberInLoop = this->currentValue;
- }
- GOPT_TRACE_BLOCK(block, true);
- FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrNext, block)
- {
- GOPT_TRACE_INSTRTRACE(instr);
- BailOutInfo* oldBailOutInfo = nullptr;
- bool isCheckAuxBailoutNeeded = this->func->IsJitInDebugMode() && !this->IsLoopPrePass();
- if (isCheckAuxBailoutNeeded && instr->HasAuxBailOut() && !instr->HasBailOutInfo())
- {
- oldBailOutInfo = instr->GetBailOutInfo();
- Assert(oldBailOutInfo);
- }
- bool isInstrRemoved = false;
- instrNext = this->OptInstr(instr, &isInstrRemoved);
- // If we still have instrs with only aux bail out, convert aux bail out back to regular bail out and fill it.
- // During OptInstr some instr can be moved out to a different block, in this case bailout info is going to be replaced
- // with e.g. loop bailout info which is filled as part of processing that block, thus we don't need to fill it here.
- if (isCheckAuxBailoutNeeded && !isInstrRemoved && instr->HasAuxBailOut() && !instr->HasBailOutInfo())
- {
- if (instr->GetBailOutInfo() == oldBailOutInfo)
- {
- instr->PromoteAuxBailOut();
- FillBailOutInfo(block, instr);
- }
- else
- {
- AssertMsg(instr->GetBailOutInfo(), "With aux bailout, the bailout info should not be removed by OptInstr.");
- }
- }
- } NEXT_INSTR_IN_BLOCK_EDITING;
- GOPT_TRACE_BLOCK(block, false);
- if (block->loop)
- {
- if (IsLoopPrePass())
- {
- if (DoBoundCheckHoist())
- {
- DetectUnknownChangesToInductionVariables(&block->globOptData);
- }
- }
- else
- {
- isPerformingLoopBackEdgeCompensation = true;
- Assert(this->tempBv->IsEmpty());
- BVSparse<JitArenaAllocator> tempBv2(this->tempAlloc);
- // On loop back-edges, we need to restore the state of the type specialized
- // symbols to that of the loop header.
- FOREACH_SUCCESSOR_BLOCK(succ, block)
- {
- if (succ->isLoopHeader && succ->loop->IsDescendentOrSelf(block->loop))
- {
- BVSparse<JitArenaAllocator> *liveOnBackEdge = block->loop->regAlloc.liveOnBackEdgeSyms;
- liveOnBackEdge->Or(block->loop->fieldPRESymStores);
- this->tempBv->Minus(block->loop->varSymsOnEntry, block->globOptData.liveVarSyms);
- this->tempBv->And(liveOnBackEdge);
- this->ToVar(this->tempBv, block);
- // Lossy int in the loop header, and no int on the back-edge - need a lossy conversion to int
- this->tempBv->Minus(block->loop->lossyInt32SymsOnEntry, block->globOptData.liveInt32Syms);
- this->tempBv->And(liveOnBackEdge);
- this->ToInt32(this->tempBv, block, true /* lossy */);
- // Lossless int in the loop header, and no lossless int on the back-edge - need a lossless conversion to int
- this->tempBv->Minus(block->loop->int32SymsOnEntry, block->loop->lossyInt32SymsOnEntry);
- tempBv2.Minus(block->globOptData.liveInt32Syms, block->globOptData.liveLossyInt32Syms);
- this->tempBv->Minus(&tempBv2);
- this->tempBv->And(liveOnBackEdge);
- this->ToInt32(this->tempBv, block, false /* lossy */);
- this->tempBv->Minus(block->loop->float64SymsOnEntry, block->globOptData.liveFloat64Syms);
- this->tempBv->And(liveOnBackEdge);
- this->ToFloat64(this->tempBv, block);
- // For ints and floats, go aggressive and type specialize in the landing pad any symbol which was specialized on
- // entry to the loop body (in the loop header), and is still specialized on this tail, but wasn't specialized in
- // the landing pad.
- // Lossy int in the loop header and no int in the landing pad - need a lossy conversion to int
- // (entry.lossyInt32 - landingPad.int32)
- this->tempBv->Minus(block->loop->lossyInt32SymsOnEntry, block->loop->landingPad->globOptData.liveInt32Syms);
- this->tempBv->And(liveOnBackEdge);
- this->ToInt32(this->tempBv, block->loop->landingPad, true /* lossy */);
- // Lossless int in the loop header, and no lossless int in the landing pad - need a lossless conversion to int
- // ((entry.int32 - entry.lossyInt32) - (landingPad.int32 - landingPad.lossyInt32))
- this->tempBv->Minus(block->loop->int32SymsOnEntry, block->loop->lossyInt32SymsOnEntry);
- tempBv2.Minus(
- block->loop->landingPad->globOptData.liveInt32Syms,
- block->loop->landingPad->globOptData.liveLossyInt32Syms);
- this->tempBv->Minus(&tempBv2);
- this->tempBv->And(liveOnBackEdge);
- this->ToInt32(this->tempBv, block->loop->landingPad, false /* lossy */);
- // ((entry.float64 - landingPad.float64) & block.float64)
- this->tempBv->Minus(block->loop->float64SymsOnEntry, block->loop->landingPad->globOptData.liveFloat64Syms);
- this->tempBv->And(block->globOptData.liveFloat64Syms);
- this->tempBv->And(liveOnBackEdge);
- this->ToFloat64(this->tempBv, block->loop->landingPad);
- if (block->loop->symsRequiringCompensationToMergedValueInfoMap)
- {
- InsertValueCompensation(block, succ, block->loop->symsRequiringCompensationToMergedValueInfoMap);
- }
- // Now that we're done with the liveFields within this loop, trim the set to those syms
- // that the backward pass told us were live out of the loop.
- // This assumes we have no further need of the liveFields within the loop.
- if (block->loop->liveOutFields)
- {
- block->globOptData.liveFields->And(block->loop->liveOutFields);
- }
- }
- } NEXT_SUCCESSOR_BLOCK;
- this->tempBv->ClearAll();
- isPerformingLoopBackEdgeCompensation = false;
- }
- }
- block->PathDepBranchFolding(this);
- #if DBG
- // The set of live lossy int32 syms should be a subset of all live int32 syms
- this->tempBv->And(block->globOptData.liveInt32Syms, block->globOptData.liveLossyInt32Syms);
- Assert(this->tempBv->Count() == block->globOptData.liveLossyInt32Syms->Count());
- // The set of live lossy int32 syms should be a subset of live var or float syms (var or float sym containing the lossless
- // value of the sym should be live)
- this->tempBv->Or(block->globOptData.liveVarSyms, block->globOptData.liveFloat64Syms);
- this->tempBv->And(block->globOptData.liveLossyInt32Syms);
- Assert(this->tempBv->Count() == block->globOptData.liveLossyInt32Syms->Count());
- this->tempBv->ClearAll();
- Assert(this->currentBlock == block);
- #endif
- }
- void
- GlobOpt::OptLoops(Loop *loop)
- {
- Assert(loop != nullptr);
- #if DBG
- if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldCopyPropPhase) &&
- !DoFunctionFieldCopyProp() && DoFieldCopyProp(loop))
- {
- Output::Print(_u("TRACE: CanDoFieldCopyProp Loop: "));
- this->func->DumpFullFunctionName();
- uint loopNumber = loop->GetLoopNumber();
- Assert(loopNumber != Js::LoopHeader::NoLoop);
- Output::Print(_u(" Loop: %d\n"), loopNumber);
- }
- #endif
- Loop *previousLoop = this->prePassLoop;
- this->prePassLoop = loop;
- if (previousLoop == nullptr)
- {
- Assert(this->rootLoopPrePass == nullptr);
- this->rootLoopPrePass = loop;
- this->prePassInstrMap->Clear();
- if (loop->parent == nullptr)
- {
- // Outer most loop...
- this->prePassCopyPropSym->ClearAll();
- }
- }
- Assert(loop->symsAssignedToInLoop != nullptr);
- if (loop->symsUsedBeforeDefined == nullptr)
- {
- loop->symsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
- loop->likelyIntSymsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
- loop->likelyNumberSymsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
- loop->forceFloat64SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
- loop->symsDefInLoop = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
- loop->fieldKilled = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
- loop->fieldPRESymStores = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
- loop->allFieldsKilled = false;
- }
- else
- {
- loop->symsUsedBeforeDefined->ClearAll();
- loop->likelyIntSymsUsedBeforeDefined->ClearAll();
- loop->likelyNumberSymsUsedBeforeDefined->ClearAll();
- loop->forceFloat64SymsOnEntry->ClearAll();
- loop->symsDefInLoop->ClearAll();
- loop->fieldKilled->ClearAll();
- loop->allFieldsKilled = false;
- loop->initialValueFieldMap.Reset();
- }
- FOREACH_BLOCK_IN_LOOP(block, loop)
- {
- block->SetDataUseCount(block->GetSuccList()->Count());
- OptBlock(block);
- } NEXT_BLOCK_IN_LOOP;
- if (previousLoop == nullptr)
- {
- Assert(this->rootLoopPrePass == loop);
- this->rootLoopPrePass = nullptr;
- }
- this->prePassLoop = previousLoop;
- }
- void
- GlobOpt::TailDupPass()
- {
- FOREACH_LOOP_IN_FUNC_EDITING(loop, this->func)
- {
- BasicBlock* header = loop->GetHeadBlock();
- BasicBlock* loopTail = nullptr;
- FOREACH_PREDECESSOR_BLOCK(pred, header)
- {
- if (loop->IsDescendentOrSelf(pred->loop))
- {
- loopTail = pred;
- break;
- }
- } NEXT_PREDECESSOR_BLOCK;
- if (loopTail)
- {
- AssertMsg(loopTail->GetLastInstr()->IsBranchInstr(), "LastInstr of loop should always be a branch no?");
- if (!loopTail->GetPredList()->HasOne())
- {
- TryTailDup(loopTail->GetLastInstr()->AsBranchInstr());
- }
- }
- } NEXT_LOOP_IN_FUNC_EDITING;
- }
- bool
- GlobOpt::TryTailDup(IR::BranchInstr *tailBranch)
- {
- if (PHASE_OFF(Js::TailDupPhase, tailBranch->m_func->GetTopFunc()))
- {
- return false;
- }
- if (tailBranch->IsConditional())
- {
- return false;
- }
- IR::Instr *instr;
- uint instrCount = 0;
- for (instr = tailBranch->GetPrevRealInstrOrLabel(); !instr->IsLabelInstr(); instr = instr->GetPrevRealInstrOrLabel())
- {
- if (instr->HasBailOutInfo())
- {
- break;
- }
- if (!OpCodeAttr::CanCSE(instr->m_opcode))
- {
- // Consider: We could be more aggressive here
- break;
- }
- instrCount++;
- if (instrCount > 1)
- {
- // Consider: If copy handled single-def tmps renaming, we could do more instrs
- break;
- }
- }
- if (!instr->IsLabelInstr())
- {
- return false;
- }
- IR::LabelInstr *mergeLabel = instr->AsLabelInstr();
- IR::Instr *mergeLabelPrev = mergeLabel->m_prev;
- // Skip unreferenced labels
- while (mergeLabelPrev->IsLabelInstr() && mergeLabelPrev->AsLabelInstr()->labelRefs.Empty())
- {
- mergeLabelPrev = mergeLabelPrev->m_prev;
- }
- BasicBlock* labelBlock = mergeLabel->GetBasicBlock();
- uint origPredCount = labelBlock->GetPredList()->Count();
- uint dupCount = 0;
- // We are good to go. Let's do the tail duplication.
- FOREACH_SLISTCOUNTED_ENTRY_EDITING(IR::BranchInstr*, branchEntry, &mergeLabel->labelRefs, iter)
- {
- if (branchEntry->IsUnconditional() && !branchEntry->IsMultiBranch() && branchEntry != mergeLabelPrev && branchEntry != tailBranch)
- {
- for (instr = mergeLabel->m_next; instr != tailBranch; instr = instr->m_next)
- {
- branchEntry->InsertBefore(instr->Copy());
- }
- instr = branchEntry;
- branchEntry->ReplaceTarget(mergeLabel, tailBranch->GetTarget());
- while(!instr->IsLabelInstr())
- {
- instr = instr->m_prev;
- }
- BasicBlock* branchBlock = instr->AsLabelInstr()->GetBasicBlock();
- labelBlock->RemovePred(branchBlock, func->m_fg);
- func->m_fg->AddEdge(branchBlock, tailBranch->GetTarget()->GetBasicBlock());
- dupCount++;
- }
- } NEXT_SLISTCOUNTED_ENTRY_EDITING;
- // If we've duplicated everywhere, tail block is dead and should be removed.
- if (dupCount == origPredCount)
- {
- AssertMsg(mergeLabel->labelRefs.Empty(), "Should not remove block with referenced label.");
- func->m_fg->RemoveBlock(labelBlock, nullptr, true);
- }
- return true;
- }
- void
- GlobOpt::ToVar(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, IR::Instr* insertBeforeInstr /* = nullptr */)
- {
- FOREACH_BITSET_IN_SPARSEBV(id, bv)
- {
- StackSym *stackSym = this->func->m_symTable->FindStackSym(id);
- IR::RegOpnd *newOpnd = IR::RegOpnd::New(stackSym, TyVar, this->func);
- IR::Instr* lastInstr = block->GetLastInstr();
- if (insertBeforeInstr != nullptr)
- {
- this->ToVar(insertBeforeInstr, newOpnd, block, nullptr, false);
- }
- else if (lastInstr->IsBranchInstr() || lastInstr->m_opcode == Js::OpCode::BailTarget)
- {
- // If branch is using this symbol, hoist the operand as the ToVar load will get
- // inserted right before the branch.
- IR::Opnd *src1 = lastInstr->GetSrc1();
- if (src1)
- {
- if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_sym == stackSym)
- {
- lastInstr->HoistSrc1(Js::OpCode::Ld_A);
- }
- IR::Opnd *src2 = lastInstr->GetSrc2();
- if (src2)
- {
- if (src2->IsRegOpnd() && src2->AsRegOpnd()->m_sym == stackSym)
- {
- lastInstr->HoistSrc2(Js::OpCode::Ld_A);
- }
- }
- }
- this->ToVar(lastInstr, newOpnd, block, nullptr, false);
- }
- else
- {
- IR::Instr *lastNextInstr = lastInstr->m_next;
- this->ToVar(lastNextInstr, newOpnd, block, nullptr, false);
- }
- } NEXT_BITSET_IN_SPARSEBV;
- }
- void
- GlobOpt::ToInt32(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, bool lossy, IR::Instr *insertBeforeInstr)
- {
- return this->ToTypeSpec(bv, block, TyInt32, IR::BailOutIntOnly, lossy, insertBeforeInstr);
- }
- void
- GlobOpt::ToFloat64(BVSparse<JitArenaAllocator> *bv, BasicBlock *block)
- {
- return this->ToTypeSpec(bv, block, TyFloat64, IR::BailOutNumberOnly);
- }
- void
- GlobOpt::ToTypeSpec(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, IRType toType, IR::BailOutKind bailOutKind, bool lossy, IR::Instr *insertBeforeInstr)
- {
- FOREACH_BITSET_IN_SPARSEBV(id, bv)
- {
- StackSym *stackSym = this->func->m_symTable->FindStackSym(id);
- IRType fromType = TyIllegal;
- // Win8 bug: 757126. If we are trying to type specialize the arguments object,
- // let's make sure stack args optimization is not enabled. This is a problem, particularly,
- // if the instruction comes from an unreachable block. In other cases, the pass on the
- // instruction itself should disable arguments object optimization.
- if(block->globOptData.argObjSyms && block->globOptData.IsArgumentsSymID(id))
- {
- CannotAllocateArgumentsObjectOnStack(nullptr);
- }
- if (block->globOptData.liveVarSyms->Test(id))
- {
- fromType = TyVar;
- }
- else if (block->globOptData.liveInt32Syms->Test(id) && !block->globOptData.liveLossyInt32Syms->Test(id))
- {
- fromType = TyInt32;
- stackSym = stackSym->GetInt32EquivSym(this->func);
- }
- else if (block->globOptData.liveFloat64Syms->Test(id))
- {
- fromType = TyFloat64;
- stackSym = stackSym->GetFloat64EquivSym(this->func);
- }
- else
- {
- Assert(UNREACHED);
- }
- IR::RegOpnd *newOpnd = IR::RegOpnd::New(stackSym, fromType, this->func);
- this->ToTypeSpecUse(nullptr, newOpnd, block, nullptr, nullptr, toType, bailOutKind, lossy, insertBeforeInstr);
- } NEXT_BITSET_IN_SPARSEBV;
- }
- void GlobOpt::PRE::FindPossiblePRECandidates(Loop *loop, JitArenaAllocator *alloc)
- {
- // Find the set of PRE candidates
- BasicBlock *loopHeader = loop->GetHeadBlock();
- PRECandidates *candidates = nullptr;
- bool firstBackEdge = true;
- FOREACH_PREDECESSOR_BLOCK(blockPred, loopHeader)
- {
- if (!loop->IsDescendentOrSelf(blockPred->loop))
- {
- // Not a loop back-edge
- continue;
- }
- if (firstBackEdge)
- {
- candidates = this->globOpt->FindBackEdgePRECandidates(blockPred, alloc);
- }
- else
- {
- blockPred->globOptData.RemoveUnavailableCandidates(candidates);
- }
- } NEXT_PREDECESSOR_BLOCK;
- this->candidates = candidates;
- }
- BOOL GlobOpt::PRE::PreloadPRECandidate(Loop *loop, GlobHashBucket* candidate)
- {
- // Insert a load for each field PRE candidate.
- PropertySym *propertySym = candidate->value->AsPropertySym();
- if (!candidates->candidatesToProcess->TestAndClear(propertySym->m_id))
- {
- return false;
- }
- Value * propSymValueOnBackEdge = candidate->element;
- StackSym *objPtrSym = propertySym->m_stackSym;
- Sym * objPtrCopyPropSym = nullptr;
- if (!loop->landingPad->globOptData.IsLive(objPtrSym))
- {
- if (PHASE_OFF(Js::MakeObjSymLiveInLandingPadPhase, this->globOpt->func))
- {
- return false;
- }
- if (objPtrSym->IsSingleDef())
- {
- // We can still try to do PRE if the object sym is single def, even if its not live in the landing pad.
- // We'll have to add a def instruction for the object sym in the landing pad, and then we can continue
- // pre-loading the current PRE candidate.
- // Case in point:
- // $L1
- // value|symStore
- // t1 = o.x (v1|t3)
- // t2 = t1.y (v2|t4) <-- t1 is not live in the loop landing pad
- // jmp $L1
- if (!InsertSymDefinitionInLandingPad(objPtrSym, loop, &objPtrCopyPropSym))
- {
- #if DBG_DUMP
- TraceFailedPreloadInLandingPad(loop, propertySym, _u("Failed to insert load of object sym in landing pad"));
- #endif
- return false;
- }
- }
- else
- {
- #if DBG_DUMP
- TraceFailedPreloadInLandingPad(loop, propertySym, _u("Object sym not live in landing pad and not single-def"));
- #endif
- return false;
- }
- }
- Assert(loop->landingPad->globOptData.IsLive(objPtrSym));
- BasicBlock *landingPad = loop->landingPad;
- Sym *symStore = propSymValueOnBackEdge->GetValueInfo()->GetSymStore();
- // The symStore can't be live into the loop
- // The symStore needs to still have the same value
- Assert(symStore && symStore->IsStackSym());
- if (loop->landingPad->globOptData.IsLive(symStore))
- {
- // May have already been hoisted:
- // o.x = t1;
- // o.y = t1;
- return false;
- }
- Value *landingPadValue = landingPad->globOptData.FindValue(propertySym);
- // Value should be added as initial value or already be there.
- Assert(landingPadValue);
- IR::Instr * ldInstrInLoop = this->globOpt->prePassInstrMap->Lookup(propertySym->m_id, nullptr);
- Assert(ldInstrInLoop);
- Assert(ldInstrInLoop->GetDst() == nullptr);
- // Create instr to put in landing pad for compensation
- Assert(IsPREInstrCandidateLoad(ldInstrInLoop->m_opcode));
- IR::Instr * ldInstr = InsertPropertySymPreloadInLandingPad(ldInstrInLoop, loop, propertySym);
- if (!ldInstr)
- {
- return false;
- }
- Assert(ldInstr->GetDst() == nullptr);
- ldInstr->SetDst(IR::RegOpnd::New(symStore->AsStackSym(), TyVar, this->globOpt->func));
- loop->fieldPRESymStores->Set(symStore->m_id);
- landingPad->globOptData.liveVarSyms->Set(symStore->m_id);
- Value * objPtrValue = landingPad->globOptData.FindValue(objPtrSym);
- objPtrCopyPropSym = objPtrCopyPropSym ? objPtrCopyPropSym : objPtrValue ? landingPad->globOptData.GetCopyPropSym(objPtrSym, objPtrValue) : nullptr;
- if (objPtrCopyPropSym)
- {
- // If we inserted T4 = T1.y, and T3 is the copy prop sym for T1 in the landing pad, we need T3.y
- // to be live on back edges to have the merge produce a value for T3.y. Having a value for T1.y
- // produced from the merge is not enough as the T1.y in the loop will get obj-ptr-copy-propped to
- // T3.y
- // T3.y
- PropertySym *newPropSym = PropertySym::FindOrCreate(
- objPtrCopyPropSym->m_id, propertySym->m_propertyId, propertySym->GetPropertyIdIndex(), propertySym->GetInlineCacheIndex(), propertySym->m_fieldKind, this->globOpt->func);
- if (!landingPad->globOptData.FindValue(newPropSym))
- {
- landingPad->globOptData.SetValue(landingPadValue, newPropSym);
- landingPad->globOptData.liveFields->Set(newPropSym->m_id);
- MakePropertySymLiveOnBackEdges(newPropSym, loop, propSymValueOnBackEdge);
- }
- }
- ValueType valueType(ValueType::Uninitialized);
- Value *initialValue = nullptr;
- if (loop->initialValueFieldMap.TryGetValue(propertySym, &initialValue))
- {
- if (ldInstr->IsProfiledInstr())
- {
- if (initialValue->GetValueNumber() == propSymValueOnBackEdge->GetValueNumber())
- {
- if (propSymValueOnBackEdge->GetValueInfo()->IsUninitialized())
- {
- valueType = ldInstr->AsProfiledInstr()->u.FldInfo().valueType;
- }
- else
- {
- valueType = propSymValueOnBackEdge->GetValueInfo()->Type();
- }
- }
- else
- {
- valueType = ValueType::Uninitialized;
- }
- ldInstr->AsProfiledInstr()->u.FldInfo().valueType = valueType;
- }
- }
- else
- {
- valueType = landingPadValue->GetValueInfo()->Type();
- }
- loop->symsUsedBeforeDefined->Set(symStore->m_id);
- if (valueType.IsLikelyNumber())
- {
- loop->likelyNumberSymsUsedBeforeDefined->Set(symStore->m_id);
- if (globOpt->DoAggressiveIntTypeSpec() ? valueType.IsLikelyInt() : valueType.IsInt())
- {
- // Can only force int conversions in the landing pad based on likely-int values if aggressive int type
- // specialization is enabled
- loop->likelyIntSymsUsedBeforeDefined->Set(symStore->m_id);
- }
- }
- #if DBG_DUMP
- if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldPREPhase, this->globOpt->func->GetSourceContextId(), this->globOpt->func->GetLocalFunctionId()))
- {
- Output::Print(_u("** TRACE: Field PRE: field pre-loaded in landing pad of loop head #%-3d: "), loop->GetHeadBlock()->GetBlockNum());
- ldInstr->Dump();
- Output::Print(_u("\n"));
- Output::Flush();
- }
- #endif
- return true;
- }
- void GlobOpt::PRE::PreloadPRECandidates(Loop *loop)
- {
- // Insert loads in landing pad for field PRE candidates. Iterate while(changed)
- // for the o.x.y cases.
- BOOL changed = true;
- if (!candidates || !candidates->candidatesList)
- {
- return;
- }
- Assert(loop->landingPad->GetFirstInstr() == loop->landingPad->GetLastInstr());
- while (changed)
- {
- changed = false;
- FOREACH_SLIST_ENTRY_EDITING(GlobHashBucket*, candidate, (SList<GlobHashBucket*>*)candidates->candidatesList, iter)
- {
- if (this->PreloadPRECandidate(loop, candidate))
- {
- changed = true;
- iter.RemoveCurrent();
- }
- if (PHASE_TRACE(Js::FieldPREPhase, this->globOpt->func))
- {
- Output::Print(_u("============================\n"));
- Output::Flush();
- }
- } NEXT_SLIST_ENTRY_EDITING;
- }
- }
- void GlobOpt::FieldPRE(Loop *loop)
- {
- if (!DoFieldPRE(loop))
- {
- return;
- }
- GlobOpt::PRE pre(this);
- pre.FieldPRE(loop);
- }
- void GlobOpt::InsertValueCompensation(
- BasicBlock *const predecessor,
- BasicBlock *const successor,
- const SymToValueInfoMap *symsRequiringCompensationToMergedValueInfoMap)
- {
- Assert(predecessor);
- Assert(successor);
- AssertOrFailFast(predecessor != successor);
- Assert(symsRequiringCompensationToMergedValueInfoMap->Count() != 0);
- IR::Instr *insertBeforeInstr = predecessor->GetLastInstr();
- Func *const func = insertBeforeInstr->m_func;
- bool setLastInstrInPredecessor;
- // If this is a loop back edge, and the successor has been completed, don't attempt to update its block data.
- // The update is unnecessary, and the data has likely been freed.
- bool updateSuccessorBlockData = !this->isPerformingLoopBackEdgeCompensation || successor->GetDataUseCount() > 0;
- if(insertBeforeInstr->IsBranchInstr() || insertBeforeInstr->m_opcode == Js::OpCode::BailTarget)
- {
- // Don't insert code between the branch and the corresponding ByteCodeUses instructions
- while(insertBeforeInstr->m_prev->m_opcode == Js::OpCode::ByteCodeUses)
- {
- insertBeforeInstr = insertBeforeInstr->m_prev;
- }
- setLastInstrInPredecessor = false;
- }
- else
- {
- // Insert at the end of the block and set the last instruction
- Assert(insertBeforeInstr->m_next);
- insertBeforeInstr = insertBeforeInstr->m_next; // Instruction after the last instruction in the predecessor
- setLastInstrInPredecessor = true;
- }
- GlobOptBlockData &predecessorBlockData = predecessor->globOptData;
- GlobOptBlockData &successorBlockData = successor->globOptData;
- struct DelayChangeValueInfo
- {
- Value* predecessorValue;
- ArrayValueInfo* valueInfo;
- void ChangeValueInfo(BasicBlock* predecessor, GlobOpt* g)
- {
- g->ChangeValueInfo(
- predecessor,
- predecessorValue,
- valueInfo,
- false /*allowIncompatibleType*/,
- true /*compensated*/);
- }
- };
- JsUtil::List<DelayChangeValueInfo, ArenaAllocator> delayChangeValueInfo(alloc);
- for(auto it = symsRequiringCompensationToMergedValueInfoMap->GetIterator(); it.IsValid(); it.MoveNext())
- {
- const auto &entry = it.Current();
- Sym *const sym = entry.Key();
- Value *const predecessorValue = predecessorBlockData.FindValue(sym);
- Assert(predecessorValue);
- ValueInfo *const predecessorValueInfo = predecessorValue->GetValueInfo();
- // Currently, array value infos are the only ones that require compensation based on values
- Assert(predecessorValueInfo->IsAnyOptimizedArray());
- const ArrayValueInfo *const predecessorArrayValueInfo = predecessorValueInfo->AsArrayValueInfo();
- StackSym *const predecessorHeadSegmentSym = predecessorArrayValueInfo->HeadSegmentSym();
- StackSym *const predecessorHeadSegmentLengthSym = predecessorArrayValueInfo->HeadSegmentLengthSym();
- StackSym *const predecessorLengthSym = predecessorArrayValueInfo->LengthSym();
- ValueInfo *const mergedValueInfo = entry.Value();
- const ArrayValueInfo *const mergedArrayValueInfo = mergedValueInfo->AsArrayValueInfo();
- StackSym *const mergedHeadSegmentSym = mergedArrayValueInfo->HeadSegmentSym();
- StackSym *const mergedHeadSegmentLengthSym = mergedArrayValueInfo->HeadSegmentLengthSym();
- StackSym *const mergedLengthSym = mergedArrayValueInfo->LengthSym();
- Assert(!mergedHeadSegmentSym || predecessorHeadSegmentSym);
- Assert(!mergedHeadSegmentLengthSym || predecessorHeadSegmentLengthSym);
- Assert(!mergedLengthSym || predecessorLengthSym);
- bool compensated = false;
- if(mergedHeadSegmentSym && predecessorHeadSegmentSym != mergedHeadSegmentSym)
- {
- IR::Instr *const newInstr =
- IR::Instr::New(
- Js::OpCode::Ld_A,
- IR::RegOpnd::New(mergedHeadSegmentSym, mergedHeadSegmentSym->GetType(), func),
- IR::RegOpnd::New(predecessorHeadSegmentSym, predecessorHeadSegmentSym->GetType(), func),
- func);
- newInstr->GetDst()->SetIsJITOptimizedReg(true);
- newInstr->GetSrc1()->SetIsJITOptimizedReg(true);
- newInstr->SetByteCodeOffset(insertBeforeInstr);
- insertBeforeInstr->InsertBefore(newInstr);
- compensated = true;
- }
- if(mergedHeadSegmentLengthSym && predecessorHeadSegmentLengthSym != mergedHeadSegmentLengthSym)
- {
- IR::Instr *const newInstr =
- IR::Instr::New(
- Js::OpCode::Ld_A,
- IR::RegOpnd::New(mergedHeadSegmentLengthSym, mergedHeadSegmentLengthSym->GetType(), func),
- IR::RegOpnd::New(predecessorHeadSegmentLengthSym, predecessorHeadSegmentLengthSym->GetType(), func),
- func);
- newInstr->GetDst()->SetIsJITOptimizedReg(true);
- newInstr->GetSrc1()->SetIsJITOptimizedReg(true);
- newInstr->SetByteCodeOffset(insertBeforeInstr);
- insertBeforeInstr->InsertBefore(newInstr);
- compensated = true;
- // Merge the head segment length value
- Assert(predecessorBlockData.liveVarSyms->Test(predecessorHeadSegmentLengthSym->m_id));
- predecessorBlockData.liveVarSyms->Set(mergedHeadSegmentLengthSym->m_id);
- Value *const predecessorHeadSegmentLengthValue =
- predecessorBlockData.FindValue(predecessorHeadSegmentLengthSym);
- Assert(predecessorHeadSegmentLengthValue);
- predecessorBlockData.SetValue(predecessorHeadSegmentLengthValue, mergedHeadSegmentLengthSym);
- if (updateSuccessorBlockData)
- {
- successorBlockData.liveVarSyms->Set(mergedHeadSegmentLengthSym->m_id);
- Value *const mergedHeadSegmentLengthValue = successorBlockData.FindValue(mergedHeadSegmentLengthSym);
- if(mergedHeadSegmentLengthValue)
- {
- Assert(mergedHeadSegmentLengthValue->GetValueNumber() != predecessorHeadSegmentLengthValue->GetValueNumber());
- if(predecessorHeadSegmentLengthValue->GetValueInfo() != mergedHeadSegmentLengthValue->GetValueInfo())
- {
- mergedHeadSegmentLengthValue->SetValueInfo(
- ValueInfo::MergeLikelyIntValueInfo(
- this->alloc,
- mergedHeadSegmentLengthValue,
- predecessorHeadSegmentLengthValue,
- mergedHeadSegmentLengthValue->GetValueInfo()->Type()
- .Merge(predecessorHeadSegmentLengthValue->GetValueInfo()->Type())));
- }
- }
- else
- {
- successorBlockData.SetValue(CopyValue(predecessorHeadSegmentLengthValue), mergedHeadSegmentLengthSym);
- }
- }
- }
- if(mergedLengthSym && predecessorLengthSym != mergedLengthSym)
- {
- IR::Instr *const newInstr =
- IR::Instr::New(
- Js::OpCode::Ld_I4,
- IR::RegOpnd::New(mergedLengthSym, mergedLengthSym->GetType(), func),
- IR::RegOpnd::New(predecessorLengthSym, predecessorLengthSym->GetType(), func),
- func);
- newInstr->GetDst()->SetIsJITOptimizedReg(true);
- newInstr->GetSrc1()->SetIsJITOptimizedReg(true);
- newInstr->SetByteCodeOffset(insertBeforeInstr);
- insertBeforeInstr->InsertBefore(newInstr);
- compensated = true;
- // Merge the length value
- Assert(predecessorBlockData.liveVarSyms->Test(predecessorLengthSym->m_id));
- predecessorBlockData.liveVarSyms->Set(mergedLengthSym->m_id);
- Value *const predecessorLengthValue = predecessorBlockData.FindValue(predecessorLengthSym);
- Assert(predecessorLengthValue);
- predecessorBlockData.SetValue(predecessorLengthValue, mergedLengthSym);
- if (updateSuccessorBlockData)
- {
- successorBlockData.liveVarSyms->Set(mergedLengthSym->m_id);
- Value *const mergedLengthValue = successorBlockData.FindValue(mergedLengthSym);
- if(mergedLengthValue)
- {
- Assert(mergedLengthValue->GetValueNumber() != predecessorLengthValue->GetValueNumber());
- if(predecessorLengthValue->GetValueInfo() != mergedLengthValue->GetValueInfo())
- {
- mergedLengthValue->SetValueInfo(
- ValueInfo::MergeLikelyIntValueInfo(
- this->alloc,
- mergedLengthValue,
- predecessorLengthValue,
- mergedLengthValue->GetValueInfo()->Type().Merge(predecessorLengthValue->GetValueInfo()->Type())));
- }
- }
- else
- {
- successorBlockData.SetValue(CopyValue(predecessorLengthValue), mergedLengthSym);
- }
- }
- }
- if(compensated)
- {
- // Save the new ValueInfo for later.
- // We don't want other symbols needing compensation to see this new one
- delayChangeValueInfo.Add({
- predecessorValue,
- ArrayValueInfo::New(
- alloc,
- predecessorValueInfo->Type(),
- mergedHeadSegmentSym ? mergedHeadSegmentSym : predecessorHeadSegmentSym,
- mergedHeadSegmentLengthSym ? mergedHeadSegmentLengthSym : predecessorHeadSegmentLengthSym,
- mergedLengthSym ? mergedLengthSym : predecessorLengthSym,
- predecessorValueInfo->GetSymStore())
- });
- }
- }
- // Once we've compensated all the symbols, update the new ValueInfo.
- delayChangeValueInfo.Map([predecessor, this](int, DelayChangeValueInfo d) { d.ChangeValueInfo(predecessor, this); });
- if(setLastInstrInPredecessor)
- {
- predecessor->SetLastInstr(insertBeforeInstr->m_prev);
- }
- }
- bool
- GlobOpt::AreFromSameBytecodeFunc(IR::RegOpnd const* src1, IR::RegOpnd const* dst) const
- {
- Assert(this->func->m_symTable->FindStackSym(src1->m_sym->m_id) == src1->m_sym);
- Assert(this->func->m_symTable->FindStackSym(dst->m_sym->m_id) == dst->m_sym);
- if (dst->m_sym->HasByteCodeRegSlot() && src1->m_sym->HasByteCodeRegSlot())
- {
- return src1->m_sym->GetByteCodeFunc() == dst->m_sym->GetByteCodeFunc();
- }
- return false;
- }
- /*
- * This is for scope object removal along with Heap Arguments optimization.
- * We track several instructions to facilitate the removal of scope object.
- * - LdSlotArr - This instr is tracked to keep track of the formals array (the dest)
- * - InlineeStart - To keep track of the stack syms for the formals of the inlinee.
- */
- void
- GlobOpt::TrackInstrsForScopeObjectRemoval(IR::Instr * instr)
- {
- IR::Opnd* dst = instr->GetDst();
- IR::Opnd* src1 = instr->GetSrc1();
- if (instr->m_opcode == Js::OpCode::Ld_A && src1->IsRegOpnd())
- {
- AssertMsg(!instr->m_func->IsStackArgsEnabled() || !src1->IsScopeObjOpnd(instr->m_func), "There can be no aliasing for scope object.");
- }
- // The following is to track formals array for Stack Arguments optimization with Formals
- if (instr->m_func->IsStackArgsEnabled() && !this->IsLoopPrePass())
- {
- if (instr->m_opcode == Js::OpCode::LdSlotArr)
- {
- if (instr->GetSrc1()->IsScopeObjOpnd(instr->m_func))
- {
- AssertMsg(!instr->m_func->GetJITFunctionBody()->HasImplicitArgIns(), "No mapping is required in this case. So it should already be generating ArgIns.");
- instr->m_func->TrackFormalsArraySym(dst->GetStackSym()->m_id);
- }
- }
- else if (instr->m_opcode == Js::OpCode::InlineeStart)
- {
- Assert(instr->m_func->IsInlined());
- Js::ArgSlot actualsCount = instr->m_func->actualCount - 1;
- Js::ArgSlot formalsCount = instr->m_func->GetJITFunctionBody()->GetInParamsCount() - 1;
- Func * func = instr->m_func;
- Func * inlinerFunc = func->GetParentFunc(); //Inliner's func
- IR::Instr * argOutInstr = instr->GetSrc2()->GetStackSym()->GetInstrDef();
- //The argout immediately before the InlineeStart will be the ArgOut for NewScObject
- //So we don't want to track the stack sym for this argout.- Skipping it here.
- if (instr->m_func->IsInlinedConstructor())
- {
- //PRE might introduce a second defintion for the Src1. So assert for the opcode only when it has single definition.
- Assert(argOutInstr->GetSrc1()->GetStackSym()->GetInstrDef() == nullptr ||
- argOutInstr->GetSrc1()->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::NewScObjectNoCtor);
- argOutInstr = argOutInstr->GetSrc2()->GetStackSym()->GetInstrDef();
- }
- if (formalsCount < actualsCount)
- {
- Js::ArgSlot extraActuals = actualsCount - formalsCount;
- //Skipping extra actuals passed
- for (Js::ArgSlot i = 0; i < extraActuals; i++)
- {
- argOutInstr = argOutInstr->GetSrc2()->GetStackSym()->GetInstrDef();
- }
- }
- StackSym * undefinedSym = nullptr;
- for (Js::ArgSlot param = formalsCount; param > 0; param--)
- {
- StackSym * argOutSym = nullptr;
- if (argOutInstr->GetSrc1())
- {
- if (argOutInstr->GetSrc1()->IsRegOpnd())
- {
- argOutSym = argOutInstr->GetSrc1()->GetStackSym();
- }
- else
- {
- // We will always have ArgOut instr - so the source operand will not be removed.
- argOutSym = StackSym::New(inlinerFunc);
- IR::Opnd * srcOpnd = argOutInstr->GetSrc1();
- IR::Opnd * dstOpnd = IR::RegOpnd::New(argOutSym, TyVar, inlinerFunc);
- IR::Instr * assignInstr = IR::Instr::New(Js::OpCode::Ld_A, dstOpnd, srcOpnd, inlinerFunc);
- instr->InsertBefore(assignInstr);
- }
- }
- Assert(!func->HasStackSymForFormal(param - 1));
- if (param <= actualsCount)
- {
- Assert(argOutSym);
- func->TrackStackSymForFormalIndex(param - 1, argOutSym);
- argOutInstr = argOutInstr->GetSrc2()->GetStackSym()->GetInstrDef();
- }
- else
- {
- /*When param is out of range of actuals count, load undefined*/
- // TODO: saravind: This will insert undefined for each of the param not having an actual. - Clean up this by having a sym for undefined on func ?
- Assert(formalsCount > actualsCount);
- if (undefinedSym == nullptr)
- {
- undefinedSym = StackSym::New(inlinerFunc);
- IR::Opnd * srcOpnd = IR::AddrOpnd::New(inlinerFunc->GetScriptContextInfo()->GetUndefinedAddr(), IR::AddrOpndKindDynamicMisc, inlinerFunc);
- IR::Opnd * dstOpnd = IR::RegOpnd::New(undefinedSym, TyVar, inlinerFunc);
- IR::Instr * assignUndefined = IR::Instr::New(Js::OpCode::Ld_A, dstOpnd, srcOpnd, inlinerFunc);
- instr->InsertBefore(assignUndefined);
- }
- func->TrackStackSymForFormalIndex(param - 1, undefinedSym);
- }
- }
- }
- }
- }
- void
- GlobOpt::OptArguments(IR::Instr *instr)
- {
- IR::Opnd* dst = instr->GetDst();
- IR::Opnd* src1 = instr->GetSrc1();
- IR::Opnd* src2 = instr->GetSrc2();
- TrackInstrsForScopeObjectRemoval(instr);
- if (!TrackArgumentsObject())
- {
- return;
- }
- if (instr->HasAnyLoadHeapArgsOpCode())
- {
- #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
- if (instr->m_func->IsStackArgsEnabled())
- {
- if (instr->GetSrc1()->IsRegOpnd() && instr->m_func->GetJITFunctionBody()->GetInParamsCount() > 1)
- {
- StackSym * scopeObjSym = instr->GetSrc1()->GetStackSym();
- Assert(scopeObjSym);
- Assert(scopeObjSym->GetInstrDef()->m_opcode == Js::OpCode::InitCachedScope || scopeObjSym->GetInstrDef()->m_opcode == Js::OpCode::NewScopeObject);
- Assert(instr->m_func->GetScopeObjSym() == scopeObjSym);
- if (PHASE_VERBOSE_TRACE1(Js::StackArgFormalsOptPhase))
- {
- Output::Print(_u("StackArgFormals : %s (%d) :Setting scopeObjSym in forward pass. \n"), instr->m_func->GetJITFunctionBody()->GetDisplayName(), instr->m_func->GetJITFunctionBody()->GetFunctionNumber());
- Output::Flush();
- }
- }
- }
- #endif
- if (instr->m_func->GetJITFunctionBody()->GetInParamsCount() != 1 && !instr->m_func->IsStackArgsEnabled())
- {
- CannotAllocateArgumentsObjectOnStack(instr->m_func);
- }
- else
- {
- CurrentBlockData()->TrackArgumentsSym(dst->AsRegOpnd());
- }
- return;
- }
- // Keep track of arguments objects and its aliases
- // LdHeapArguments loads the arguments object and Ld_A tracks the aliases.
- if ((instr->m_opcode == Js::OpCode::Ld_A || instr->m_opcode == Js::OpCode::BytecodeArgOutCapture) && (src1->IsRegOpnd() && CurrentBlockData()->IsArgumentsOpnd(src1)))
- {
- // In the debug mode, we don't want to optimize away the aliases. Since we may have to show them on the inspection.
- if (((!AreFromSameBytecodeFunc(src1->AsRegOpnd(), dst->AsRegOpnd()) || this->currentBlock->loop) && instr->m_opcode != Js::OpCode::BytecodeArgOutCapture) || this->func->IsJitInDebugMode())
- {
- CannotAllocateArgumentsObjectOnStack(instr->m_func);
- return;
- }
- // Disable stack args if we are aliasing arguments inside try block to a writethrough symbol.
- // We don't have precise tracking of these symbols, so bailout couldn't know if it needs to restore arguments object or not after exception
- Region* tryRegion = this->currentRegion ? this->currentRegion->GetSelfOrFirstTryAncestor() : nullptr;
- if (tryRegion && tryRegion->GetType() == RegionTypeTry &&
- tryRegion->writeThroughSymbolsSet &&
- tryRegion->writeThroughSymbolsSet->Test(dst->AsRegOpnd()->m_sym->m_id))
- {
- CannotAllocateArgumentsObjectOnStack(instr->m_func);
- return;
- }
- if(!dst->AsRegOpnd()->GetStackSym()->m_nonEscapingArgObjAlias)
- {
- CurrentBlockData()->TrackArgumentsSym(dst->AsRegOpnd());
- }
- return;
- }
- if (!CurrentBlockData()->TestAnyArgumentsSym())
- {
- // There are no syms to track yet, don't start tracking arguments sym.
- return;
- }
- // Avoid loop prepass
- if (this->currentBlock->loop && this->IsLoopPrePass())
- {
- return;
- }
- SymID id = 0;
-
- switch(instr->m_opcode)
- {
- case Js::OpCode::LdElemI_A:
- case Js::OpCode::TypeofElem:
- {
- Assert(src1->IsIndirOpnd());
- IR::RegOpnd *indexOpnd = src1->AsIndirOpnd()->GetIndexOpnd();
- if (indexOpnd && CurrentBlockData()->IsArgumentsSymID(indexOpnd->m_sym->m_id))
- {
- // Pathological test cases such as a[arguments]
- CannotAllocateArgumentsObjectOnStack(instr->m_func);
- return;
- }
- IR::RegOpnd *baseOpnd = src1->AsIndirOpnd()->GetBaseOpnd();
- id = baseOpnd->m_sym->m_id;
- if (CurrentBlockData()->IsArgumentsSymID(id))
- {
- instr->usesStackArgumentsObject = true;
- }
- break;
- }
- case Js::OpCode::LdLen_A:
- {
- Assert(src1->IsRegOpnd());
- if(CurrentBlockData()->IsArgumentsOpnd(src1))
- {
- instr->usesStackArgumentsObject = true;
- }
- break;
- }
- case Js::OpCode::ArgOut_A_InlineBuiltIn:
- {
- if (CurrentBlockData()->IsArgumentsOpnd(src1))
- {
- instr->usesStackArgumentsObject = true;
- instr->m_func->unoptimizableArgumentsObjReference++;
- }
- if (CurrentBlockData()->IsArgumentsOpnd(src1) &&
- src1->AsRegOpnd()->m_sym->GetInstrDef()->m_opcode == Js::OpCode::BytecodeArgOutCapture)
- {
- // Apply inlining results in such usage - this is to ignore this sym that is def'd by ByteCodeArgOutCapture
- // It's needed because we do not have block level merging of arguments object and this def due to inlining can turn off stack args opt.
- IR::Instr* builtinStart = instr->GetNextRealInstr();
- if (builtinStart->m_opcode == Js::OpCode::InlineBuiltInStart)
- {
- IR::Opnd* builtinOpnd = builtinStart->GetSrc1();
- if (builtinStart->GetSrc1()->IsAddrOpnd())
- {
- Assert(builtinOpnd->AsAddrOpnd()->m_isFunction);
- Js::BuiltinFunction builtinFunction = Js::JavascriptLibrary::GetBuiltInForFuncInfo(((FixedFieldInfo*)builtinOpnd->AsAddrOpnd()->m_metadata)->GetLocalFuncId());
- if (builtinFunction == Js::BuiltinFunction::JavascriptFunction_Apply)
- {
- CurrentBlockData()->ClearArgumentsSym(src1->AsRegOpnd());
- instr->m_func->unoptimizableArgumentsObjReference--;
- }
- }
- else if (builtinOpnd->IsRegOpnd())
- {
- if (builtinOpnd->AsRegOpnd()->m_sym->m_builtInIndex == Js::BuiltinFunction::JavascriptFunction_Apply)
- {
- CurrentBlockData()->ClearArgumentsSym(src1->AsRegOpnd());
- instr->m_func->unoptimizableArgumentsObjReference--;
- }
- }
- }
- }
- break;
- }
- case Js::OpCode::BailOnNotStackArgs:
- case Js::OpCode::ArgOut_A_FromStackArgs:
- case Js::OpCode::BytecodeArgOutUse:
- {
- if (src1 && CurrentBlockData()->IsArgumentsOpnd(src1))
- {
- instr->usesStackArgumentsObject = true;
- }
- break;
- }
- default:
- {
- // Super conservative here, if we see the arguments or any of its alias being used in any
- // other opcode just don't do this optimization. Revisit this to optimize further if we see any common
- // case is missed.
- if (src1)
- {
- if (src1->IsRegOpnd() || src1->IsSymOpnd() || src1->IsIndirOpnd())
- {
- if (CurrentBlockData()->IsArgumentsOpnd(src1))
- {
- #ifdef PERF_HINT
- if (PHASE_TRACE1(Js::PerfHintPhase))
- {
- WritePerfHint(PerfHints::HeapArgumentsCreated, instr->m_func, instr->GetByteCodeOffset());
- }
- #endif
- CannotAllocateArgumentsObjectOnStack(instr->m_func);
- return;
- }
- }
- }
- if (src2)
- {
- if (src2->IsRegOpnd() || src2->IsSymOpnd() || src2->IsIndirOpnd())
- {
- if (CurrentBlockData()->IsArgumentsOpnd(src2))
- {
- #ifdef PERF_HINT
- if (PHASE_TRACE1(Js::PerfHintPhase))
- {
- WritePerfHint(PerfHints::HeapArgumentsCreated, instr->m_func, instr->GetByteCodeOffset());
- }
- #endif
- CannotAllocateArgumentsObjectOnStack(instr->m_func);
- return;
- }
- }
- }
- // We should look at dst last to correctly handle cases where it's the same as one of the src operands.
- if (dst)
- {
- if (dst->IsIndirOpnd() || dst->IsSymOpnd())
- {
- if (CurrentBlockData()->IsArgumentsOpnd(dst))
- {
- #ifdef PERF_HINT
- if (PHASE_TRACE1(Js::PerfHintPhase))
- {
- WritePerfHint(PerfHints::HeapArgumentsModification, instr->m_func, instr->GetByteCodeOffset());
- }
- #endif
- CannotAllocateArgumentsObjectOnStack(instr->m_func);
- return;
- }
- }
- else if (dst->IsRegOpnd())
- {
- if (this->currentBlock->loop && CurrentBlockData()->IsArgumentsOpnd(dst))
- {
- #ifdef PERF_HINT
- if (PHASE_TRACE1(Js::PerfHintPhase))
- {
- WritePerfHint(PerfHints::HeapArgumentsModification, instr->m_func, instr->GetByteCodeOffset());
- }
- #endif
- CannotAllocateArgumentsObjectOnStack(instr->m_func);
- return;
- }
- CurrentBlockData()->ClearArgumentsSym(dst->AsRegOpnd());
- }
- }
- }
- break;
- }
- return;
- }
- void
- GlobOpt::MarkArgumentsUsedForBranch(IR::Instr * instr)
- {
- // If it's a conditional branch instruction and the operand used for branching is one of the arguments
- // to the function, tag the m_argUsedForBranch of the functionBody so that it can be used later for inlining decisions.
- if (instr->IsBranchInstr() && !instr->AsBranchInstr()->IsUnconditional())
- {
- IR::BranchInstr * bInstr = instr->AsBranchInstr();
- IR::Opnd *src1 = bInstr->GetSrc1();
- IR::Opnd *src2 = bInstr->GetSrc2();
- // These are used because we don't want to rely on src1 or src2 to always be the register/constant
- IR::RegOpnd *regOpnd = nullptr;
- if (!src2 && (instr->m_opcode == Js::OpCode::BrFalse_A || instr->m_opcode == Js::OpCode::BrTrue_A) && src1->IsRegOpnd())
- {
- regOpnd = src1->AsRegOpnd();
- }
- // We need to check for (0===arg) and (arg===0); this is especially important since some minifiers
- // change all instances of one to the other.
- else if (src2 && src2->IsConstOpnd() && src1->IsRegOpnd())
- {
- regOpnd = src1->AsRegOpnd();
- }
- else if (src2 && src2->IsRegOpnd() && src1->IsConstOpnd())
- {
- regOpnd = src2->AsRegOpnd();
- }
- if (regOpnd != nullptr)
- {
- if (regOpnd->m_sym->IsSingleDef())
- {
- IR::Instr * defInst = regOpnd->m_sym->GetInstrDef();
- IR::Opnd *defSym = defInst->GetSrc1();
- if (defSym && defSym->IsSymOpnd() && defSym->AsSymOpnd()->m_sym->IsStackSym()
- && defSym->AsSymOpnd()->m_sym->AsStackSym()->IsParamSlotSym())
- {
- uint16 param = defSym->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum();
- // We only support functions with 13 arguments to ensure optimal size of callSiteInfo
- if (param < Js::Constants::MaximumArgumentCountForConstantArgumentInlining)
- {
- this->func->GetJITOutput()->SetArgUsedForBranch((uint8)param);
- }
- }
- }
- }
- }
- }
- const InductionVariable*
- GlobOpt::GetInductionVariable(SymID sym, Loop *loop)
- {
- if (loop->inductionVariables)
- {
- for (auto it = loop->inductionVariables->GetIterator(); it.IsValid(); it.MoveNext())
- {
- InductionVariable* iv = &it.CurrentValueReference();
- if (!iv->IsChangeDeterminate() || !iv->IsChangeUnidirectional())
- {
- continue;
- }
- if (iv->Sym()->m_id == sym)
- {
- return iv;
- }
- }
- }
- return nullptr;
- }
- bool
- GlobOpt::IsSymIDInductionVariable(SymID sym, Loop *loop)
- {
- return GetInductionVariable(sym, loop) != nullptr;
- }
- SymID
- GlobOpt::GetVarSymID(StackSym *sym)
- {
- if (sym && sym->m_type != TyVar)
- {
- sym = sym->GetVarEquivSym(nullptr);
- }
- if (!sym)
- {
- return Js::Constants::InvalidSymID;
- }
- return sym->m_id;
- }
- bool
- GlobOpt::IsAllowedForMemOpt(IR::Instr* instr, bool isMemset, IR::RegOpnd *baseOpnd, IR::Opnd *indexOpnd)
- {
- Assert(instr);
- if (!baseOpnd || !indexOpnd)
- {
- return false;
- }
- Loop* loop = this->currentBlock->loop;
- const ValueType baseValueType(baseOpnd->GetValueType());
- const ValueType indexValueType(indexOpnd->GetValueType());
- // Validate the array and index types
- if (
- !indexValueType.IsInt() ||
- !(
- baseValueType.IsTypedIntOrFloatArray() ||
- baseValueType.IsArray()
- )
- )
- {
- #if DBG_DUMP
- wchar indexValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
- indexValueType.ToString(indexValueTypeStr);
- wchar baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
- baseValueType.ToString(baseValueTypeStr);
- TRACE_MEMOP_VERBOSE(loop, instr, _u("Index[%s] or Array[%s] value type is invalid"), indexValueTypeStr, baseValueTypeStr);
- #endif
- return false;
- }
- // The following is conservative and works around a bug in induction variable analysis.
- if (baseOpnd->IsArrayRegOpnd())
- {
- IR::ArrayRegOpnd *baseArrayOp = baseOpnd->AsArrayRegOpnd();
- bool hasBoundChecksRemoved = (
- baseArrayOp->EliminatedLowerBoundCheck() &&
- baseArrayOp->EliminatedUpperBoundCheck() &&
- !instr->extractedUpperBoundCheckWithoutHoisting &&
- !instr->loadedArrayHeadSegment &&
- !instr->loadedArrayHeadSegmentLength
- );
- if (!hasBoundChecksRemoved)
- {
- TRACE_MEMOP_VERBOSE(loop, instr, _u("Missing bounds check optimization"));
- return false;
- }
- }
- else
- {
- return false;
- }
- if (!baseValueType.IsTypedArray())
- {
- // Check if the instr can kill the value type of the array
- JsArrayKills arrayKills = CheckJsArrayKills(instr);
- if (arrayKills.KillsValueType(baseValueType))
- {
- TRACE_MEMOP_VERBOSE(loop, instr, _u("The array (s%d) can lose its value type"), GetVarSymID(baseOpnd->GetStackSym()));
- return false;
- }
- }
- // Process the Index Operand
- if (!this->OptIsInvariant(baseOpnd, this->currentBlock, loop, CurrentBlockData()->FindValue(baseOpnd->m_sym), false, true))
- {
- TRACE_MEMOP_VERBOSE(loop, instr, _u("Base (s%d) is not invariant"), GetVarSymID(baseOpnd->GetStackSym()));
- return false;
- }
- // Validate the index
- Assert(indexOpnd->GetStackSym());
- SymID indexSymID = GetVarSymID(indexOpnd->GetStackSym());
- const InductionVariable* iv = GetInductionVariable(indexSymID, loop);
- if (!iv)
- {
- // If the index is not an induction variable return
- TRACE_MEMOP_VERBOSE(loop, instr, _u("Index (s%d) is not an induction variable"), indexSymID);
- return false;
- }
- Assert(iv->IsChangeDeterminate() && iv->IsChangeUnidirectional());
- const IntConstantBounds & bounds = iv->ChangeBounds();
- if (loop->memOpInfo)
- {
- // Only accept induction variables that increments by 1
- Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
- inductionVariableChangeInfo = loop->memOpInfo->inductionVariableChangeInfoMap->Lookup(indexSymID, inductionVariableChangeInfo);
- if (
- (bounds.LowerBound() != 1 && bounds.LowerBound() != -1) ||
- (bounds.UpperBound() != bounds.LowerBound()) ||
- inductionVariableChangeInfo.unroll > 1 // Must be 0 (not seen yet) or 1 (already seen)
- )
- {
- TRACE_MEMOP_VERBOSE(loop, instr, _u("The index does not change by 1: %d><%d, unroll=%d"), bounds.LowerBound(), bounds.UpperBound(), inductionVariableChangeInfo.unroll);
- return false;
- }
- // Check if the index is the same in all MemOp optimization in this loop
- if (!loop->memOpInfo->candidates->Empty())
- {
- Loop::MemOpCandidate* previousCandidate = loop->memOpInfo->candidates->Head();
- // All MemOp operations within the same loop must use the same index
- if (previousCandidate->index != indexSymID)
- {
- TRACE_MEMOP_VERBOSE(loop, instr, _u("The index is not the same as other MemOp in the loop"));
- return false;
- }
- }
- }
- return true;
- }
- bool
- GlobOpt::CollectMemcopyLdElementI(IR::Instr *instr, Loop *loop)
- {
- Assert(instr->GetSrc1()->IsIndirOpnd());
- IR::IndirOpnd *src1 = instr->GetSrc1()->AsIndirOpnd();
- IR::Opnd *indexOpnd = src1->GetIndexOpnd();
- IR::RegOpnd *baseOpnd = src1->GetBaseOpnd()->AsRegOpnd();
- SymID baseSymID = GetVarSymID(baseOpnd->GetStackSym());
- if (!IsAllowedForMemOpt(instr, false, baseOpnd, indexOpnd))
- {
- return false;
- }
- SymID inductionSymID = GetVarSymID(indexOpnd->GetStackSym());
- Assert(IsSymIDInductionVariable(inductionSymID, loop));
- loop->EnsureMemOpVariablesInitialized();
- bool isIndexPreIncr = loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID);
- IR::Opnd * dst = instr->GetDst();
- if (!dst->IsRegOpnd() || !dst->AsRegOpnd()->GetStackSym()->IsSingleDef())
- {
- return false;
- }
- Loop::MemCopyCandidate* memcopyInfo = memcopyInfo = JitAnewStruct(this->func->GetTopFunc()->m_fg->alloc, Loop::MemCopyCandidate);
- memcopyInfo->ldBase = baseSymID;
- memcopyInfo->ldCount = 1;
- memcopyInfo->count = 0;
- memcopyInfo->bIndexAlreadyChanged = isIndexPreIncr;
- memcopyInfo->base = Js::Constants::InvalidSymID; //need to find the stElem first
- memcopyInfo->index = inductionSymID;
- memcopyInfo->transferSym = dst->AsRegOpnd()->GetStackSym();
- loop->memOpInfo->candidates->Prepend(memcopyInfo);
- return true;
- }
- bool
- GlobOpt::CollectMemsetStElementI(IR::Instr *instr, Loop *loop)
- {
- Assert(instr->GetDst()->IsIndirOpnd());
- IR::IndirOpnd *dst = instr->GetDst()->AsIndirOpnd();
- IR::Opnd *indexOp = dst->GetIndexOpnd();
- IR::RegOpnd *baseOp = dst->GetBaseOpnd()->AsRegOpnd();
- if (!IsAllowedForMemOpt(instr, true, baseOp, indexOp))
- {
- return false;
- }
- SymID baseSymID = GetVarSymID(baseOp->GetStackSym());
- IR::Opnd *srcDef = instr->GetSrc1();
- StackSym *srcSym = nullptr;
- if (srcDef->IsRegOpnd())
- {
- IR::RegOpnd* opnd = srcDef->AsRegOpnd();
- if (this->OptIsInvariant(opnd, this->currentBlock, loop, CurrentBlockData()->FindValue(opnd->m_sym), true, true))
- {
- srcSym = opnd->GetStackSym();
- }
- }
- BailoutConstantValue constant = {TyIllegal, 0};
- if (srcDef->IsFloatConstOpnd())
- {
- constant.InitFloatConstValue(srcDef->AsFloatConstOpnd()->m_value);
- }
- else if (srcDef->IsIntConstOpnd())
- {
- constant.InitIntConstValue(srcDef->AsIntConstOpnd()->GetValue(), srcDef->AsIntConstOpnd()->GetType());
- }
- else if (srcDef->IsAddrOpnd())
- {
- constant.InitVarConstValue(srcDef->AsAddrOpnd()->m_address);
- }
- else if(!srcSym)
- {
- TRACE_MEMOP_PHASE_VERBOSE(MemSet, loop, instr, _u("Source is not an invariant"));
- return false;
- }
- // Process the Index Operand
- Assert(indexOp->GetStackSym());
- SymID inductionSymID = GetVarSymID(indexOp->GetStackSym());
- Assert(IsSymIDInductionVariable(inductionSymID, loop));
- loop->EnsureMemOpVariablesInitialized();
- bool isIndexPreIncr = loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID);
- Loop::MemSetCandidate* memsetInfo = JitAnewStruct(this->func->GetTopFunc()->m_fg->alloc, Loop::MemSetCandidate);
- memsetInfo->base = baseSymID;
- memsetInfo->index = inductionSymID;
- memsetInfo->constant = constant;
- memsetInfo->srcSym = srcSym;
- memsetInfo->count = 1;
- memsetInfo->bIndexAlreadyChanged = isIndexPreIncr;
- loop->memOpInfo->candidates->Prepend(memsetInfo);
- return true;
- }
- bool GlobOpt::CollectMemcopyStElementI(IR::Instr *instr, Loop *loop)
- {
- if (!loop->memOpInfo || loop->memOpInfo->candidates->Empty())
- {
- // There is no ldElem matching this stElem
- return false;
- }
- Assert(instr->GetDst()->IsIndirOpnd());
- IR::IndirOpnd *dst = instr->GetDst()->AsIndirOpnd();
- IR::Opnd *indexOp = dst->GetIndexOpnd();
- IR::RegOpnd *baseOp = dst->GetBaseOpnd()->AsRegOpnd();
- SymID baseSymID = GetVarSymID(baseOp->GetStackSym());
- if (!instr->GetSrc1()->IsRegOpnd())
- {
- return false;
- }
- IR::RegOpnd* src1 = instr->GetSrc1()->AsRegOpnd();
- if (!src1->GetIsDead())
- {
- // This must be the last use of the register.
- // It will invalidate `var m = a[i]; b[i] = m;` but this is not a very interesting case.
- TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Source (s%d) is still alive after StElemI"), baseSymID);
- return false;
- }
- if (!IsAllowedForMemOpt(instr, false, baseOp, indexOp))
- {
- return false;
- }
- SymID srcSymID = GetVarSymID(src1->GetStackSym());
- // Prepare the memcopyCandidate entry
- Loop::MemOpCandidate* previousCandidate = loop->memOpInfo->candidates->Head();
- if (!previousCandidate->IsMemCopy())
- {
- return false;
- }
- Loop::MemCopyCandidate* memcopyInfo = previousCandidate->AsMemCopy();
- // The previous candidate has to have been created by the matching ldElem
- if (
- memcopyInfo->base != Js::Constants::InvalidSymID ||
- GetVarSymID(memcopyInfo->transferSym) != srcSymID
- )
- {
- TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("No matching LdElem found (s%d)"), baseSymID);
- return false;
- }
- Assert(indexOp->GetStackSym());
- SymID inductionSymID = GetVarSymID(indexOp->GetStackSym());
- Assert(IsSymIDInductionVariable(inductionSymID, loop));
- bool isIndexPreIncr = loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID);
- if (isIndexPreIncr != memcopyInfo->bIndexAlreadyChanged)
- {
- // The index changed between the load and the store
- TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Index value changed between ldElem and stElem"));
- return false;
- }
- // Consider: Can we remove the count field?
- memcopyInfo->count++;
- AssertOrFailFast(memcopyInfo->count <= 1);
- memcopyInfo->base = baseSymID;
- return true;
- }
- bool
- GlobOpt::CollectMemOpLdElementI(IR::Instr *instr, Loop *loop)
- {
- Assert(instr->m_opcode == Js::OpCode::LdElemI_A);
- return (!PHASE_OFF(Js::MemCopyPhase, this->func) && CollectMemcopyLdElementI(instr, loop));
- }
- bool
- GlobOpt::CollectMemOpStElementI(IR::Instr *instr, Loop *loop)
- {
- Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict);
- Assert(instr->GetSrc1());
- return (!PHASE_OFF(Js::MemSetPhase, this->func) && CollectMemsetStElementI(instr, loop)) ||
- (!PHASE_OFF(Js::MemCopyPhase, this->func) && CollectMemcopyStElementI(instr, loop));
- }
- bool
- GlobOpt::CollectMemOpInfo(IR::Instr *instrBegin, IR::Instr *instr, Value *src1Val, Value *src2Val)
- {
- Assert(this->currentBlock->loop);
- Loop *loop = this->currentBlock->loop;
- if (!loop->blockList.HasTwo())
- {
- // We support memcopy and memset for loops which have only two blocks.
- return false;
- }
- if (loop->GetLoopFlags().isInterpreted && !loop->GetLoopFlags().memopMinCountReached)
- {
- TRACE_MEMOP_VERBOSE(loop, instr, _u("minimum loop count not reached"))
- loop->doMemOp = false;
- return false;
- }
- Assert(loop->doMemOp);
- bool isIncr = true, isChangedByOne = false;
- switch (instr->m_opcode)
- {
- case Js::OpCode::StElemI_A:
- case Js::OpCode::StElemI_A_Strict:
- if (!CollectMemOpStElementI(instr, loop))
- {
- loop->doMemOp = false;
- return false;
- }
- break;
- case Js::OpCode::LdElemI_A:
- if (!CollectMemOpLdElementI(instr, loop))
- {
- loop->doMemOp = false;
- return false;
- }
- break;
- case Js::OpCode::Sub_I4:
- isIncr = false;
- case Js::OpCode::Add_I4:
- {
- // The only case in which these OpCodes can contribute to an inductionVariableChangeInfo
- // is when the induction variable is being modified and overwritten aswell (ex: j = j + 1)
- // and not when the induction variable is modified but not overwritten (ex: k = j + 1).
- // This can either be detected in IR as
- // s1 = Add_I4 s1 1 // Case #1, can be seen with "j++".
- // or as
- // s4(s2) = Add_I4 s3(s1) 1 // Case #2, can be see with "j = j + 1".
- // s1 = Ld_A s2
- bool isInductionVar = false;
- IR::Instr* nextInstr = instr->m_next;
- if (
- // Checks for Case #1 and Case #2
- instr->GetDst()->GetStackSym() != nullptr &&
- instr->GetDst()->IsRegOpnd() &&
- (
- // Checks for Case #1
- (instr->GetDst()->GetStackSym() == instr->GetSrc1()->GetStackSym()) ||
- // Checks for Case #2
- (nextInstr&& nextInstr->m_opcode == Js::OpCode::Ld_A &&
- nextInstr->GetSrc1()->IsRegOpnd() &&
- nextInstr->GetDst()->IsRegOpnd() &&
- GetVarSymID(instr->GetDst()->GetStackSym()) == nextInstr->GetSrc1()->GetStackSym()->m_id &&
- GetVarSymID(instr->GetSrc1()->GetStackSym()) == nextInstr->GetDst()->GetStackSym()->m_id)
- )
- )
- {
- isInductionVar = true;
- }
-
- // Even if dstIsInductionVar then dst == src1 so it's safe to use src1 as the induction sym always.
- StackSym* sym = instr->GetSrc1()->GetStackSym();
- SymID inductionSymID = GetVarSymID(sym);
- if (isInductionVar && IsSymIDInductionVariable(inductionSymID, this->currentBlock->loop))
- {
- if (!isChangedByOne)
- {
- IR::Opnd *src1, *src2;
- src1 = instr->GetSrc1();
- src2 = instr->GetSrc2();
- if (src2->IsRegOpnd())
- {
- Value *val = CurrentBlockData()->FindValue(src2->AsRegOpnd()->m_sym);
- if (val)
- {
- ValueInfo *vi = val->GetValueInfo();
- int constValue;
- if (vi && vi->TryGetIntConstantValue(&constValue))
- {
- if (constValue == 1)
- {
- isChangedByOne = true;
- }
- }
- }
- }
- else if (src2->IsIntConstOpnd())
- {
- if (src2->AsIntConstOpnd()->GetValue() == 1)
- {
- isChangedByOne = true;
- }
- }
- }
- loop->EnsureMemOpVariablesInitialized();
- if (!isChangedByOne)
- {
- Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { Js::Constants::InvalidLoopUnrollFactor, 0 };
- if (!loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID))
- {
- loop->memOpInfo->inductionVariableChangeInfoMap->Add(inductionSymID, inductionVariableChangeInfo);
- if (sym->m_id != inductionSymID)
- {
- // Backwards pass uses this bit-vector to lookup upwardExposedUsed/bytecodeUpwardExposedUsed symbols, which are not necessarily vars. Just add both.
- loop->memOpInfo->inductionVariableChangeInfoMap->Add(sym->m_id, inductionVariableChangeInfo);
- }
- }
- else
- {
- loop->memOpInfo->inductionVariableChangeInfoMap->Item(inductionSymID, inductionVariableChangeInfo);
- if (sym->m_id != inductionSymID)
- {
- // Backwards pass uses this bit-vector to lookup upwardExposedUsed/bytecodeUpwardExposedUsed symbols, which are not necessarily vars. Just add both.
- loop->memOpInfo->inductionVariableChangeInfoMap->Item(sym->m_id, inductionVariableChangeInfo);
- }
- }
- }
- else
- {
- if (!loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID))
- {
- Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 1, isIncr };
- loop->memOpInfo->inductionVariableChangeInfoMap->Add(inductionSymID, inductionVariableChangeInfo);
- if (sym->m_id != inductionSymID)
- {
- // Backwards pass uses this bit-vector to lookup upwardExposedUsed/bytecodeUpwardExposedUsed symbols, which are not necessarily vars. Just add both.
- loop->memOpInfo->inductionVariableChangeInfoMap->Add(sym->m_id, inductionVariableChangeInfo);
- }
- }
- else
- {
- Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
- inductionVariableChangeInfo = loop->memOpInfo->inductionVariableChangeInfoMap->Lookup(inductionSymID, inductionVariableChangeInfo);
- // If inductionVariableChangeInfo.unroll has been invalidated, do
- // not modify the Js::Constants::InvalidLoopUnrollFactor value
- if (inductionVariableChangeInfo.unroll != Js::Constants::InvalidLoopUnrollFactor)
- {
- inductionVariableChangeInfo.unroll++;
- }
- inductionVariableChangeInfo.isIncremental = isIncr;
- loop->memOpInfo->inductionVariableChangeInfoMap->Item(inductionSymID, inductionVariableChangeInfo);
- if (sym->m_id != inductionSymID)
- {
- // Backwards pass uses this bit-vector to lookup upwardExposedUsed/bytecodeUpwardExposedUsed symbols, which are not necessarily vars. Just add both.
- loop->memOpInfo->inductionVariableChangeInfoMap->Item(sym->m_id, inductionVariableChangeInfo);
- }
- }
- }
- break;
- }
- // Fallthrough if not an induction variable
- }
- default:
- FOREACH_INSTR_IN_RANGE(chkInstr, instrBegin->m_next, instr)
- {
- if (IsInstrInvalidForMemOp(chkInstr, loop, src1Val, src2Val))
- {
- loop->doMemOp = false;
- return false;
- }
- // Make sure this instruction doesn't use the memcopy transfer sym before it is checked by StElemI
- if (loop->memOpInfo && !loop->memOpInfo->candidates->Empty())
- {
- Loop::MemOpCandidate* prevCandidate = loop->memOpInfo->candidates->Head();
- if (prevCandidate->IsMemCopy())
- {
- Loop::MemCopyCandidate* memcopyCandidate = prevCandidate->AsMemCopy();
- if (memcopyCandidate->base == Js::Constants::InvalidSymID)
- {
- if (chkInstr->HasSymUse(memcopyCandidate->transferSym))
- {
- loop->doMemOp = false;
- TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, chkInstr, _u("Found illegal use of LdElemI value(s%d)"), GetVarSymID(memcopyCandidate->transferSym));
- return false;
- }
- }
- }
- }
- }
- NEXT_INSTR_IN_RANGE;
- IR::Instr* prevInstr = instr->m_prev;
- // If an instr where the dst is an induction variable (and thus is being written to) is not caught by a case in the above
- // switch statement (which implies that this instr does not contributes to a inductionVariableChangeInfo) and in the default
- // case does not set doMemOp to false (which implies that this instr does not invalidate this MemOp), then FailFast as we
- // should not be performing a MemOp under these conditions.
- AssertOrFailFast(!instr->GetDst() || instr->m_opcode == Js::OpCode::IncrLoopBodyCount || !loop->memOpInfo ||
- // Refer to "Case #2" described above in this function. For the following IR:
- // Line #1: s4(s2) = Add_I4 s3(s1) 1
- // Line #2: s3(s1) = Ld_A s4(s2)
- // do not consider line #2 as a violating instr
- (instr->m_opcode == Js::OpCode::Ld_I4 &&
- prevInstr && (prevInstr->m_opcode == Js::OpCode::Add_I4 || prevInstr->m_opcode == Js::OpCode::Sub_I4) &&
- instr->GetSrc1()->IsRegOpnd() &&
- instr->GetDst()->IsRegOpnd() &&
- prevInstr->GetDst()->IsRegOpnd() &&
- instr->GetDst()->GetStackSym() == prevInstr->GetSrc1()->GetStackSym() &&
- instr->GetSrc1()->GetStackSym() == prevInstr->GetDst()->GetStackSym()) ||
- !loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(GetVarSymID(instr->GetDst()->GetStackSym())));
- }
- return true;
- }
- bool
- GlobOpt::IsInstrInvalidForMemOp(IR::Instr *instr, Loop *loop, Value *src1Val, Value *src2Val)
- {
- // List of instruction that are valid with memop (ie: instr that gets removed if memop is emitted)
- if (
- this->currentBlock != loop->GetHeadBlock() &&
- !instr->IsLabelInstr() &&
- instr->IsRealInstr() &&
- instr->m_opcode != Js::OpCode::IncrLoopBodyCount &&
- instr->m_opcode != Js::OpCode::StLoopBodyCount &&
- instr->m_opcode != Js::OpCode::Ld_A &&
- instr->m_opcode != Js::OpCode::Ld_I4 &&
- !(instr->IsBranchInstr() && instr->AsBranchInstr()->IsUnconditional())
- )
- {
- TRACE_MEMOP_VERBOSE(loop, instr, _u("Instruction not accepted for memop"));
- return true;
- }
- // Check prev instr because it could have been added by an optimization and we won't see it here.
- if (OpCodeAttr::FastFldInstr(instr->m_opcode) || (instr->m_prev && OpCodeAttr::FastFldInstr(instr->m_prev->m_opcode)))
- {
- // Refuse any operations interacting with Fields
- TRACE_MEMOP_VERBOSE(loop, instr, _u("Field interaction detected"));
- return true;
- }
- if (Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::ElementSlot)
- {
- // Refuse any operations interacting with slots
- TRACE_MEMOP_VERBOSE(loop, instr, _u("Slot interaction detected"));
- return true;
- }
- if (this->MayNeedBailOnImplicitCall(instr, src1Val, src2Val))
- {
- TRACE_MEMOP_VERBOSE(loop, instr, _u("Implicit call bailout detected"));
- return true;
- }
- return false;
- }
- void
- GlobOpt::TryReplaceLdLen(IR::Instr *& instr)
- {
- // Change LdLen on objects other than arrays, strings, and 'arguments' to LdFld. Otherwise, convert the SymOpnd to a RegOpnd here.
- if (instr->m_opcode == Js::OpCode::LdLen_A && instr->GetSrc1() && instr->GetSrc1()->IsSymOpnd())
- {
- IR::SymOpnd * opnd = instr->GetSrc1()->AsSymOpnd();
- Sym *sym = opnd->m_sym;
- Assert(sym->IsPropertySym());
- PropertySym *originalPropertySym = sym->AsPropertySym();
- IR::RegOpnd* newopnd = IR::RegOpnd::New(originalPropertySym->m_stackSym, IRType::TyVar, instr->m_func);
- ValueInfo *const objectValueInfo = CurrentBlockData()->FindValue(originalPropertySym->m_stackSym)->GetValueInfo();
- // things we'd emit a fast path for
- if (
- objectValueInfo->IsLikelyAnyArray() ||
- objectValueInfo->HasHadStringTag() ||
- objectValueInfo->IsLikelyString() ||
- newopnd->IsArgumentsObject() ||
- (CurrentBlockData()->argObjSyms && CurrentBlockData()->IsArgumentsOpnd(newopnd))
- )
- {
- // We need to properly transfer over the information from the old operand, which is
- // a SymOpnd, to the new one, which is a RegOpnd. Unfortunately, the types mean the
- // normal copy methods won't work here, so we're going to directly copy data.
- newopnd->SetIsJITOptimizedReg(opnd->GetIsJITOptimizedReg());
- newopnd->SetValueType(objectValueInfo->Type());
- newopnd->SetIsDead(opnd->GetIsDead());
- instr->ReplaceSrc1(newopnd);
- }
- else
- {
- // otherwise, change the instruction to an LdFld here.
- instr->m_opcode = Js::OpCode::LdFld;
- }
- }
- }
- IR::Instr *
- GlobOpt::OptInstr(IR::Instr *&instr, bool* isInstrRemoved)
- {
- Assert(instr->m_func->IsTopFunc() || instr->m_func->isGetterSetter || instr->m_func->callSiteIdInParentFunc != UINT16_MAX);
- IR::Opnd *src1, *src2;
- Value *src1Val = nullptr, *src2Val = nullptr, *dstVal = nullptr;
- Value *src1IndirIndexVal = nullptr, *dstIndirIndexVal = nullptr;
- IR::Instr *instrPrev = instr->m_prev;
- IR::Instr *instrNext = instr->m_next;
- if (instr->IsLabelInstr() && this->func->HasTry() && this->func->DoOptimizeTry())
- {
- this->currentRegion = instr->AsLabelInstr()->GetRegion();
- Assert(this->currentRegion);
- }
- if(PrepareForIgnoringIntOverflow(instr))
- {
- if(!IsLoopPrePass())
- {
- *isInstrRemoved = true;
- currentBlock->RemoveInstr(instr);
- }
- return instrNext;
- }
- if (instr->m_opcode == Js::OpCode::Yield)
- {
- // TODO[generators][ianhall]: Can this and the FillBailOutInfo call below be moved to after Src1 and Src2 so that Yield can be optimized right up to the actual yield?
- this->ProcessKills(instr);
- }
- if (!instr->IsRealInstr() || instr->IsByteCodeUsesInstr() || instr->m_opcode == Js::OpCode::Conv_Bool)
- {
- return instrNext;
- }
- if (!IsLoopPrePass())
- {
- // Change LdLen on objects other than arrays, strings, and 'arguments' to LdFld.
- this->TryReplaceLdLen(instr);
- }
- // Consider: Do we ever get post-op bailout here, and if so is the FillBailOutInfo call in the right place?
- if (instr->HasBailOutInfo() && !this->IsLoopPrePass())
- {
- this->FillBailOutInfo(this->currentBlock, instr);
- }
- this->instrCountSinceLastCleanUp++;
- instr = this->PreOptPeep(instr);
- this->OptArguments(instr);
- //StackArguments Optimization - We bail out if the index is out of range of actuals.
- if ((instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::TypeofElem) &&
- instr->DoStackArgsOpt() && !this->IsLoopPrePass())
- {
- GenerateBailAtOperation(&instr, IR::BailOnStackArgsOutOfActualsRange);
- }
- #if DBG
- PropertySym *propertySymUseBefore = nullptr;
- Assert(this->byteCodeUses == nullptr);
- this->byteCodeUsesBeforeOpt->ClearAll();
- GlobOpt::TrackByteCodeSymUsed(instr, this->byteCodeUsesBeforeOpt, &propertySymUseBefore);
- Assert(noImplicitCallUsesToInsert->Count() == 0);
- #endif
- this->ignoredIntOverflowForCurrentInstr = false;
- this->ignoredNegativeZeroForCurrentInstr = false;
- src1 = instr->GetSrc1();
- src2 = instr->GetSrc2();
- if (src1)
- {
- src1Val = this->OptSrc(src1, &instr, &src1IndirIndexVal);
- GOPT_TRACE_VALUENUMBER(_u("[src1] "), instr->GetSrc1(), _u("%d"), src1Val ? src1Val->GetValueNumber() : -1);
- instr = this->SetTypeCheckBailOut(instr->GetSrc1(), instr, nullptr);
- if (src2)
- {
- src2Val = this->OptSrc(src2, &instr);
- GOPT_TRACE_VALUENUMBER(_u("[src2] "), instr->GetSrc2(), _u("%d"), src2Val ? src2Val->GetValueNumber() : -1);
- }
- }
- if(instr->GetDst() && instr->GetDst()->IsIndirOpnd())
- {
- this->OptSrc(instr->GetDst(), &instr, &dstIndirIndexVal);
- }
- MarkArgumentsUsedForBranch(instr);
- CSEOptimize(this->currentBlock, &instr, &src1Val, &src2Val, &src1IndirIndexVal);
- OptimizeChecks(instr);
- OptArraySrc(&instr, &src1Val, &src2Val);
- OptNewScObject(&instr, src1Val);
- OptStackArgLenAndConst(instr, &src1Val);
- instr = this->OptPeep(instr, src1Val, src2Val);
- if (instr->m_opcode == Js::OpCode::Nop ||
- (instr->m_opcode == Js::OpCode::CheckThis &&
- instr->GetSrc1()->IsRegOpnd() &&
- instr->GetSrc1()->AsRegOpnd()->m_sym->m_isSafeThis))
- {
- instrNext = instr->m_next;
- InsertNoImplicitCallUses(instr);
- if (this->byteCodeUses)
- {
- this->InsertByteCodeUses(instr);
- }
- *isInstrRemoved = true;
- this->currentBlock->RemoveInstr(instr);
- return instrNext;
- }
- else if (instr->m_opcode == Js::OpCode::GetNewScObject && !this->IsLoopPrePass() && src1Val->GetValueInfo()->IsPrimitive())
- {
- // Constructor returned (src1) a primitive value, so fold this into "dst = Ld_A src2", where src2 is the new object that
- // was passed into the constructor as its 'this' parameter
- instr->FreeSrc1();
- instr->SetSrc1(instr->UnlinkSrc2());
- instr->m_opcode = Js::OpCode::Ld_A;
- src1Val = src2Val;
- src2Val = nullptr;
- }
- else if ((instr->m_opcode == Js::OpCode::TryCatch && this->func->DoOptimizeTry()) || (instr->m_opcode == Js::OpCode::TryFinally && this->func->DoOptimizeTry()))
- {
- ProcessTryHandler(instr);
- }
- else if (instr->m_opcode == Js::OpCode::BrOnException || instr->m_opcode == Js::OpCode::BrOnNoException)
- {
- if (this->ProcessExceptionHandlingEdges(instr))
- {
- *isInstrRemoved = true;
- return instrNext;
- }
- }
- bool isAlreadyTypeSpecialized = false;
- if (!IsLoopPrePass() && instr->HasBailOutInfo())
- {
- if (instr->GetBailOutKind() == IR::BailOutExpectingInteger)
- {
- isAlreadyTypeSpecialized = TypeSpecializeBailoutExpectedInteger(instr, src1Val, &dstVal);
- }
- else if (instr->GetBailOutKind() == IR::BailOutExpectingString)
- {
- if (instr->GetSrc1()->IsRegOpnd())
- {
- if (!src1Val || !src1Val->GetValueInfo()->IsLikelyString())
- {
- // Disable SwitchOpt if the source is definitely not a string - This may be realized only in Globopt
- Assert(IsSwitchOptEnabled());
- throw Js::RejitException(RejitReason::DisableSwitchOptExpectingString);
- }
- }
- }
- }
- bool forceInvariantHoisting = false;
- const bool ignoreIntOverflowInRangeForInstr = instr->ignoreIntOverflowInRange; // Save it since the instr can change
- if (!isAlreadyTypeSpecialized)
- {
- bool redoTypeSpec;
- instr = this->TypeSpecialization(instr, &src1Val, &src2Val, &dstVal, &redoTypeSpec, &forceInvariantHoisting);
- if(redoTypeSpec && instr->m_opcode != Js::OpCode::Nop)
- {
- forceInvariantHoisting = false;
- instr = this->TypeSpecialization(instr, &src1Val, &src2Val, &dstVal, &redoTypeSpec, &forceInvariantHoisting);
- Assert(!redoTypeSpec);
- }
- if (instr->m_opcode == Js::OpCode::Nop)
- {
- InsertNoImplicitCallUses(instr);
- if (this->byteCodeUses)
- {
- this->InsertByteCodeUses(instr);
- }
- instrNext = instr->m_next;
- *isInstrRemoved = true;
- this->currentBlock->RemoveInstr(instr);
- return instrNext;
- }
- }
- if (ignoreIntOverflowInRangeForInstr)
- {
- VerifyIntSpecForIgnoringIntOverflow(instr);
- }
- // Track calls after any pre-op bailouts have been inserted before the call, because they will need to restore out params.
- this->TrackCalls(instr);
- if (instr->GetSrc1())
- {
- this->UpdateObjPtrValueType(instr->GetSrc1(), instr);
- }
- IR::Opnd *dst = instr->GetDst();
- if (dst)
- {
- // Copy prop dst uses and mark live/available type syms before tracking kills.
- CopyPropDstUses(dst, instr, src1Val);
- }
- // Track mark temp object before we process the dst so we can generate pre-op bailout
- instr = this->TrackMarkTempObject(instrPrev->m_next, instr);
- bool removed = OptTagChecks(instr);
- if (removed)
- {
- *isInstrRemoved = true;
- return instrNext;
- }
- dstVal = this->OptDst(&instr, dstVal, src1Val, src2Val, dstIndirIndexVal, src1IndirIndexVal);
- if (dst)
- {
- GOPT_TRACE_VALUENUMBER(_u("[dst] "), instr->GetDst(), _u("%d\n"), dstVal ? dstVal->GetValueNumber() : -1);
- }
- dst = instr->GetDst();
- instrNext = instr->m_next;
- if (dst)
- {
- if (this->func->HasTry() && this->func->DoOptimizeTry())
- {
- this->InsertToVarAtDefInTryRegion(instr, dst);
- }
- instr = this->SetTypeCheckBailOut(dst, instr, nullptr);
- this->UpdateObjPtrValueType(dst, instr);
- }
- BVSparse<JitArenaAllocator> instrByteCodeStackSymUsedAfter(this->alloc);
- PropertySym *propertySymUseAfter = nullptr;
- if (this->byteCodeUses != nullptr)
- {
- GlobOpt::TrackByteCodeSymUsed(instr, &instrByteCodeStackSymUsedAfter, &propertySymUseAfter);
- }
- #if DBG
- else
- {
- GlobOpt::TrackByteCodeSymUsed(instr, &instrByteCodeStackSymUsedAfter, &propertySymUseAfter);
- instrByteCodeStackSymUsedAfter.Equal(this->byteCodeUsesBeforeOpt);
- Assert(propertySymUseAfter == propertySymUseBefore);
- }
- #endif
- bool isHoisted = false;
- if (this->currentBlock->loop && !this->IsLoopPrePass())
- {
- isHoisted = this->TryHoistInvariant(instr, this->currentBlock, dstVal, src1Val, src2Val, true, false, forceInvariantHoisting);
- }
- src1 = instr->GetSrc1();
- if (!this->IsLoopPrePass() && src1)
- {
- // instr const, nonConst => canonicalize by swapping operands
- // This simplifies lowering. (somewhat machine dependent)
- // Note that because of Var overflows, src1 may not have been constant prop'd to an IntConst
- this->PreLowerCanonicalize(instr, &src1Val, &src2Val);
- }
- if (!PHASE_OFF(Js::MemOpPhase, this->func) &&
- !isHoisted &&
- !(instr->IsJitProfilingInstr()) &&
- this->currentBlock->loop && !IsLoopPrePass() &&
- !func->IsJitInDebugMode() &&
- !func->IsMemOpDisabled() &&
- this->currentBlock->loop->doMemOp)
- {
- CollectMemOpInfo(instrPrev, instr, src1Val, src2Val);
- }
- InsertNoImplicitCallUses(instr);
- if (this->byteCodeUses != nullptr)
- {
- // Optimization removed some uses from the instruction.
- // Need to insert fake uses so we can get the correct live register to restore in bailout.
- this->byteCodeUses->Minus(&instrByteCodeStackSymUsedAfter);
- if (this->propertySymUse == propertySymUseAfter)
- {
- this->propertySymUse = nullptr;
- }
- this->InsertByteCodeUses(instr);
- }
- if (!this->IsLoopPrePass() && !isHoisted && this->IsImplicitCallBailOutCurrentlyNeeded(instr, src1Val, src2Val))
- {
- IR::BailOutKind kind = IR::BailOutOnImplicitCalls;
- if(instr->HasBailOutInfo())
- {
- Assert(instr->GetBailOutInfo()->bailOutOffset == instr->GetByteCodeOffset());
- const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if((bailOutKind & ~IR::BailOutKindBits) != IR::BailOutOnImplicitCallsPreOp)
- {
- Assert(!(bailOutKind & ~IR::BailOutKindBits));
- instr->SetBailOutKind(bailOutKind + IR::BailOutOnImplicitCallsPreOp);
- }
- }
- else if (instr->forcePreOpBailOutIfNeeded || this->isRecursiveCallOnLandingPad)
- {
- // We can't have a byte code reg slot as dst to generate a
- // pre-op implicit call after we have processed the dst.
- // Consider: This might miss an opportunity to use a copy prop sym to restore
- // some other byte code reg if the dst is that copy prop that we already killed.
- Assert(!instr->GetDst()
- || !instr->GetDst()->IsRegOpnd()
- || instr->GetDst()->AsRegOpnd()->GetIsJITOptimizedReg()
- || !instr->GetDst()->AsRegOpnd()->m_sym->HasByteCodeRegSlot());
- this->GenerateBailAtOperation(&instr, IR::BailOutOnImplicitCallsPreOp);
- }
- else
- {
- // Capture value of the bailout after the operation is done.
- this->GenerateBailAfterOperation(&instr, kind);
- }
- }
- if (this->IsLazyBailOutCurrentlyNeeded(instr, src1Val, src2Val, isHoisted))
- {
- this->GenerateLazyBailOut(instr);
- }
- if (CurrentBlockData()->capturedValuesCandidate && !this->IsLoopPrePass())
- {
- this->CommitCapturedValuesCandidate();
- }
- #if DBG
- if (CONFIG_FLAG(ValidateIntRanges) && !IsLoopPrePass())
- {
- if (instr->ShouldEmitIntRangeCheck())
- {
- this->EmitIntRangeChecks(instr);
- }
- }
- #endif
- return instrNext;
- }
- bool
- GlobOpt::IsNonNumericRegOpnd(IR::RegOpnd* opnd, bool inGlobOpt, bool* isSafeToTransferInPrepass /*=nullptr*/) const
- {
- if (opnd == nullptr)
- {
- return false;
- }
- if (opnd->m_sym->m_isNotNumber)
- {
- return true;
- }
- if (!inGlobOpt)
- {
- return false;
- }
- if (opnd->GetValueType().IsNumber() || currentBlock->globOptData.IsTypeSpecialized(opnd->m_sym))
- {
- if (!this->IsLoopPrePass())
- {
- return false;
- }
- Value * opndValue = this->currentBlock->globOptData.FindValue(opnd->m_sym);
- ValueInfo * opndValueInfo = opndValue ? opndValue->GetValueInfo() : nullptr;
- if (!opndValueInfo)
- {
- return true;
- }
- bool isSafeToTransfer = this->IsSafeToTransferInPrepass(opnd->m_sym, opndValueInfo);
- if (isSafeToTransferInPrepass != nullptr)
- {
- *isSafeToTransferInPrepass = isSafeToTransfer;
- }
- if (this->prePassLoop->preservesNumberValue->Test(opnd->m_sym->m_id))
- {
- return false;
- }
- return !isSafeToTransfer;
- }
- return true;
- }
- bool
- GlobOpt::OptTagChecks(IR::Instr *instr)
- {
- if (PHASE_OFF(Js::OptTagChecksPhase, this->func) || !this->DoTagChecks())
- {
- return false;
- }
- StackSym *stackSym = nullptr;
- IR::SymOpnd *symOpnd = nullptr;
- IR::RegOpnd *regOpnd = nullptr;
- switch(instr->m_opcode)
- {
- case Js::OpCode::LdFld:
- case Js::OpCode::LdMethodFld:
- case Js::OpCode::CheckFixedFld:
- case Js::OpCode::CheckPropertyGuardAndLoadType:
- symOpnd = instr->GetSrc1()->AsSymOpnd();
- stackSym = symOpnd->m_sym->AsPropertySym()->m_stackSym;
- break;
- case Js::OpCode::BailOnNotObject:
- case Js::OpCode::BailOnNotArray:
- if (instr->GetSrc1()->IsRegOpnd())
- {
- regOpnd = instr->GetSrc1()->AsRegOpnd();
- stackSym = regOpnd->m_sym;
- }
- break;
- case Js::OpCode::StFld:
- symOpnd = instr->GetDst()->AsSymOpnd();
- stackSym = symOpnd->m_sym->AsPropertySym()->m_stackSym;
- break;
- }
- if (stackSym)
- {
- Value *value = CurrentBlockData()->FindValue(stackSym);
- if (value)
- {
- ValueInfo *valInfo = value->GetValueInfo();
- if (valInfo->GetSymStore() && valInfo->GetSymStore()->IsStackSym() && valInfo->GetSymStore()->AsStackSym()->IsFromByteCodeConstantTable())
- {
- return false;
- }
- ValueType valueType = value->GetValueInfo()->Type();
- if (instr->m_opcode == Js::OpCode::BailOnNotObject)
- {
- if (valueType.CanBeTaggedValue())
- {
- // We're not adding new information to the value other than changing the value type. Preserve any existing
- // information and just change the value type.
- ChangeValueType(nullptr, value, valueType.SetCanBeTaggedValue(false), true /*preserveSubClassInfo*/);
- return false;
- }
- if (!this->IsLoopPrePass())
- {
- if (this->byteCodeUses)
- {
- this->InsertByteCodeUses(instr);
- }
- this->currentBlock->RemoveInstr(instr);
- }
- return true;
- }
- if (valueType.CanBeTaggedValue() &&
- !valueType.HasBeenNumber() &&
- !this->IsLoopPrePass())
- {
- ValueType newValueType = valueType.SetCanBeTaggedValue(false);
- // Split out the tag check as a separate instruction.
- IR::Instr *bailOutInstr;
- bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOnNotObject, IR::BailOutOnTaggedValue, instr, instr->m_func);
- if (!this->IsLoopPrePass())
- {
- FillBailOutInfo(this->currentBlock, bailOutInstr);
- }
- IR::RegOpnd *srcOpnd = regOpnd;
- if (!srcOpnd)
- {
- srcOpnd = IR::RegOpnd::New(stackSym, stackSym->GetType(), instr->m_func);
- AnalysisAssert(symOpnd);
- if (symOpnd->GetIsJITOptimizedReg())
- {
- srcOpnd->SetIsJITOptimizedReg(true);
- }
- }
- bailOutInstr->SetSrc1(srcOpnd);
- bailOutInstr->GetSrc1()->SetValueType(valueType);
- bailOutInstr->SetByteCodeOffset(instr);
- instr->InsertBefore(bailOutInstr);
- if (this->currentBlock->loop)
- {
- // Try hoisting the BailOnNotObject instr.
- // But since this isn't the current instr being optimized, we need to play tricks with
- // the byteCodeUse fields...
- TrackByteCodeUsesForInstrAddedInOptInstr(bailOutInstr, [&]()
- {
- if (TryHoistInvariant(bailOutInstr, this->currentBlock, nullptr, value, nullptr, true, false, false, IR::BailOutOnTaggedValue))
- {
- Value* landingPadValue = this->currentBlock->loop->landingPad->globOptData.FindValue(stackSym);
- ValueType newLandingPadValueType = landingPadValue->GetValueInfo()->Type().SetCanBeTaggedValue(false);
- ChangeValueType(nullptr, landingPadValue, newLandingPadValueType, false);
- }
- });
- }
- if (symOpnd)
- {
- symOpnd->SetPropertyOwnerValueType(newValueType);
- }
- else
- {
- regOpnd->SetValueType(newValueType);
- }
- ChangeValueType(nullptr, value, newValueType, false);
- }
- }
- }
- return false;
- }
- bool
- GlobOpt::TypeSpecializeBailoutExpectedInteger(IR::Instr* instr, Value* src1Val, Value** dstVal)
- {
- bool isAlreadyTypeSpecialized = false;
- if(instr->GetSrc1()->IsRegOpnd())
- {
- if (!src1Val || !src1Val->GetValueInfo()->IsLikelyInt() || instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotNumber)
- {
- Assert(IsSwitchOptEnabledForIntTypeSpec());
- throw Js::RejitException(RejitReason::DisableSwitchOptExpectingInteger);
- }
- // Attach the BailOutExpectingInteger to FromVar and Remove the bail out info on the Ld_A (Begin Switch) instr.
- this->ToTypeSpecUse(instr, instr->GetSrc1(), this->currentBlock, src1Val, nullptr, TyInt32, IR::BailOutExpectingInteger, false, instr);
- //TypeSpecialize the dst of Ld_A
- TypeSpecializeIntDst(instr, instr->m_opcode, src1Val, src1Val, nullptr, IR::BailOutInvalid, INT32_MIN, INT32_MAX, dstVal);
- isAlreadyTypeSpecialized = true;
- }
- instr->ClearBailOutInfo();
- return isAlreadyTypeSpecialized;
- }
- Value*
- GlobOpt::OptDst(
- IR::Instr ** pInstr,
- Value *dstVal,
- Value *src1Val,
- Value *src2Val,
- Value *dstIndirIndexVal,
- Value *src1IndirIndexVal)
- {
- IR::Instr *&instr = *pInstr;
- IR::Opnd *opnd = instr->GetDst();
- if (opnd)
- {
- if (opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
- {
- this->FinishOptPropOp(instr, opnd->AsPropertySymOpnd());
- }
- if (opnd->IsIndirOpnd() && !this->IsLoopPrePass())
- {
- IR::RegOpnd *baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
- const ValueType baseValueType(baseOpnd->GetValueType());
- if ((
- baseValueType.IsLikelyNativeArray() ||
- #ifdef _M_IX86
- (
- !AutoSystemInfo::Data.SSE2Available() &&
- baseValueType.IsLikelyObject() &&
- (
- baseValueType.GetObjectType() == ObjectType::Float32Array ||
- baseValueType.GetObjectType() == ObjectType::Float64Array
- )
- )
- #else
- false
- #endif
- ) &&
- instr->GetSrc1()->IsVar())
- {
- if(instr->m_opcode == Js::OpCode::StElemC)
- {
- // StElemC has different code that handles native array conversion or missing value stores. Add a bailout
- // for those cases.
- Assert(baseValueType.IsLikelyNativeArray());
- Assert(!instr->HasBailOutInfo());
- GenerateBailAtOperation(&instr, IR::BailOutConventionalNativeArrayAccessOnly);
- }
- else if(instr->HasBailOutInfo())
- {
- // The lowerer is not going to generate a fast path for this case. Remove any bailouts that require the fast
- // path. Note that the removed bailouts should not be necessary for correctness. Bailout on native array
- // conversion will be handled automatically as normal.
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
- {
- bailOutKind -= IR::BailOutOnArrayAccessHelperCall;
- }
- if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
- {
- bailOutKind -= IR::BailOutOnImplicitCallsPreOp;
- }
- if(bailOutKind)
- {
- instr->SetBailOutKind(bailOutKind);
- }
- else
- {
- instr->ClearBailOutInfo();
- }
- }
- }
- }
- }
- this->ProcessKills(instr);
- if (opnd)
- {
- if (dstVal == nullptr)
- {
- dstVal = ValueNumberDst(pInstr, src1Val, src2Val);
- }
- if (this->IsLoopPrePass())
- {
- // Keep track of symbols defined in the loop.
- if (opnd->IsRegOpnd())
- {
- StackSym *symDst = opnd->AsRegOpnd()->m_sym;
- rootLoopPrePass->symsDefInLoop->Set(symDst->m_id);
- }
- }
- else if (dstVal)
- {
- opnd->SetValueType(dstVal->GetValueInfo()->Type());
- if (currentBlock->loop &&
- !IsLoopPrePass() &&
- (instr->m_opcode == Js::OpCode::Ld_A || instr->m_opcode == Js::OpCode::Ld_I4) &&
- instr->GetSrc1()->IsRegOpnd() &&
- !func->IsJitInDebugMode())
- {
- // Look for the following patterns:
- //
- // Pattern 1:
- // s1[liveOnBackEdge] = s3[dead]
- //
- // Pattern 2:
- // s3 = operation(s1[liveOnBackEdge], s2)
- // s1[liveOnBackEdge] = s3
- //
- // In both patterns, s1 and s3 have the same value by the end. Prefer to use s1 as the sym store instead of s3
- // since s1 is live on back-edge, as otherwise, their lifetimes overlap, requiring two registers to hold the
- // value instead of one.
- do
- {
- IR::RegOpnd *const src = instr->GetSrc1()->AsRegOpnd();
- StackSym *srcVarSym = src->m_sym;
- if(srcVarSym->IsTypeSpec())
- {
- srcVarSym = srcVarSym->GetVarEquivSym(nullptr);
- Assert(srcVarSym);
- }
- if(dstVal->GetValueInfo()->GetSymStore() != srcVarSym)
- {
- break;
- }
- IR::RegOpnd *const dst = opnd->AsRegOpnd();
- StackSym *dstVarSym = dst->m_sym;
- if(dstVarSym->IsTypeSpec())
- {
- dstVarSym = dstVarSym->GetVarEquivSym(nullptr);
- Assert(dstVarSym);
- }
- if(!currentBlock->loop->regAlloc.liveOnBackEdgeSyms->Test(dstVarSym->m_id))
- {
- break;
- }
- Value *const srcValue = CurrentBlockData()->FindValue(srcVarSym);
- if(srcValue->GetValueNumber() != dstVal->GetValueNumber())
- {
- break;
- }
- if(!src->GetIsDead())
- {
- IR::Instr *const prevInstr = instr->GetPrevRealInstrOrLabel();
- IR::Opnd *const prevDst = prevInstr->GetDst();
- if(!prevDst ||
- !src->IsEqualInternal(prevDst) ||
- !(
- (prevInstr->GetSrc1() && dst->IsEqual(prevInstr->GetSrc1())) ||
- (prevInstr->GetSrc2() && dst->IsEqual(prevInstr->GetSrc2()))
- ))
- {
- break;
- }
- }
- this->SetSymStoreDirect(dstVal->GetValueInfo(), dstVarSym);
- } while(false);
- }
- }
- this->ValueNumberObjectType(opnd, instr);
- }
- this->CSEAddInstr(this->currentBlock, *pInstr, dstVal, src1Val, src2Val, dstIndirIndexVal, src1IndirIndexVal);
- return dstVal;
- }
- void
- GlobOpt::CopyPropDstUses(IR::Opnd *opnd, IR::Instr *instr, Value *src1Val)
- {
- if (opnd->IsSymOpnd())
- {
- IR::SymOpnd *symOpnd = opnd->AsSymOpnd();
- if (symOpnd->m_sym->IsPropertySym())
- {
- PropertySym * originalPropertySym = symOpnd->m_sym->AsPropertySym();
- Value *const objectValue = CurrentBlockData()->FindValue(originalPropertySym->m_stackSym);
- symOpnd->SetPropertyOwnerValueType(objectValue ? objectValue->GetValueInfo()->Type() : ValueType::Uninitialized);
- this->CopyPropPropertySymObj(symOpnd, instr);
- }
- }
- }
- void
- GlobOpt::SetLoopFieldInitialValue(Loop *loop, IR::Instr *instr, PropertySym *propertySym, PropertySym *originalPropertySym)
- {
- Value *initialValue = nullptr;
- StackSym *symStore;
- if (loop->allFieldsKilled || loop->fieldKilled->Test(originalPropertySym->m_id) || loop->fieldKilled->Test(propertySym->m_id))
- {
- return;
- }
- // Value already exists
- if (CurrentBlockData()->FindValue(propertySym))
- {
- return;
- }
- // If this initial value was already added, we would find in the current value table.
- Assert(!loop->initialValueFieldMap.TryGetValue(propertySym, &initialValue));
- // If propertySym is live in landingPad, we don't need an initial value.
- if (loop->landingPad->globOptData.liveFields->Test(propertySym->m_id))
- {
- return;
- }
- StackSym * objectSym = propertySym->m_stackSym;
- Value *landingPadObjPtrVal, *currentObjPtrVal;
- landingPadObjPtrVal = loop->landingPad->globOptData.FindValue(objectSym);
- currentObjPtrVal = CurrentBlockData()->FindValue(objectSym);
-
- auto CanSetInitialValue = [&]() -> bool {
- if (!currentObjPtrVal)
- {
- return false;
- }
- if (landingPadObjPtrVal)
- {
- return currentObjPtrVal->GetValueNumber() == landingPadObjPtrVal->GetValueNumber();
- }
- else
- {
- if (!objectSym->IsSingleDef())
- {
- return false;
- }
- IR::Instr * defInstr = objectSym->GetInstrDef();
- IR::Opnd * src1 = defInstr->GetSrc1();
- while (!(src1 && src1->IsSymOpnd() && src1->AsSymOpnd()->m_sym->IsPropertySym()))
- {
- if (src1 && src1->IsRegOpnd() && src1->AsRegOpnd()->GetStackSym()->IsSingleDef())
- {
- defInstr = src1->AsRegOpnd()->GetStackSym()->GetInstrDef();
- src1 = defInstr->GetSrc1();
- }
- else
- {
- return false;
- }
- }
- return true;
- // Todo: allow other kinds of operands as src1 of instr def of the object sym of the current propertySym
- // SymOpnd, but not PropertySymOpnd - LdSlotArr, some LdSlots (?)
- // nullptr - NewScObject
- }
- };
- if (!CanSetInitialValue())
- {
- // objPtr has a different value in the landing pad.
- return;
- }
- // The opnd's value type has not yet been initialized. Since the property sym doesn't have a value, it effectively has an
- // Uninitialized value type. Use the profiled value type from the instruction.
- const ValueType profiledValueType =
- instr->IsProfiledInstr() ? instr->AsProfiledInstr()->u.FldInfo().valueType : ValueType::Uninitialized;
- Assert(!profiledValueType.IsDefinite()); // Hence the values created here don't need to be tracked for kills
- initialValue = this->NewGenericValue(profiledValueType, propertySym);
- symStore = StackSym::New(this->func);
- initialValue->GetValueInfo()->SetSymStore(symStore);
- loop->initialValueFieldMap.Add(propertySym, initialValue->Copy(this->alloc, initialValue->GetValueNumber()));
- // Copy the initial value into the landing pad, but without a symStore
- Value *landingPadInitialValue = Value::New(this->alloc, initialValue->GetValueNumber(),
- ValueInfo::New(this->alloc, initialValue->GetValueInfo()->Type()));
- loop->landingPad->globOptData.SetValue(landingPadInitialValue, propertySym);
- loop->landingPad->globOptData.liveFields->Set(propertySym->m_id);
- #if DBG_DUMP
- if (PHASE_TRACE(Js::FieldPREPhase, this->func))
- {
- Output::Print(_u("** TRACE: Field PRE initial value for loop head #%d. Val:%d symStore:"),
- loop->GetHeadBlock()->GetBlockNum(), initialValue->GetValueNumber());
- symStore->Dump();
- Output::Print(_u("\n Instr: "));
- instr->Dump();
- Output::Flush();
- }
- #endif
- // Add initial value to all the previous blocks in the loop.
- FOREACH_BLOCK_BACKWARD_IN_RANGE(block, this->currentBlock->GetPrev(), loop->GetHeadBlock())
- {
- if (block->GetDataUseCount() == 0)
- {
- // All successor blocks have been processed, no point in adding the value.
- continue;
- }
- Value *newValue = initialValue->Copy(this->alloc, initialValue->GetValueNumber());
- block->globOptData.SetValue(newValue, propertySym);
- block->globOptData.liveFields->Set(propertySym->m_id);
- block->globOptData.SetValue(newValue, symStore);
- block->globOptData.liveVarSyms->Set(symStore->m_id);
- } NEXT_BLOCK_BACKWARD_IN_RANGE;
- CurrentBlockData()->SetValue(initialValue, symStore);
- CurrentBlockData()->liveVarSyms->Set(symStore->m_id);
- CurrentBlockData()->liveFields->Set(propertySym->m_id);
- }
- // Examine src, apply copy prop and value number it
- Value*
- GlobOpt::OptSrc(IR::Opnd *opnd, IR::Instr * *pInstr, Value **indirIndexValRef, IR::IndirOpnd *parentIndirOpnd)
- {
- IR::Instr * &instr = *pInstr;
- Assert(!indirIndexValRef || !*indirIndexValRef);
- Assert(
- parentIndirOpnd
- ? opnd == parentIndirOpnd->GetBaseOpnd() || opnd == parentIndirOpnd->GetIndexOpnd()
- : opnd == instr->GetSrc1() || opnd == instr->GetSrc2() || opnd == instr->GetDst() && opnd->IsIndirOpnd());
- Sym *sym;
- Value *val;
- PropertySym *originalPropertySym = nullptr;
- switch(opnd->GetKind())
- {
- case IR::OpndKindIntConst:
- val = this->GetIntConstantValue(opnd->AsIntConstOpnd()->AsInt32(), instr);
- opnd->SetValueType(val->GetValueInfo()->Type());
- return val;
- case IR::OpndKindInt64Const:
- val = this->GetIntConstantValue(opnd->AsInt64ConstOpnd()->GetValue(), instr);
- opnd->SetValueType(val->GetValueInfo()->Type());
- return val;
- case IR::OpndKindFloatConst:
- {
- const FloatConstType floatValue = opnd->AsFloatConstOpnd()->m_value;
- int32 int32Value;
- if(Js::JavascriptNumber::TryGetInt32Value(floatValue, &int32Value))
- {
- val = GetIntConstantValue(int32Value, instr);
- }
- else
- {
- val = NewFloatConstantValue(floatValue);
- }
- opnd->SetValueType(val->GetValueInfo()->Type());
- return val;
- }
- case IR::OpndKindAddr:
- {
- IR::AddrOpnd *addrOpnd = opnd->AsAddrOpnd();
- if (addrOpnd->m_isFunction)
- {
- AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instr->m_func), "Fixed function address operand with fixed method calls phase disabled?");
- val = NewFixedFunctionValue((Js::JavascriptFunction *)addrOpnd->m_address, addrOpnd);
- opnd->SetValueType(val->GetValueInfo()->Type());
- return val;
- }
- else if (addrOpnd->IsVar() && Js::TaggedInt::Is(addrOpnd->m_address))
- {
- val = this->GetIntConstantValue(Js::TaggedInt::ToInt32(addrOpnd->m_address), instr);
- opnd->SetValueType(val->GetValueInfo()->Type());
- return val;
- }
- val = this->GetVarConstantValue(addrOpnd);
- return val;
- }
- case IR::OpndKindSym:
- {
- // Clear the opnd's value type up-front, so that this code cannot accidentally use the value type set from a previous
- // OptSrc on the same instruction (for instance, from an earlier loop prepass). The value type will be set from the
- // value if available, before returning from this function.
- opnd->SetValueType(ValueType::Uninitialized);
- sym = opnd->AsSymOpnd()->m_sym;
- // Don't create a new value for ArgSlots and don't copy prop them away.
- if (sym->IsStackSym() && sym->AsStackSym()->IsArgSlotSym())
- {
- return nullptr;
- }
- // Unless we have profile info, don't create a new value for ArgSlots and don't copy prop them away.
- if (sym->IsStackSym() && sym->AsStackSym()->IsParamSlotSym())
- {
- if (!instr->m_func->IsLoopBody() && instr->m_func->HasProfileInfo())
- {
- // Skip "this" pointer.
- int paramSlotNum = sym->AsStackSym()->GetParamSlotNum() - 2;
- if (paramSlotNum >= 0)
- {
- const auto parameterType = instr->m_func->GetReadOnlyProfileInfo()->GetParameterInfo(static_cast<Js::ArgSlot>(paramSlotNum));
- val = NewGenericValue(parameterType);
- opnd->SetValueType(val->GetValueInfo()->Type());
- return val;
- }
- }
- return nullptr;
- }
- if (!sym->IsPropertySym())
- {
- break;
- }
- originalPropertySym = sym->AsPropertySym();
- // Don't give a value to 'arguments' property sym to prevent field copy prop of 'arguments'
- if (originalPropertySym->AsPropertySym()->m_propertyId == Js::PropertyIds::arguments &&
- originalPropertySym->AsPropertySym()->m_fieldKind == PropertyKindData)
- {
- if (opnd->AsSymOpnd()->IsPropertySymOpnd())
- {
- this->FinishOptPropOp(instr, opnd->AsPropertySymOpnd());
- }
- return nullptr;
- }
- Value *const objectValue = CurrentBlockData()->FindValue(originalPropertySym->m_stackSym);
- opnd->AsSymOpnd()->SetPropertyOwnerValueType(
- objectValue ? objectValue->GetValueInfo()->Type() : ValueType::Uninitialized);
-
- sym = this->CopyPropPropertySymObj(opnd->AsSymOpnd(), instr);
- if (!DoFieldCopyProp())
- {
- if (opnd->AsSymOpnd()->IsPropertySymOpnd())
- {
- this->FinishOptPropOp(instr, opnd->AsPropertySymOpnd());
- }
- return nullptr;
- }
- switch (instr->m_opcode)
- {
- // These need the symbolic reference to the field, don't copy prop the value of the field
- case Js::OpCode::DeleteFld:
- case Js::OpCode::DeleteRootFld:
- case Js::OpCode::DeleteFldStrict:
- case Js::OpCode::DeleteRootFldStrict:
- case Js::OpCode::ScopedDeleteFld:
- case Js::OpCode::ScopedDeleteFldStrict:
- case Js::OpCode::LdMethodFromFlags:
- case Js::OpCode::BrOnNoProperty:
- case Js::OpCode::BrOnNoLocalProperty:
- case Js::OpCode::BrOnHasProperty:
- case Js::OpCode::BrOnHasLocalProperty:
- case Js::OpCode::LdMethodFldPolyInlineMiss:
- case Js::OpCode::StSlotChkUndecl:
- case Js::OpCode::ScopedLdInst:
- return nullptr;
- };
- if (instr->CallsGetter())
- {
- return nullptr;
- }
- if (this->IsLoopPrePass() && this->DoFieldPRE(this->rootLoopPrePass))
- {
- if (!this->prePassLoop->allFieldsKilled && !this->prePassLoop->fieldKilled->Test(sym->m_id))
- {
- this->SetLoopFieldInitialValue(this->rootLoopPrePass, instr, sym->AsPropertySym(), originalPropertySym);
- }
- if (this->IsPREInstrCandidateLoad(instr->m_opcode))
- {
- // Foreach property sym, remember the first instruction that loads it.
- // Can this be done in one call?
- if (!this->prePassInstrMap->ContainsKey(sym->m_id))
- {
- this->prePassInstrMap->AddNew(sym->m_id, instr->CopyWithoutDst());
- }
- }
- }
- break;
- }
- case IR::OpndKindReg:
- // Clear the opnd's value type up-front, so that this code cannot accidentally use the value type set from a previous
- // OptSrc on the same instruction (for instance, from an earlier loop prepass). The value type will be set from the
- // value if available, before returning from this function.
- opnd->SetValueType(ValueType::Uninitialized);
- sym = opnd->AsRegOpnd()->m_sym;
- CurrentBlockData()->MarkTempLastUse(instr, opnd->AsRegOpnd());
- if (sym->AsStackSym()->IsTypeSpec())
- {
- sym = sym->AsStackSym()->GetVarEquivSym(this->func);
- }
- break;
- case IR::OpndKindIndir:
- this->OptimizeIndirUses(opnd->AsIndirOpnd(), &instr, indirIndexValRef);
- return nullptr;
- default:
- return nullptr;
- }
- val = CurrentBlockData()->FindValue(sym);
- if (val)
- {
- Assert(CurrentBlockData()->IsLive(sym) || (sym->IsPropertySym()));
- if (instr)
- {
- opnd = this->CopyProp(opnd, instr, val, parentIndirOpnd);
- }
- // Check if we freed the operand.
- if (opnd == nullptr)
- {
- return nullptr;
- }
- // In a loop prepass, determine stack syms that are used before they are defined in the root loop for which the prepass
- // is being done. This information is used to do type specialization conversions in the landing pad where appropriate.
- if(IsLoopPrePass() &&
- sym->IsStackSym() &&
- !rootLoopPrePass->symsUsedBeforeDefined->Test(sym->m_id) &&
- rootLoopPrePass->landingPad->globOptData.IsLive(sym) && !isAsmJSFunc) // no typespec in asmjs and hence skipping this
- {
- Value *const landingPadValue = rootLoopPrePass->landingPad->globOptData.FindValue(sym);
- if(landingPadValue && val->GetValueNumber() == landingPadValue->GetValueNumber())
- {
- rootLoopPrePass->symsUsedBeforeDefined->Set(sym->m_id);
- ValueInfo *landingPadValueInfo = landingPadValue->GetValueInfo();
- if(landingPadValueInfo->IsLikelyNumber())
- {
- rootLoopPrePass->likelyNumberSymsUsedBeforeDefined->Set(sym->m_id);
- if(DoAggressiveIntTypeSpec() ? landingPadValueInfo->IsLikelyInt() : landingPadValueInfo->IsInt())
- {
- // Can only force int conversions in the landing pad based on likely-int values if aggressive int type
- // specialization is enabled.
- rootLoopPrePass->likelyIntSymsUsedBeforeDefined->Set(sym->m_id);
- }
- }
- }
- }
- }
- else if ((instr->TransfersSrcValue() || OpCodeAttr::CanCSE(instr->m_opcode)) && (opnd == instr->GetSrc1() || opnd == instr->GetSrc2()))
- {
- if (sym->IsPropertySym())
- {
- val = this->CreateFieldSrcValue(sym->AsPropertySym(), originalPropertySym, &opnd, instr);
- }
- else
- {
- val = this->NewGenericValue(ValueType::Uninitialized, opnd);
- }
- }
- if (opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
- {
- TryOptimizeInstrWithFixedDataProperty(&instr);
- this->FinishOptPropOp(instr, opnd->AsPropertySymOpnd());
- }
- if (val)
- {
- ValueType valueType(val->GetValueInfo()->Type());
- // This block uses per-instruction profile information on array types to optimize using the best available profile
- // information and to prevent infinite bailouts by ensuring array type information is updated on bailouts.
- if (valueType.IsLikelyArray() && !valueType.IsDefinite() && !valueType.IsObject() && instr->IsProfiledInstr())
- {
- // See if we have profile data for the array type
- IR::ProfiledInstr *const profiledInstr = instr->AsProfiledInstr();
- ValueType profiledArrayType;
- bool useAggressiveSpecialization = true;
- switch(instr->m_opcode)
- {
- case Js::OpCode::LdElemI_A:
- if(instr->GetSrc1()->IsIndirOpnd() && opnd == instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd())
- {
- profiledArrayType = profiledInstr->u.ldElemInfo->GetArrayType();
- useAggressiveSpecialization = !profiledInstr->u.ldElemInfo->IsAggressiveSpecializationDisabled();
- }
- break;
- case Js::OpCode::StElemI_A:
- case Js::OpCode::StElemI_A_Strict:
- case Js::OpCode::StElemC:
- if(instr->GetDst()->IsIndirOpnd() && opnd == instr->GetDst()->AsIndirOpnd()->GetBaseOpnd())
- {
- profiledArrayType = profiledInstr->u.stElemInfo->GetArrayType();
- useAggressiveSpecialization = !profiledInstr->u.stElemInfo->IsAggressiveSpecializationDisabled();
- }
- break;
- case Js::OpCode::LdLen_A:
- if(instr->GetSrc1()->IsRegOpnd() && opnd == instr->GetSrc1())
- {
- profiledArrayType = profiledInstr->u.LdLenInfo().GetArrayType();
- useAggressiveSpecialization = !profiledInstr->u.LdLenInfo().IsAggressiveSpecializationDisabled();
- }
- break;
- case Js::OpCode::IsIn:
- if (instr->GetSrc2()->IsRegOpnd() && opnd == instr->GetSrc2())
- {
- profiledArrayType = profiledInstr->u.ldElemInfo->GetArrayType();
- useAggressiveSpecialization = !profiledInstr->u.ldElemInfo->IsAggressiveSpecializationDisabled();
- }
- break;
- }
- if (profiledArrayType.IsLikelyObject())
- {
- // Ideally we want to use the most specialized type seen by this path, but when that causes bailouts use the least specialized type instead.
- if (useAggressiveSpecialization &&
- profiledArrayType.GetObjectType() == valueType.GetObjectType() &&
- !valueType.IsLikelyNativeIntArray() &&
- (
- profiledArrayType.HasIntElements() || (valueType.HasVarElements() && profiledArrayType.HasFloatElements())
- ))
- {
- // use the more specialized type profiled by the instruction.
- valueType = profiledArrayType.SetHasNoMissingValues(valueType.HasNoMissingValues());
- ChangeValueType(this->currentBlock, CurrentBlockData()->FindValue(opnd->AsRegOpnd()->m_sym), valueType, false);
- }
- else if (!useAggressiveSpecialization &&
- (profiledArrayType.GetObjectType() != valueType.GetObjectType() ||
- (
- valueType.IsLikelyNativeArray() &&
- (
- profiledArrayType.HasVarElements() || (valueType.HasIntElements() && profiledArrayType.HasFloatElements())
- )
- )
- ))
- {
- // Merge array type we pulled from profile with type propagated by dataflow.
- if (profiledArrayType.IsLikelyArray())
- {
- valueType = valueType.Merge(profiledArrayType).SetHasNoMissingValues(valueType.HasNoMissingValues());
- }
- else
- {
- valueType = valueType.Merge(profiledArrayType);
- }
- ChangeValueType(this->currentBlock, CurrentBlockData()->FindValue(opnd->AsRegOpnd()->m_sym), valueType, false, true);
- }
- }
- }
- opnd->SetValueType(valueType);
- if(!IsLoopPrePass() && opnd->IsSymOpnd() && (valueType.IsDefinite() || valueType.IsNotTaggedValue()))
- {
- if (opnd->AsSymOpnd()->m_sym->IsPropertySym())
- {
- // A property sym can only be guaranteed to have a definite value type when implicit calls are disabled from the
- // point where the sym was defined with the definite value type. Insert an instruction to indicate to the
- // dead-store pass that implicit calls need to be kept disabled until after this instruction.
- Assert(DoFieldCopyProp());
- CaptureNoImplicitCallUses(opnd, false, instr);
- }
- }
- }
- else
- {
- opnd->SetValueType(ValueType::Uninitialized);
- }
- return val;
- }
- /*
- * GlobOpt::TryOptimizeInstrWithFixedDataProperty
- * Converts Ld[Root]Fld instr to
- * * CheckFixedFld
- * * Dst = Ld_A <int Constant value>
- * This API assumes that the source operand is a Sym/PropertySym kind.
- */
- void
- GlobOpt::TryOptimizeInstrWithFixedDataProperty(IR::Instr ** const pInstr)
- {
- Assert(pInstr);
- IR::Instr * &instr = *pInstr;
- IR::Opnd * src1 = instr->GetSrc1();
- Assert(src1 && src1->IsSymOpnd() && src1->AsSymOpnd()->IsPropertySymOpnd());
- if(PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func))
- {
- return;
- }
- if (!this->IsLoopPrePass() && !this->isRecursiveCallOnLandingPad &&
- OpCodeAttr::CanLoadFixedFields(instr->m_opcode))
- {
- instr->TryOptimizeInstrWithFixedDataProperty(&instr, this);
- }
- }
- // Constant prop if possible, otherwise if this value already resides in another
- // symbol, reuse this previous symbol. This should help register allocation.
- IR::Opnd *
- GlobOpt::CopyProp(IR::Opnd *opnd, IR::Instr *instr, Value *val, IR::IndirOpnd *parentIndirOpnd)
- {
- Assert(
- parentIndirOpnd
- ? opnd == parentIndirOpnd->GetBaseOpnd() || opnd == parentIndirOpnd->GetIndexOpnd()
- : opnd == instr->GetSrc1() || opnd == instr->GetSrc2() || opnd == instr->GetDst() && opnd->IsIndirOpnd());
- if (this->IsLoopPrePass())
- {
- // Transformations are not legal in prepass...
- return opnd;
- }
- if (instr->m_opcode == Js::OpCode::CheckFixedFld || instr->m_opcode == Js::OpCode::CheckPropertyGuardAndLoadType)
- {
- // Don't copy prop into CheckFixedFld or CheckPropertyGuardAndLoadType
- return opnd;
- }
- // Don't copy-prop link operands of ExtendedArgs
- if (instr->m_opcode == Js::OpCode::ExtendArg_A && opnd == instr->GetSrc2())
- {
- return opnd;
- }
- // Don't copy-prop operand of SIMD instr with ExtendedArg operands. Each instr should have its exclusive EA sequence.
- if (
- Js::IsSimd128Opcode(instr->m_opcode) &&
- instr->GetSrc1() != nullptr &&
- instr->GetSrc1()->IsRegOpnd() &&
- instr->GetSrc2() == nullptr
- )
- {
- StackSym *sym = instr->GetSrc1()->GetStackSym();
- if (sym && sym->IsSingleDef() && sym->GetInstrDef()->m_opcode == Js::OpCode::ExtendArg_A)
- {
- return opnd;
- }
- }
- ValueInfo *valueInfo = val->GetValueInfo();
- if (this->func->HasFinally())
- {
- // s0 = undefined was added on functions with early exit in try-finally functions, that can get copy-proped and case incorrect results
- if (instr->m_opcode == Js::OpCode::ArgOut_A_Inline && valueInfo->GetSymStore() &&
- valueInfo->GetSymStore()->m_id == 0)
- {
- // We don't want to copy-prop s0 (return symbol) into inlinee code
- return opnd;
- }
- }
- // Constant prop?
- int32 intConstantValue;
- int64 int64ConstantValue;
- if (valueInfo->TryGetIntConstantValue(&intConstantValue))
- {
- if (PHASE_OFF(Js::ConstPropPhase, this->func))
- {
- return opnd;
- }
- if ((
- instr->m_opcode == Js::OpCode::StElemI_A ||
- instr->m_opcode == Js::OpCode::StElemI_A_Strict ||
- instr->m_opcode == Js::OpCode::StElemC
- ) && instr->GetSrc1() == opnd)
- {
- // Disabling prop to src of native array store, because we were losing the chance to type specialize.
- // Is it possible to type specialize this src if we allow constants, etc., to be prop'd here?
- if (instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyNativeArray())
- {
- return opnd;
- }
- }
- if(opnd != instr->GetSrc1() && opnd != instr->GetSrc2())
- {
- if(PHASE_OFF(Js::IndirCopyPropPhase, instr->m_func))
- {
- return opnd;
- }
- // Const-prop an indir opnd's constant index into its offset
- IR::Opnd *srcs[] = { instr->GetSrc1(), instr->GetSrc2(), instr->GetDst() };
- for(int i = 0; i < sizeof(srcs) / sizeof(srcs[0]); ++i)
- {
- const auto src = srcs[i];
- if(!src || !src->IsIndirOpnd())
- {
- continue;
- }
- const auto indir = src->AsIndirOpnd();
- if ((int64)indir->GetOffset() + intConstantValue > INT32_MAX)
- {
- continue;
- }
- if(opnd == indir->GetIndexOpnd())
- {
- Assert(indir->GetScale() == 0);
- GOPT_TRACE_OPND(opnd, _u("Constant prop indir index into offset (value: %d)\n"), intConstantValue);
- this->CaptureByteCodeSymUses(instr);
- indir->SetOffset(indir->GetOffset() + intConstantValue);
- indir->SetIndexOpnd(nullptr);
- }
- }
- return opnd;
- }
- if (Js::TaggedInt::IsOverflow(intConstantValue))
- {
- return opnd;
- }
- IR::Opnd *constOpnd;
- if (opnd->IsVar())
- {
- IR::AddrOpnd *addrOpnd = IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked((int)intConstantValue), IR::AddrOpndKindConstantVar, instr->m_func);
- GOPT_TRACE_OPND(opnd, _u("Constant prop %d (value:%d)\n"), addrOpnd->m_address, intConstantValue);
- constOpnd = addrOpnd;
- }
- else
- {
- // Note: Jit loop body generates some i32 operands...
- Assert(opnd->IsInt32() || opnd->IsInt64() || opnd->IsUInt32());
- IRType opndType;
- IntConstType constVal;
- if (opnd->IsUInt32())
- {
- // avoid sign extension
- constVal = (uint32)intConstantValue;
- opndType = TyUint32;
- }
- else
- {
- constVal = intConstantValue;
- opndType = TyInt32;
- }
- IR::IntConstOpnd *intOpnd = IR::IntConstOpnd::New(constVal, opndType, instr->m_func);
- GOPT_TRACE_OPND(opnd, _u("Constant prop %d (value:%d)\n"), intOpnd->GetImmediateValue(instr->m_func), intConstantValue);
- constOpnd = intOpnd;
- }
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- //Need to update DumpFieldCopyPropTestTrace for every new opcode that is added for fieldcopyprop
- if(Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FieldCopyPropPhase))
- {
- instr->DumpFieldCopyPropTestTrace(this->isRecursiveCallOnLandingPad);
- }
- #endif
- this->CaptureByteCodeSymUses(instr);
- opnd = instr->ReplaceSrc(opnd, constOpnd);
- switch (instr->m_opcode)
- {
- case Js::OpCode::LdSlot:
- case Js::OpCode::LdSlotArr:
- case Js::OpCode::LdFld:
- case Js::OpCode::LdFldForTypeOf:
- case Js::OpCode::LdRootFldForTypeOf:
- case Js::OpCode::LdFldForCallApplyTarget:
- case Js::OpCode::LdRootFld:
- case Js::OpCode::LdMethodFld:
- case Js::OpCode::LdRootMethodFld:
- case Js::OpCode::LdMethodFromFlags:
- case Js::OpCode::ScopedLdMethodFld:
- case Js::OpCode::ScopedLdFld:
- case Js::OpCode::ScopedLdFldForTypeOf:
- instr->m_opcode = Js::OpCode::Ld_A;
- case Js::OpCode::Ld_A:
- {
- IR::Opnd * dst = instr->GetDst();
- if (dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->IsSingleDef())
- {
- dst->AsRegOpnd()->m_sym->SetIsIntConst((int)intConstantValue);
- }
- break;
- }
- case Js::OpCode::ArgOut_A:
- case Js::OpCode::ArgOut_A_Inline:
- case Js::OpCode::ArgOut_A_FixupForStackArgs:
- case Js::OpCode::ArgOut_A_InlineBuiltIn:
- if (instr->GetDst()->IsRegOpnd())
- {
- Assert(instr->GetDst()->AsRegOpnd()->m_sym->m_isSingleDef);
- instr->GetDst()->AsRegOpnd()->m_sym->AsStackSym()->SetIsIntConst((int)intConstantValue);
- }
- else
- {
- instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym()->SetIsIntConst((int)intConstantValue);
- }
- break;
- case Js::OpCode::TypeofElem:
- instr->m_opcode = Js::OpCode::Typeof;
- break;
- case Js::OpCode::StSlotChkUndecl:
- if (instr->GetSrc2() == opnd)
- {
- // Src2 here should refer to the same location as the Dst operand, which we need to keep live
- // due to the implicit read for ChkUndecl.
- instr->m_opcode = Js::OpCode::StSlot;
- instr->FreeSrc2();
- opnd = nullptr;
- }
- break;
- }
- return opnd;
- }
- else if (valueInfo->TryGetIntConstantValue(&int64ConstantValue, false))
- {
- if (PHASE_OFF(Js::ConstPropPhase, this->func) || !PHASE_ON(Js::Int64ConstPropPhase, this->func))
- {
- return opnd;
- }
- Assert(this->func->GetJITFunctionBody()->IsWasmFunction());
- if (this->func->GetJITFunctionBody()->IsWasmFunction() && opnd->IsInt64())
- {
- IR::Int64ConstOpnd *intOpnd = IR::Int64ConstOpnd::New(int64ConstantValue, opnd->GetType(), instr->m_func);
- GOPT_TRACE_OPND(opnd, _u("Constant prop %lld (value:%lld)\n"), intOpnd->GetImmediateValue(instr->m_func), int64ConstantValue);
- this->CaptureByteCodeSymUses(instr);
- opnd = instr->ReplaceSrc(opnd, intOpnd);
- }
- return opnd;
- }
- Sym *opndSym = nullptr;
- if (opnd->IsRegOpnd())
- {
- IR::RegOpnd *regOpnd = opnd->AsRegOpnd();
- opndSym = regOpnd->m_sym;
- }
- else if (opnd->IsSymOpnd())
- {
- IR::SymOpnd *symOpnd = opnd->AsSymOpnd();
- opndSym = symOpnd->m_sym;
- }
- if (!opndSym)
- {
- return opnd;
- }
- if (PHASE_OFF(Js::CopyPropPhase, this->func))
- {
- this->SetSymStoreDirect(valueInfo, opndSym);
- return opnd;
- }
- StackSym *copySym = CurrentBlockData()->GetCopyPropSym(opndSym, val);
- if (copySym != nullptr)
- {
- Assert(!opndSym->IsStackSym() || copySym->GetSymSize() == opndSym->AsStackSym()->GetSymSize());
- // Copy prop.
- return CopyPropReplaceOpnd(instr, opnd, copySym, parentIndirOpnd);
- }
- else
- {
- if (valueInfo->GetSymStore() && instr->m_opcode == Js::OpCode::Ld_A && instr->GetDst()->IsRegOpnd()
- && valueInfo->GetSymStore() == instr->GetDst()->AsRegOpnd()->m_sym)
- {
- // Avoid resetting symStore after fieldHoisting:
- // t1 = LdFld field <- set symStore to fieldHoistSym
- // fieldHoistSym = Ld_A t1 <- we're looking at t1 now, but want to copy-prop fieldHoistSym forward
- return opnd;
- }
- this->SetSymStoreDirect(valueInfo, opndSym);
- }
- return opnd;
- }
- IR::Opnd *
- GlobOpt::CopyPropReplaceOpnd(IR::Instr * instr, IR::Opnd * opnd, StackSym * copySym, IR::IndirOpnd *parentIndirOpnd)
- {
- Assert(
- parentIndirOpnd
- ? opnd == parentIndirOpnd->GetBaseOpnd() || opnd == parentIndirOpnd->GetIndexOpnd()
- : opnd == instr->GetSrc1() || opnd == instr->GetSrc2() || opnd == instr->GetDst() && opnd->IsIndirOpnd());
- Assert(CurrentBlockData()->IsLive(copySym));
- IR::RegOpnd *regOpnd;
- StackSym *newSym = copySym;
- GOPT_TRACE_OPND(opnd, _u("Copy prop s%d\n"), newSym->m_id);
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- //Need to update DumpFieldCopyPropTestTrace for every new opcode that is added for fieldcopyprop
- if(Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FieldCopyPropPhase))
- {
- instr->DumpFieldCopyPropTestTrace(this->isRecursiveCallOnLandingPad);
- }
- #endif
- this->CaptureByteCodeSymUses(instr);
- if (opnd->IsRegOpnd())
- {
- regOpnd = opnd->AsRegOpnd();
- regOpnd->m_sym = newSym;
- regOpnd->SetIsJITOptimizedReg(true);
- // The dead bit on the opnd is specific to the sym it is referencing. Since we replaced the sym, the bit is reset.
- regOpnd->SetIsDead(false);
- if(parentIndirOpnd)
- {
- return regOpnd;
- }
- }
- else
- {
- // If this is an object type specialized field load inside a loop, and it produces a type value which wasn't live
- // before, make sure the type check is left in the loop, because it may be the last type check in the loop protecting
- // other fields which are not hoistable and are lexically upstream in the loop. If the check is not ultimately
- // needed, the dead store pass will remove it.
- if (this->currentBlock->loop != nullptr && opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
- {
- IR::PropertySymOpnd* propertySymOpnd = opnd->AsPropertySymOpnd();
- if (CheckIfPropOpEmitsTypeCheck(instr, propertySymOpnd))
- {
- // We only set guarded properties in the dead store pass, so they shouldn't be set here yet. If they were
- // we would need to move them from this operand to the operand which is being copy propagated.
- Assert(propertySymOpnd->GetGuardedPropOps() == nullptr);
- // We're creating a copy of this operand to be reused in the same spot in the flow, so we can copy all
- // flow sensitive fields. However, we will do only a type check here (no property access) and only for
- // the sake of downstream instructions, so the flags pertaining to this property access are irrelevant.
- IR::PropertySymOpnd* checkObjTypeOpnd = CreateOpndForTypeCheckOnly(propertySymOpnd, instr->m_func);
- IR::Instr* checkObjTypeInstr = IR::Instr::New(Js::OpCode::CheckObjType, instr->m_func);
- checkObjTypeInstr->SetSrc1(checkObjTypeOpnd);
- checkObjTypeInstr->SetByteCodeOffset(instr);
- instr->InsertBefore(checkObjTypeInstr);
- // Since we inserted this instruction before the one that is being processed in natural flow, we must process
- // it for object type spec explicitly here.
- FinishOptPropOp(checkObjTypeInstr, checkObjTypeOpnd);
- Assert(!propertySymOpnd->IsTypeChecked());
- checkObjTypeInstr = this->SetTypeCheckBailOut(checkObjTypeOpnd, checkObjTypeInstr, nullptr);
- Assert(checkObjTypeInstr->HasBailOutInfo());
- if (this->currentBlock->loop && !this->IsLoopPrePass())
- {
- // Try hoisting this checkObjType.
- // But since this isn't the current instr being optimized, we need to play tricks with
- // the byteCodeUse fields...
- TrackByteCodeUsesForInstrAddedInOptInstr(checkObjTypeInstr, [&]()
- {
- TryHoistInvariant(checkObjTypeInstr, this->currentBlock, NULL, CurrentBlockData()->FindValue(copySym), NULL, true);
- });
- }
- }
- }
- if (opnd->IsSymOpnd() && opnd->GetIsDead())
- {
- // Take the property sym out of the live fields set
- this->EndFieldLifetime(opnd->AsSymOpnd());
- }
- regOpnd = IR::RegOpnd::New(newSym, opnd->GetType(), instr->m_func);
- regOpnd->SetIsJITOptimizedReg(true);
- instr->ReplaceSrc(opnd, regOpnd);
- }
- switch (instr->m_opcode)
- {
- case Js::OpCode::Ld_A:
- if (instr->GetDst()->IsRegOpnd() && instr->GetSrc1()->IsRegOpnd() &&
- instr->GetDst()->AsRegOpnd()->GetStackSym() == instr->GetSrc1()->AsRegOpnd()->GetStackSym())
- {
- this->InsertByteCodeUses(instr, true);
- instr->m_opcode = Js::OpCode::Nop;
- }
- break;
- case Js::OpCode::LdSlot:
- case Js::OpCode::LdSlotArr:
- if (instr->GetDst()->IsRegOpnd() && instr->GetSrc1()->IsRegOpnd() &&
- instr->GetDst()->AsRegOpnd()->GetStackSym() == instr->GetSrc1()->AsRegOpnd()->GetStackSym())
- {
- this->InsertByteCodeUses(instr, true);
- instr->m_opcode = Js::OpCode::Nop;
- }
- else
- {
- instr->m_opcode = Js::OpCode::Ld_A;
- }
- break;
- case Js::OpCode::StSlotChkUndecl:
- if (instr->GetSrc2()->IsRegOpnd())
- {
- // Src2 here should refer to the same location as the Dst operand, which we need to keep live
- // due to the implicit read for ChkUndecl.
- instr->m_opcode = Js::OpCode::StSlot;
- instr->FreeSrc2();
- return nullptr;
- }
- break;
- case Js::OpCode::LdFld:
- case Js::OpCode::LdFldForTypeOf:
- case Js::OpCode::LdRootFldForTypeOf:
- case Js::OpCode::LdFldForCallApplyTarget:
- case Js::OpCode::LdRootFld:
- case Js::OpCode::LdMethodFld:
- case Js::OpCode::LdRootMethodFld:
- case Js::OpCode::ScopedLdMethodFld:
- case Js::OpCode::ScopedLdFld:
- case Js::OpCode::ScopedLdFldForTypeOf:
- instr->m_opcode = Js::OpCode::Ld_A;
- break;
- case Js::OpCode::LdMethodFromFlags:
- // The bailout is checked on the loop top and we don't need to check bailout again in loop.
- instr->m_opcode = Js::OpCode::Ld_A;
- instr->ClearBailOutInfo();
- break;
- case Js::OpCode::TypeofElem:
- instr->m_opcode = Js::OpCode::Typeof;
- break;
- }
- CurrentBlockData()->MarkTempLastUse(instr, regOpnd);
- return regOpnd;
- }
- ValueNumber
- GlobOpt::NewValueNumber()
- {
- ValueNumber valueNumber = this->currentValue++;
- if (valueNumber == 0)
- {
- Js::Throw::OutOfMemory();
- }
- return valueNumber;
- }
- Value *GlobOpt::NewValue(ValueInfo *const valueInfo)
- {
- return NewValue(NewValueNumber(), valueInfo);
- }
- Value *GlobOpt::NewValue(const ValueNumber valueNumber, ValueInfo *const valueInfo)
- {
- Assert(valueInfo);
- return Value::New(alloc, valueNumber, valueInfo);
- }
- Value *GlobOpt::CopyValue(Value const *const value)
- {
- return CopyValue(value, NewValueNumber());
- }
- Value *GlobOpt::CopyValue(Value const *const value, const ValueNumber valueNumber)
- {
- Assert(value);
- return value->Copy(alloc, valueNumber);
- }
- Value *
- GlobOpt::NewGenericValue(const ValueType valueType)
- {
- return NewGenericValue(valueType, static_cast<IR::Opnd *>(nullptr));
- }
- Value *
- GlobOpt::NewGenericValue(const ValueType valueType, IR::Opnd *const opnd)
- {
- // Shouldn't assign a likely-int value to something that is definitely not an int
- Assert(!(valueType.IsLikelyInt() && opnd && opnd->IsNotInt()));
- ValueInfo *valueInfo = ValueInfo::New(this->alloc, valueType);
- Value *val = NewValue(valueInfo);
- TrackNewValueForKills(val);
- CurrentBlockData()->InsertNewValue(val, opnd);
- return val;
- }
- Value *
- GlobOpt::NewGenericValue(const ValueType valueType, Sym *const sym)
- {
- ValueInfo *valueInfo = ValueInfo::New(this->alloc, valueType);
- Value *val = NewValue(valueInfo);
- TrackNewValueForKills(val);
- CurrentBlockData()->SetValue(val, sym);
- return val;
- }
- Value *
- GlobOpt::GetIntConstantValue(const int32 intConst, IR::Instr * instr, IR::Opnd *const opnd)
- {
- Value *value = nullptr;
- Value *const cachedValue = this->intConstantToValueMap->Lookup(intConst, nullptr);
- if(cachedValue)
- {
- // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
- // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
- // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
- // way to determine if a value with the same value number exists for this block. So the best we can do with a global
- // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
- // Otherwise, we have to create a new value with a new value number.
- Sym *const symStore = cachedValue->GetValueInfo()->GetSymStore();
- if (symStore && CurrentBlockData()->IsLive(symStore))
- {
- Value *const symStoreValue = CurrentBlockData()->FindValue(symStore);
- int32 symStoreIntConstantValue;
- if (symStoreValue &&
- symStoreValue->GetValueNumber() == cachedValue->GetValueNumber() &&
- symStoreValue->GetValueInfo()->TryGetIntConstantValue(&symStoreIntConstantValue) &&
- symStoreIntConstantValue == intConst)
- {
- value = symStoreValue;
- }
- }
- }
- if (!value)
- {
- value = NewIntConstantValue(intConst, instr, !Js::TaggedInt::IsOverflow(intConst));
- }
- return CurrentBlockData()->InsertNewValue(value, opnd);
- }
- Value *
- GlobOpt::GetIntConstantValue(const int64 intConst, IR::Instr * instr, IR::Opnd *const opnd)
- {
- Assert(instr->m_func->GetJITFunctionBody()->IsWasmFunction());
- Value *value = nullptr;
- Value *const cachedValue = this->int64ConstantToValueMap->Lookup(intConst, nullptr);
- if (cachedValue)
- {
- // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
- // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
- // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
- // way to determine if a value with the same value number exists for this block. So the best we can do with a global
- // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
- // Otherwise, we have to create a new value with a new value number.
- Sym *const symStore = cachedValue->GetValueInfo()->GetSymStore();
- if (symStore && this->currentBlock->globOptData.IsLive(symStore))
- {
- Value *const symStoreValue = this->currentBlock->globOptData.FindValue(symStore);
- int64 symStoreIntConstantValue;
- if (symStoreValue &&
- symStoreValue->GetValueNumber() == cachedValue->GetValueNumber() &&
- symStoreValue->GetValueInfo()->TryGetInt64ConstantValue(&symStoreIntConstantValue, false) &&
- symStoreIntConstantValue == intConst)
- {
- value = symStoreValue;
- }
- }
- }
- if (!value)
- {
- value = NewInt64ConstantValue(intConst, instr);
- }
- return this->currentBlock->globOptData.InsertNewValue(value, opnd);
- }
- Value *
- GlobOpt::NewInt64ConstantValue(const int64 intConst, IR::Instr* instr)
- {
- Value * value = NewValue(Int64ConstantValueInfo::New(this->alloc, intConst));
- this->int64ConstantToValueMap->Item(intConst, value);
- if (!value->GetValueInfo()->GetSymStore() &&
- (instr->m_opcode == Js::OpCode::LdC_A_I4 || instr->m_opcode == Js::OpCode::Ld_I4))
- {
- StackSym * sym = instr->GetDst()->GetStackSym();
- Assert(sym && !sym->IsTypeSpec());
- this->currentBlock->globOptData.SetValue(value, sym);
- this->currentBlock->globOptData.liveVarSyms->Set(sym->m_id);
- }
- return value;
- }
- Value *
- GlobOpt::NewIntConstantValue(const int32 intConst, IR::Instr * instr, bool isTaggable)
- {
- Value * value = NewValue(IntConstantValueInfo::New(this->alloc, intConst));
- this->intConstantToValueMap->Item(intConst, value);
- if (isTaggable &&
- !PHASE_OFF(Js::HoistConstIntPhase, this->func))
- {
- // When creating a new int constant value, make sure it gets a symstore. If the int const doesn't have a symstore,
- // any downstream instruction using the same int will have to create a new value (object) for the int.
- // This gets in the way of CSE.
- value = HoistConstantLoadAndPropagateValueBackward(Js::TaggedInt::ToVarUnchecked(intConst), instr, value);
- if (!value->GetValueInfo()->GetSymStore() &&
- (instr->m_opcode == Js::OpCode::LdC_A_I4 || instr->m_opcode == Js::OpCode::Ld_I4))
- {
- StackSym * sym = instr->GetDst()->GetStackSym();
- Assert(sym);
- if (sym->IsTypeSpec())
- {
- Assert(sym->IsInt32());
- StackSym * varSym = sym->GetVarEquivSym(instr->m_func);
- CurrentBlockData()->SetValue(value, varSym);
- CurrentBlockData()->liveInt32Syms->Set(varSym->m_id);
- }
- else
- {
- CurrentBlockData()->SetValue(value, sym);
- CurrentBlockData()->liveVarSyms->Set(sym->m_id);
- }
- }
- }
- return value;
- }
- ValueInfo *
- GlobOpt::NewIntRangeValueInfo(const int32 min, const int32 max, const bool wasNegativeZeroPreventedByBailout)
- {
- return ValueInfo::NewIntRangeValueInfo(this->alloc, min, max, wasNegativeZeroPreventedByBailout);
- }
- ValueInfo *GlobOpt::NewIntRangeValueInfo(
- const ValueInfo *const originalValueInfo,
- const int32 min,
- const int32 max) const
- {
- Assert(originalValueInfo);
- ValueInfo *valueInfo;
- if(min == max)
- {
- // Since int constant values are const-propped, negative zero tracking does not track them, and so it's okay to ignore
- // 'wasNegativeZeroPreventedByBailout'
- valueInfo = IntConstantValueInfo::New(alloc, min);
- }
- else
- {
- valueInfo =
- IntRangeValueInfo::New(
- alloc,
- min,
- max,
- min <= 0 && max >= 0 && originalValueInfo->WasNegativeZeroPreventedByBailout());
- }
- valueInfo->SetSymStore(originalValueInfo->GetSymStore());
- return valueInfo;
- }
- Value *
- GlobOpt::NewIntRangeValue(
- const int32 min,
- const int32 max,
- const bool wasNegativeZeroPreventedByBailout,
- IR::Opnd *const opnd)
- {
- ValueInfo *valueInfo = this->NewIntRangeValueInfo(min, max, wasNegativeZeroPreventedByBailout);
- Value *val = NewValue(valueInfo);
- if (opnd)
- {
- GOPT_TRACE_OPND(opnd, _u("Range %d (0x%X) to %d (0x%X)\n"), min, min, max, max);
- }
- CurrentBlockData()->InsertNewValue(val, opnd);
- return val;
- }
- IntBoundedValueInfo *GlobOpt::NewIntBoundedValueInfo(
- const ValueInfo *const originalValueInfo,
- const IntBounds *const bounds) const
- {
- Assert(originalValueInfo);
- bounds->Verify();
- IntBoundedValueInfo *const valueInfo =
- IntBoundedValueInfo::New(
- originalValueInfo->Type(),
- bounds,
- (
- bounds->ConstantLowerBound() <= 0 &&
- bounds->ConstantUpperBound() >= 0 &&
- originalValueInfo->WasNegativeZeroPreventedByBailout()
- ),
- alloc);
- valueInfo->SetSymStore(originalValueInfo->GetSymStore());
- return valueInfo;
- }
- Value *GlobOpt::NewIntBoundedValue(
- const ValueType valueType,
- const IntBounds *const bounds,
- const bool wasNegativeZeroPreventedByBailout,
- IR::Opnd *const opnd)
- {
- Value *const value = NewValue(IntBoundedValueInfo::New(valueType, bounds, wasNegativeZeroPreventedByBailout, alloc));
- CurrentBlockData()->InsertNewValue(value, opnd);
- return value;
- }
- Value *
- GlobOpt::NewFloatConstantValue(const FloatConstType floatValue, IR::Opnd *const opnd)
- {
- FloatConstantValueInfo *valueInfo = FloatConstantValueInfo::New(this->alloc, floatValue);
- Value *val = NewValue(valueInfo);
- CurrentBlockData()->InsertNewValue(val, opnd);
- return val;
- }
- Value *
- GlobOpt::GetVarConstantValue(IR::AddrOpnd *addrOpnd)
- {
- bool isVar = addrOpnd->IsVar();
- bool isString = isVar && addrOpnd->m_localAddress && JITJavascriptString::Is(addrOpnd->m_localAddress);
- Value *val = nullptr;
- Value *cachedValue = nullptr;
- if(this->addrConstantToValueMap->TryGetValue(addrOpnd->m_address, &cachedValue))
- {
- // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
- // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
- // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
- // way to determine if a value with the same value number exists for this block. So the best we can do with a global
- // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
- // Otherwise, we have to create a new value with a new value number.
- Sym *symStore = cachedValue->GetValueInfo()->GetSymStore();
- if(symStore && CurrentBlockData()->IsLive(symStore))
- {
- Value *const symStoreValue = CurrentBlockData()->FindValue(symStore);
- if(symStoreValue && symStoreValue->GetValueNumber() == cachedValue->GetValueNumber())
- {
- ValueInfo *const symStoreValueInfo = symStoreValue->GetValueInfo();
- if(symStoreValueInfo->IsVarConstant() && symStoreValueInfo->AsVarConstant()->VarValue() == addrOpnd->m_address)
- {
- val = symStoreValue;
- }
- }
- }
- }
- else if (isString)
- {
- JITJavascriptString* jsString = JITJavascriptString::FromVar(addrOpnd->m_localAddress);
- Js::InternalString internalString(jsString->GetString(), jsString->GetLength());
- if (this->stringConstantToValueMap->TryGetValue(internalString, &cachedValue))
- {
- Sym *symStore = cachedValue->GetValueInfo()->GetSymStore();
- if (symStore && CurrentBlockData()->IsLive(symStore))
- {
- Value *const symStoreValue = CurrentBlockData()->FindValue(symStore);
- if (symStoreValue && symStoreValue->GetValueNumber() == cachedValue->GetValueNumber())
- {
- ValueInfo *const symStoreValueInfo = symStoreValue->GetValueInfo();
- if (symStoreValueInfo->IsVarConstant())
- {
- JITJavascriptString * cachedString = JITJavascriptString::FromVar(symStoreValue->GetValueInfo()->AsVarConstant()->VarValue(true));
- Js::InternalString cachedInternalString(cachedString->GetString(), cachedString->GetLength());
- if (Js::InternalStringComparer::Equals(internalString, cachedInternalString))
- {
- val = symStoreValue;
- }
- }
- }
- }
- }
- }
- if(!val)
- {
- val = NewVarConstantValue(addrOpnd, isString);
- }
- addrOpnd->SetValueType(val->GetValueInfo()->Type());
- return val;
- }
- Value *
- GlobOpt::NewVarConstantValue(IR::AddrOpnd *addrOpnd, bool isString)
- {
- VarConstantValueInfo *valueInfo = VarConstantValueInfo::New(this->alloc, addrOpnd->m_address, addrOpnd->GetValueType(), false, addrOpnd->m_localAddress);
- Value * value = NewValue(valueInfo);
- this->addrConstantToValueMap->Item(addrOpnd->m_address, value);
- if (isString)
- {
- JITJavascriptString* jsString = JITJavascriptString::FromVar(addrOpnd->m_localAddress);
- Js::InternalString internalString(jsString->GetString(), jsString->GetLength());
- this->stringConstantToValueMap->Item(internalString, value);
- }
- return value;
- }
- Value *
- GlobOpt::HoistConstantLoadAndPropagateValueBackward(Js::Var varConst, IR::Instr * origInstr, Value * value)
- {
- if (this->IsLoopPrePass() ||
- ((this->currentBlock == this->func->m_fg->blockList) &&
- origInstr->TransfersSrcValue()))
- {
- return value;
- }
- // Only hoisting taggable int const loads for now. Could be extended to other constants (floats, strings, addr opnds) if we see some benefit.
- Assert(Js::TaggedInt::Is(varConst));
- // Insert a load of the constant at the top of the function
- StackSym * dstSym = StackSym::New(this->func);
- IR::RegOpnd * constRegOpnd = IR::RegOpnd::New(dstSym, TyVar, this->func);
- IR::Instr * loadInstr = IR::Instr::NewConstantLoad(constRegOpnd, (intptr_t)varConst, ValueType::GetInt(true), this->func);
- this->func->m_fg->blockList->GetFirstInstr()->InsertAfter(loadInstr);
- // Type-spec the load (Support for floats needs to be added when we start hoisting float constants).
- bool typeSpecedToInt = false;
- if (Js::TaggedInt::Is(varConst) && !IsTypeSpecPhaseOff(this->func))
- {
- typeSpecedToInt = true;
- loadInstr->m_opcode = Js::OpCode::Ld_I4;
- ToInt32Dst(loadInstr, loadInstr->GetDst()->AsRegOpnd(), this->currentBlock);
- loadInstr->GetDst()->GetStackSym()->SetIsConst();
- }
- else
- {
- CurrentBlockData()->liveVarSyms->Set(dstSym->m_id);
- }
- // Add the value (object) to the current block's symToValueMap and propagate the value backward to all relevant blocks so it is available on merges.
- value = CurrentBlockData()->InsertNewValue(value, constRegOpnd);
- BVSparse<JitArenaAllocator>* GlobOptBlockData::*bv;
- bv = typeSpecedToInt ? &GlobOptBlockData::liveInt32Syms : &GlobOptBlockData::liveVarSyms; // Will need to be expanded when we start hoisting float constants.
- if (this->currentBlock != this->func->m_fg->blockList)
- {
- for (InvariantBlockBackwardIterator it(this, this->currentBlock, this->func->m_fg->blockList, nullptr);
- it.IsValid();
- it.MoveNext())
- {
- BasicBlock * block = it.Block();
- (block->globOptData.*bv)->Set(dstSym->m_id);
- if (!block->globOptData.FindValue(dstSym))
- {
- Value *const valueCopy = CopyValue(value, value->GetValueNumber());
- block->globOptData.SetValue(valueCopy, dstSym);
- }
- }
- }
- return value;
- }
- Value *
- GlobOpt::NewFixedFunctionValue(Js::JavascriptFunction *function, IR::AddrOpnd *addrOpnd)
- {
- Assert(function != nullptr);
- Value *val = nullptr;
- Value *cachedValue = nullptr;
- if(this->addrConstantToValueMap->TryGetValue(addrOpnd->m_address, &cachedValue))
- {
- // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
- // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
- // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
- // way to determine if a value with the same value number exists for this block. So the best we can do with a global
- // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
- // Otherwise, we have to create a new value with a new value number.
- Sym *symStore = cachedValue->GetValueInfo()->GetSymStore();
- if(symStore && CurrentBlockData()->IsLive(symStore))
- {
- Value *const symStoreValue = CurrentBlockData()->FindValue(symStore);
- if(symStoreValue && symStoreValue->GetValueNumber() == cachedValue->GetValueNumber())
- {
- ValueInfo *const symStoreValueInfo = symStoreValue->GetValueInfo();
- if(symStoreValueInfo->IsVarConstant())
- {
- VarConstantValueInfo *const symStoreVarConstantValueInfo = symStoreValueInfo->AsVarConstant();
- if(symStoreVarConstantValueInfo->VarValue() == addrOpnd->m_address &&
- symStoreVarConstantValueInfo->IsFunction())
- {
- val = symStoreValue;
- }
- }
- }
- }
- }
- if(!val)
- {
- VarConstantValueInfo *valueInfo = VarConstantValueInfo::New(this->alloc, function, addrOpnd->GetValueType(), true, addrOpnd->m_localAddress);
- val = NewValue(valueInfo);
- this->addrConstantToValueMap->AddNew(addrOpnd->m_address, val);
- }
- CurrentBlockData()->InsertNewValue(val, addrOpnd);
- return val;
- }
- StackSym *GlobOpt::GetTaggedIntConstantStackSym(const int32 intConstantValue) const
- {
- Assert(!Js::TaggedInt::IsOverflow(intConstantValue));
- return intConstantToStackSymMap->Lookup(intConstantValue, nullptr);
- }
- StackSym *GlobOpt::GetOrCreateTaggedIntConstantStackSym(const int32 intConstantValue) const
- {
- StackSym *stackSym = GetTaggedIntConstantStackSym(intConstantValue);
- if(stackSym)
- {
- return stackSym;
- }
- stackSym = StackSym::New(TyVar,func);
- intConstantToStackSymMap->Add(intConstantValue, stackSym);
- return stackSym;
- }
- Sym *
- GlobOpt::SetSymStore(ValueInfo *valueInfo, Sym *sym)
- {
- if (sym->IsStackSym())
- {
- StackSym *stackSym = sym->AsStackSym();
- if (stackSym->IsTypeSpec())
- {
- stackSym = stackSym->GetVarEquivSym(this->func);
- sym = stackSym;
- }
- }
- if (valueInfo->GetSymStore() == nullptr || valueInfo->GetSymStore()->IsPropertySym())
- {
- SetSymStoreDirect(valueInfo, sym);
- }
- return sym;
- }
- void
- GlobOpt::SetSymStoreDirect(ValueInfo * valueInfo, Sym * sym)
- {
- Sym * prevSymStore = valueInfo->GetSymStore();
- CurrentBlockData()->SetChangedSym(prevSymStore);
- valueInfo->SetSymStore(sym);
- }
- // Figure out the Value of this dst.
- Value *
- GlobOpt::ValueNumberDst(IR::Instr **pInstr, Value *src1Val, Value *src2Val)
- {
- IR::Instr *&instr = *pInstr;
- IR::Opnd *dst = instr->GetDst();
- Value *dstVal = nullptr;
- Sym *sym;
- if (instr->CallsSetter())
- {
- return nullptr;
- }
- if (dst == nullptr)
- {
- return nullptr;
- }
- switch (dst->GetKind())
- {
- case IR::OpndKindSym:
- sym = dst->AsSymOpnd()->m_sym;
- break;
- case IR::OpndKindReg:
- sym = dst->AsRegOpnd()->m_sym;
- if (OpCodeAttr::TempNumberProducing(instr->m_opcode))
- {
- CurrentBlockData()->isTempSrc->Set(sym->m_id);
- }
- else if (OpCodeAttr::TempNumberTransfer(instr->m_opcode))
- {
- IR::Opnd *src1 = instr->GetSrc1();
- if (src1->IsRegOpnd() && CurrentBlockData()->isTempSrc->Test(src1->AsRegOpnd()->m_sym->m_id))
- {
- StackSym *src1Sym = src1->AsRegOpnd()->m_sym;
- // isTempSrc is used for marking isTempLastUse, which is used to generate AddLeftDead()
- // calls instead of the normal Add helpers. It tells the runtime that concats can use string
- // builders.
- // We need to be careful in the case where src1 points to a string builder and is getting aliased.
- // Clear the bit on src and dst of the transfer instr in this case, unless we can prove src1
- // isn't pointing at a string builder, like if it is single def and the def instr is not an Add,
- // but TempProducing.
- if (src1Sym->IsSingleDef() && src1Sym->m_instrDef->m_opcode != Js::OpCode::Add_A
- && OpCodeAttr::TempNumberProducing(src1Sym->m_instrDef->m_opcode))
- {
- CurrentBlockData()->isTempSrc->Set(sym->m_id);
- }
- else
- {
- CurrentBlockData()->isTempSrc->Clear(src1->AsRegOpnd()->m_sym->m_id);
- CurrentBlockData()->isTempSrc->Clear(sym->m_id);
- }
- }
- else
- {
- CurrentBlockData()->isTempSrc->Clear(sym->m_id);
- }
- }
- else
- {
- CurrentBlockData()->isTempSrc->Clear(sym->m_id);
- }
- break;
- case IR::OpndKindIndir:
- return nullptr;
- default:
- return nullptr;
- }
- int32 min1, max1, min2, max2, newMin, newMax;
- ValueInfo *src1ValueInfo = (src1Val ? src1Val->GetValueInfo() : nullptr);
- ValueInfo *src2ValueInfo = (src2Val ? src2Val->GetValueInfo() : nullptr);
- switch (instr->m_opcode)
- {
- case Js::OpCode::Conv_PrimStr:
- AssertMsg(instr->GetDst()->GetValueType().IsString(),
- "Creator of this instruction should have set the type");
- if (this->IsLoopPrePass() || src1ValueInfo == nullptr || !src1ValueInfo->IsPrimitive())
- {
- break;
- }
- instr->m_opcode = Js::OpCode::Conv_Str;
- // fall-through
- case Js::OpCode::Conv_Str:
- // This opcode is commented out since we don't track regex information in GlobOpt now.
- //case Js::OpCode::Coerce_Regex:
- case Js::OpCode::Coerce_Str:
- AssertMsg(instr->GetDst()->GetValueType().IsString(),
- "Creator of this instruction should have set the type");
- // Due to fall through and the fact that Ld_A only takes one source,
- // free the other source here.
- if (instr->GetSrc2() && !(this->IsLoopPrePass() || src1ValueInfo == nullptr || !src1ValueInfo->IsString()))
- {
- instr->FreeSrc2();
- }
- // fall-through
- case Js::OpCode::Coerce_StrOrRegex:
- // We don't set the ValueType of src1 for Coerce_StrOrRegex, hence skip the ASSERT
- if (this->IsLoopPrePass() || src1ValueInfo == nullptr || !src1ValueInfo->IsString())
- {
- break;
- }
- instr->m_opcode = Js::OpCode::Ld_A;
- // fall-through
- case Js::OpCode::BytecodeArgOutCapture:
- case Js::OpCode::LdAsmJsFunc:
- case Js::OpCode::Ld_A:
- case Js::OpCode::Ld_I4:
- // Propagate sym attributes across the reg copy.
- if (!this->IsLoopPrePass() && instr->GetSrc1()->IsRegOpnd())
- {
- if (dst->AsRegOpnd()->m_sym->IsSingleDef())
- {
- dst->AsRegOpnd()->m_sym->CopySymAttrs(instr->GetSrc1()->AsRegOpnd()->m_sym);
- }
- }
- if (instr->IsProfiledInstr())
- {
- const ValueType profiledValueType(instr->AsProfiledInstr()->u.FldInfo().valueType);
- if(!(
- profiledValueType.IsLikelyInt() &&
- (
- (dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->m_isNotNumber) ||
- (instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotNumber)
- )
- ))
- {
- if(!src1ValueInfo)
- {
- dstVal = this->NewGenericValue(profiledValueType, dst);
- }
- else if(src1ValueInfo->IsUninitialized())
- {
- if(IsLoopPrePass())
- {
- dstVal = this->NewGenericValue(profiledValueType, dst);
- }
- else
- {
- // Assuming the profile data gives more precise value types based on the path it took at runtime, we
- // can improve the original value type.
- src1ValueInfo->Type() = profiledValueType;
- instr->GetSrc1()->SetValueType(profiledValueType);
- }
- }
- }
- }
- if (dstVal == nullptr)
- {
- // Ld_A is just transferring the value
- dstVal = this->ValueNumberTransferDst(instr, src1Val);
- }
- break;
- case Js::OpCode::ExtendArg_A:
- {
- // SIMD_JS
- // We avoid transforming EAs to Lds to keep the IR shape consistent and avoid CSEing of EAs.
- // CSEOptimize only assigns a Value to the EA dst, and doesn't turn it to a Ld. If this happened, we shouldn't assign a new Value here.
- if (DoCSE())
- {
- IR::Opnd * currDst = instr->GetDst();
- Value * currDstVal = CurrentBlockData()->FindValue(currDst->GetStackSym());
- if (currDstVal != nullptr)
- {
- return currDstVal;
- }
- }
- break;
- }
- case Js::OpCode::CheckFixedFld:
- AssertMsg(false, "CheckFixedFld doesn't have a dst, so we should never get here");
- break;
- case Js::OpCode::LdSlot:
- case Js::OpCode::LdSlotArr:
- case Js::OpCode::LdFld:
- case Js::OpCode::LdFldForTypeOf:
- case Js::OpCode::LdFldForCallApplyTarget:
- // Do not transfer value type on LdRootFldForTypeOf to prevent copy-prop to LdRootFld in case the field doesn't exist since LdRootFldForTypeOf does not throw.
- // Same goes for ScopedLdFldForTypeOf as we'll end up loading the property from the root object if the property is not in the scope chain.
- //case Js::OpCode::LdRootFldForTypeOf:
- //case Js::OpCode::ScopedLdFldForTypeOf:
- case Js::OpCode::LdRootFld:
- case Js::OpCode::LdMethodFld:
- case Js::OpCode::LdRootMethodFld:
- case Js::OpCode::ScopedLdMethodFld:
- case Js::OpCode::LdMethodFromFlags:
- case Js::OpCode::ScopedLdFld:
- if (instr->IsProfiledInstr())
- {
- ValueType profiledValueType(instr->AsProfiledInstr()->u.FldInfo().valueType);
- if(!(profiledValueType.IsLikelyInt() && dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->m_isNotNumber))
- {
- if(!src1ValueInfo)
- {
- dstVal = this->NewGenericValue(profiledValueType, dst);
- }
- else if(src1ValueInfo->IsUninitialized())
- {
- if(IsLoopPrePass() && (!dst->IsRegOpnd() || !dst->AsRegOpnd()->m_sym->IsSingleDef()))
- {
- dstVal = this->NewGenericValue(profiledValueType, dst);
- }
- else
- {
- // Assuming the profile data gives more precise value types based on the path it took at runtime, we
- // can improve the original value type.
- src1ValueInfo->Type() = profiledValueType;
- instr->GetSrc1()->SetValueType(profiledValueType);
- }
- }
- }
- }
- if (dstVal == nullptr)
- {
- dstVal = this->ValueNumberTransferDst(instr, src1Val);
- }
- if(!this->IsLoopPrePass())
- {
- // We cannot transfer value if the field hasn't been copy prop'd because we don't generate
- // an implicit call bailout between those values if we don't have "live fields" unless, we are hoisting the field.
- ValueInfo *dstValueInfo = (dstVal ? dstVal->GetValueInfo() : nullptr);
- // Update symStore if it isn't a stackSym
- if (dstVal && (!dstValueInfo->GetSymStore() || !dstValueInfo->GetSymStore()->IsStackSym()))
- {
- Assert(dst->IsRegOpnd());
- this->SetSymStoreDirect(dstValueInfo, dst->AsRegOpnd()->m_sym);
- }
- if (src1Val != dstVal)
- {
- CurrentBlockData()->SetValue(dstVal, instr->GetSrc1());
- }
- }
- break;
- case Js::OpCode::LdC_A_R8:
- case Js::OpCode::LdC_A_I4:
- case Js::OpCode::ArgIn_A:
- dstVal = src1Val;
- break;
- case Js::OpCode::LdStr:
- if (src1Val == nullptr)
- {
- src1Val = NewGenericValue(ValueType::String, dst);
- }
- dstVal = src1Val;
- break;
- // LdElemUndef only assign undef if the field doesn't exist.
- // So we don't actually know what the value is, so we can't really copy prop it.
- //case Js::OpCode::LdElemUndef:
- case Js::OpCode::StSlot:
- case Js::OpCode::StSlotChkUndecl:
- case Js::OpCode::StFld:
- case Js::OpCode::StRootFld:
- case Js::OpCode::StFldStrict:
- case Js::OpCode::StRootFldStrict:
- case Js::OpCode::InitFld:
- case Js::OpCode::InitComputedProperty:
- if (DoFieldCopyProp())
- {
- if (src1Val == nullptr)
- {
- // src1 may have no value if it's not a valid var, e.g., NULL for let/const initialization.
- // Consider creating generic values for such things.
- return nullptr;
- }
- AssertMsg(!src2Val, "Bad src Values...");
- Assert(sym->IsPropertySym());
- SymID symId = sym->m_id;
- Assert(instr->m_opcode == Js::OpCode::StSlot || instr->m_opcode == Js::OpCode::StSlotChkUndecl || !CurrentBlockData()->liveFields->Test(symId));
- CurrentBlockData()->liveFields->Set(symId);
- if (!this->IsLoopPrePass() && dst->GetIsDead())
- {
- // Take the property sym out of the live fields set (with special handling for loops).
- this->EndFieldLifetime(dst->AsSymOpnd());
- }
- dstVal = this->ValueNumberTransferDst(instr, src1Val);
- }
- else
- {
- return nullptr;
- }
- break;
- case Js::OpCode::Conv_Num:
- if(src1ValueInfo->IsNumber())
- {
- dstVal = ValueNumberTransferDst(instr, src1Val);
- }
- else
- {
- return NewGenericValue(src1ValueInfo->Type().ToDefiniteAnyNumber().SetCanBeTaggedValue(true), dst);
- }
- break;
- case Js::OpCode::Not_A:
- {
- if (!src1Val || !src1ValueInfo->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec()))
- {
- min1 = INT32_MIN;
- max1 = INT32_MAX;
- }
- this->PropagateIntRangeForNot(min1, max1, &newMin, &newMax);
- return CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, src2Val);
- }
- case Js::OpCode::Xor_A:
- case Js::OpCode::Or_A:
- case Js::OpCode::And_A:
- case Js::OpCode::Shl_A:
- case Js::OpCode::Shr_A:
- case Js::OpCode::ShrU_A:
- {
- if (!src1Val || !src1ValueInfo->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec()))
- {
- min1 = INT32_MIN;
- max1 = INT32_MAX;
- }
- if (!src2Val || !src2ValueInfo->GetIntValMinMax(&min2, &max2, this->DoAggressiveIntTypeSpec()))
- {
- min2 = INT32_MIN;
- max2 = INT32_MAX;
- }
- if (instr->m_opcode == Js::OpCode::ShrU_A &&
- min1 < 0 &&
- IntConstantBounds(min2, max2).And_0x1f().Contains(0))
- {
- // Src1 may be too large to represent as a signed int32, and src2 may be zero.
- // Since the result can therefore be too large to represent as a signed int32,
- // include Number in the value type.
- return CreateDstUntransferredValue(
- ValueType::AnyNumber.SetCanBeTaggedValue(true), instr, src1Val, src2Val);
- }
- this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
- return CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, src2Val);
- }
- case Js::OpCode::Incr_A:
- case Js::OpCode::Decr_A:
- {
- ValueType valueType;
- if(src1Val)
- {
- valueType = src1Val->GetValueInfo()->Type().ToDefiniteAnyNumber();
- }
- else
- {
- valueType = ValueType::Number;
- }
- return CreateDstUntransferredValue(valueType.SetCanBeTaggedValue(true), instr, src1Val, src2Val);
- }
- case Js::OpCode::Add_A:
- {
- ValueType valueType;
- if (src1Val && src1ValueInfo->IsLikelyNumber() && src2Val && src2ValueInfo->IsLikelyNumber())
- {
- if(src1ValueInfo->IsLikelyInt() && src2ValueInfo->IsLikelyInt())
- {
- // When doing aggressiveIntType, just assume the result is likely going to be int
- // if both input is int.
- const bool isLikelyTagged = src1ValueInfo->IsLikelyTaggedInt() && src2ValueInfo->IsLikelyTaggedInt();
- if(src1ValueInfo->IsNumber() && src2ValueInfo->IsNumber())
- {
- // If both of them are numbers then we can definitely say that the result is a number.
- valueType = ValueType::GetNumberAndLikelyInt(isLikelyTagged);
- }
- else
- {
- // This is only likely going to be int but can be a string as well.
- valueType = ValueType::GetInt(isLikelyTagged).ToLikely();
- }
- }
- else
- {
- // We can only be certain of any thing if both of them are numbers.
- // Otherwise, the result could be string.
- if (src1ValueInfo->IsNumber() && src2ValueInfo->IsNumber())
- {
- if (src1ValueInfo->IsFloat() || src2ValueInfo->IsFloat())
- {
- // If one of them is a float, the result probably is a float instead of just int
- // but should always be a number.
- valueType = ValueType::Float.SetCanBeTaggedValue(true);
- }
- else
- {
- // Could be int, could be number
- valueType = ValueType::Number.SetCanBeTaggedValue(true);
- }
- }
- else if (src1ValueInfo->IsLikelyFloat() || src2ValueInfo->IsLikelyFloat())
- {
- // Result is likely a float (but can be anything)
- valueType = ValueType::Float.ToLikely();
- }
- else
- {
- // Otherwise it is a likely int or float (but can be anything)
- valueType = ValueType::Number.ToLikely();
- }
- }
- }
- else if((src1Val && src1ValueInfo->IsString()) || (src2Val && src2ValueInfo->IsString()))
- {
- // String + anything should always result in a string
- valueType = ValueType::String;
- }
- else if((src1Val && src1ValueInfo->IsNotString() && src1ValueInfo->IsPrimitive())
- && (src2Val && src2ValueInfo->IsNotString() && src2ValueInfo->IsPrimitive()))
- {
- // If src1 and src2 are not strings and primitive, add should yield a number.
- valueType = ValueType::Number.SetCanBeTaggedValue(true);
- }
- else if((src1Val && src1ValueInfo->IsLikelyString()) || (src2Val && src2ValueInfo->IsLikelyString()))
- {
- // likelystring + anything should always result in a likelystring
- valueType = ValueType::String.ToLikely();
- }
- else
- {
- // Number or string. Could make the value a merge of Number and String, but Uninitialized is more useful at the moment.
- Assert(valueType.IsUninitialized());
- }
- return CreateDstUntransferredValue(valueType, instr, src1Val, src2Val);
- }
- case Js::OpCode::Div_A:
- {
- ValueType divValueType = GetDivValueType(instr, src1Val, src2Val, false);
- if (divValueType.IsLikelyInt() || divValueType.IsFloat())
- {
- return CreateDstUntransferredValue(divValueType.SetCanBeTaggedValue(true), instr, src1Val, src2Val);
- }
- }
- // fall-through
- case Js::OpCode::Sub_A:
- case Js::OpCode::Mul_A:
- case Js::OpCode::Rem_A:
- {
- ValueType valueType;
- if( src1Val &&
- src1ValueInfo->IsLikelyInt() &&
- src2Val &&
- src2ValueInfo->IsLikelyInt() &&
- instr->m_opcode != Js::OpCode::Div_A)
- {
- const bool isLikelyTagged =
- src1ValueInfo->IsLikelyTaggedInt() && (src2ValueInfo->IsLikelyTaggedInt() || instr->m_opcode == Js::OpCode::Rem_A);
- if(src1ValueInfo->IsNumber() && src2ValueInfo->IsNumber())
- {
- valueType = ValueType::GetNumberAndLikelyInt(isLikelyTagged);
- }
- else
- {
- valueType = ValueType::GetInt(isLikelyTagged).ToLikely();
- }
- }
- else if ((src1Val && src1ValueInfo->IsLikelyFloat()) || (src2Val && src2ValueInfo->IsLikelyFloat()))
- {
- // This should ideally be NewNumberAndLikelyFloatValue since we know the result is a number but not sure if it will
- // be a float value. However, that Number/LikelyFloat value type doesn't exist currently and all the necessary
- // checks are done for float values (tagged int checks, etc.) so it's sufficient to just create a float value here.
- valueType = ValueType::Float.SetCanBeTaggedValue(true);
- }
- else
- {
- valueType = ValueType::Number.SetCanBeTaggedValue(true);
- }
- return CreateDstUntransferredValue(valueType, instr, src1Val, src2Val);
- }
- case Js::OpCode::CallI:
- Assert(dst->IsRegOpnd());
- return NewGenericValue(dst->AsRegOpnd()->GetValueType(), dst);
- case Js::OpCode::LdElemI_A:
- {
- dstVal = ValueNumberLdElemDst(pInstr, src1Val);
- const ValueType baseValueType(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
- if( (
- baseValueType.IsLikelyNativeArray() ||
- #ifdef _M_IX86
- (
- !AutoSystemInfo::Data.SSE2Available() &&
- baseValueType.IsLikelyObject() &&
- (
- baseValueType.GetObjectType() == ObjectType::Float32Array ||
- baseValueType.GetObjectType() == ObjectType::Float64Array
- )
- )
- #else
- false
- #endif
- ) &&
- instr->GetDst()->IsVar() &&
- instr->HasBailOutInfo())
- {
- // The lowerer is not going to generate a fast path for this case. Remove any bailouts that require the fast
- // path. Note that the removed bailouts should not be necessary for correctness.
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
- {
- bailOutKind -= IR::BailOutOnArrayAccessHelperCall;
- }
- if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
- {
- bailOutKind -= IR::BailOutOnImplicitCallsPreOp;
- }
- if(bailOutKind)
- {
- instr->SetBailOutKind(bailOutKind);
- }
- else
- {
- instr->ClearBailOutInfo();
- }
- }
- return dstVal;
- }
- case Js::OpCode::LdMethodElem:
- // Not worth profiling this, just assume it's likely object (should be likely function but ValueType does not track
- // functions currently, so using ObjectType::Object instead)
- dstVal = NewGenericValue(ValueType::GetObject(ObjectType::Object).ToLikely(), dst);
- if(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyNativeArray() && instr->HasBailOutInfo())
- {
- // The lowerer is not going to generate a fast path for this case. Remove any bailouts that require the fast
- // path. Note that the removed bailouts should not be necessary for correctness.
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
- {
- bailOutKind -= IR::BailOutOnArrayAccessHelperCall;
- }
- if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
- {
- bailOutKind -= IR::BailOutOnImplicitCallsPreOp;
- }
- if(bailOutKind)
- {
- instr->SetBailOutKind(bailOutKind);
- }
- else
- {
- instr->ClearBailOutInfo();
- }
- }
- return dstVal;
- case Js::OpCode::StElemI_A:
- case Js::OpCode::StElemI_A_Strict:
- dstVal = this->ValueNumberTransferDst(instr, src1Val);
- break;
- case Js::OpCode::LdLen_A:
- if (instr->IsProfiledInstr())
- {
- const ValueType profiledValueType(instr->AsProfiledInstr()->u.FldInfo().valueType);
- if(!(profiledValueType.IsLikelyInt() && dst->AsRegOpnd()->m_sym->m_isNotNumber))
- {
- return this->NewGenericValue(profiledValueType, dst);
- }
- }
- break;
- case Js::OpCode::BrOnEmpty:
- case Js::OpCode::BrOnNotEmpty:
- Assert(dst->IsRegOpnd());
- Assert(dst->GetValueType().IsString());
- return this->NewGenericValue(ValueType::String, dst);
- case Js::OpCode::IsInst:
- case Js::OpCode::LdTrue:
- case Js::OpCode::LdFalse:
- case Js::OpCode::CmEq_A:
- case Js::OpCode::CmSrEq_A:
- case Js::OpCode::CmNeq_A:
- case Js::OpCode::CmSrNeq_A:
- case Js::OpCode::CmLe_A:
- case Js::OpCode::CmUnLe_A:
- case Js::OpCode::CmLt_A:
- case Js::OpCode::CmUnLt_A:
- case Js::OpCode::CmGe_A:
- case Js::OpCode::CmUnGe_A:
- case Js::OpCode::CmGt_A:
- case Js::OpCode::CmUnGt_A:
- return this->NewGenericValue(ValueType::Boolean, dst);
- case Js::OpCode::LdUndef:
- return this->NewGenericValue(ValueType::Undefined, dst);
- case Js::OpCode::LdC_A_Null:
- return this->NewGenericValue(ValueType::Null, dst);
- case Js::OpCode::LdThis:
- if (!PHASE_OFF(Js::OptTagChecksPhase, this->func) &&
- (src1ValueInfo == nullptr || src1ValueInfo->IsUninitialized()))
- {
- return this->NewGenericValue(ValueType::GetObject(ObjectType::Object).ToLikely().SetCanBeTaggedValue(false), dst);
- }
- break;
- case Js::OpCode::Typeof:
- case Js::OpCode::TypeofElem:
- return this->NewGenericValue(ValueType::String, dst);
- case Js::OpCode::InitLocalClosure:
- Assert(instr->GetDst());
- Assert(instr->GetDst()->IsRegOpnd());
- IR::RegOpnd *regOpnd = instr->GetDst()->AsRegOpnd();
- StackSym *opndStackSym = regOpnd->m_sym;
- Assert(opndStackSym != nullptr);
- ObjectSymInfo *objectSymInfo = opndStackSym->m_objectInfo;
- Assert(objectSymInfo != nullptr);
- for (PropertySym *localVarSlotList = objectSymInfo->m_propertySymList; localVarSlotList; localVarSlotList = localVarSlotList->m_nextInStackSymList)
- {
- this->slotSyms->Set(localVarSlotList->m_id);
- }
- break;
- }
- if (dstVal == nullptr)
- {
- return this->NewGenericValue(dst->GetValueType(), dst);
- }
- return CurrentBlockData()->SetValue(dstVal, dst);
- }
- Value *
- GlobOpt::ValueNumberLdElemDst(IR::Instr **pInstr, Value *srcVal)
- {
- IR::Instr *&instr = *pInstr;
- IR::Opnd *dst = instr->GetDst();
- Value *dstVal = nullptr;
- int32 newMin, newMax;
- ValueInfo *srcValueInfo = (srcVal ? srcVal->GetValueInfo() : nullptr);
- ValueType profiledElementType;
- if (instr->IsProfiledInstr())
- {
- profiledElementType = instr->AsProfiledInstr()->u.ldElemInfo->GetElementType();
- if(!(profiledElementType.IsLikelyInt() && dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->m_isNotNumber) &&
- srcVal &&
- srcValueInfo->IsUninitialized())
- {
- if(IsLoopPrePass())
- {
- dstVal = NewGenericValue(profiledElementType, dst);
- }
- else
- {
- // Assuming the profile data gives more precise value types based on the path it took at runtime, we
- // can improve the original value type.
- srcValueInfo->Type() = profiledElementType;
- instr->GetSrc1()->SetValueType(profiledElementType);
- }
- }
- }
-
- IR::IndirOpnd *src = instr->GetSrc1()->AsIndirOpnd();
- const ValueType baseValueType(src->GetBaseOpnd()->GetValueType());
- if (instr->DoStackArgsOpt() ||
- !(
- baseValueType.IsLikelyOptimizedTypedArray() ||
- (baseValueType.IsLikelyNativeArray() && instr->IsProfiledInstr()) // Specialized native array lowering for LdElem requires that it is profiled.
- ) ||
- (!this->DoTypedArrayTypeSpec() && baseValueType.IsLikelyOptimizedTypedArray()) ||
- // Don't do type spec on native array with a history of accessing gaps, as this is a bailout
- (!this->DoNativeArrayTypeSpec() && baseValueType.IsLikelyNativeArray()) ||
- !ShouldExpectConventionalArrayIndexValue(src))
- {
- if(DoTypedArrayTypeSpec() && !IsLoopPrePass())
- {
- GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access.\n"));
- if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
- baseValueType.ToString(baseValueTypeStr);
- Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not type specialize, because %s.\n"),
- this->func->GetJITFunctionBody()->GetDisplayName(),
- this->func->GetDebugNumberSet(debugStringBuffer),
- Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
- baseValueTypeStr,
- instr->DoStackArgsOpt() ? _u("instruction uses the arguments object") :
- baseValueType.IsLikelyOptimizedTypedArray() ? _u("index is negative or likely not int") : _u("of array type"));
- Output::Flush();
- }
- }
- if(!dstVal)
- {
- if(srcVal)
- {
- dstVal = this->ValueNumberTransferDst(instr, srcVal);
- }
- else
- {
- dstVal = NewGenericValue(profiledElementType, dst);
- }
- }
- return dstVal;
- }
- Assert(instr->GetSrc1()->IsIndirOpnd());
- IRType toType = TyVar;
- IR::BailOutKind bailOutKind = IR::BailOutConventionalTypedArrayAccessOnly;
- switch(baseValueType.GetObjectType())
- {
- case ObjectType::Int8Array:
- case ObjectType::Int8VirtualArray:
- case ObjectType::Int8MixedArray:
- newMin = Int8ConstMin;
- newMax = Int8ConstMax;
- goto IntArrayCommon;
- case ObjectType::Uint8Array:
- case ObjectType::Uint8VirtualArray:
- case ObjectType::Uint8MixedArray:
- case ObjectType::Uint8ClampedArray:
- case ObjectType::Uint8ClampedVirtualArray:
- case ObjectType::Uint8ClampedMixedArray:
- newMin = Uint8ConstMin;
- newMax = Uint8ConstMax;
- goto IntArrayCommon;
- case ObjectType::Int16Array:
- case ObjectType::Int16VirtualArray:
- case ObjectType::Int16MixedArray:
- newMin = Int16ConstMin;
- newMax = Int16ConstMax;
- goto IntArrayCommon;
- case ObjectType::Uint16Array:
- case ObjectType::Uint16VirtualArray:
- case ObjectType::Uint16MixedArray:
- newMin = Uint16ConstMin;
- newMax = Uint16ConstMax;
- goto IntArrayCommon;
- case ObjectType::Int32Array:
- case ObjectType::Int32VirtualArray:
- case ObjectType::Int32MixedArray:
- case ObjectType::Uint32Array: // int-specialized loads from uint32 arrays will bail out on values that don't fit in an int32
- case ObjectType::Uint32VirtualArray:
- case ObjectType::Uint32MixedArray:
- Int32Array:
- newMin = Int32ConstMin;
- newMax = Int32ConstMax;
- goto IntArrayCommon;
- IntArrayCommon:
- Assert(dst->IsRegOpnd());
- // If int type spec is disabled, it is ok to load int values as they can help float type spec, and merging int32 with float64 => float64.
- // But if float type spec is also disabled, we'll have problems because float64 merged with var => float64...
- if (!this->DoAggressiveIntTypeSpec() && !this->DoFloatTypeSpec())
- {
- if (!dstVal)
- {
- if (srcVal)
- {
- dstVal = this->ValueNumberTransferDst(instr, srcVal);
- }
- else
- {
- dstVal = NewGenericValue(profiledElementType, dst);
- }
- }
- return dstVal;
- }
- if (!this->IsLoopPrePass())
- {
- if (instr->HasBailOutInfo())
- {
- const IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
- Assert(
- (
- !(oldBailOutKind & ~IR::BailOutKindBits) ||
- (oldBailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp
- ) &&
- !(oldBailOutKind & IR::BailOutKindBits & ~(IR::BailOutOnArrayAccessHelperCall | IR::BailOutMarkTempObject)));
- if (bailOutKind == IR::BailOutConventionalTypedArrayAccessOnly)
- {
- // BailOutConventionalTypedArrayAccessOnly also bails out if the array access is outside the head
- // segment bounds, and guarantees no implicit calls. Override the bailout kind so that the instruction
- // bails out for the right reason.
- instr->SetBailOutKind(
- bailOutKind | (oldBailOutKind & (IR::BailOutKindBits - IR::BailOutOnArrayAccessHelperCall)));
- }
- else
- {
- // BailOutConventionalNativeArrayAccessOnly by itself may generate a helper call, and may cause implicit
- // calls to occur, so it must be merged in to eliminate generating the helper call
- Assert(bailOutKind == IR::BailOutConventionalNativeArrayAccessOnly);
- instr->SetBailOutKind(oldBailOutKind | bailOutKind);
- }
- }
- else
- {
- GenerateBailAtOperation(&instr, bailOutKind);
- }
- }
- TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, nullptr, nullptr, bailOutKind, newMin, newMax, &dstVal);
- toType = TyInt32;
- break;
- case ObjectType::Float32Array:
- case ObjectType::Float32VirtualArray:
- case ObjectType::Float32MixedArray:
- case ObjectType::Float64Array:
- case ObjectType::Float64VirtualArray:
- case ObjectType::Float64MixedArray:
- Float64Array:
- Assert(dst->IsRegOpnd());
- // If float type spec is disabled, don't load float64 values
- if (!this->DoFloatTypeSpec())
- {
- if (!dstVal)
- {
- if (srcVal)
- {
- dstVal = this->ValueNumberTransferDst(instr, srcVal);
- }
- else
- {
- dstVal = NewGenericValue(profiledElementType, dst);
- }
- }
- return dstVal;
- }
- if (!this->IsLoopPrePass())
- {
- if (instr->HasBailOutInfo())
- {
- const IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
- Assert(
- (
- !(oldBailOutKind & ~IR::BailOutKindBits) ||
- (oldBailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp
- ) &&
- !(oldBailOutKind & IR::BailOutKindBits & ~(IR::BailOutOnArrayAccessHelperCall | IR::BailOutMarkTempObject)));
- if (bailOutKind == IR::BailOutConventionalTypedArrayAccessOnly)
- {
- // BailOutConventionalTypedArrayAccessOnly also bails out if the array access is outside the head
- // segment bounds, and guarantees no implicit calls. Override the bailout kind so that the instruction
- // bails out for the right reason.
- instr->SetBailOutKind(
- bailOutKind | (oldBailOutKind & (IR::BailOutKindBits - IR::BailOutOnArrayAccessHelperCall)));
- }
- else
- {
- // BailOutConventionalNativeArrayAccessOnly by itself may generate a helper call, and may cause implicit
- // calls to occur, so it must be merged in to eliminate generating the helper call
- Assert(bailOutKind == IR::BailOutConventionalNativeArrayAccessOnly);
- instr->SetBailOutKind(oldBailOutKind | bailOutKind);
- }
- }
- else
- {
- GenerateBailAtOperation(&instr, bailOutKind);
- }
- }
- TypeSpecializeFloatDst(instr, nullptr, nullptr, nullptr, &dstVal);
- toType = TyFloat64;
- break;
- default:
- Assert(baseValueType.IsLikelyNativeArray());
- bailOutKind = IR::BailOutConventionalNativeArrayAccessOnly;
- if(baseValueType.HasIntElements())
- {
- goto Int32Array;
- }
- Assert(baseValueType.HasFloatElements());
- goto Float64Array;
- }
- if(!dstVal)
- {
- dstVal = NewGenericValue(profiledElementType, dst);
- }
- Assert(toType != TyVar);
- GOPT_TRACE_INSTR(instr, _u("Type specialized array access.\n"));
- if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
- baseValueType.ToString(baseValueTypeStr);
- char dstValTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
- dstVal->GetValueInfo()->Type().ToString(dstValTypeStr);
- Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, type specialized to %s producing %S"),
- this->func->GetJITFunctionBody()->GetDisplayName(),
- this->func->GetDebugNumberSet(debugStringBuffer),
- Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
- baseValueTypeStr,
- toType == TyInt32 ? _u("int32") : _u("float64"),
- dstValTypeStr);
- #if DBG_DUMP
- Output::Print(_u(" ("));
- dstVal->Dump();
- Output::Print(_u(").\n"));
- #else
- Output::Print(_u(".\n"));
- #endif
- Output::Flush();
- }
- return dstVal;
- }
- ValueType
- GlobOpt::GetPrepassValueTypeForDst(
- const ValueType desiredValueType,
- IR::Instr *const instr,
- Value *const src1Value,
- Value *const src2Value,
- bool const isValueInfoPrecise,
- bool const isSafeToTransferInPrepass) const
- {
- // Values with definite types can be created in the loop prepass only when it is guaranteed that the value type will be the
- // same on any iteration of the loop. The heuristics currently used are:
- // - If the source sym is not live on the back-edge, then it acquires a new value for each iteration of the loop, so
- // that value type can be definite
- // - Consider: A better solution for this is to track values that originate in this loop, which can have definite value
- // types. That catches more cases, should look into that in the future.
- // - If the source sym has a constant value that doesn't change for the duration of the function
- // - The operation always results in a definite value type. For instance, signed bitwise operations always result in an
- // int32, conv_num and ++ always result in a number, etc.
- // - For operations that always result in an int32, the resulting int range is precise only if the source syms pass
- // the above heuristics. Otherwise, the range must be expanded to the full int32 range.
- Assert(IsLoopPrePass());
- Assert(instr);
- if(!isValueInfoPrecise)
- {
- if(!desiredValueType.IsDefinite())
- {
- return isSafeToTransferInPrepass ? desiredValueType : desiredValueType.SetCanBeTaggedValue(true);
- }
- // If the desired value type is not precise, the value type of the destination is derived from the value types of the
- // sources. Since the value type of a source sym is not definite, the destination value type also cannot be definite.
- if(desiredValueType.IsInt() && OpCodeAttr::IsInt32(instr->m_opcode))
- {
- // The op always produces an int32, but not always a tagged int
- return ValueType::GetInt(desiredValueType.IsLikelyTaggedInt());
- }
- if(desiredValueType.IsNumber() && OpCodeAttr::ProducesNumber(instr->m_opcode))
- {
- // The op always produces a number, but not always an int
- return desiredValueType.ToDefiniteAnyNumber();
- }
- // Note: ToLikely() also sets CanBeTaggedValue
- return desiredValueType.ToLikely();
- }
- return desiredValueType;
- }
- bool
- GlobOpt::IsPrepassSrcValueInfoPrecise(IR::Instr *const instr, Value *const src1Value, Value *const src2Value, bool * isSafeToTransferInPrepass) const
- {
- return
- (!instr->GetSrc1() || IsPrepassSrcValueInfoPrecise(instr->GetSrc1(), src1Value, isSafeToTransferInPrepass)) &&
- (!instr->GetSrc2() || IsPrepassSrcValueInfoPrecise(instr->GetSrc2(), src2Value, isSafeToTransferInPrepass));
- }
- bool
- GlobOpt::IsPrepassSrcValueInfoPrecise(IR::Opnd *const src, Value *const srcValue, bool * isSafeToTransferInPrepass) const
- {
- Assert(IsLoopPrePass());
- Assert(src);
- if (isSafeToTransferInPrepass)
- {
- *isSafeToTransferInPrepass = false;
- }
- if (src->IsAddrOpnd() &&
- srcValue->GetValueInfo()->GetSymStore() &&
- srcValue->GetValueInfo()->GetSymStore()->IsStackSym() &&
- srcValue->GetValueInfo()->GetSymStore()->AsStackSym()->IsFromByteCodeConstantTable())
- {
- if (isSafeToTransferInPrepass)
- {
- *isSafeToTransferInPrepass = false;
- }
- return true;
- }
- if (!src->IsRegOpnd() || !srcValue)
- {
- return false;
- }
- ValueInfo *const srcValueInfo = srcValue->GetValueInfo();
- bool isValueInfoDefinite = srcValueInfo->IsDefinite();
- StackSym * srcSym = src->AsRegOpnd()->m_sym;
- bool isSafeToTransfer = IsSafeToTransferInPrepass(srcSym, srcValueInfo);
- if (isSafeToTransferInPrepass)
- {
- *isSafeToTransferInPrepass = isSafeToTransfer;
- }
- return isValueInfoDefinite && isSafeToTransfer;
- }
- bool
- GlobOpt::IsSafeToTransferInPrepass(StackSym * const srcSym, ValueInfo *const srcValueInfo) const
- {
- int32 intConstantValue;
- return
- srcSym->IsFromByteCodeConstantTable() ||
- (
- srcValueInfo->TryGetIntConstantValue(&intConstantValue) &&
- !Js::TaggedInt::IsOverflow(intConstantValue) &&
- GetTaggedIntConstantStackSym(intConstantValue) == srcSym
- ) ||
- !currentBlock->loop->regAlloc.liveOnBackEdgeSyms->Test(srcSym->m_id) ||
- !currentBlock->loop->IsSymAssignedToInSelfOrParents(srcSym);
- }
- bool
- GlobOpt::SafeToCopyPropInPrepass(StackSym * const originalSym, StackSym * const copySym, Value *const value) const
- {
- Assert(this->currentBlock->globOptData.GetCopyPropSym(originalSym, value) == copySym);
- // In the following example, to copy-prop s2 into s1, it is not enough to check if s1 and s2 are safe to transfer.
- // In fact, both s1 and s2 are safe to transfer, but it is not legal to copy prop s2 into s1.
- //
- // s1 = s2
- // $Loop:
- // s3 = s1
- // s2 = s4
- // Br $Loop
- //
- // In general, requirements for copy-propping in prepass are more restricted than those for transferring values.
- // For copy prop in prepass, if the original sym is live on back-edge, then the copy-prop sym should not be written to
- // in the loop (or its parents)
-
- ValueInfo* const valueInfo = value->GetValueInfo();
- return IsSafeToTransferInPrepass(originalSym, valueInfo) &&
- IsSafeToTransferInPrepass(copySym, valueInfo) &&
- (!currentBlock->loop->regAlloc.liveOnBackEdgeSyms->Test(originalSym->m_id) || !currentBlock->loop->IsSymAssignedToInSelfOrParents(copySym));
- }
- Value *GlobOpt::CreateDstUntransferredIntValue(
- const int32 min,
- const int32 max,
- IR::Instr *const instr,
- Value *const src1Value,
- Value *const src2Value)
- {
- Assert(instr);
- Assert(instr->GetDst());
- Assert(OpCodeAttr::ProducesNumber(instr->m_opcode)
- || (instr->m_opcode == Js::OpCode::Add_A && src1Value->GetValueInfo()->IsNumber()
- && src2Value->GetValueInfo()->IsNumber()));
- ValueType valueType(ValueType::GetInt(IntConstantBounds(min, max).IsLikelyTaggable()));
- Assert(valueType.IsInt());
- bool isValueInfoPrecise;
- if(IsLoopPrePass())
- {
- isValueInfoPrecise = IsPrepassSrcValueInfoPrecise(instr, src1Value, src2Value);
- valueType = GetPrepassValueTypeForDst(valueType, instr, src1Value, src2Value, isValueInfoPrecise);
- }
- else
- {
- isValueInfoPrecise = true;
- }
- IR::Opnd *const dst = instr->GetDst();
- if(isValueInfoPrecise)
- {
- Assert(valueType == ValueType::GetInt(IntConstantBounds(min, max).IsLikelyTaggable()));
- Assert(!(dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->IsTypeSpec()));
- return NewIntRangeValue(min, max, false, dst);
- }
- return NewGenericValue(valueType, dst);
- }
- Value *
- GlobOpt::CreateDstUntransferredValue(
- const ValueType desiredValueType,
- IR::Instr *const instr,
- Value *const src1Value,
- Value *const src2Value)
- {
- Assert(instr);
- Assert(instr->GetDst());
- Assert(!desiredValueType.IsInt()); // use CreateDstUntransferredIntValue instead
- ValueType valueType(desiredValueType);
- if(IsLoopPrePass())
- {
- valueType = GetPrepassValueTypeForDst(valueType, instr, src1Value, src2Value, IsPrepassSrcValueInfoPrecise(instr, src1Value, src2Value));
- }
- return NewGenericValue(valueType, instr->GetDst());
- }
- Value *
- GlobOpt::ValueNumberTransferDst(IR::Instr *const instr, Value * src1Val)
- {
- Value *dstVal = this->IsLoopPrePass() ? this->ValueNumberTransferDstInPrepass(instr, src1Val) : src1Val;
- // Don't copy-prop a temp over a user symbol. This is likely to extend the temp's lifetime, as the user symbol
- // is more likely to already have later references.
- // REVIEW: Enabling this does cause perf issues...
- #if 0
- if (dstVal != src1Val)
- {
- return dstVal;
- }
- Sym *dstSym = dst->GetStackSym();
- if (dstVal && dstSym && dstSym->IsStackSym() && !dstSym->AsStackSym()->m_isBytecodeTmp)
- {
- Sym *dstValSym = dstVal->GetValueInfo()->GetSymStore();
- if (dstValSym && dstValSym->AsStackSym()->m_isBytecodeTmp /* src->GetIsDead()*/)
- {
- dstVal->GetValueInfo()->SetSymStore(dstSym);
- }
- }
- #endif
- return dstVal;
- }
- bool
- GlobOpt::IsSafeToTransferInPrePass(IR::Opnd *src, Value *srcValue)
- {
- if (src->IsRegOpnd())
- {
- StackSym *srcSym = src->AsRegOpnd()->m_sym;
- if (srcSym->IsFromByteCodeConstantTable())
- {
- return true;
- }
- ValueInfo *srcValueInfo = srcValue->GetValueInfo();
- int32 srcIntConstantValue;
- if (srcValueInfo->TryGetIntConstantValue(&srcIntConstantValue) && !Js::TaggedInt::IsOverflow(srcIntConstantValue)
- && GetTaggedIntConstantStackSym(srcIntConstantValue) == srcSym)
- {
- return true;
- }
- }
- return false;
- }
- Value *
- GlobOpt::ValueNumberTransferDstInPrepass(IR::Instr *const instr, Value *const src1Val)
- {
- Value *dstVal = nullptr;
- if (!src1Val)
- {
- return nullptr;
- }
- bool isValueInfoPrecise;
- ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
- // TODO: This conflicts with new values created by the type specialization code
- // We should re-enable if we change that code to avoid the new values.
- #if 0
- if (this->IsSafeToTransferInPrePass(instr->GetSrc1(), src1Val))
- {
- return src1Val;
- }
- if (this->IsPREInstrCandidateLoad(instr->m_opcode) && instr->GetDst())
- {
- StackSym *dstSym = instr->GetDst()->AsRegOpnd()->m_sym;
- for (Loop *curLoop = this->currentBlock->loop; curLoop; curLoop = curLoop->parent)
- {
- if (curLoop->fieldPRESymStore->Test(dstSym->m_id))
- {
- return src1Val;
- }
- }
- }
- if (instr->GetDst()->IsRegOpnd())
- {
- StackSym *stackSym = instr->GetDst()->AsRegOpnd()->m_sym;
- if (stackSym->IsSingleDef() || this->IsLive(stackSym, this->prePassLoop->landingPad))
- {
- IntConstantBounds src1IntConstantBounds;
- if (src1ValueInfo->TryGetIntConstantBounds(&src1IntConstantBounds) &&
- !(
- src1IntConstantBounds.LowerBound() == INT32_MIN &&
- src1IntConstantBounds.UpperBound() == INT32_MAX
- ))
- {
- const ValueType valueType(
- GetPrepassValueTypeForDst(src1ValueInfo->Type(), instr, src1Val, nullptr, &isValueInfoPrecise));
- if (isValueInfoPrecise)
- {
- return src1Val;
- }
- }
- else
- {
- return src1Val;
- }
- }
- }
- #endif
- // Src1's value could change later in the loop, so the value wouldn't be the same for each
- // iteration. Since we don't iterate over loops "while (!changed)", go conservative on the
- // first pass when transferring a value that is live on the back-edge.
- // In prepass we are going to copy the value but with a different value number
- // for aggressive int type spec.
- bool isSafeToTransferInPrepass = false;
- isValueInfoPrecise = IsPrepassSrcValueInfoPrecise(instr, src1Val, nullptr, &isSafeToTransferInPrepass);
-
- const ValueType valueType(GetPrepassValueTypeForDst(src1ValueInfo->Type(), instr, src1Val, nullptr, isValueInfoPrecise, isSafeToTransferInPrepass));
- if(isValueInfoPrecise || isSafeToTransferInPrepass)
- {
- Assert(valueType == src1ValueInfo->Type());
- if (!PHASE_OFF1(Js::AVTInPrePassPhase))
- {
- dstVal = src1Val;
- }
- else
- {
- dstVal = CopyValue(src1Val);
- TrackCopiedValueForKills(dstVal);
- }
- }
- else if (valueType == src1ValueInfo->Type() && src1ValueInfo->IsGeneric()) // this else branch is probably not needed
- {
- Assert(valueType == src1ValueInfo->Type());
- dstVal = CopyValue(src1Val);
- TrackCopiedValueForKills(dstVal);
- }
- else
- {
- dstVal = NewGenericValue(valueType);
- dstVal->GetValueInfo()->SetSymStore(src1ValueInfo->GetSymStore());
- }
- return dstVal;
- }
- void
- GlobOpt::PropagateIntRangeForNot(int32 minimum, int32 maximum, int32 *pNewMin, int32* pNewMax)
- {
- int32 tmp;
- Int32Math::Not(minimum, pNewMin);
- *pNewMax = *pNewMin;
- Int32Math::Not(maximum, &tmp);
- *pNewMin = min(*pNewMin, tmp);
- *pNewMax = max(*pNewMax, tmp);
- }
- void
- GlobOpt::PropagateIntRangeBinary(IR::Instr *instr, int32 min1, int32 max1,
- int32 min2, int32 max2, int32 *pNewMin, int32* pNewMax)
- {
- int32 min, max, tmp, tmp2;
- min = INT32_MIN;
- max = INT32_MAX;
- switch (instr->m_opcode)
- {
- case Js::OpCode::Xor_A:
- case Js::OpCode::Or_A:
- // Find range with highest high order bit
- tmp = ::max((uint32)min1, (uint32)max1);
- tmp2 = ::max((uint32)min2, (uint32)max2);
- if ((uint32)tmp > (uint32)tmp2)
- {
- max = tmp;
- }
- else
- {
- max = tmp2;
- }
- if (max < 0)
- {
- min = INT32_MIN; // REVIEW: conservative...
- max = INT32_MAX;
- }
- else
- {
- // Turn values like 0x1010 into 0x1111
- max = 1 << Math::Log2(max);
- max = (uint32)(max << 1) - 1;
- min = 0;
- }
- break;
- case Js::OpCode::And_A:
- if (min1 == INT32_MIN && min2 == INT32_MIN)
- {
- // Shortcut
- break;
- }
- // Find range with lowest higher bit
- tmp = ::max((uint32)min1, (uint32)max1);
- tmp2 = ::max((uint32)min2, (uint32)max2);
- if ((uint32)tmp < (uint32)tmp2)
- {
- min = min1;
- max = max1;
- }
- else
- {
- min = min2;
- max = max2;
- }
- // To compute max, look if min has higher high bit
- if ((uint32)min > (uint32)max)
- {
- max = min;
- }
- // If max is negative, max let's assume it could be -1, so result in MAX_INT
- if (max < 0)
- {
- max = INT32_MAX;
- }
- // If min is positive, the resulting min is zero
- if (min >= 0)
- {
- min = 0;
- }
- else
- {
- min = INT32_MIN;
- }
- break;
- case Js::OpCode::Shl_A:
- {
- // Shift count
- if (min2 != max2 && ((uint32)min2 > 0x1F || (uint32)max2 > 0x1F))
- {
- min2 = 0;
- max2 = 0x1F;
- }
- else
- {
- min2 &= 0x1F;
- max2 &= 0x1F;
- }
- int32 min1FreeTopBitCount = min1 ? (sizeof(int32) * 8) - (Math::Log2(min1) + 1) : (sizeof(int32) * 8);
- int32 max1FreeTopBitCount = max1 ? (sizeof(int32) * 8) - (Math::Log2(max1) + 1) : (sizeof(int32) * 8);
- if (min1FreeTopBitCount <= max2 || max1FreeTopBitCount <= max2)
- {
- // If the shift is going to touch the sign bit return the max range
- min = INT32_MIN;
- max = INT32_MAX;
- }
- else
- {
- // Compute max
- // Turn values like 0x1010 into 0x1111
- if (min1)
- {
- min1 = 1 << Math::Log2(min1);
- min1 = (min1 << 1) - 1;
- }
- if (max1)
- {
- max1 = 1 << Math::Log2(max1);
- max1 = (uint32)(max1 << 1) - 1;
- }
- if (max1 > 0)
- {
- int32 nrTopBits = (sizeof(int32) * 8) - Math::Log2(max1);
- if (nrTopBits < ::min(max2, 30))
- max = INT32_MAX;
- else
- max = ::max((max1 << ::min(max2, 30)) & ~0x80000000, (min1 << min2) & ~0x80000000);
- }
- else
- {
- max = (max1 << min2) & ~0x80000000;
- }
- // Compute min
- if (min1 < 0)
- {
- min = ::min(min1 << max2, max1 << max2);
- }
- else
- {
- min = ::min(min1 << min2, max1 << max2);
- }
- // Turn values like 0x1110 into 0x1000
- if (min)
- {
- min = 1 << Math::Log2(min);
- }
- }
- }
- break;
- case Js::OpCode::Shr_A:
- // Shift count
- if (min2 != max2 && ((uint32)min2 > 0x1F || (uint32)max2 > 0x1F))
- {
- min2 = 0;
- max2 = 0x1F;
- }
- else
- {
- min2 &= 0x1F;
- max2 &= 0x1F;
- }
- // Compute max
- if (max1 < 0)
- {
- max = max1 >> max2;
- }
- else
- {
- max = max1 >> min2;
- }
- // Compute min
- if (min1 < 0)
- {
- min = min1 >> min2;
- }
- else
- {
- min = min1 >> max2;
- }
- break;
- case Js::OpCode::ShrU_A:
- // shift count is constant zero
- if ((min2 == max2) && (max2 & 0x1f) == 0)
- {
- // We can't encode uint32 result, so it has to be used as int32 only or the original value is positive.
- Assert(instr->ignoreIntOverflow || min1 >= 0);
- // We can transfer the signed int32 range.
- min = min1;
- max = max1;
- break;
- }
- const IntConstantBounds src2NewBounds = IntConstantBounds(min2, max2).And_0x1f();
- // Zero is only allowed if result is always a signed int32 or always used as a signed int32
- Assert(min1 >= 0 || instr->ignoreIntOverflow || !src2NewBounds.Contains(0));
- min2 = src2NewBounds.LowerBound();
- max2 = src2NewBounds.UpperBound();
- Assert(min2 <= max2);
- // zero shift count is only allowed if result is used as int32 and/or value is positive
- Assert(min2 > 0 || instr->ignoreIntOverflow || min1 >= 0);
- uint32 umin1 = (uint32)min1;
- uint32 umax1 = (uint32)max1;
- if (umin1 > umax1)
- {
- uint32 temp = umax1;
- umax1 = umin1;
- umin1 = temp;
- }
- Assert(min2 >= 0 && max2 < 32);
- // Compute max
- if (min1 < 0)
- {
- umax1 = UINT32_MAX;
- }
- max = umax1 >> min2;
- // Compute min
- if (min1 <= 0 && max1 >=0)
- {
- min = 0;
- }
- else
- {
- min = umin1 >> max2;
- }
- // We should be able to fit uint32 range as int32
- Assert(instr->ignoreIntOverflow || (min >= 0 && max >= 0) );
- if (min > max)
- {
- // can only happen if shift count can be zero
- Assert(min2 == 0 && (instr->ignoreIntOverflow || min1 >= 0));
- min = Int32ConstMin;
- max = Int32ConstMax;
- }
- break;
- }
- *pNewMin = min;
- *pNewMax = max;
- }
- IR::Instr *
- GlobOpt::TypeSpecialization(
- IR::Instr *instr,
- Value **pSrc1Val,
- Value **pSrc2Val,
- Value **pDstVal,
- bool *redoTypeSpecRef,
- bool *const forceInvariantHoistingRef)
- {
- Value *&src1Val = *pSrc1Val;
- Value *&src2Val = *pSrc2Val;
- *redoTypeSpecRef = false;
- Assert(!*forceInvariantHoistingRef);
- this->ignoredIntOverflowForCurrentInstr = false;
- this->ignoredNegativeZeroForCurrentInstr = false;
- // - Int32 values that can't be tagged are created as float constant values instead because a JavascriptNumber var is needed
- // for that value at runtime. For the purposes of type specialization, recover the int32 values so that they will be
- // treated as ints.
- // - If int overflow does not matter for the instruction, we can additionally treat uint32 values as int32 values because
- // the value resulting from the operation will eventually be converted to int32 anyway
- Value *const src1OriginalVal = src1Val;
- Value *const src2OriginalVal = src2Val;
- if(!instr->ShouldCheckForIntOverflow())
- {
- if(src1Val && src1Val->GetValueInfo()->IsFloatConstant())
- {
- int32 int32Value;
- bool isInt32;
- if(Js::JavascriptNumber::TryGetInt32OrUInt32Value(
- src1Val->GetValueInfo()->AsFloatConstant()->FloatValue(),
- &int32Value,
- &isInt32))
- {
- src1Val = GetIntConstantValue(int32Value, instr);
- if(!isInt32)
- {
- this->ignoredIntOverflowForCurrentInstr = true;
- }
- }
- }
- if(src2Val && src2Val->GetValueInfo()->IsFloatConstant())
- {
- int32 int32Value;
- bool isInt32;
- if(Js::JavascriptNumber::TryGetInt32OrUInt32Value(
- src2Val->GetValueInfo()->AsFloatConstant()->FloatValue(),
- &int32Value,
- &isInt32))
- {
- src2Val = GetIntConstantValue(int32Value, instr);
- if(!isInt32)
- {
- this->ignoredIntOverflowForCurrentInstr = true;
- }
- }
- }
- }
- const AutoRestoreVal autoRestoreSrc1Val(src1OriginalVal, &src1Val);
- const AutoRestoreVal autoRestoreSrc2Val(src2OriginalVal, &src2Val);
- if (src1Val && instr->GetSrc2() == nullptr)
- {
- // Unary
- // Note make sure that native array StElemI gets to TypeSpecializeStElem. Do this for typed arrays, too?
- int32 intConstantValue;
- if (!this->IsLoopPrePass() &&
- !instr->IsBranchInstr() &&
- src1Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue) &&
- !(
- // Nothing to fold for element stores. Go into type specialization to see if they can at least be specialized.
- instr->m_opcode == Js::OpCode::StElemI_A ||
- instr->m_opcode == Js::OpCode::StElemI_A_Strict ||
- instr->m_opcode == Js::OpCode::StElemC ||
- instr->m_opcode == Js::OpCode::MultiBr ||
- instr->m_opcode == Js::OpCode::InlineArrayPop
- ))
- {
- if (OptConstFoldUnary(&instr, intConstantValue, src1Val == src1OriginalVal, pDstVal))
- {
- return instr;
- }
- }
- else if (this->TypeSpecializeUnary(
- &instr,
- &src1Val,
- pDstVal,
- src1OriginalVal,
- redoTypeSpecRef,
- forceInvariantHoistingRef))
- {
- return instr;
- }
- else if(*redoTypeSpecRef)
- {
- return instr;
- }
- }
- else if (instr->GetSrc2() && !instr->IsBranchInstr())
- {
- // Binary
- if (!this->IsLoopPrePass())
- {
- if (GetIsAsmJSFunc())
- {
- if (CONFIG_FLAG(WasmFold))
- {
- bool success = instr->GetSrc1()->IsInt64() ?
- this->OptConstFoldBinaryWasm<int64>(&instr, src1Val, src2Val, pDstVal) :
- this->OptConstFoldBinaryWasm<int>(&instr, src1Val, src2Val, pDstVal);
- if (success)
- {
- return instr;
- }
- }
- }
- else
- {
- // OptConstFoldBinary doesn't do type spec, so only deal with things we are sure are int (IntConstant and IntRange)
- // and not just likely ints TypeSpecializeBinary will deal with type specializing them and fold them again
- IntConstantBounds src1IntConstantBounds, src2IntConstantBounds;
- if (src1Val && src1Val->GetValueInfo()->TryGetIntConstantBounds(&src1IntConstantBounds))
- {
- if (src2Val && src2Val->GetValueInfo()->TryGetIntConstantBounds(&src2IntConstantBounds))
- {
- if (this->OptConstFoldBinary(&instr, src1IntConstantBounds, src2IntConstantBounds, pDstVal))
- {
- return instr;
- }
- }
- }
- }
- }
- }
- if (instr->GetSrc2() && this->TypeSpecializeBinary(&instr, pSrc1Val, pSrc2Val, pDstVal, src1OriginalVal, src2OriginalVal, redoTypeSpecRef))
- {
- if (!this->IsLoopPrePass() &&
- instr->m_opcode != Js::OpCode::Nop &&
- instr->m_opcode != Js::OpCode::Br && // We may have const fold a branch
- // Cannot const-peep if the result of the operation is required for a bailout check
- !(instr->HasBailOutInfo() && instr->GetBailOutKind() & IR::BailOutOnResultConditions))
- {
- if (src1Val && src1Val->GetValueInfo()->HasIntConstantValue())
- {
- if (this->OptConstPeep(instr, instr->GetSrc1(), pDstVal, src1Val->GetValueInfo()))
- {
- return instr;
- }
- }
- else if (src2Val && src2Val->GetValueInfo()->HasIntConstantValue())
- {
- if (this->OptConstPeep(instr, instr->GetSrc2(), pDstVal, src2Val->GetValueInfo()))
- {
- return instr;
- }
- }
- }
- return instr;
- }
- else if(*redoTypeSpecRef)
- {
- return instr;
- }
- if (instr->IsBranchInstr() && !this->IsLoopPrePass())
- {
- if (this->OptConstFoldBranch(instr, src1Val, src2Val, pDstVal))
- {
- return instr;
- }
- }
- // We didn't type specialize, make sure the srcs are unspecialized
- IR::Opnd *src1 = instr->GetSrc1();
- if (src1)
- {
- instr = this->ToVarUses(instr, src1, false, src1Val);
- IR::Opnd *src2 = instr->GetSrc2();
- if (src2)
- {
- instr = this->ToVarUses(instr, src2, false, src2Val);
- }
- }
- IR::Opnd *dst = instr->GetDst();
- if (dst)
- {
- instr = this->ToVarUses(instr, dst, true, nullptr);
- // Handling for instructions other than built-ins that may require only dst type specialization
- // should be added here.
- if(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode) && !GetIsAsmJSFunc()) // don't need to do typespec for asmjs
- {
- this->TypeSpecializeInlineBuiltInDst(&instr, pDstVal);
- return instr;
- }
- // Clear the int specialized bit on the dst.
- if (dst->IsRegOpnd())
- {
- IR::RegOpnd *dstRegOpnd = dst->AsRegOpnd();
- if (!dstRegOpnd->m_sym->IsTypeSpec())
- {
- this->ToVarRegOpnd(dstRegOpnd, this->currentBlock);
- }
- else if (dstRegOpnd->m_sym->IsInt32())
- {
- this->ToInt32Dst(instr, dstRegOpnd, this->currentBlock);
- }
- else if (dstRegOpnd->m_sym->IsUInt32() && GetIsAsmJSFunc())
- {
- this->ToUInt32Dst(instr, dstRegOpnd, this->currentBlock);
- }
- else if (dstRegOpnd->m_sym->IsFloat64())
- {
- this->ToFloat64Dst(instr, dstRegOpnd, this->currentBlock);
- }
- }
- else if (dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsStackSym())
- {
- this->ToVarStackSym(dst->AsSymOpnd()->m_sym->AsStackSym(), this->currentBlock);
- }
- }
- return instr;
- }
- bool
- GlobOpt::OptConstPeep(IR::Instr *instr, IR::Opnd *constSrc, Value **pDstVal, ValueInfo *valuInfo)
- {
- int32 value;
- IR::Opnd *src;
- IR::Opnd *nonConstSrc = (constSrc == instr->GetSrc1() ? instr->GetSrc2() : instr->GetSrc1());
- // Try to find the value from value info first
- if (valuInfo->TryGetIntConstantValue(&value))
- {
- }
- else if (constSrc->IsAddrOpnd())
- {
- IR::AddrOpnd *addrOpnd = constSrc->AsAddrOpnd();
- #ifdef _M_X64
- Assert(addrOpnd->IsVar() || Math::FitsInDWord((size_t)addrOpnd->m_address));
- #else
- Assert(sizeof(value) == sizeof(addrOpnd->m_address));
- #endif
- if (addrOpnd->IsVar())
- {
- value = Js::TaggedInt::ToInt32(addrOpnd->m_address);
- }
- else
- {
- // We asserted that the address will fit in a DWORD above
- value = ::Math::PointerCastToIntegral<int32>(constSrc->AsAddrOpnd()->m_address);
- }
- }
- else if (constSrc->IsIntConstOpnd())
- {
- value = constSrc->AsIntConstOpnd()->AsInt32();
- }
- else
- {
- return false;
- }
- switch(instr->m_opcode)
- {
- // Can't do all Add_A because of string concats.
- // Sub_A cannot be transformed to a NEG_A because 0 - 0 != -0
- case Js::OpCode::Add_A:
- src = nonConstSrc;
- if (!src->GetValueType().IsInt())
- {
- // 0 + -0 != -0
- // "Foo" + 0 != "Foo
- return false;
- }
- // fall-through
- case Js::OpCode::Add_I4:
- if (value != 0)
- {
- return false;
- }
- if (constSrc == instr->GetSrc1())
- {
- src = instr->GetSrc2();
- }
- else
- {
- src = instr->GetSrc1();
- }
- break;
- case Js::OpCode::Mul_A:
- case Js::OpCode::Mul_I4:
- if (value == 0)
- {
- // -0 * 0 != 0
- return false;
- }
- else if (value == 1)
- {
- src = nonConstSrc;
- }
- else
- {
- return false;
- }
- break;
- case Js::OpCode::Div_A:
- if (value == 1 && constSrc == instr->GetSrc2())
- {
- src = instr->GetSrc1();
- }
- else
- {
- return false;
- }
- break;
- case Js::OpCode::Or_I4:
- if (value == -1)
- {
- src = constSrc;
- }
- else if (value == 0)
- {
- src = nonConstSrc;
- }
- else
- {
- return false;
- }
- break;
- case Js::OpCode::And_I4:
- if (value == -1)
- {
- src = nonConstSrc;
- }
- else if (value == 0)
- {
- src = constSrc;
- }
- else
- {
- return false;
- }
- break;
- case Js::OpCode::Shl_I4:
- case Js::OpCode::ShrU_I4:
- case Js::OpCode::Shr_I4:
- if (value != 0 || constSrc != instr->GetSrc2())
- {
- return false;
- }
- src = instr->GetSrc1();
- break;
- default:
- return false;
- }
- this->CaptureByteCodeSymUses(instr);
- if (src == instr->GetSrc1())
- {
- instr->FreeSrc2();
- }
- else
- {
- Assert(src == instr->GetSrc2());
- instr->ReplaceSrc1(instr->UnlinkSrc2());
- }
- instr->m_opcode = Js::OpCode::Ld_A;
- InvalidateInductionVariables(instr);
- return true;
- }
- Js::Var // TODO: michhol OOP JIT, shouldn't play with Vars
- GlobOpt::GetConstantVar(IR::Opnd *opnd, Value *val)
- {
- ValueInfo *valueInfo = val->GetValueInfo();
- if (valueInfo->IsVarConstant() && valueInfo->IsPrimitive())
- {
- return valueInfo->AsVarConstant()->VarValue();
- }
- if (opnd->IsAddrOpnd())
- {
- IR::AddrOpnd *addrOpnd = opnd->AsAddrOpnd();
- if (addrOpnd->IsVar())
- {
- return addrOpnd->m_address;
- }
- }
- else if (opnd->IsIntConstOpnd())
- {
- if (!Js::TaggedInt::IsOverflow(opnd->AsIntConstOpnd()->AsInt32()))
- {
- return Js::TaggedInt::ToVarUnchecked(opnd->AsIntConstOpnd()->AsInt32());
- }
- }
- #if FLOATVAR
- else if (opnd->IsFloatConstOpnd())
- {
- return Js::JavascriptNumber::ToVar(opnd->AsFloatConstOpnd()->m_value);
- }
- #endif
- else if (opnd->IsRegOpnd() && opnd->AsRegOpnd()->m_sym->IsSingleDef())
- {
- if (valueInfo->IsBoolean())
- {
- IR::Instr * defInstr = opnd->AsRegOpnd()->m_sym->GetInstrDef();
- if (defInstr->m_opcode != Js::OpCode::Ld_A || !defInstr->GetSrc1()->IsAddrOpnd())
- {
- return nullptr;
- }
- Assert(defInstr->GetSrc1()->AsAddrOpnd()->IsVar());
- return defInstr->GetSrc1()->AsAddrOpnd()->m_address;
- }
- else if (valueInfo->IsUndefined())
- {
- return (Js::Var)this->func->GetScriptContextInfo()->GetUndefinedAddr();
- }
- else if (valueInfo->IsNull())
- {
- return (Js::Var)this->func->GetScriptContextInfo()->GetNullAddr();
- }
- #if FLOATVAR
- else if (valueInfo->IsFloat())
- {
- IR::Instr * defInstr = opnd->AsRegOpnd()->m_sym->GetInstrDef();
- if ((defInstr->m_opcode == Js::OpCode::LdC_F8_R8 || defInstr->m_opcode == Js::OpCode::LdC_A_R8) && defInstr->GetSrc1()->IsFloatConstOpnd())
- {
- return Js::JavascriptNumber::ToVar(defInstr->GetSrc1()->AsFloatConstOpnd()->m_value);
- }
- }
- #endif
- }
- return nullptr;
- }
- namespace
- {
- bool TryCompIntAndFloat(bool * result, Js::Var left, Js::Var right)
- {
- if (Js::TaggedInt::Is(left))
- {
- // If both are tagged ints we should not get here.
- Assert(!Js::TaggedInt::Is(right));
- if (Js::JavascriptNumber::Is_NoTaggedIntCheck(right))
- {
- double value = Js::JavascriptNumber::GetValue(right);
- *result = (Js::TaggedInt::ToInt32(left) == value);
- return true;
- }
- }
- return false;
- }
- bool Op_JitEq(bool * result, Value * src1Val, Value * src2Val, Js::Var src1Var, Js::Var src2Var, Func * func, bool isStrict)
- {
- Assert(src1Val != nullptr && src2Val != nullptr);
- Assert(src1Var != nullptr && src2Var != nullptr);
- if (src1Var == src2Var)
- {
- if (Js::TaggedInt::Is(src1Var))
- {
- *result = true;
- return true;
- }
- if (!isStrict && src1Val->GetValueInfo()->IsNotFloat())
- {
- // If the vars are equal and they are not NaN, non-strict equal returns true. Not float guarantees not NaN.
- *result = true;
- return true;
- }
- #if FLOATVAR
- if (Js::JavascriptNumber::Is_NoTaggedIntCheck(src1Var))
- {
- *result = !Js::JavascriptNumber::IsNan(Js::JavascriptNumber::GetValue(src1Var));
- return true;
- }
- #endif
- if (src1Var == reinterpret_cast<Js::Var>(func->GetScriptContextInfo()->GetTrueAddr()) ||
- src1Var == reinterpret_cast<Js::Var>(func->GetScriptContextInfo()->GetFalseAddr()) ||
- src1Var == reinterpret_cast<Js::Var>(func->GetScriptContextInfo()->GetNullAddr()) ||
- src1Var == reinterpret_cast<Js::Var>(func->GetScriptContextInfo()->GetUndefinedAddr()))
- {
- *result = true;
- return true;
- }
- // Other var comparisons require the runtime to prove.
- return false;
- }
- #if FLOATVAR
- if (TryCompIntAndFloat(result, src1Var, src2Var) || TryCompIntAndFloat(result, src2Var, src1Var))
- {
- return true;
- }
- #endif
- return false;
- }
- bool Op_JitNeq(bool * result, Value * src1Val, Value * src2Val, Js::Var src1Var, Js::Var src2Var, Func * func, bool isStrict)
- {
- if (Op_JitEq(result, src1Val, src2Val, src1Var, src2Var, func, isStrict))
- {
- *result = !*result;
- return true;
- }
- return false;
- }
- bool BoolAndIntStaticAndTypeMismatch(Value* src1Val, Value* src2Val, Js::Var src1Var, Js::Var src2Var)
- {
- ValueInfo *src1ValInfo = src1Val->GetValueInfo();
- ValueInfo *src2ValInfo = src2Val->GetValueInfo();
- return (src1ValInfo->IsNumber() && src1Var && src2ValInfo->IsBoolean() && src1Var != Js::TaggedInt::ToVarUnchecked(0) && src1Var != Js::TaggedInt::ToVarUnchecked(1)) ||
- (src2ValInfo->IsNumber() && src2Var && src1ValInfo->IsBoolean() && src2Var != Js::TaggedInt::ToVarUnchecked(0) && src2Var != Js::TaggedInt::ToVarUnchecked(1));
- }
- }
- bool
- GlobOpt::CanProveConditionalBranch(IR::Instr *instr, Value *src1Val, Value *src2Val, Js::Var src1Var, Js::Var src2Var, bool *result)
- {
- auto AreSourcesEqual = [&](Value * val1, Value * val2, bool undefinedCmp) -> bool
- {
- // NaN !== NaN, and objects can have valueOf/toString
- if (val1->IsEqualTo(val2))
- {
- if (val1->GetValueInfo()->IsUndefined())
- {
- return undefinedCmp;
- }
- ValueInfo * valInfo = val1->GetValueInfo();
- return !valInfo->HasBeenUndefined() && valInfo->IsPrimitive() && valInfo->IsNotFloat();
- }
- return false;
- };
- // Make sure GetConstantVar only returns primitives.
- // TODO: OOP JIT, enabled these asserts
- //Assert(!src1Var || !Js::JavascriptOperators::IsObject(src1Var));
- //Assert(!src2Var || !Js::JavascriptOperators::IsObject(src2Var));
- int64 left64, right64;
- int32 left, right;
- int32 constVal;
- switch (instr->m_opcode)
- {
- #define BRANCHSIGNED(OPCODE,CMP,TYPE,UNSIGNEDNESS,UNDEFINEDCMP) \
- case Js::OpCode::##OPCODE: \
- if (src1Val && src2Val) \
- { \
- if (src1Val->GetValueInfo()->TryGetIntConstantValue(&left, UNSIGNEDNESS) && \
- src2Val->GetValueInfo()->TryGetIntConstantValue(&right, UNSIGNEDNESS)) \
- { \
- *result = (TYPE)left CMP(TYPE)right; \
- } \
- if (src1Val->GetValueInfo()->TryGetInt64ConstantValue(&left64, UNSIGNEDNESS) && \
- src2Val->GetValueInfo()->TryGetInt64ConstantValue(&right64, UNSIGNEDNESS)) \
- { \
- *result = (TYPE)left64 CMP(TYPE)right64; \
- } \
- else if (AreSourcesEqual(src1Val, src2Val, UNDEFINEDCMP)) \
- { \
- *result = 0 CMP 0; \
- } \
- else \
- { \
- return false; \
- } \
- } \
- else \
- { \
- return false; \
- } \
- break;
- BRANCHSIGNED(BrEq_I4, == , int64, false, true)
- BRANCHSIGNED(BrGe_I4, >= , int64, false, false)
- BRANCHSIGNED(BrGt_I4, > , int64, false, false)
- BRANCHSIGNED(BrLt_I4, < , int64, false, false)
- BRANCHSIGNED(BrLe_I4, <= , int64, false, false)
- BRANCHSIGNED(BrNeq_I4, != , int64, false, false)
- BRANCHSIGNED(BrUnGe_I4, >= , uint64, true, false)
- BRANCHSIGNED(BrUnGt_I4, > , uint64, true, false)
- BRANCHSIGNED(BrUnLt_I4, < , uint64, true, false)
- BRANCHSIGNED(BrUnLe_I4, <= , uint64, true, false)
- #undef BRANCHSIGNED
- #define BRANCH(OPCODE,CMP,VARCMPFUNC,UNDEFINEDCMP) \
- case Js::OpCode::##OPCODE: \
- if (src1Val && src2Val && src1Val->GetValueInfo()->TryGetIntConstantValue(&left) && \
- src2Val->GetValueInfo()->TryGetIntConstantValue(&right)) \
- { \
- *result = left CMP right; \
- } \
- else if (src1Val && src2Val && AreSourcesEqual(src1Val, src2Val, UNDEFINEDCMP)) \
- { \
- *result = 0 CMP 0; \
- } \
- else if (src1Var && src2Var) \
- { \
- if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts)) \
- { \
- return false; \
- } \
- *result = VARCMPFUNC(src1Var, src2Var, this->func->GetScriptContext()); \
- } \
- else \
- { \
- return false; \
- } \
- break;
- BRANCH(BrGe_A, >= , Js::JavascriptOperators::GreaterEqual, /*undefinedEquality*/ false)
- BRANCH(BrNotGe_A, <, !Js::JavascriptOperators::GreaterEqual, false)
- BRANCH(BrLt_A, <, Js::JavascriptOperators::Less, false)
- BRANCH(BrNotLt_A, >= , !Js::JavascriptOperators::Less, false)
- BRANCH(BrGt_A, >, Js::JavascriptOperators::Greater, false)
- BRANCH(BrNotGt_A, <= , !Js::JavascriptOperators::Greater, false)
- BRANCH(BrLe_A, <= , Js::JavascriptOperators::LessEqual, false)
- BRANCH(BrNotLe_A, >, !Js::JavascriptOperators::LessEqual, false)
- #undef BRANCH
- case Js::OpCode::BrEq_A:
- case Js::OpCode::BrNotNeq_A:
- if (src1Val && src2Val && src1Val->GetValueInfo()->TryGetIntConstantValue(&left) &&
- src2Val->GetValueInfo()->TryGetIntConstantValue(&right))
- {
- *result = left == right;
- }
- else if (src1Val && src2Val && AreSourcesEqual(src1Val, src2Val, true))
- {
- *result = true;
- }
- else if (!src1Var || !src2Var)
- {
- if (BoolAndIntStaticAndTypeMismatch(src1Val, src2Val, src1Var, src2Var))
- {
- *result = false;
- }
- else
- {
- return false;
- }
- }
- else
- {
- if (!Op_JitEq(result, src1Val, src2Val, src1Var, src2Var, this->func, false /* isStrict */))
- {
- return false;
- }
- }
- break;
- case Js::OpCode::BrNeq_A:
- case Js::OpCode::BrNotEq_A:
- if (src1Val && src2Val && src1Val->GetValueInfo()->TryGetIntConstantValue(&left) &&
- src2Val->GetValueInfo()->TryGetIntConstantValue(&right))
- {
- *result = left != right;
- }
- else if (src1Val && src2Val && AreSourcesEqual(src1Val, src2Val, true))
- {
- *result = false;
- }
- else if (!src1Var || !src2Var)
- {
- if (BoolAndIntStaticAndTypeMismatch(src1Val, src2Val, src1Var, src2Var))
- {
- *result = true;
- }
- else
- {
- return false;
- }
- }
- else
- {
- if (!Op_JitNeq(result, src1Val, src2Val, src1Var, src2Var, this->func, false /* isStrict */))
- {
- return false;
- }
- }
- break;
- case Js::OpCode::BrSrEq_A:
- case Js::OpCode::BrSrNotNeq_A:
- if (!src1Var || !src2Var)
- {
- ValueInfo *src1ValInfo = src1Val->GetValueInfo();
- ValueInfo *src2ValInfo = src2Val->GetValueInfo();
- if (
- (src1ValInfo->IsUndefined() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenUndefined()) ||
- (src1ValInfo->IsNull() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNull()) ||
- (src1ValInfo->IsBoolean() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenBoolean()) ||
- (src1ValInfo->IsNumber() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNumber()) ||
- (src1ValInfo->IsString() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenString()) ||
- (src2ValInfo->IsUndefined() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenUndefined()) ||
- (src2ValInfo->IsNull() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNull()) ||
- (src2ValInfo->IsBoolean() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenBoolean()) ||
- (src2ValInfo->IsNumber() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNumber()) ||
- (src2ValInfo->IsString() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenString())
- )
- {
- *result = false;
- }
- else if (AreSourcesEqual(src1Val, src2Val, true))
- {
- *result = true;
- }
- else
- {
- return false;
- }
- }
- else
- {
- if (!Op_JitEq(result, src1Val, src2Val, src1Var, src2Var, this->func, true /* isStrict */))
- {
- return false;
- }
- }
- break;
- case Js::OpCode::BrSrNeq_A:
- case Js::OpCode::BrSrNotEq_A:
- if (!src1Var || !src2Var)
- {
- ValueInfo *src1ValInfo = src1Val->GetValueInfo();
- ValueInfo *src2ValInfo = src2Val->GetValueInfo();
- if (
- (src1ValInfo->IsUndefined() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenUndefined()) ||
- (src1ValInfo->IsNull() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNull()) ||
- (src1ValInfo->IsBoolean() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenBoolean()) ||
- (src1ValInfo->IsNumber() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNumber()) ||
- (src1ValInfo->IsString() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenString()) ||
- (src2ValInfo->IsUndefined() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenUndefined()) ||
- (src2ValInfo->IsNull() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNull()) ||
- (src2ValInfo->IsBoolean() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenBoolean()) ||
- (src2ValInfo->IsNumber() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNumber()) ||
- (src2ValInfo->IsString() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenString())
- )
- {
- *result = true;
- }
- else if (AreSourcesEqual(src1Val, src2Val, true))
- {
- *result = false;
- }
- else
- {
- return false;
- }
- }
- else
- {
- if (!Op_JitNeq(result, src1Val, src2Val, src1Var, src2Var, this->func, true /* isStrict */))
- {
- return false;
- }
- }
- break;
- case Js::OpCode::BrFalse_A:
- case Js::OpCode::BrTrue_A:
- {
- ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
- if (src1ValueInfo->IsNull() || src1ValueInfo->IsUndefined())
- {
- *result = instr->m_opcode == Js::OpCode::BrFalse_A;
- break;
- }
- if (src1ValueInfo->IsObject() && src1ValueInfo->GetObjectType() > ObjectType::Object)
- {
- // Specific object types that are tracked are equivalent to 'true'
- *result = instr->m_opcode == Js::OpCode::BrTrue_A;
- break;
- }
- if (!src1Var)
- {
- return false;
- }
- // Set *result = (evaluates true) and negate it later for BrFalse
- if (src1Var == reinterpret_cast<Js::Var>(this->func->GetScriptContextInfo()->GetTrueAddr()))
- {
- *result = true;
- }
- else if (src1Var == reinterpret_cast<Js::Var>(this->func->GetScriptContextInfo()->GetFalseAddr()))
- {
- *result = false;
- }
- else if (Js::TaggedInt::Is(src1Var))
- {
- *result = (src1Var != reinterpret_cast<Js::Var>(Js::AtomTag_IntPtr));
- }
- #if FLOATVAR
- else if (Js::JavascriptNumber::Is_NoTaggedIntCheck(src1Var))
- {
- double value = Js::JavascriptNumber::GetValue(src1Var);
- *result = (!Js::JavascriptNumber::IsNan(value)) && (!Js::JavascriptNumber::IsZero(value));
- }
- #endif
- else
- {
- return false;
- }
- if (instr->m_opcode == Js::OpCode::BrFalse_A)
- {
- *result = !(*result);
- }
- break;
- }
- case Js::OpCode::BrFalse_I4:
- {
- constVal = 0;
- if (!src1Val->GetValueInfo()->TryGetIntConstantValue(&constVal))
- {
- return false;
- }
- *result = constVal == 0;
- break;
- }
- case Js::OpCode::BrOnObject_A:
- {
- ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
- if (!src1ValueInfo->IsDefinite())
- {
- return false;
- }
- if (src1ValueInfo->IsPrimitive())
- {
- *result = false;
- }
- else
- {
- if (src1ValueInfo->HasBeenPrimitive())
- {
- return false;
- }
- *result = true;
- }
- break;
- }
- default:
- return false;
- }
- return true;
- }
- bool
- GlobOpt::OptConstFoldBranch(IR::Instr *instr, Value *src1Val, Value*src2Val, Value **pDstVal)
- {
- if (!src1Val)
- {
- return false;
- }
- Js::Var src1Var = this->GetConstantVar(instr->GetSrc1(), src1Val);
- Js::Var src2Var = nullptr;
- if (instr->GetSrc2())
- {
- if (!src2Val)
- {
- return false;
- }
- src2Var = this->GetConstantVar(instr->GetSrc2(), src2Val);
- }
- bool result;
- if (!CanProveConditionalBranch(instr, src1Val, src2Val, src1Var, src2Var, &result))
- {
- return false;
- }
- this->OptConstFoldBr(!!result, instr);
- return true;
- }
- bool
- GlobOpt::OptConstFoldUnary(
- IR::Instr * *pInstr,
- const int32 intConstantValue,
- const bool isUsingOriginalSrc1Value,
- Value **pDstVal)
- {
- IR::Instr * &instr = *pInstr;
- int32 value = 0;
- IR::Opnd *constOpnd;
- bool isInt = true;
- bool doSetDstVal = true;
- FloatConstType fValue = 0.0;
- if (!DoConstFold())
- {
- return false;
- }
- if (instr->GetDst() && !instr->GetDst()->IsRegOpnd())
- {
- return false;
- }
- switch(instr->m_opcode)
- {
- case Js::OpCode::Neg_A:
- if (intConstantValue == 0)
- {
- // Could fold to -0.0
- return false;
- }
- if (Int32Math::Neg(intConstantValue, &value))
- {
- return false;
- }
- break;
- case Js::OpCode::Not_A:
- Int32Math::Not(intConstantValue, &value);
- break;
- case Js::OpCode::Ld_A:
- if (instr->HasBailOutInfo())
- {
- //The profile data for switch expr can be string and in GlobOpt we realize it is an int.
- if(instr->GetBailOutKind() == IR::BailOutExpectingString)
- {
- throw Js::RejitException(RejitReason::DisableSwitchOptExpectingString);
- }
- Assert(instr->GetBailOutKind() == IR::BailOutExpectingInteger);
- instr->ClearBailOutInfo();
- }
- value = intConstantValue;
- if(isUsingOriginalSrc1Value)
- {
- doSetDstVal = false; // Let OptDst do it by copying src1Val
- }
- break;
- case Js::OpCode::Conv_Num:
- case Js::OpCode::LdC_A_I4:
- value = intConstantValue;
- if(isUsingOriginalSrc1Value)
- {
- doSetDstVal = false; // Let OptDst do it by copying src1Val
- }
- break;
- case Js::OpCode::Incr_A:
- if (Int32Math::Inc(intConstantValue, &value))
- {
- return false;
- }
- break;
- case Js::OpCode::Decr_A:
- if (Int32Math::Dec(intConstantValue, &value))
- {
- return false;
- }
- break;
- case Js::OpCode::InlineMathAcos:
- fValue = Js::Math::Acos((double)intConstantValue);
- isInt = false;
- break;
- case Js::OpCode::InlineMathAsin:
- fValue = Js::Math::Asin((double)intConstantValue);
- isInt = false;
- break;
- case Js::OpCode::InlineMathAtan:
- fValue = Js::Math::Atan((double)intConstantValue);
- isInt = false;
- break;
- case Js::OpCode::InlineMathCos:
- fValue = Js::Math::Cos((double)intConstantValue);
- isInt = false;
- break;
- case Js::OpCode::InlineMathExp:
- fValue = Js::Math::Exp((double)intConstantValue);
- isInt = false;
- break;
- case Js::OpCode::InlineMathLog:
- fValue = Js::Math::Log((double)intConstantValue);
- isInt = false;
- break;
- case Js::OpCode::InlineMathSin:
- fValue = Js::Math::Sin((double)intConstantValue);
- isInt = false;
- break;
- case Js::OpCode::InlineMathSqrt:
- fValue = ::sqrt((double)intConstantValue);
- isInt = false;
- break;
- case Js::OpCode::InlineMathTan:
- fValue = ::tan((double)intConstantValue);
- isInt = false;
- break;
- case Js::OpCode::InlineMathFround:
- fValue = (double) (float) intConstantValue;
- isInt = false;
- break;
- case Js::OpCode::InlineMathAbs:
- if (intConstantValue == INT32_MIN)
- {
- if (instr->GetDst()->IsInt32())
- {
- // if dst is an int (e.g. in asm.js), we should coerce it, not convert to float
- value = static_cast<int32>(2147483648U);
- }
- else
- {
- // Rejit with AggressiveIntTypeSpecDisabled for Math.abs(INT32_MIN) because it causes dst
- // to be float type which could be different with previous type spec result in LoopPrePass
- throw Js::RejitException(RejitReason::AggressiveIntTypeSpecDisabled);
- }
- }
- else
- {
- value = ::abs(intConstantValue);
- }
- break;
- case Js::OpCode::InlineMathClz:
- DWORD clz;
- if (_BitScanReverse(&clz, intConstantValue))
- {
- value = 31 - clz;
- }
- else
- {
- value = 32;
- }
- instr->ClearBailOutInfo();
- break;
- case Js::OpCode::Ctz:
- Assert(func->GetJITFunctionBody()->IsWasmFunction());
- Assert(!instr->HasBailOutInfo());
- DWORD ctz;
- if (_BitScanForward(&ctz, intConstantValue))
- {
- value = ctz;
- }
- else
- {
- value = 32;
- }
- break;
- case Js::OpCode::InlineMathFloor:
- value = intConstantValue;
- instr->ClearBailOutInfo();
- break;
- case Js::OpCode::InlineMathCeil:
- value = intConstantValue;
- instr->ClearBailOutInfo();
- break;
- case Js::OpCode::InlineMathRound:
- value = intConstantValue;
- instr->ClearBailOutInfo();
- break;
- case Js::OpCode::ToVar:
- if (Js::TaggedInt::IsOverflow(intConstantValue))
- {
- return false;
- }
- else
- {
- value = intConstantValue;
- instr->ClearBailOutInfo();
- break;
- }
- default:
- return false;
- }
- this->CaptureByteCodeSymUses(instr);
- Assert(!instr->HasBailOutInfo()); // If we are, in fact, successful in constant folding the instruction, there is no point in having the bailoutinfo around anymore.
- // Make sure that it is cleared if it was initially present.
- if (!isInt)
- {
- value = (int32)fValue;
- if (fValue == (double)value)
- {
- isInt = true;
- }
- }
- if (isInt)
- {
- constOpnd = IR::IntConstOpnd::New(value, TyInt32, instr->m_func);
- GOPT_TRACE(_u("Constant folding to %d\n"), value);
- }
- else
- {
- constOpnd = IR::FloatConstOpnd::New(fValue, TyFloat64, instr->m_func);
- GOPT_TRACE(_u("Constant folding to %f\n"), fValue);
- }
- instr->ReplaceSrc1(constOpnd);
- this->OptSrc(constOpnd, &instr);
- IR::Opnd *dst = instr->GetDst();
- Assert(dst->IsRegOpnd());
- StackSym *dstSym = dst->AsRegOpnd()->m_sym;
- if (isInt)
- {
- if (dstSym->IsSingleDef())
- {
- dstSym->SetIsIntConst(value);
- }
- if (doSetDstVal)
- {
- *pDstVal = GetIntConstantValue(value, instr, dst);
- }
- if (IsTypeSpecPhaseOff(this->func))
- {
- instr->m_opcode = Js::OpCode::LdC_A_I4;
- this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
- }
- else
- {
- instr->m_opcode = Js::OpCode::Ld_I4;
- this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
- StackSym * currDstSym = instr->GetDst()->AsRegOpnd()->m_sym;
- if (currDstSym->IsSingleDef())
- {
- currDstSym->SetIsIntConst(value);
- }
- }
- }
- else
- {
- *pDstVal = NewFloatConstantValue(fValue, dst);
- if (IsTypeSpecPhaseOff(this->func))
- {
- instr->m_opcode = Js::OpCode::LdC_A_R8;
- this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
- }
- else
- {
- instr->m_opcode = Js::OpCode::LdC_F8_R8;
- this->ToFloat64Dst(instr, dst->AsRegOpnd(), this->currentBlock);
- }
- }
- InvalidateInductionVariables(instr);
- return true;
- }
- //------------------------------------------------------------------------------------------------------
- // Type specialization
- //------------------------------------------------------------------------------------------------------
- bool
- GlobOpt::IsWorthSpecializingToInt32DueToSrc(IR::Opnd *const src, Value *const val)
- {
- Assert(src);
- Assert(val);
- ValueInfo *valueInfo = val->GetValueInfo();
- Assert(valueInfo->IsLikelyInt());
- // If it is not known that the operand is definitely an int, the operand is not already type-specialized, and it's not live
- // in the loop landing pad (if we're in a loop), it's probably not worth type-specializing this instruction. The common case
- // where type-specializing this would be bad is where the operations are entirely on properties or array elements, where the
- // ratio of FromVars and ToVars to the number of actual operations is high, and the conversions would dominate the time
- // spent. On the other hand, if we're using a function formal parameter more than once, it would probably be worth
- // type-specializing it, hence the IsDead check on the operands.
- return
- valueInfo->IsInt() ||
- valueInfo->HasIntConstantValue(true) ||
- !src->GetIsDead() ||
- !src->IsRegOpnd() ||
- CurrentBlockData()->IsInt32TypeSpecialized(src->AsRegOpnd()->m_sym) ||
- (this->currentBlock->loop && this->currentBlock->loop->landingPad->globOptData.IsLive(src->AsRegOpnd()->m_sym));
- }
- bool
- GlobOpt::IsWorthSpecializingToInt32DueToDst(IR::Opnd *const dst)
- {
- Assert(dst);
- const auto sym = dst->AsRegOpnd()->m_sym;
- return
- CurrentBlockData()->IsInt32TypeSpecialized(sym) ||
- (this->currentBlock->loop && this->currentBlock->loop->landingPad->globOptData.IsLive(sym));
- }
- bool
- GlobOpt::IsWorthSpecializingToInt32(IR::Instr *const instr, Value *const src1Val, Value *const src2Val)
- {
- Assert(instr);
- const auto src1 = instr->GetSrc1();
- const auto src2 = instr->GetSrc2();
- // In addition to checking each operand and the destination, if for any reason we only have to do a maximum of two
- // conversions instead of the worst-case 3 conversions, it's probably worth specializing.
- if (IsWorthSpecializingToInt32DueToSrc(src1, src1Val) ||
- (src2Val && IsWorthSpecializingToInt32DueToSrc(src2, src2Val)))
- {
- return true;
- }
- IR::Opnd *dst = instr->GetDst();
- if (!dst || IsWorthSpecializingToInt32DueToDst(dst))
- {
- return true;
- }
- if (dst->IsEqual(src1) || (src2Val && (dst->IsEqual(src2) || src1->IsEqual(src2))))
- {
- return true;
- }
- IR::Instr *instrNext = instr->GetNextRealInstrOrLabel();
- // Skip useless Ld_A's
- do
- {
- switch (instrNext->m_opcode)
- {
- case Js::OpCode::Ld_A:
- if (!dst->IsEqual(instrNext->GetSrc1()))
- {
- goto done;
- }
- dst = instrNext->GetDst();
- break;
- case Js::OpCode::LdFld:
- case Js::OpCode::LdRootFld:
- case Js::OpCode::LdRootFldForTypeOf:
- case Js::OpCode::LdFldForTypeOf:
- case Js::OpCode::LdElemI_A:
- case Js::OpCode::ByteCodeUses:
- break;
- default:
- goto done;
- }
- instrNext = instrNext->GetNextRealInstrOrLabel();
- } while (true);
- done:
- // If the next instr could also be type specialized, then it is probably worth it.
- if ((instrNext->GetSrc1() && dst->IsEqual(instrNext->GetSrc1())) || (instrNext->GetSrc2() && dst->IsEqual(instrNext->GetSrc2())))
- {
- switch (instrNext->m_opcode)
- {
- case Js::OpCode::Add_A:
- case Js::OpCode::Sub_A:
- case Js::OpCode::Mul_A:
- case Js::OpCode::Div_A:
- case Js::OpCode::Rem_A:
- case Js::OpCode::Xor_A:
- case Js::OpCode::And_A:
- case Js::OpCode::Or_A:
- case Js::OpCode::Shl_A:
- case Js::OpCode::Shr_A:
- case Js::OpCode::Incr_A:
- case Js::OpCode::Decr_A:
- case Js::OpCode::Neg_A:
- case Js::OpCode::Not_A:
- case Js::OpCode::Conv_Num:
- case Js::OpCode::BrEq_I4:
- case Js::OpCode::BrTrue_I4:
- case Js::OpCode::BrFalse_I4:
- case Js::OpCode::BrGe_I4:
- case Js::OpCode::BrGt_I4:
- case Js::OpCode::BrLt_I4:
- case Js::OpCode::BrLe_I4:
- case Js::OpCode::BrNeq_I4:
- return true;
- }
- }
- return false;
- }
- bool
- GlobOpt::TypeSpecializeNumberUnary(IR::Instr *instr, Value *src1Val, Value **pDstVal)
- {
- Assert(src1Val->GetValueInfo()->IsNumber());
- if (this->IsLoopPrePass())
- {
- return false;
- }
- switch (instr->m_opcode)
- {
- case Js::OpCode::Conv_Num:
- // Optimize Conv_Num away since we know this is a number
- instr->m_opcode = Js::OpCode::Ld_A;
- return false;
- }
- return false;
- }
- bool
- GlobOpt::TypeSpecializeUnary(
- IR::Instr **pInstr,
- Value **pSrc1Val,
- Value **pDstVal,
- Value *const src1OriginalVal,
- bool *redoTypeSpecRef,
- bool *const forceInvariantHoistingRef)
- {
- Assert(pSrc1Val);
- Value *&src1Val = *pSrc1Val;
- Assert(src1Val);
- // We don't need to do typespec for asmjs
- if (IsTypeSpecPhaseOff(this->func) || GetIsAsmJSFunc())
- {
- return false;
- }
- IR::Instr *&instr = *pInstr;
- int32 min, max;
- // Inline built-ins explicitly specify how srcs/dst must be specialized.
- if (OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
- {
- TypeSpecializeInlineBuiltInUnary(pInstr, &src1Val, pDstVal, src1OriginalVal, redoTypeSpecRef);
- return true;
- }
- // Consider: If type spec wasn't completely done, make sure that we don't type-spec the dst 2nd time.
- if(instr->m_opcode == Js::OpCode::LdLen_A && TypeSpecializeLdLen(&instr, &src1Val, pDstVal, forceInvariantHoistingRef))
- {
- return true;
- }
- if (!src1Val->GetValueInfo()->GetIntValMinMax(&min, &max, this->DoAggressiveIntTypeSpec()))
- {
- src1Val = src1OriginalVal;
- if (src1Val->GetValueInfo()->IsLikelyFloat())
- {
- // Try to type specialize to float
- return this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal);
- }
- else if (src1Val->GetValueInfo()->IsNumber())
- {
- return TypeSpecializeNumberUnary(instr, src1Val, pDstVal);
- }
- return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
- }
- return this->TypeSpecializeIntUnary(pInstr, &src1Val, pDstVal, min, max, src1OriginalVal, redoTypeSpecRef);
- }
- // Returns true if the built-in requested type specialization, and no further action needed,
- // otherwise returns false.
- void
- GlobOpt::TypeSpecializeInlineBuiltInUnary(IR::Instr **pInstr, Value **pSrc1Val, Value **pDstVal, Value *const src1OriginalVal, bool *redoTypeSpecRef)
- {
- IR::Instr *&instr = *pInstr;
- Assert(pSrc1Val);
- Value *&src1Val = *pSrc1Val;
- Assert(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
- Js::BuiltinFunction builtInId = Js::JavascriptLibrary::GetBuiltInInlineCandidateId(instr->m_opcode); // From actual instr, not profile based.
- Assert(builtInId != Js::BuiltinFunction::None);
- // Consider using different bailout for float/int FromVars, so that when the arg cannot be converted to number we don't disable
- // type spec for other parts of the big function but rather just don't inline that built-in instr.
- // E.g. could do that if the value is not likelyInt/likelyFloat.
- Js::BuiltInFlags builtInFlags = Js::JavascriptLibrary::GetFlagsForBuiltIn(builtInId);
- bool areAllArgsAlwaysFloat = (builtInFlags & Js::BuiltInFlags::BIF_Args) == Js::BuiltInFlags::BIF_TypeSpecUnaryToFloat;
- if (areAllArgsAlwaysFloat)
- {
- // InlineMathAcos, InlineMathAsin, InlineMathAtan, InlineMathCos, InlineMathExp, InlineMathLog, InlineMathSin, InlineMathSqrt, InlineMathTan.
- Assert(this->DoFloatTypeSpec());
- // Type-spec the src.
- src1Val = src1OriginalVal;
- bool retVal = this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal, /* skipDst = */ true);
- AssertMsg(retVal, "For inline built-ins the args have to be type-specialized to float, but something failed during the process.");
- // Type-spec the dst.
- this->TypeSpecializeFloatDst(instr, nullptr, src1Val, nullptr, pDstVal);
- }
- else if (instr->m_opcode == Js::OpCode::InlineMathAbs)
- {
- // Consider the case when the value is unknown - because of bailout in abs we may disable type spec for the whole function which is too much.
- // First, try int.
- int minVal, maxVal;
- bool shouldTypeSpecToInt = src1Val->GetValueInfo()->GetIntValMinMax(&minVal, &maxVal, /* doAggressiveIntTypeSpec = */ true);
- if (shouldTypeSpecToInt)
- {
- Assert(this->DoAggressiveIntTypeSpec());
- bool retVal = this->TypeSpecializeIntUnary(pInstr, &src1Val, pDstVal, minVal, maxVal, src1OriginalVal, redoTypeSpecRef, true);
- AssertMsg(retVal, "For inline built-ins the args have to be type-specialized (int), but something failed during the process.");
- if (!this->IsLoopPrePass())
- {
- // Create bailout for INT_MIN which does not have corresponding int value on the positive side.
- // Check int range: if we know the range is out of overflow, we do not need the bail out at all.
- if (minVal == INT32_MIN)
- {
- GenerateBailAtOperation(&instr, IR::BailOnIntMin);
- }
- }
- // Account for ::abs(INT_MIN) == INT_MIN (which is less than 0).
- maxVal = ::max(
- ::abs(Int32Math::NearestInRangeTo(minVal, INT_MIN + 1, INT_MAX)),
- ::abs(Int32Math::NearestInRangeTo(maxVal, INT_MIN + 1, INT_MAX)));
- minVal = minVal >= 0 ? minVal : 0;
- this->TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, src1Val, nullptr, IR::BailOutInvalid, minVal, maxVal, pDstVal);
- }
- else
- {
- // If we couldn't do int, do float.
- Assert(this->DoFloatTypeSpec());
- src1Val = src1OriginalVal;
- bool retVal = this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal, true);
- AssertMsg(retVal, "For inline built-ins the args have to be type-specialized (float), but something failed during the process.");
- this->TypeSpecializeFloatDst(instr, nullptr, src1Val, nullptr, pDstVal);
- }
- }
- else if (instr->m_opcode == Js::OpCode::InlineMathFloor || instr->m_opcode == Js::OpCode::InlineMathCeil || instr->m_opcode == Js::OpCode::InlineMathRound)
- {
- // Type specialize src to float
- src1Val = src1OriginalVal;
- bool retVal = this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal, /* skipDst = */ true);
- AssertMsg(retVal, "For inline Math.floor and Math.ceil the src has to be type-specialized to float, but something failed during the process.");
- // Type specialize dst to int
- this->TypeSpecializeIntDst(
- instr,
- instr->m_opcode,
- nullptr,
- src1Val,
- nullptr,
- IR::BailOutInvalid,
- INT32_MIN,
- INT32_MAX,
- pDstVal);
- }
- else if(instr->m_opcode == Js::OpCode::InlineArrayPop)
- {
- IR::Opnd *const thisOpnd = instr->GetSrc1();
- Assert(thisOpnd);
- // Ensure src1 (Array) is a var
- this->ToVarUses(instr, thisOpnd, false, src1Val);
- if(!this->IsLoopPrePass() && thisOpnd->GetValueType().IsLikelyNativeArray())
- {
- // We bail out, if there is illegal access or a mismatch in the Native array type that is optimized for, during the run time.
- GenerateBailAtOperation(&instr, IR::BailOutConventionalNativeArrayAccessOnly);
- }
- if(!instr->GetDst())
- {
- return;
- }
- // Try Type Specializing the element (return item from Pop) based on the array's profile data.
- if(thisOpnd->GetValueType().IsLikelyNativeIntArray())
- {
- this->TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, nullptr, nullptr, IR::BailOutInvalid, INT32_MIN, INT32_MAX, pDstVal);
- }
- else if(thisOpnd->GetValueType().IsLikelyNativeFloatArray())
- {
- this->TypeSpecializeFloatDst(instr, nullptr, nullptr, nullptr, pDstVal);
- }
- else
- {
- // We reached here so the Element is not yet type specialized. Ensure element is a var
- if(instr->GetDst()->IsRegOpnd())
- {
- this->ToVarRegOpnd(instr->GetDst()->AsRegOpnd(), currentBlock);
- }
- }
- }
- else if (instr->m_opcode == Js::OpCode::InlineMathClz)
- {
- Assert(this->DoAggressiveIntTypeSpec());
- Assert(this->DoLossyIntTypeSpec());
- //Type specialize to int
- bool retVal = this->TypeSpecializeIntUnary(pInstr, &src1Val, pDstVal, INT32_MIN, INT32_MAX, src1OriginalVal, redoTypeSpecRef);
- AssertMsg(retVal, "For clz32, the arg has to be type-specialized to int.");
- }
- else
- {
- AssertMsg(FALSE, "Unsupported built-in!");
- }
- }
- void
- GlobOpt::TypeSpecializeInlineBuiltInBinary(IR::Instr **pInstr, Value *src1Val, Value* src2Val, Value **pDstVal, Value *const src1OriginalVal, Value *const src2OriginalVal)
- {
- IR::Instr *&instr = *pInstr;
- Assert(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
- switch(instr->m_opcode)
- {
- case Js::OpCode::InlineMathAtan2:
- {
- Js::BuiltinFunction builtInId = Js::JavascriptLibrary::GetBuiltInInlineCandidateId(instr->m_opcode); // From actual instr, not profile based.
- Js::BuiltInFlags builtInFlags = Js::JavascriptLibrary::GetFlagsForBuiltIn(builtInId);
- bool areAllArgsAlwaysFloat = (builtInFlags & Js::BuiltInFlags::BIF_TypeSpecAllToFloat) != 0;
- Assert(areAllArgsAlwaysFloat);
- Assert(this->DoFloatTypeSpec());
- // Type-spec the src1, src2 and dst.
- src1Val = src1OriginalVal;
- src2Val = src2OriginalVal;
- bool retVal = this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
- AssertMsg(retVal, "For pow and atnan2 the args have to be type-specialized to float, but something failed during the process.");
- break;
- }
- case Js::OpCode::InlineMathPow:
- {
- #ifndef _M_ARM32_OR_ARM64
- if (src2Val->GetValueInfo()->IsLikelyInt())
- {
- bool lossy = false;
- this->ToInt32(instr, instr->GetSrc2(), this->currentBlock, src2Val, nullptr, lossy);
- IR::Opnd* src1 = instr->GetSrc1();
- int32 valueMin, valueMax;
- if (src1Val->GetValueInfo()->IsLikelyInt() &&
- this->DoPowIntIntTypeSpec() &&
- src2Val->GetValueInfo()->GetIntValMinMax(&valueMin, &valueMax, this->DoAggressiveIntTypeSpec()) &&
- valueMin >= 0)
- {
- this->ToInt32(instr, src1, this->currentBlock, src1Val, nullptr, lossy);
- this->TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, src1Val, src2Val, IR::BailOutInvalid, INT32_MIN, INT32_MAX, pDstVal);
- if(!this->IsLoopPrePass())
- {
- GenerateBailAtOperation(&instr, IR::BailOutOnPowIntIntOverflow);
- }
- }
- else
- {
- this->ToFloat64(instr, src1, this->currentBlock, src1Val, nullptr, IR::BailOutPrimitiveButString);
- TypeSpecializeFloatDst(instr, nullptr, src1Val, src2Val, pDstVal);
- }
- }
- else
- {
- #endif
- this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
- #ifndef _M_ARM32_OR_ARM64
- }
- #endif
- break;
- }
- case Js::OpCode::InlineMathImul:
- {
- Assert(this->DoAggressiveIntTypeSpec());
- Assert(this->DoLossyIntTypeSpec());
- //Type specialize to int
- bool retVal = this->TypeSpecializeIntBinary(pInstr, src1Val, src2Val, pDstVal, INT32_MIN, INT32_MAX, false /* skipDst */);
- AssertMsg(retVal, "For imul, the args have to be type-specialized to int but something failed during the process.");
- break;
- }
- case Js::OpCode::InlineMathMin:
- case Js::OpCode::InlineMathMax:
- {
- if(src1Val->GetValueInfo()->IsLikelyInt() && src2Val->GetValueInfo()->IsLikelyInt())
- {
- // Compute resulting range info
- int32 min1 = INT32_MIN;
- int32 max1 = INT32_MAX;
- int32 min2 = INT32_MIN;
- int32 max2 = INT32_MAX;
- int32 newMin, newMax;
- Assert(this->DoAggressiveIntTypeSpec());
- src1Val->GetValueInfo()->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec());
- src2Val->GetValueInfo()->GetIntValMinMax(&min2, &max2, this->DoAggressiveIntTypeSpec());
- if (instr->m_opcode == Js::OpCode::InlineMathMin)
- {
- newMin = min(min1, min2);
- newMax = min(max1, max2);
- }
- else
- {
- Assert(instr->m_opcode == Js::OpCode::InlineMathMax);
- newMin = max(min1, min2);
- newMax = max(max1, max2);
- }
- // Type specialize to int
- bool retVal = this->TypeSpecializeIntBinary(pInstr, src1Val, src2Val, pDstVal, newMin, newMax, false /* skipDst */);
- AssertMsg(retVal, "For min and max, the args have to be type-specialized to int if any one of the sources is an int, but something failed during the process.");
- }
- // Couldn't type specialize to int, type specialize to float
- else
- {
- Assert(this->DoFloatTypeSpec());
- src1Val = src1OriginalVal;
- src2Val = src2OriginalVal;
- bool retVal = this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
- AssertMsg(retVal, "For min and max, the args have to be type-specialized to float if any one of the sources is a float, but something failed during the process.");
- }
- break;
- }
- case Js::OpCode::InlineArrayPush:
- {
- IR::Opnd *const thisOpnd = instr->GetSrc1();
- Assert(thisOpnd);
- if(instr->GetDst() && instr->GetDst()->IsRegOpnd())
- {
- // Set the dst as live here, as the built-ins return early from the TypeSpecialization functions - before the dst is marked as live.
- // Also, we are not specializing the dst separately and we are skipping the dst to be handled when we specialize the instruction above.
- this->ToVarRegOpnd(instr->GetDst()->AsRegOpnd(), currentBlock);
- }
- // Ensure src1 (Array) is a var
- this->ToVarUses(instr, thisOpnd, false, src1Val);
- if(!this->IsLoopPrePass())
- {
- if(thisOpnd->GetValueType().IsLikelyNativeArray())
- {
- // We bail out, if there is illegal access or a mismatch in the Native array type that is optimized for, during run time.
- GenerateBailAtOperation(&instr, IR::BailOutConventionalNativeArrayAccessOnly);
- }
- else
- {
- GenerateBailAtOperation(&instr, IR::BailOutOnImplicitCallsPreOp);
- }
- }
- // Try Type Specializing the element based on the array's profile data.
- if(thisOpnd->GetValueType().IsLikelyNativeFloatArray())
- {
- src1Val = src1OriginalVal;
- src2Val = src2OriginalVal;
- }
- if((thisOpnd->GetValueType().IsLikelyNativeIntArray() && this->TypeSpecializeIntBinary(pInstr, src1Val, src2Val, pDstVal, INT32_MIN, INT32_MAX, true))
- || (thisOpnd->GetValueType().IsLikelyNativeFloatArray() && this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal)))
- {
- break;
- }
- // The Element is not yet type specialized. Ensure element is a var
- this->ToVarUses(instr, instr->GetSrc2(), false, src2Val);
- break;
- }
- }
- }
- void
- GlobOpt::TypeSpecializeInlineBuiltInDst(IR::Instr **pInstr, Value **pDstVal)
- {
- IR::Instr *&instr = *pInstr;
- Assert(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
- if (instr->m_opcode == Js::OpCode::InlineMathRandom)
- {
- Assert(this->DoFloatTypeSpec());
- // Type specialize dst to float
- this->TypeSpecializeFloatDst(instr, nullptr, nullptr, nullptr, pDstVal);
- }
- }
- bool
- GlobOpt::TryTypeSpecializeUnaryToFloatHelper(IR::Instr** pInstr, Value** pSrc1Val, Value* const src1OriginalVal, Value **pDstVal)
- {
- // It has been determined that this instruction cannot be int-specialized. We need to determine whether to attempt to
- // float-specialize the instruction, or leave it unspecialized.
- #if !INT32VAR
- Value*& src1Val = *pSrc1Val;
- if(src1Val->GetValueInfo()->IsLikelyUntaggedInt())
- {
- // An input range is completely outside the range of an int31. Even if the operation may overflow, it is
- // unlikely to overflow on these operations, so we leave it unspecialized on 64-bit platforms. However, on
- // 32-bit platforms, the value is untaggable and will be a JavascriptNumber, which is significantly slower to
- // use in an unspecialized operation compared to a tagged int. So, try to float-specialize the instruction.
- src1Val = src1OriginalVal;
- return this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal);
- }
- #endif
- return false;
- }
- bool
- GlobOpt::TypeSpecializeIntBinary(IR::Instr **pInstr, Value *src1Val, Value *src2Val, Value **pDstVal, int32 min, int32 max, bool skipDst /* = false */)
- {
- // Consider moving the code for int type spec-ing binary functions here.
- IR::Instr *&instr = *pInstr;
- bool lossy = false;
- if(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
- {
- if(instr->m_opcode == Js::OpCode::InlineArrayPush)
- {
- int32 intConstantValue;
- bool isIntConstMissingItem = src2Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue);
- if(isIntConstMissingItem)
- {
- isIntConstMissingItem = Js::SparseArraySegment<int>::IsMissingItem(&intConstantValue);
- }
- // Don't specialize if the element is not likelyInt or an IntConst which is a missing item value.
- if(!(src2Val->GetValueInfo()->IsLikelyInt()) || isIntConstMissingItem)
- {
- return false;
- }
- // We don't want to specialize both the source operands, though it is a binary instr.
- IR::Opnd * elementOpnd = instr->GetSrc2();
- this->ToInt32(instr, elementOpnd, this->currentBlock, src2Val, nullptr, lossy);
- }
- else
- {
- IR::Opnd *src1 = instr->GetSrc1();
- this->ToInt32(instr, src1, this->currentBlock, src1Val, nullptr, lossy);
- IR::Opnd *src2 = instr->GetSrc2();
- this->ToInt32(instr, src2, this->currentBlock, src2Val, nullptr, lossy);
- }
- if(!skipDst)
- {
- IR::Opnd *dst = instr->GetDst();
- if (dst)
- {
- TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, src1Val, src2Val, IR::BailOutInvalid, min, max, pDstVal);
- }
- }
- return true;
- }
- else
- {
- AssertMsg(false, "Yet to move code for other binary functions here");
- return false;
- }
- }
- bool
- GlobOpt::TypeSpecializeIntUnary(
- IR::Instr **pInstr,
- Value **pSrc1Val,
- Value **pDstVal,
- int32 min,
- int32 max,
- Value *const src1OriginalVal,
- bool *redoTypeSpecRef,
- bool skipDst /* = false */)
- {
- IR::Instr *&instr = *pInstr;
- Assert(pSrc1Val);
- Value *&src1Val = *pSrc1Val;
- bool isTransfer = false;
- Js::OpCode opcode;
- int32 newMin, newMax;
- bool lossy = false;
- IR::BailOutKind bailOutKind = IR::BailOutInvalid;
- bool ignoredIntOverflow = this->ignoredIntOverflowForCurrentInstr;
- bool ignoredNegativeZero = false;
- bool checkTypeSpecWorth = false;
- if(instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotNumber)
- {
- return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
- }
- AddSubConstantInfo addSubConstantInfo;
- switch(instr->m_opcode)
- {
- case Js::OpCode::Ld_A:
- if (instr->GetSrc1()->IsRegOpnd())
- {
- StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
- if (CurrentBlockData()->IsInt32TypeSpecialized(sym) == false)
- {
- // Type specializing an Ld_A isn't worth it, unless the src
- // is already type specialized.
- return false;
- }
- }
- newMin = min;
- newMax = max;
- opcode = Js::OpCode::Ld_I4;
- isTransfer = true;
- break;
- case Js::OpCode::Conv_Num:
- newMin = min;
- newMax = max;
- opcode = Js::OpCode::Ld_I4;
- isTransfer = true;
- break;
- case Js::OpCode::LdC_A_I4:
- newMin = newMax = instr->GetSrc1()->AsIntConstOpnd()->AsInt32();
- opcode = Js::OpCode::Ld_I4;
- break;
- case Js::OpCode::Neg_A:
- if (min <= 0 && max >= 0)
- {
- if(instr->ShouldCheckForNegativeZero())
- {
- // -0 matters since the sym is not a local, or is used in a way in which -0 would differ from +0
- if(!DoAggressiveIntTypeSpec())
- {
- // May result in -0
- // Consider adding a dynamic check for src1 == 0
- return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
- }
- if(min == 0 && max == 0)
- {
- // Always results in -0
- return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
- }
- bailOutKind |= IR::BailOutOnNegativeZero;
- }
- else
- {
- ignoredNegativeZero = true;
- }
- }
- if (Int32Math::Neg(min, &newMax))
- {
- if(instr->ShouldCheckForIntOverflow())
- {
- if(!DoAggressiveIntTypeSpec())
- {
- // May overflow
- return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
- }
- if(min == max)
- {
- // Always overflows
- return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
- }
- bailOutKind |= IR::BailOutOnOverflow;
- newMax = INT32_MAX;
- }
- else
- {
- ignoredIntOverflow = true;
- }
- }
- if (Int32Math::Neg(max, &newMin))
- {
- if(instr->ShouldCheckForIntOverflow())
- {
- if(!DoAggressiveIntTypeSpec())
- {
- // May overflow
- return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
- }
- bailOutKind |= IR::BailOutOnOverflow;
- newMin = INT32_MAX;
- }
- else
- {
- ignoredIntOverflow = true;
- }
- }
- if(!instr->ShouldCheckForIntOverflow() && newMin > newMax)
- {
- // When ignoring overflow, the range needs to account for overflow. Since MIN_INT is the only int32 value that
- // overflows on Neg, and the value resulting from overflow is also MIN_INT, if calculating only the new min or new
- // max overflowed but not both, then the new min will be greater than the new max. In that case we need to consider
- // the full range of int32s as possible resulting values.
- newMin = INT32_MIN;
- newMax = INT32_MAX;
- }
- opcode = Js::OpCode::Neg_I4;
- checkTypeSpecWorth = true;
- break;
- case Js::OpCode::Not_A:
- if(!DoLossyIntTypeSpec())
- {
- return false;
- }
- this->PropagateIntRangeForNot(min, max, &newMin, &newMax);
- opcode = Js::OpCode::Not_I4;
- lossy = true;
- break;
- case Js::OpCode::Incr_A:
- do // while(false)
- {
- const auto CannotOverflowBasedOnRelativeBounds = [&]()
- {
- const ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
- return
- (src1ValueInfo->IsInt() || DoAggressiveIntTypeSpec()) &&
- src1ValueInfo->IsIntBounded() &&
- src1ValueInfo->AsIntBounded()->Bounds()->AddCannotOverflowBasedOnRelativeBounds(1);
- };
- if (Int32Math::Inc(min, &newMin))
- {
- if(CannotOverflowBasedOnRelativeBounds())
- {
- newMin = INT32_MAX;
- }
- else if(instr->ShouldCheckForIntOverflow())
- {
- // Always overflows
- return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
- }
- else
- {
- // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since overflow
- // causes the value to wrap around, and we don't have a way to specify a lower and upper range of ints,
- // we use the full range of int32s.
- ignoredIntOverflow = true;
- newMin = INT32_MIN;
- newMax = INT32_MAX;
- break;
- }
- }
- if (Int32Math::Inc(max, &newMax))
- {
- if(CannotOverflowBasedOnRelativeBounds())
- {
- newMax = INT32_MAX;
- }
- else if(instr->ShouldCheckForIntOverflow())
- {
- if(!DoAggressiveIntTypeSpec())
- {
- // May overflow
- return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
- }
- bailOutKind |= IR::BailOutOnOverflow;
- newMax = INT32_MAX;
- }
- else
- {
- // See comment about ignoring overflow above
- ignoredIntOverflow = true;
- newMin = INT32_MIN;
- newMax = INT32_MAX;
- break;
- }
- }
- } while(false);
- if(!ignoredIntOverflow && instr->GetSrc1()->IsRegOpnd())
- {
- addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min == max, 1);
- }
- opcode = Js::OpCode::Add_I4;
- if (!this->IsLoopPrePass())
- {
- instr->SetSrc2(IR::IntConstOpnd::New(1, TyInt32, instr->m_func));
- }
- checkTypeSpecWorth = true;
- break;
- case Js::OpCode::Decr_A:
- do // while(false)
- {
- const auto CannotOverflowBasedOnRelativeBounds = [&]()
- {
- const ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
- return
- (src1ValueInfo->IsInt() || DoAggressiveIntTypeSpec()) &&
- src1ValueInfo->IsIntBounded() &&
- src1ValueInfo->AsIntBounded()->Bounds()->SubCannotOverflowBasedOnRelativeBounds(1);
- };
- if (Int32Math::Dec(max, &newMax))
- {
- if(CannotOverflowBasedOnRelativeBounds())
- {
- newMax = INT32_MIN;
- }
- else if(instr->ShouldCheckForIntOverflow())
- {
- // Always overflows
- return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
- }
- else
- {
- // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since overflow
- // causes the value to wrap around, and we don't have a way to specify a lower and upper range of ints, we
- // use the full range of int32s.
- ignoredIntOverflow = true;
- newMin = INT32_MIN;
- newMax = INT32_MAX;
- break;
- }
- }
- if (Int32Math::Dec(min, &newMin))
- {
- if(CannotOverflowBasedOnRelativeBounds())
- {
- newMin = INT32_MIN;
- }
- else if(instr->ShouldCheckForIntOverflow())
- {
- if(!DoAggressiveIntTypeSpec())
- {
- // May overflow
- return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
- }
- bailOutKind |= IR::BailOutOnOverflow;
- newMin = INT32_MIN;
- }
- else
- {
- // See comment about ignoring overflow above
- ignoredIntOverflow = true;
- newMin = INT32_MIN;
- newMax = INT32_MAX;
- break;
- }
- }
- } while(false);
- if(!ignoredIntOverflow && instr->GetSrc1()->IsRegOpnd())
- {
- addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min == max, -1);
- }
- opcode = Js::OpCode::Sub_I4;
- if (!this->IsLoopPrePass())
- {
- instr->SetSrc2(IR::IntConstOpnd::New(1, TyInt32, instr->m_func));
- }
- checkTypeSpecWorth = true;
- break;
- case Js::OpCode::BrFalse_A:
- case Js::OpCode::BrTrue_A:
- {
- if(DoConstFold() && !IsLoopPrePass() && TryOptConstFoldBrFalse(instr, src1Val, min, max))
- {
- return true;
- }
- bool specialize = true;
- if (!src1Val->GetValueInfo()->HasIntConstantValue() && instr->GetSrc1()->IsRegOpnd())
- {
- StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
- if (CurrentBlockData()->IsInt32TypeSpecialized(sym) == false)
- {
- // Type specializing a BrTrue_A/BrFalse_A isn't worth it, unless the src
- // is already type specialized
- specialize = false;
- }
- }
- if(instr->m_opcode == Js::OpCode::BrTrue_A)
- {
- UpdateIntBoundsForNotEqualBranch(src1Val, nullptr, 0);
- opcode = Js::OpCode::BrTrue_I4;
- }
- else
- {
- UpdateIntBoundsForEqualBranch(src1Val, nullptr, 0);
- opcode = Js::OpCode::BrFalse_I4;
- }
- if(!specialize)
- {
- return false;
- }
- newMin = 2; newMax = 1; // We'll assert if we make a range where min > max
- break;
- }
- case Js::OpCode::MultiBr:
- newMin = min;
- newMax = max;
- opcode = instr->m_opcode;
- break;
- case Js::OpCode::StElemI_A:
- case Js::OpCode::StElemI_A_Strict:
- case Js::OpCode::StElemC:
- if(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyAnyArrayWithNativeFloatValues())
- {
- src1Val = src1OriginalVal;
- }
- return TypeSpecializeStElem(pInstr, src1Val, pDstVal);
- case Js::OpCode::NewScArray:
- case Js::OpCode::NewScArrayWithMissingValues:
- case Js::OpCode::InitFld:
- case Js::OpCode::InitRootFld:
- case Js::OpCode::StSlot:
- case Js::OpCode::StSlotChkUndecl:
- #if !FLOATVAR
- case Js::OpCode::StSlotBoxTemp:
- #endif
- case Js::OpCode::StFld:
- case Js::OpCode::StRootFld:
- case Js::OpCode::StFldStrict:
- case Js::OpCode::StRootFldStrict:
- case Js::OpCode::ArgOut_A:
- case Js::OpCode::ArgOut_A_Inline:
- case Js::OpCode::ArgOut_A_FixupForStackArgs:
- case Js::OpCode::ArgOut_A_Dynamic:
- case Js::OpCode::ArgOut_A_FromStackArgs:
- case Js::OpCode::ArgOut_A_SpreadArg:
- // For this one we need to implement type specialization
- //case Js::OpCode::ArgOut_A_InlineBuiltIn:
- case Js::OpCode::Ret:
- case Js::OpCode::LdElemUndef:
- case Js::OpCode::LdElemUndefScoped:
- return false;
- default:
- if (OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
- {
- newMin = min;
- newMax = max;
- opcode = instr->m_opcode;
- break; // Note: we must keep checkTypeSpecWorth = false to make sure we never return false from this function.
- }
- return false;
- }
- // If this instruction is in a range of instructions where int overflow does not matter, we will still specialize it (won't
- // leave it unspecialized based on heuristics), since it is most likely worth specializing, and the dst value needs to be
- // guaranteed to be an int
- if(checkTypeSpecWorth &&
- !ignoredIntOverflow &&
- !ignoredNegativeZero &&
- instr->ShouldCheckForIntOverflow() &&
- !IsWorthSpecializingToInt32(instr, src1Val))
- {
- // Even though type specialization is being skipped since it may not be worth it, the proper value should still be
- // maintained so that the result may be type specialized later. An int value is not created for the dst in any of
- // the following cases.
- // - A bailout check is necessary to specialize this instruction. The bailout check is what guarantees the result to be
- // an int, but since we're not going to specialize this instruction, there won't be a bailout check.
- // - Aggressive int type specialization is disabled and we're in a loop prepass. We're conservative on dst values in
- // that case, especially if the dst sym is live on the back-edge.
- if(bailOutKind == IR::BailOutInvalid &&
- instr->GetDst() &&
- (DoAggressiveIntTypeSpec() || !this->IsLoopPrePass()))
- {
- *pDstVal = CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, nullptr);
- }
- if(instr->GetSrc2())
- {
- instr->FreeSrc2();
- }
- return false;
- }
- this->ignoredIntOverflowForCurrentInstr = ignoredIntOverflow;
- this->ignoredNegativeZeroForCurrentInstr = ignoredNegativeZero;
- {
- // Try CSE again before modifying the IR, in case some attributes are required for successful CSE
- Value *src1IndirIndexVal = nullptr;
- Value *src2Val = nullptr;
- if(CSEOptimize(currentBlock, &instr, &src1Val, &src2Val, &src1IndirIndexVal, true /* intMathExprOnly */))
- {
- *redoTypeSpecRef = true;
- return false;
- }
- }
- const Js::OpCode originalOpCode = instr->m_opcode;
- if (!this->IsLoopPrePass())
- {
- // No re-write on prepass
- instr->m_opcode = opcode;
- }
- Value *src1ValueToSpecialize = src1Val;
- if(lossy)
- {
- // Lossy conversions to int32 must be done based on the original source values. For instance, if one of the values is a
- // float constant with a value that fits in a uint32 but not an int32, and the instruction can ignore int overflow, the
- // source value for the purposes of int specialization would have been changed to an int constant value by ignoring
- // overflow. If we were to specialize the sym using the int constant value, it would be treated as a lossless
- // conversion, but since there may be subsequent uses of the same float constant value that may not ignore overflow,
- // this must be treated as a lossy conversion by specializing the sym using the original float constant value.
- src1ValueToSpecialize = src1OriginalVal;
- }
- // Make sure the srcs are specialized
- IR::Opnd *src1 = instr->GetSrc1();
- this->ToInt32(instr, src1, this->currentBlock, src1ValueToSpecialize, nullptr, lossy);
- if(bailOutKind != IR::BailOutInvalid && !this->IsLoopPrePass())
- {
- GenerateBailAtOperation(&instr, bailOutKind);
- }
- if (!skipDst)
- {
- IR::Opnd *dst = instr->GetDst();
- if (dst)
- {
- AssertMsg(!(isTransfer && !this->IsLoopPrePass()) || min == newMin && max == newMax, "If this is just a copy, old/new min/max should be the same");
- TypeSpecializeIntDst(
- instr,
- originalOpCode,
- isTransfer ? src1Val : nullptr,
- src1Val,
- nullptr,
- bailOutKind,
- newMin,
- newMax,
- pDstVal,
- addSubConstantInfo.HasInfo() ? &addSubConstantInfo : nullptr);
- }
- }
- if(bailOutKind == IR::BailOutInvalid)
- {
- GOPT_TRACE(_u("Type specialized to INT\n"));
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
- {
- Output::Print(_u("Type specialized to INT: "));
- Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
- }
- #endif
- }
- else
- {
- GOPT_TRACE(_u("Type specialized to INT with bailout on:\n"));
- if(bailOutKind & IR::BailOutOnOverflow)
- {
- GOPT_TRACE(_u(" Overflow\n"));
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
- {
- Output::Print(_u("Type specialized to INT with bailout (%S): "), "Overflow");
- Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
- }
- #endif
- }
- if(bailOutKind & IR::BailOutOnNegativeZero)
- {
- GOPT_TRACE(_u(" Zero\n"));
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
- {
- Output::Print(_u("Type specialized to INT with bailout (%S): "), "Zero");
- Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
- }
- #endif
- }
- }
- return true;
- }
- void
- GlobOpt::TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, int32 newMin, int32 newMax, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo)
- {
- this->TypeSpecializeIntDst(instr, originalOpCode, valToTransfer, src1Value, src2Value, bailOutKind, ValueType::GetInt(IntConstantBounds(newMin, newMax).IsLikelyTaggable()), newMin, newMax, pDstVal, addSubConstantInfo);
- }
- void
- GlobOpt::TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, ValueType valueType, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo)
- {
- this->TypeSpecializeIntDst(instr, originalOpCode, valToTransfer, src1Value, src2Value, bailOutKind, valueType, 0, 0, pDstVal, addSubConstantInfo);
- }
- void
- GlobOpt::TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, ValueType valueType, int32 newMin, int32 newMax, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo)
- {
- Assert(valueType.IsInt() || (valueType.IsNumber() && valueType.IsLikelyInt() && newMin == 0 && newMax == 0));
- Assert(!valToTransfer || valToTransfer == src1Value);
- Assert(!addSubConstantInfo || addSubConstantInfo->HasInfo());
- IR::Opnd *dst = instr->GetDst();
- Assert(dst);
- bool isValueInfoPrecise;
- if(IsLoopPrePass())
- {
- isValueInfoPrecise = IsPrepassSrcValueInfoPrecise(instr, src1Value, src2Value);
- valueType = GetPrepassValueTypeForDst(valueType, instr, src1Value, src2Value, isValueInfoPrecise);
- }
- else
- {
- isValueInfoPrecise = true;
- }
- // If dst has a circular reference in a loop, it probably won't get specialized. Don't mark the dst as type-specialized on
- // the pre-pass. With aggressive int spec though, it will take care of bailing out if necessary so there's no need to assume
- // that the dst will be a var even if it's live on the back-edge. Also if the op always produces an int32, then there's no
- // ambiguity in the dst's value type even in the prepass.
- if (!DoAggressiveIntTypeSpec() && this->IsLoopPrePass() && !valueType.IsInt())
- {
- if (dst->IsRegOpnd())
- {
- this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
- }
- return;
- }
- const IntBounds *dstBounds = nullptr;
- if(addSubConstantInfo && !addSubConstantInfo->SrcValueIsLikelyConstant() && DoTrackRelativeIntBounds())
- {
- Assert(!ignoredIntOverflowForCurrentInstr);
- // Track bounds for add or sub with a constant. For instance, consider (b = a + 2). The value of 'b' should track that
- // it is equal to (the value of 'a') + 2. Additionally, the value of 'b' should inherit the bounds of 'a', offset by
- // the constant value.
- if(!valueType.IsInt() || !isValueInfoPrecise)
- {
- newMin = INT32_MIN;
- newMax = INT32_MAX;
- }
- dstBounds =
- IntBounds::Add(
- addSubConstantInfo->SrcValue(),
- addSubConstantInfo->Offset(),
- isValueInfoPrecise,
- IntConstantBounds(newMin, newMax),
- alloc);
- }
- // Src1's value could change later in the loop, so the value wouldn't be the same for each
- // iteration. Since we don't iterate over loops "while (!changed)", go conservative on the
- // pre-pass.
- if (valToTransfer)
- {
- // If this is just a copy, no need for creating a new value.
- Assert(!addSubConstantInfo);
- *pDstVal = this->ValueNumberTransferDst(instr, valToTransfer);
- CurrentBlockData()->InsertNewValue(*pDstVal, dst);
- }
- else if (valueType.IsInt() && isValueInfoPrecise)
- {
- bool wasNegativeZeroPreventedByBailout = false;
- if(newMin <= 0 && newMax >= 0)
- {
- switch(originalOpCode)
- {
- case Js::OpCode::Add_A:
- // -0 + -0 == -0
- Assert(src1Value);
- Assert(src2Value);
- wasNegativeZeroPreventedByBailout =
- src1Value->GetValueInfo()->WasNegativeZeroPreventedByBailout() &&
- src2Value->GetValueInfo()->WasNegativeZeroPreventedByBailout();
- break;
- case Js::OpCode::Sub_A:
- // -0 - 0 == -0
- Assert(src1Value);
- wasNegativeZeroPreventedByBailout = src1Value->GetValueInfo()->WasNegativeZeroPreventedByBailout();
- break;
- case Js::OpCode::Neg_A:
- case Js::OpCode::Mul_A:
- case Js::OpCode::Div_A:
- case Js::OpCode::Rem_A:
- wasNegativeZeroPreventedByBailout = !!(bailOutKind & IR::BailOutOnNegativeZero);
- break;
- }
- }
- *pDstVal =
- dstBounds
- ? NewIntBoundedValue(valueType, dstBounds, wasNegativeZeroPreventedByBailout, nullptr)
- : NewIntRangeValue(newMin, newMax, wasNegativeZeroPreventedByBailout, nullptr);
- }
- else
- {
- *pDstVal = dstBounds ? NewIntBoundedValue(valueType, dstBounds, false, nullptr) : NewGenericValue(valueType);
- }
- if(addSubConstantInfo || updateInductionVariableValueNumber)
- {
- TrackIntSpecializedAddSubConstant(instr, addSubConstantInfo, *pDstVal, !!dstBounds);
- }
- CurrentBlockData()->SetValue(*pDstVal, dst);
- AssertMsg(dst->IsRegOpnd(), "What else?");
- this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
- }
- bool
- GlobOpt::TypeSpecializeBinary(IR::Instr **pInstr, Value **pSrc1Val, Value **pSrc2Val, Value **pDstVal, Value *const src1OriginalVal, Value *const src2OriginalVal, bool *redoTypeSpecRef)
- {
- IR::Instr *&instr = *pInstr;
- int32 min1 = INT32_MIN, max1 = INT32_MAX, min2 = INT32_MIN, max2 = INT32_MAX, newMin, newMax, tmp;
- Js::OpCode opcode;
- Value *&src1Val = *pSrc1Val;
- Value *&src2Val = *pSrc2Val;
- // We don't need to do typespec for asmjs
- if (IsTypeSpecPhaseOff(this->func) || GetIsAsmJSFunc())
- {
- return false;
- }
- if (OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
- {
- this->TypeSpecializeInlineBuiltInBinary(pInstr, src1Val, src2Val, pDstVal, src1OriginalVal, src2OriginalVal);
- return true;
- }
- if (src1Val)
- {
- src1Val->GetValueInfo()->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec());
- }
- if (src2Val)
- {
- src2Val->GetValueInfo()->GetIntValMinMax(&min2, &max2, this->DoAggressiveIntTypeSpec());
- }
- // Type specialize binary operators to int32
- bool src1Lossy = true;
- bool src2Lossy = true;
- IR::BailOutKind bailOutKind = IR::BailOutInvalid;
- bool ignoredIntOverflow = this->ignoredIntOverflowForCurrentInstr;
- bool ignoredNegativeZero = false;
- bool skipSrc2 = false;
- bool skipDst = false;
- bool needsBoolConv = false;
- AddSubConstantInfo addSubConstantInfo;
- switch (instr->m_opcode)
- {
- case Js::OpCode::Or_A:
- if (!DoLossyIntTypeSpec())
- {
- return false;
- }
- this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
- opcode = Js::OpCode::Or_I4;
- break;
- case Js::OpCode::And_A:
- if (!DoLossyIntTypeSpec())
- {
- return false;
- }
- this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
- opcode = Js::OpCode::And_I4;
- break;
- case Js::OpCode::Xor_A:
- if (!DoLossyIntTypeSpec())
- {
- return false;
- }
- this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
- opcode = Js::OpCode::Xor_I4;
- break;
- case Js::OpCode::Shl_A:
- if (!DoLossyIntTypeSpec())
- {
- return false;
- }
- this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
- opcode = Js::OpCode::Shl_I4;
- break;
- case Js::OpCode::Shr_A:
- if (!DoLossyIntTypeSpec())
- {
- return false;
- }
- this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
- opcode = Js::OpCode::Shr_I4;
- break;
- case Js::OpCode::ShrU_A:
- if (!DoLossyIntTypeSpec())
- {
- return false;
- }
- if (min1 < 0 && IntConstantBounds(min2, max2).And_0x1f().Contains(0))
- {
- // Src1 may be too large to represent as a signed int32, and src2 may be zero. Unless the resulting value is only
- // used as a signed int32 (hence allowing us to ignore the result's sign), don't specialize the instruction.
- if (!instr->ignoreIntOverflow)
- return false;
- ignoredIntOverflow = true;
- }
- this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
- opcode = Js::OpCode::ShrU_I4;
- break;
- case Js::OpCode::BrUnLe_A:
- // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
- // int already, so require that both sources are likely int for folding.
- if (DoConstFold() &&
- !IsLoopPrePass() &&
- TryOptConstFoldBrUnsignedGreaterThan(instr, false, src1Val, min1, max1, src2Val, min2, max2))
- {
- return true;
- }
- if (min1 >= 0 && min2 >= 0)
- {
- // Only handle positive values since this is unsigned...
- // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
- // (INT32_MIN, INT32_MAX), so we're good.
- Assert(src1Val);
- Assert(src1Val->GetValueInfo()->IsLikelyInt());
- Assert(src2Val);
- Assert(src2Val->GetValueInfo()->IsLikelyInt());
- UpdateIntBoundsForLessThanOrEqualBranch(src1Val, src2Val);
- }
- if (!DoLossyIntTypeSpec())
- {
- return false;
- }
- newMin = newMax = 0;
- opcode = Js::OpCode::BrUnLe_I4;
- break;
- case Js::OpCode::BrUnLt_A:
- // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
- // int already, so require that both sources are likely int for folding.
- if (DoConstFold() &&
- !IsLoopPrePass() &&
- TryOptConstFoldBrUnsignedLessThan(instr, true, src1Val, min1, max1, src2Val, min2, max2))
- {
- return true;
- }
- if (min1 >= 0 && min2 >= 0)
- {
- // Only handle positive values since this is unsigned...
- // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
- // (INT32_MIN, INT32_MAX), so we're good.
- Assert(src1Val);
- Assert(src1Val->GetValueInfo()->IsLikelyInt());
- Assert(src2Val);
- Assert(src2Val->GetValueInfo()->IsLikelyInt());
- UpdateIntBoundsForLessThanBranch(src1Val, src2Val);
- }
- if (!DoLossyIntTypeSpec())
- {
- return false;
- }
- newMin = newMax = 0;
- opcode = Js::OpCode::BrUnLt_I4;
- break;
- case Js::OpCode::BrUnGe_A:
- // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
- // int already, so require that both sources are likely int for folding.
- if (DoConstFold() &&
- !IsLoopPrePass() &&
- TryOptConstFoldBrUnsignedLessThan(instr, false, src1Val, min1, max1, src2Val, min2, max2))
- {
- return true;
- }
- if (min1 >= 0 && min2 >= 0)
- {
- // Only handle positive values since this is unsigned...
- // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
- // (INT32_MIN, INT32_MAX), so we're good.
- Assert(src1Val);
- Assert(src1Val->GetValueInfo()->IsLikelyInt());
- Assert(src2Val);
- Assert(src2Val->GetValueInfo()->IsLikelyInt());
- UpdateIntBoundsForGreaterThanOrEqualBranch(src1Val, src2Val);
- }
- if (!DoLossyIntTypeSpec())
- {
- return false;
- }
- newMin = newMax = 0;
- opcode = Js::OpCode::BrUnGe_I4;
- break;
- case Js::OpCode::BrUnGt_A:
- // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
- // int already, so require that both sources are likely int for folding.
- if (DoConstFold() &&
- !IsLoopPrePass() &&
- TryOptConstFoldBrUnsignedGreaterThan(instr, true, src1Val, min1, max1, src2Val, min2, max2))
- {
- return true;
- }
- if (min1 >= 0 && min2 >= 0)
- {
- // Only handle positive values since this is unsigned...
- // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
- // (INT32_MIN, INT32_MAX), so we're good.
- Assert(src1Val);
- Assert(src1Val->GetValueInfo()->IsLikelyInt());
- Assert(src2Val);
- Assert(src2Val->GetValueInfo()->IsLikelyInt());
- UpdateIntBoundsForGreaterThanBranch(src1Val, src2Val);
- }
- if (!DoLossyIntTypeSpec())
- {
- return false;
- }
- newMin = newMax = 0;
- opcode = Js::OpCode::BrUnGt_I4;
- break;
- case Js::OpCode::CmUnLe_A:
- if (!DoLossyIntTypeSpec())
- {
- return false;
- }
- newMin = 0;
- newMax = 1;
- opcode = Js::OpCode::CmUnLe_I4;
- needsBoolConv = true;
- break;
- case Js::OpCode::CmUnLt_A:
- if (!DoLossyIntTypeSpec())
- {
- return false;
- }
- newMin = 0;
- newMax = 1;
- opcode = Js::OpCode::CmUnLt_I4;
- needsBoolConv = true;
- break;
- case Js::OpCode::CmUnGe_A:
- if (!DoLossyIntTypeSpec())
- {
- return false;
- }
- newMin = 0;
- newMax = 1;
- opcode = Js::OpCode::CmUnGe_I4;
- needsBoolConv = true;
- break;
- case Js::OpCode::CmUnGt_A:
- if (!DoLossyIntTypeSpec())
- {
- return false;
- }
- newMin = 0;
- newMax = 1;
- opcode = Js::OpCode::CmUnGt_I4;
- needsBoolConv = true;
- break;
- case Js::OpCode::Expo_A:
- {
- src1Val = src1OriginalVal;
- src2Val = src2OriginalVal;
- return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
- }
- case Js::OpCode::Div_A:
- {
- ValueType specializedValueType = GetDivValueType(instr, src1Val, src2Val, true);
- if (specializedValueType.IsFloat())
- {
- // Either result is float or 1/x or cst1/cst2 where cst1%cst2 != 0
- // Note: We should really constant fold cst1%cst2...
- src1Val = src1OriginalVal;
- src2Val = src2OriginalVal;
- return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
- }
- #ifdef _M_ARM
- if (!AutoSystemInfo::Data.ArmDivAvailable())
- {
- return false;
- }
- #endif
- if (specializedValueType.IsInt())
- {
- if (max2 == 0x80000000 || (min2 == 0 && max2 == 00))
- {
- return false;
- }
- if (min1 == 0x80000000 && min2 <= -1 && max2 >= -1)
- {
- // Prevent integer overflow, as div by zero or MIN_INT / -1 will throw an exception
- // Or we know we are dividing by zero (which is weird to have because the profile data
- // say we got an int)
- bailOutKind = IR::BailOutOnDivOfMinInt;
- }
- src1Lossy = false; // Detect -0 on the sources
- src2Lossy = false;
- opcode = Js::OpCode::Div_I4;
- Assert(!instr->GetSrc1()->IsUnsigned());
- bailOutKind |= IR::BailOnDivResultNotInt;
- if (max2 >= 0 && min2 <= 0)
- {
- // Need to check for divide by zero if the denominator range includes 0
- bailOutKind |= IR::BailOutOnDivByZero;
- }
- if (max1 >= 0 && min1 <= 0)
- {
- // Numerator contains 0 so the result contains 0
- newMin = 0;
- newMax = 0;
- if (min2 < 0)
- {
- // Denominator may be negative, so the result could be negative 0
- if (instr->ShouldCheckForNegativeZero())
- {
- bailOutKind |= IR::BailOutOnNegativeZero;
- }
- else
- {
- ignoredNegativeZero = true;
- }
- }
- }
- else
- {
- // Initialize to invalid value, one of the condition below will update it correctly
- newMin = INT_MAX;
- newMax = INT_MIN;
- }
- // Deal with the positive and negative range separately for both the numerator and the denominator,
- // and integrate to the overall min and max.
- // If the result is positive (positive/positive or negative/negative):
- // The min should be the smallest magnitude numerator (positive_Min1 | negative_Max1)
- // divided by ---------------------------------------------------------------
- // largest magnitude denominator (positive_Max2 | negative_Min2)
- //
- // The max should be the largest magnitude numerator (positive_Max1 | negative_Max1)
- // divided by ---------------------------------------------------------------
- // smallest magnitude denominator (positive_Min2 | negative_Max2)
- // If the result is negative (positive/negative or positive/negative):
- // The min should be the largest magnitude numerator (positive_Max1 | negative_Min1)
- // divided by ---------------------------------------------------------------
- // smallest magnitude denominator (negative_Max2 | positive_Min2)
- //
- // The max should be the smallest magnitude numerator (positive_Min1 | negative_Max1)
- // divided by ---------------------------------------------------------------
- // largest magnitude denominator (negative_Min2 | positive_Max2)
- // Consider: The range can be slightly more precise if we take care of the rounding
- if (max1 > 0)
- {
- // Take only the positive numerator range
- int32 positive_Min1 = max(1, min1);
- int32 positive_Max1 = max1;
- if (max2 > 0)
- {
- // Take only the positive denominator range
- int32 positive_Min2 = max(1, min2);
- int32 positive_Max2 = max2;
- // Positive / Positive
- int32 quadrant1_Min = positive_Min1 <= positive_Max2? 1 : positive_Min1 / positive_Max2;
- int32 quadrant1_Max = positive_Max1 <= positive_Min2? 1 : positive_Max1 / positive_Min2;
- Assert(1 <= quadrant1_Min && quadrant1_Min <= quadrant1_Max);
- // The result should positive
- newMin = min(newMin, quadrant1_Min);
- newMax = max(newMax, quadrant1_Max);
- }
- if (min2 < 0)
- {
- // Take only the negative denominator range
- int32 negative_Min2 = min2;
- int32 negative_Max2 = min(-1, max2);
- // Positive / Negative
- int32 quadrant2_Min = -positive_Max1 >= negative_Max2? -1 : positive_Max1 / negative_Max2;
- int32 quadrant2_Max = -positive_Min1 >= negative_Min2? -1 : positive_Min1 / negative_Min2;
- // The result should negative
- Assert(quadrant2_Min <= quadrant2_Max && quadrant2_Max <= -1);
- newMin = min(newMin, quadrant2_Min);
- newMax = max(newMax, quadrant2_Max);
- }
- }
- if (min1 < 0)
- {
- // Take only the native numerator range
- int32 negative_Min1 = min1;
- int32 negative_Max1 = min(-1, max1);
- if (max2 > 0)
- {
- // Take only the positive denominator range
- int32 positive_Min2 = max(1, min2);
- int32 positive_Max2 = max2;
- // Negative / Positive
- int32 quadrant4_Min = negative_Min1 >= -positive_Min2? -1 : negative_Min1 / positive_Min2;
- int32 quadrant4_Max = negative_Max1 >= -positive_Max2? -1 : negative_Max1 / positive_Max2;
- // The result should negative
- Assert(quadrant4_Min <= quadrant4_Max && quadrant4_Max <= -1);
- newMin = min(newMin, quadrant4_Min);
- newMax = max(newMax, quadrant4_Max);
- }
- if (min2 < 0)
- {
- // Take only the negative denominator range
- int32 negative_Min2 = min2;
- int32 negative_Max2 = min(-1, max2);
- int32 quadrant3_Min;
- int32 quadrant3_Max;
- // Negative / Negative
- if (negative_Max1 == 0x80000000 && negative_Min2 == -1)
- {
- quadrant3_Min = negative_Max1 >= negative_Min2? 1 : (negative_Max1+1) / negative_Min2;
- }
- else
- {
- quadrant3_Min = negative_Max1 >= negative_Min2? 1 : negative_Max1 / negative_Min2;
- }
- if (negative_Min1 == 0x80000000 && negative_Max2 == -1)
- {
- quadrant3_Max = negative_Min1 >= negative_Max2? 1 : (negative_Min1+1) / negative_Max2;
- }
- else
- {
- quadrant3_Max = negative_Min1 >= negative_Max2? 1 : negative_Min1 / negative_Max2;
- }
- // The result should positive
- Assert(1 <= quadrant3_Min && quadrant3_Min <= quadrant3_Max);
- newMin = min(newMin, quadrant3_Min);
- newMax = max(newMax, quadrant3_Max);
- }
- }
- Assert(newMin <= newMax);
- // Continue to int type spec
- break;
- }
- }
- // fall-through
- default:
- {
- const bool involesLargeInt32 =
- (src1Val && src1Val->GetValueInfo()->IsLikelyUntaggedInt()) ||
- (src2Val && src2Val->GetValueInfo()->IsLikelyUntaggedInt());
- const auto trySpecializeToFloat =
- [&](const bool mayOverflow) -> bool
- {
- // It has been determined that this instruction cannot be int-specialized. Need to determine whether to attempt
- // to float-specialize the instruction, or leave it unspecialized.
- if((involesLargeInt32
- #if INT32VAR
- && mayOverflow
- #endif
- ) || (instr->m_opcode == Js::OpCode::Mul_A && !this->DoAggressiveMulIntTypeSpec())
- )
- {
- // An input range is completely outside the range of an int31 and the operation is likely to overflow.
- // Additionally, on 32-bit platforms, the value is untaggable and will be a JavascriptNumber, which is
- // significantly slower to use in an unspecialized operation compared to a tagged int. So, try to
- // float-specialize the instruction.
- src1Val = src1OriginalVal;
- src2Val = src2OriginalVal;
- return TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
- }
- return false;
- };
- if (instr->m_opcode != Js::OpCode::ArgOut_A_InlineBuiltIn)
- {
- if ((src1Val && src1Val->GetValueInfo()->IsLikelyFloat()) || (src2Val && src2Val->GetValueInfo()->IsLikelyFloat()))
- {
- // Try to type specialize to float
- src1Val = src1OriginalVal;
- src2Val = src2OriginalVal;
- return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
- }
- if (src1Val == nullptr ||
- src2Val == nullptr ||
- !src1Val->GetValueInfo()->IsLikelyInt() ||
- !src2Val->GetValueInfo()->IsLikelyInt() ||
- (
- !DoAggressiveIntTypeSpec() &&
- (
- !(src1Val->GetValueInfo()->IsInt() || CurrentBlockData()->IsSwitchInt32TypeSpecialized(instr)) ||
- !src2Val->GetValueInfo()->IsInt()
- )
- ) ||
- (instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotNumber) ||
- (instr->GetSrc2()->IsRegOpnd() && instr->GetSrc2()->AsRegOpnd()->m_sym->m_isNotNumber))
- {
- return trySpecializeToFloat(true);
- }
- }
- // Try to type specialize to int32
- // If one of the values is a float constant with a value that fits in a uint32 but not an int32,
- // and the instruction can ignore int overflow, the source value for the purposes of int specialization
- // would have been changed to an int constant value by ignoring overflow. But, the conversion is still lossy.
- if (!(src1OriginalVal && src1OriginalVal->GetValueInfo()->IsFloatConstant() && src1Val && src1Val->GetValueInfo()->HasIntConstantValue()))
- {
- src1Lossy = false;
- }
- if (!(src2OriginalVal && src2OriginalVal->GetValueInfo()->IsFloatConstant() && src2Val && src2Val->GetValueInfo()->HasIntConstantValue()))
- {
- src2Lossy = false;
- }
- switch(instr->m_opcode)
- {
- case Js::OpCode::ArgOut_A_InlineBuiltIn:
- // If the src is already type-specialized, if we don't type-specialize ArgOut_A_InlineBuiltIn instr, we'll get additional ToVar.
- // So, to avoid that, type-specialize the ArgOut_A_InlineBuiltIn instr.
- // Else we don't need to type-specialize the instr, we are fine with src being Var.
- if (instr->GetSrc1()->IsRegOpnd())
- {
- StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
- if (CurrentBlockData()->IsInt32TypeSpecialized(sym))
- {
- opcode = instr->m_opcode;
- skipDst = true; // We should keep dst as is, otherwise the link opnd for next ArgOut/InlineBuiltInStart would be broken.
- skipSrc2 = true; // src2 is linkOpnd. We don't need to type-specialize it.
- newMin = min1; newMax = max1; // Values don't matter, these are unused.
- goto LOutsideSwitch; // Continue to int-type-specialize.
- }
- else if (CurrentBlockData()->IsFloat64TypeSpecialized(sym))
- {
- src1Val = src1OriginalVal;
- src2Val = src2OriginalVal;
- return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
- }
- }
- return false;
- case Js::OpCode::Add_A:
- do // while(false)
- {
- const auto CannotOverflowBasedOnRelativeBounds = [&](int32 *const constantValueRef)
- {
- Assert(constantValueRef);
- if(min2 == max2 &&
- src1Val->GetValueInfo()->IsIntBounded() &&
- src1Val->GetValueInfo()->AsIntBounded()->Bounds()->AddCannotOverflowBasedOnRelativeBounds(min2))
- {
- *constantValueRef = min2;
- return true;
- }
- else if(
- min1 == max1 &&
- src2Val->GetValueInfo()->IsIntBounded() &&
- src2Val->GetValueInfo()->AsIntBounded()->Bounds()->AddCannotOverflowBasedOnRelativeBounds(min1))
- {
- *constantValueRef = min1;
- return true;
- }
- return false;
- };
- if (Int32Math::Add(min1, min2, &newMin))
- {
- int32 constantSrcValue;
- if(CannotOverflowBasedOnRelativeBounds(&constantSrcValue))
- {
- newMin = constantSrcValue >= 0 ? INT32_MAX : INT32_MIN;
- }
- else if(instr->ShouldCheckForIntOverflow())
- {
- if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
- {
- // May overflow
- return trySpecializeToFloat(true);
- }
- bailOutKind |= IR::BailOutOnOverflow;
- newMin = min1 < 0 ? INT32_MIN : INT32_MAX;
- }
- else
- {
- // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since
- // overflow causes the value to wrap around, and we don't have a way to specify a lower and upper
- // range of ints, we use the full range of int32s.
- ignoredIntOverflow = true;
- newMin = INT32_MIN;
- newMax = INT32_MAX;
- break;
- }
- }
- if (Int32Math::Add(max1, max2, &newMax))
- {
- int32 constantSrcValue;
- if(CannotOverflowBasedOnRelativeBounds(&constantSrcValue))
- {
- newMax = constantSrcValue >= 0 ? INT32_MAX : INT32_MIN;
- }
- else if(instr->ShouldCheckForIntOverflow())
- {
- if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
- {
- // May overflow
- return trySpecializeToFloat(true);
- }
- bailOutKind |= IR::BailOutOnOverflow;
- newMax = max1 < 0 ? INT32_MIN : INT32_MAX;
- }
- else
- {
- // See comment about ignoring overflow above
- ignoredIntOverflow = true;
- newMin = INT32_MIN;
- newMax = INT32_MAX;
- break;
- }
- }
- if(bailOutKind & IR::BailOutOnOverflow)
- {
- Assert(bailOutKind == IR::BailOutOnOverflow);
- Assert(instr->ShouldCheckForIntOverflow());
- int32 temp;
- if(Int32Math::Add(
- Int32Math::NearestInRangeTo(0, min1, max1),
- Int32Math::NearestInRangeTo(0, min2, max2),
- &temp))
- {
- // Always overflows
- return trySpecializeToFloat(true);
- }
- }
- } while(false);
- if (!this->IsLoopPrePass() && newMin == newMax && bailOutKind == IR::BailOutInvalid)
- {
- // Take care of Add with zero here, since we know we're dealing with 2 numbers.
- this->CaptureByteCodeSymUses(instr);
- IR::Opnd *src;
- bool isAddZero = true;
- int32 intConstantValue;
- if (src1Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue) && intConstantValue == 0)
- {
- src = instr->UnlinkSrc2();
- instr->FreeSrc1();
- }
- else if (src2Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue) && intConstantValue == 0)
- {
- src = instr->UnlinkSrc1();
- instr->FreeSrc2();
- }
- else
- {
- // This should have been handled by const folding, unless:
- // - A source's value was substituted with a different value here, which is after const folding happened
- // - A value is not definitely int, but once converted to definite int, it would be zero due to a
- // condition in the source code such as if(a === 0). Ideally, we would specialize the sources and
- // remove the add, but doesn't seem too important for now.
- Assert(
- !DoConstFold() ||
- src1Val != src1OriginalVal ||
- src2Val != src2OriginalVal ||
- !src1Val->GetValueInfo()->IsInt() ||
- !src2Val->GetValueInfo()->IsInt());
- isAddZero = false;
- src = nullptr;
- }
- if (isAddZero)
- {
- IR::Instr *newInstr = IR::Instr::New(Js::OpCode::Ld_A, instr->UnlinkDst(), src, instr->m_func);
- newInstr->SetByteCodeOffset(instr);
- instr->m_opcode = Js::OpCode::Nop;
- this->currentBlock->InsertInstrAfter(newInstr, instr);
- return true;
- }
- }
- if(!ignoredIntOverflow)
- {
- if(min2 == max2 &&
- (!IsLoopPrePass() || IsPrepassSrcValueInfoPrecise(instr->GetSrc2(), src2Val)) &&
- instr->GetSrc1()->IsRegOpnd())
- {
- addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min1 == max1, min2);
- }
- else if(
- min1 == max1 &&
- (!IsLoopPrePass() || IsPrepassSrcValueInfoPrecise(instr->GetSrc1(), src1Val)) &&
- instr->GetSrc2()->IsRegOpnd())
- {
- addSubConstantInfo.Set(instr->GetSrc2()->AsRegOpnd()->m_sym, src2Val, min2 == max2, min1);
- }
- }
- opcode = Js::OpCode::Add_I4;
- break;
- case Js::OpCode::Sub_A:
- do // while(false)
- {
- const auto CannotOverflowBasedOnRelativeBounds = [&]()
- {
- return
- min2 == max2 &&
- src1Val->GetValueInfo()->IsIntBounded() &&
- src1Val->GetValueInfo()->AsIntBounded()->Bounds()->SubCannotOverflowBasedOnRelativeBounds(min2);
- };
- if (Int32Math::Sub(min1, max2, &newMin))
- {
- if(CannotOverflowBasedOnRelativeBounds())
- {
- Assert(min2 == max2);
- newMin = min2 >= 0 ? INT32_MIN : INT32_MAX;
- }
- else if(instr->ShouldCheckForIntOverflow())
- {
- if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
- {
- // May overflow
- return trySpecializeToFloat(true);
- }
- bailOutKind |= IR::BailOutOnOverflow;
- newMin = min1 < 0 ? INT32_MIN : INT32_MAX;
- }
- else
- {
- // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since overflow
- // causes the value to wrap around, and we don't have a way to specify a lower and upper range of ints,
- // we use the full range of int32s.
- ignoredIntOverflow = true;
- newMin = INT32_MIN;
- newMax = INT32_MAX;
- break;
- }
- }
- if (Int32Math::Sub(max1, min2, &newMax))
- {
- if(CannotOverflowBasedOnRelativeBounds())
- {
- Assert(min2 == max2);
- newMax = min2 >= 0 ? INT32_MIN: INT32_MAX;
- }
- else if(instr->ShouldCheckForIntOverflow())
- {
- if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
- {
- // May overflow
- return trySpecializeToFloat(true);
- }
- bailOutKind |= IR::BailOutOnOverflow;
- newMax = max1 < 0 ? INT32_MIN : INT32_MAX;
- }
- else
- {
- // See comment about ignoring overflow above
- ignoredIntOverflow = true;
- newMin = INT32_MIN;
- newMax = INT32_MAX;
- break;
- }
- }
- if(bailOutKind & IR::BailOutOnOverflow)
- {
- Assert(bailOutKind == IR::BailOutOnOverflow);
- Assert(instr->ShouldCheckForIntOverflow());
- int32 temp;
- if(Int32Math::Sub(
- Int32Math::NearestInRangeTo(-1, min1, max1),
- Int32Math::NearestInRangeTo(0, min2, max2),
- &temp))
- {
- // Always overflows
- return trySpecializeToFloat(true);
- }
- }
- } while(false);
- if(!ignoredIntOverflow &&
- min2 == max2 &&
- min2 != INT32_MIN &&
- (!IsLoopPrePass() || IsPrepassSrcValueInfoPrecise(instr->GetSrc2(), src2Val)) &&
- instr->GetSrc1()->IsRegOpnd())
- {
- addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min1 == max1, -min2);
- }
- opcode = Js::OpCode::Sub_I4;
- break;
- case Js::OpCode::Mul_A:
- {
- bool isConservativeMulInt = !DoAggressiveMulIntTypeSpec() || !DoAggressiveIntTypeSpec();
- // Be conservative about predicting Mul overflow in prepass.
- // Operands that are live on back edge may be denied lossless-conversion to int32 and
- // trigger rejit with AggressiveIntTypeSpec off.
- // Besides multiplying a variable in a loop can overflow in just a few iterations even in simple cases like v *= 2
- // So, make sure we definitely know the source max/min values, otherwise assume the full range.
- if (isConservativeMulInt && IsLoopPrePass())
- {
- if (!IsPrepassSrcValueInfoPrecise(instr->GetSrc1(), src1Val))
- {
- max1 = INT32_MAX;
- min1 = INT32_MIN;
- }
- if (!IsPrepassSrcValueInfoPrecise(instr->GetSrc2(), src2Val))
- {
- max2 = INT32_MAX;
- min2 = INT32_MIN;
- }
- }
- if (Int32Math::Mul(min1, min2, &newMin))
- {
- if (involesLargeInt32 || isConservativeMulInt)
- {
- // May overflow
- return trySpecializeToFloat(true);
- }
- bailOutKind |= IR::BailOutOnMulOverflow;
- newMin = (min1 < 0) ^ (min2 < 0) ? INT32_MIN : INT32_MAX;
- }
- newMax = newMin;
- if (Int32Math::Mul(max1, max2, &tmp))
- {
- if (involesLargeInt32 || isConservativeMulInt)
- {
- // May overflow
- return trySpecializeToFloat(true);
- }
- bailOutKind |= IR::BailOutOnMulOverflow;
- tmp = (max1 < 0) ^ (max2 < 0) ? INT32_MIN : INT32_MAX;
- }
- newMin = min(newMin, tmp);
- newMax = max(newMax, tmp);
- if (Int32Math::Mul(min1, max2, &tmp))
- {
- if (involesLargeInt32 || isConservativeMulInt)
- {
- // May overflow
- return trySpecializeToFloat(true);
- }
- bailOutKind |= IR::BailOutOnMulOverflow;
- tmp = (min1 < 0) ^ (max2 < 0) ? INT32_MIN : INT32_MAX;
- }
- newMin = min(newMin, tmp);
- newMax = max(newMax, tmp);
- if (Int32Math::Mul(max1, min2, &tmp))
- {
- if (involesLargeInt32 || isConservativeMulInt)
- {
- // May overflow
- return trySpecializeToFloat(true);
- }
- bailOutKind |= IR::BailOutOnMulOverflow;
- tmp = (max1 < 0) ^ (min2 < 0) ? INT32_MIN : INT32_MAX;
- }
- newMin = min(newMin, tmp);
- newMax = max(newMax, tmp);
- if (bailOutKind & IR::BailOutOnMulOverflow)
- {
- // CSE only if two MULs have the same overflow check behavior.
- // Currently this is set to be ignore int32 overflow, but not 53-bit, or int32 overflow matters.
- if (!instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow())
- {
- // If we allow int to overflow then there can be anything in the resulting int
- newMin = IntConstMin;
- newMax = IntConstMax;
- ignoredIntOverflow = true;
- }
- int32 temp, overflowValue;
- if (Int32Math::Mul(
- Int32Math::NearestInRangeTo(0, min1, max1),
- Int32Math::NearestInRangeTo(0, min2, max2),
- &temp,
- &overflowValue))
- {
- Assert(instr->ignoreOverflowBitCount >= 32);
- int overflowMatters = 64 - instr->ignoreOverflowBitCount;
- if (!ignoredIntOverflow ||
- // Use shift to check high bits in case its negative
- ((overflowValue << overflowMatters) >> overflowMatters) != overflowValue
- )
- {
- // Always overflows
- return trySpecializeToFloat(true);
- }
- }
- }
- if (newMin <= 0 && newMax >= 0 && // New range crosses zero
- (min1 < 0 || min2 < 0) && // An operand's range contains a negative integer
- !(min1 > 0 || min2 > 0) && // Neither operand's range contains only positive integers
- !instr->GetSrc1()->IsEqual(instr->GetSrc2())) // The operands don't have the same value
- {
- if (instr->ShouldCheckForNegativeZero())
- {
- // -0 matters since the sym is not a local, or is used in a way in which -0 would differ from +0
- if (!DoAggressiveIntTypeSpec())
- {
- // May result in -0
- return trySpecializeToFloat(false);
- }
- if (((min1 == 0 && max1 == 0) || (min2 == 0 && max2 == 0)) && (max1 < 0 || max2 < 0))
- {
- // Always results in -0
- return trySpecializeToFloat(false);
- }
- bailOutKind |= IR::BailOutOnNegativeZero;
- }
- else
- {
- ignoredNegativeZero = true;
- }
- }
- opcode = Js::OpCode::Mul_I4;
- break;
- }
- case Js::OpCode::Rem_A:
- {
- IR::Opnd* src2 = instr->GetSrc2();
- if (!this->IsLoopPrePass() && min2 == max2 && min1 >= 0)
- {
- int32 value = min2;
- if (value == (1 << Math::Log2(value)) && src2->IsAddrOpnd())
- {
- Assert(src2->AsAddrOpnd()->IsVar());
- instr->m_opcode = Js::OpCode::And_A;
- src2->AsAddrOpnd()->SetAddress(Js::TaggedInt::ToVarUnchecked(value - 1),
- IR::AddrOpndKindConstantVar);
- *pSrc2Val = GetIntConstantValue(value - 1, instr);
- src2Val = *pSrc2Val;
- return this->TypeSpecializeBinary(&instr, pSrc1Val, pSrc2Val, pDstVal, src1OriginalVal, src2Val, redoTypeSpecRef);
- }
- }
- #ifdef _M_ARM
- if (!AutoSystemInfo::Data.ArmDivAvailable())
- {
- return false;
- }
- #endif
- if (min1 < 0)
- {
- // The most negative it can be is min1, unless limited by min2/max2
- int32 negMaxAbs2;
- if (min2 == INT32_MIN)
- {
- negMaxAbs2 = INT32_MIN;
- }
- else
- {
- negMaxAbs2 = -max(abs(min2), abs(max2)) + 1;
- }
- newMin = max(min1, negMaxAbs2);
- }
- else
- {
- newMin = 0;
- }
- bool isModByPowerOf2 = (instr->IsProfiledInstr() && instr->m_func->HasProfileInfo() &&
- instr->m_func->GetReadOnlyProfileInfo()->IsModulusOpByPowerOf2(static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId)));
- if(isModByPowerOf2)
- {
- Assert(bailOutKind == IR::BailOutInvalid);
- bailOutKind = IR::BailOnModByPowerOf2;
- newMin = 0;
- }
- else
- {
- if (min2 <= 0 && max2 >= 0)
- {
- // Consider: We could handle the zero case with a check and bailout...
- return false;
- }
- if (min1 == 0x80000000 && (min2 <= -1 && max2 >= -1))
- {
- // Prevent integer overflow, as div by zero or MIN_INT / -1 will throw an exception
- return false;
- }
- if (min1 < 0)
- {
- if(instr->ShouldCheckForNegativeZero())
- {
- if (!DoAggressiveIntTypeSpec())
- {
- return false;
- }
- bailOutKind |= IR::BailOutOnNegativeZero;
- }
- else
- {
- ignoredNegativeZero = true;
- }
- }
- }
- {
- int32 absMax2;
- if (min2 == INT32_MIN)
- {
- // abs(INT32_MIN) == INT32_MAX because of overflow
- absMax2 = INT32_MAX;
- }
- else
- {
- absMax2 = max(abs(min2), abs(max2)) - 1;
- }
- newMax = min(absMax2, max(max1, 0));
- newMax = max(newMin, newMax);
- }
- opcode = Js::OpCode::Rem_I4;
- Assert(!instr->GetSrc1()->IsUnsigned());
- break;
- }
- case Js::OpCode::CmEq_A:
- case Js::OpCode::CmSrEq_A:
- if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
- {
- return false;
- }
- newMin = 0;
- newMax = 1;
- opcode = Js::OpCode::CmEq_I4;
- needsBoolConv = true;
- break;
- case Js::OpCode::CmNeq_A:
- case Js::OpCode::CmSrNeq_A:
- if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
- {
- return false;
- }
- newMin = 0;
- newMax = 1;
- opcode = Js::OpCode::CmNeq_I4;
- needsBoolConv = true;
- break;
- case Js::OpCode::CmLe_A:
- if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
- {
- return false;
- }
- newMin = 0;
- newMax = 1;
- opcode = Js::OpCode::CmLe_I4;
- needsBoolConv = true;
- break;
- case Js::OpCode::CmLt_A:
- if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
- {
- return false;
- }
- newMin = 0;
- newMax = 1;
- opcode = Js::OpCode::CmLt_I4;
- needsBoolConv = true;
- break;
- case Js::OpCode::CmGe_A:
- if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
- {
- return false;
- }
- newMin = 0;
- newMax = 1;
- opcode = Js::OpCode::CmGe_I4;
- needsBoolConv = true;
- break;
- case Js::OpCode::CmGt_A:
- if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
- {
- return false;
- }
- newMin = 0;
- newMax = 1;
- opcode = Js::OpCode::CmGt_I4;
- needsBoolConv = true;
- break;
- case Js::OpCode::BrSrEq_A:
- case Js::OpCode::BrEq_A:
- case Js::OpCode::BrNotNeq_A:
- case Js::OpCode::BrSrNotNeq_A:
- {
- if(DoConstFold() &&
- !IsLoopPrePass() &&
- TryOptConstFoldBrEqual(instr, true, src1Val, min1, max1, src2Val, min2, max2))
- {
- return true;
- }
- const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
- UpdateIntBoundsForEqualBranch(src1Val, src2Val);
- if(!specialize)
- {
- return false;
- }
- opcode = Js::OpCode::BrEq_I4;
- // We'll get a warning if we don't assign a value to these...
- // We'll assert if we use them and make a range where min > max
- newMin = 2; newMax = 1;
- break;
- }
- case Js::OpCode::BrSrNeq_A:
- case Js::OpCode::BrNeq_A:
- case Js::OpCode::BrSrNotEq_A:
- case Js::OpCode::BrNotEq_A:
- {
- if(DoConstFold() &&
- !IsLoopPrePass() &&
- TryOptConstFoldBrEqual(instr, false, src1Val, min1, max1, src2Val, min2, max2))
- {
- return true;
- }
- const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
- UpdateIntBoundsForNotEqualBranch(src1Val, src2Val);
- if(!specialize)
- {
- return false;
- }
- opcode = Js::OpCode::BrNeq_I4;
- // We'll get a warning if we don't assign a value to these...
- // We'll assert if we use them and make a range where min > max
- newMin = 2; newMax = 1;
- break;
- }
- case Js::OpCode::BrGt_A:
- case Js::OpCode::BrNotLe_A:
- {
- if(DoConstFold() &&
- !IsLoopPrePass() &&
- TryOptConstFoldBrGreaterThan(instr, true, src1Val, min1, max1, src2Val, min2, max2))
- {
- return true;
- }
- const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
- UpdateIntBoundsForGreaterThanBranch(src1Val, src2Val);
- if(!specialize)
- {
- return false;
- }
- opcode = Js::OpCode::BrGt_I4;
- // We'll get a warning if we don't assign a value to these...
- // We'll assert if we use them and make a range where min > max
- newMin = 2; newMax = 1;
- break;
- }
- case Js::OpCode::BrGe_A:
- case Js::OpCode::BrNotLt_A:
- {
- if(DoConstFold() &&
- !IsLoopPrePass() &&
- TryOptConstFoldBrGreaterThanOrEqual(instr, true, src1Val, min1, max1, src2Val, min2, max2))
- {
- return true;
- }
- const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
- UpdateIntBoundsForGreaterThanOrEqualBranch(src1Val, src2Val);
- if(!specialize)
- {
- return false;
- }
- opcode = Js::OpCode::BrGe_I4;
- // We'll get a warning if we don't assign a value to these...
- // We'll assert if we use them and make a range where min > max
- newMin = 2; newMax = 1;
- break;
- }
- case Js::OpCode::BrLt_A:
- case Js::OpCode::BrNotGe_A:
- {
- if(DoConstFold() &&
- !IsLoopPrePass() &&
- TryOptConstFoldBrGreaterThanOrEqual(instr, false, src1Val, min1, max1, src2Val, min2, max2))
- {
- return true;
- }
- const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
- UpdateIntBoundsForLessThanBranch(src1Val, src2Val);
- if(!specialize)
- {
- return false;
- }
- opcode = Js::OpCode::BrLt_I4;
- // We'll get a warning if we don't assign a value to these...
- // We'll assert if we use them and make a range where min > max
- newMin = 2; newMax = 1;
- break;
- }
- case Js::OpCode::BrLe_A:
- case Js::OpCode::BrNotGt_A:
- {
- if(DoConstFold() &&
- !IsLoopPrePass() &&
- TryOptConstFoldBrGreaterThan(instr, false, src1Val, min1, max1, src2Val, min2, max2))
- {
- return true;
- }
- const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
- UpdateIntBoundsForLessThanOrEqualBranch(src1Val, src2Val);
- if(!specialize)
- {
- return false;
- }
- opcode = Js::OpCode::BrLe_I4;
- // We'll get a warning if we don't assign a value to these...
- // We'll assert if we use them and make a range where min > max
- newMin = 2; newMax = 1;
- break;
- }
- default:
- return false;
- }
- // If this instruction is in a range of instructions where int overflow does not matter, we will still specialize it
- // (won't leave it unspecialized based on heuristics), since it is most likely worth specializing, and the dst value
- // needs to be guaranteed to be an int
- if(!ignoredIntOverflow &&
- !ignoredNegativeZero &&
- !needsBoolConv &&
- instr->ShouldCheckForIntOverflow() &&
- !IsWorthSpecializingToInt32(instr, src1Val, src2Val))
- {
- // Even though type specialization is being skipped since it may not be worth it, the proper value should still be
- // maintained so that the result may be type specialized later. An int value is not created for the dst in any of
- // the following cases.
- // - A bailout check is necessary to specialize this instruction. The bailout check is what guarantees the result to
- // be an int, but since we're not going to specialize this instruction, there won't be a bailout check.
- // - Aggressive int type specialization is disabled and we're in a loop prepass. We're conservative on dst values in
- // that case, especially if the dst sym is live on the back-edge.
- if(bailOutKind == IR::BailOutInvalid &&
- instr->GetDst() &&
- src1Val->GetValueInfo()->IsInt() &&
- src2Val->GetValueInfo()->IsInt() &&
- (DoAggressiveIntTypeSpec() || !this->IsLoopPrePass()))
- {
- *pDstVal = CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, src2Val);
- }
- return false;
- }
- } // case default
- } // switch
- LOutsideSwitch:
- this->ignoredIntOverflowForCurrentInstr = ignoredIntOverflow;
- this->ignoredNegativeZeroForCurrentInstr = ignoredNegativeZero;
- {
- // Try CSE again before modifying the IR, in case some attributes are required for successful CSE
- Value *src1IndirIndexVal = nullptr;
- if(CSEOptimize(currentBlock, &instr, &src1Val, &src2Val, &src1IndirIndexVal, true /* intMathExprOnly */))
- {
- *redoTypeSpecRef = true;
- return false;
- }
- }
- const Js::OpCode originalOpCode = instr->m_opcode;
- if (!this->IsLoopPrePass())
- {
- // No re-write on prepass
- instr->m_opcode = opcode;
- }
- Value *src1ValueToSpecialize = src1Val, *src2ValueToSpecialize = src2Val;
- // Lossy conversions to int32 must be done based on the original source values. For instance, if one of the values is a
- // float constant with a value that fits in a uint32 but not an int32, and the instruction can ignore int overflow, the
- // source value for the purposes of int specialization would have been changed to an int constant value by ignoring
- // overflow. If we were to specialize the sym using the int constant value, it would be treated as a lossless
- // conversion, but since there may be subsequent uses of the same float constant value that may not ignore overflow,
- // this must be treated as a lossy conversion by specializing the sym using the original float constant value.
- if(src1Lossy)
- {
- src1ValueToSpecialize = src1OriginalVal;
- }
- if (src2Lossy)
- {
- src2ValueToSpecialize = src2OriginalVal;
- }
- // Make sure the srcs are specialized
- IR::Opnd* src1 = instr->GetSrc1();
- this->ToInt32(instr, src1, this->currentBlock, src1ValueToSpecialize, nullptr, src1Lossy);
- if (!skipSrc2)
- {
- IR::Opnd* src2 = instr->GetSrc2();
- this->ToInt32(instr, src2, this->currentBlock, src2ValueToSpecialize, nullptr, src2Lossy);
- }
- if(bailOutKind != IR::BailOutInvalid && !this->IsLoopPrePass())
- {
- GenerateBailAtOperation(&instr, bailOutKind);
- }
- if (!skipDst && instr->GetDst())
- {
- if (needsBoolConv)
- {
- IR::RegOpnd *varDst;
- if (this->IsLoopPrePass())
- {
- varDst = instr->GetDst()->AsRegOpnd();
- this->ToVarRegOpnd(varDst, this->currentBlock);
- }
- else
- {
- // Generate:
- // t1.i = CmCC t2.i, t3.i
- // t1.v = Conv_bool t1.i
- //
- // If the only uses of t1 are ints, the conv_bool will get dead-stored
- TypeSpecializeIntDst(instr, originalOpCode, nullptr, src1Val, src2Val, bailOutKind, newMin, newMax, pDstVal);
- IR::RegOpnd *intDst = instr->GetDst()->AsRegOpnd();
- intDst->SetIsJITOptimizedReg(true);
- varDst = IR::RegOpnd::New(intDst->m_sym->GetVarEquivSym(this->func), TyVar, this->func);
- IR::Instr *convBoolInstr = IR::Instr::New(Js::OpCode::Conv_Bool, varDst, intDst, this->func);
- // In some cases (e.g. unsigned compare peep code), a comparison will use variables
- // other than the ones initially intended for it, if we can determine that we would
- // arrive at the same result. This means that we get a ByteCodeUses operation after
- // the actual comparison. Since Inserting the Conv_bool just after the compare, and
- // just before the ByteCodeUses, would cause issues later on with register lifetime
- // calculation, we want to insert the Conv_bool after the whole compare instruction
- // block.
- IR::Instr *putAfter = instr;
- while (putAfter->m_next && putAfter->m_next->IsByteCodeUsesInstrFor(instr))
- {
- putAfter = putAfter->m_next;
- }
- putAfter->InsertAfter(convBoolInstr);
- convBoolInstr->SetByteCodeOffset(instr);
- this->ToVarRegOpnd(varDst, this->currentBlock);
- CurrentBlockData()->liveInt32Syms->Set(varDst->m_sym->m_id);
- CurrentBlockData()->liveLossyInt32Syms->Set(varDst->m_sym->m_id);
- }
- *pDstVal = this->NewGenericValue(ValueType::Boolean, varDst);
- }
- else
- {
- TypeSpecializeIntDst(
- instr,
- originalOpCode,
- nullptr,
- src1Val,
- src2Val,
- bailOutKind,
- newMin,
- newMax,
- pDstVal,
- addSubConstantInfo.HasInfo() ? &addSubConstantInfo : nullptr);
- }
- }
- if(bailOutKind == IR::BailOutInvalid)
- {
- GOPT_TRACE(_u("Type specialized to INT\n"));
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
- {
- Output::Print(_u("Type specialized to INT: "));
- Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
- }
- #endif
- }
- else
- {
- GOPT_TRACE(_u("Type specialized to INT with bailout on:\n"));
- if(bailOutKind & (IR::BailOutOnOverflow | IR::BailOutOnMulOverflow) )
- {
- GOPT_TRACE(_u(" Overflow\n"));
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
- {
- Output::Print(_u("Type specialized to INT with bailout (%S): "), "Overflow");
- Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
- }
- #endif
- }
- if(bailOutKind & IR::BailOutOnNegativeZero)
- {
- GOPT_TRACE(_u(" Zero\n"));
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
- {
- Output::Print(_u("Type specialized to INT with bailout (%S): "), "Zero");
- Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
- }
- #endif
- }
- }
- return true;
- }
- bool
- GlobOpt::IsWorthSpecializingToInt32Branch(IR::Instr const * instr, Value const * src1Val, Value const * src2Val) const
- {
- if (!src1Val->GetValueInfo()->HasIntConstantValue() && instr->GetSrc1()->IsRegOpnd())
- {
- StackSym const *sym1 = instr->GetSrc1()->AsRegOpnd()->m_sym;
- if (CurrentBlockData()->IsInt32TypeSpecialized(sym1) == false)
- {
- if (!src2Val->GetValueInfo()->HasIntConstantValue() && instr->GetSrc2()->IsRegOpnd())
- {
- StackSym const *sym2 = instr->GetSrc2()->AsRegOpnd()->m_sym;
- if (CurrentBlockData()->IsInt32TypeSpecialized(sym2) == false)
- {
- // Type specializing a Br itself isn't worth it, unless one src
- // is already type specialized
- return false;
- }
- }
- }
- }
- return true;
- }
- bool
- GlobOpt::TryOptConstFoldBrFalse(
- IR::Instr *const instr,
- Value *const srcValue,
- const int32 min,
- const int32 max)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::BrFalse_A || instr->m_opcode == Js::OpCode::BrTrue_A);
- Assert(srcValue);
- if(!(DoAggressiveIntTypeSpec() ? srcValue->GetValueInfo()->IsLikelyInt() : srcValue->GetValueInfo()->IsInt()))
- {
- return false;
- }
- if(ValueInfo::IsEqualTo(srcValue, min, max, nullptr, 0, 0))
- {
- OptConstFoldBr(instr->m_opcode == Js::OpCode::BrFalse_A, instr, srcValue);
- return true;
- }
- if(ValueInfo::IsNotEqualTo(srcValue, min, max, nullptr, 0, 0))
- {
- OptConstFoldBr(instr->m_opcode == Js::OpCode::BrTrue_A, instr, srcValue);
- return true;
- }
- return false;
- }
- bool
- GlobOpt::TryOptConstFoldBrEqual(
- IR::Instr *const instr,
- const bool branchOnEqual,
- Value *const src1Value,
- const int32 min1,
- const int32 max1,
- Value *const src2Value,
- const int32 min2,
- const int32 max2)
- {
- Assert(instr);
- Assert(src1Value);
- Assert(DoAggressiveIntTypeSpec() ? src1Value->GetValueInfo()->IsLikelyInt() : src1Value->GetValueInfo()->IsInt());
- Assert(src2Value);
- Assert(DoAggressiveIntTypeSpec() ? src2Value->GetValueInfo()->IsLikelyInt() : src2Value->GetValueInfo()->IsInt());
- if(ValueInfo::IsEqualTo(src1Value, min1, max1, src2Value, min2, max2))
- {
- OptConstFoldBr(branchOnEqual, instr, src1Value, src2Value);
- return true;
- }
- if(ValueInfo::IsNotEqualTo(src1Value, min1, max1, src2Value, min2, max2))
- {
- OptConstFoldBr(!branchOnEqual, instr, src1Value, src2Value);
- return true;
- }
- return false;
- }
- bool
- GlobOpt::TryOptConstFoldBrGreaterThan(
- IR::Instr *const instr,
- const bool branchOnGreaterThan,
- Value *const src1Value,
- const int32 min1,
- const int32 max1,
- Value *const src2Value,
- const int32 min2,
- const int32 max2)
- {
- Assert(instr);
- Assert(src1Value);
- Assert(DoAggressiveIntTypeSpec() ? src1Value->GetValueInfo()->IsLikelyInt() : src1Value->GetValueInfo()->IsInt());
- Assert(src2Value);
- Assert(DoAggressiveIntTypeSpec() ? src2Value->GetValueInfo()->IsLikelyInt() : src2Value->GetValueInfo()->IsInt());
- if(ValueInfo::IsGreaterThan(src1Value, min1, max1, src2Value, min2, max2))
- {
- OptConstFoldBr(branchOnGreaterThan, instr, src1Value, src2Value);
- return true;
- }
- if(ValueInfo::IsLessThanOrEqualTo(src1Value, min1, max1, src2Value, min2, max2))
- {
- OptConstFoldBr(!branchOnGreaterThan, instr, src1Value, src2Value);
- return true;
- }
- return false;
- }
- bool
- GlobOpt::TryOptConstFoldBrGreaterThanOrEqual(
- IR::Instr *const instr,
- const bool branchOnGreaterThanOrEqual,
- Value *const src1Value,
- const int32 min1,
- const int32 max1,
- Value *const src2Value,
- const int32 min2,
- const int32 max2)
- {
- Assert(instr);
- Assert(src1Value);
- Assert(DoAggressiveIntTypeSpec() ? src1Value->GetValueInfo()->IsLikelyInt() : src1Value->GetValueInfo()->IsInt());
- Assert(src2Value);
- Assert(DoAggressiveIntTypeSpec() ? src2Value->GetValueInfo()->IsLikelyInt() : src2Value->GetValueInfo()->IsInt());
- if(ValueInfo::IsGreaterThanOrEqualTo(src1Value, min1, max1, src2Value, min2, max2))
- {
- OptConstFoldBr(branchOnGreaterThanOrEqual, instr, src1Value, src2Value);
- return true;
- }
- if(ValueInfo::IsLessThan(src1Value, min1, max1, src2Value, min2, max2))
- {
- OptConstFoldBr(!branchOnGreaterThanOrEqual, instr, src1Value, src2Value);
- return true;
- }
- return false;
- }
- bool
- GlobOpt::TryOptConstFoldBrUnsignedLessThan(
- IR::Instr *const instr,
- const bool branchOnLessThan,
- Value *const src1Value,
- const int32 min1,
- const int32 max1,
- Value *const src2Value,
- const int32 min2,
- const int32 max2)
- {
- Assert(DoConstFold());
- Assert(!IsLoopPrePass());
- if(!src1Value ||
- !src2Value ||
- !(
- DoAggressiveIntTypeSpec()
- ? src1Value->GetValueInfo()->IsLikelyInt() && src2Value->GetValueInfo()->IsLikelyInt()
- : src1Value->GetValueInfo()->IsInt() && src2Value->GetValueInfo()->IsInt()
- ))
- {
- return false;
- }
- uint uMin1 = (min1 < 0 ? (max1 < 0 ? min((uint)min1, (uint)max1) : 0) : min1);
- uint uMax1 = max((uint)min1, (uint)max1);
- uint uMin2 = (min2 < 0 ? (max2 < 0 ? min((uint)min2, (uint)max2) : 0) : min2);
- uint uMax2 = max((uint)min2, (uint)max2);
- if (uMax1 < uMin2)
- {
- // Range 1 is always lesser than Range 2
- OptConstFoldBr(branchOnLessThan, instr, src1Value, src2Value);
- return true;
- }
- if (uMin1 >= uMax2)
- {
- // Range 2 is always lesser than Range 1
- OptConstFoldBr(!branchOnLessThan, instr, src1Value, src2Value);
- return true;
- }
- return false;
- }
- bool
- GlobOpt::TryOptConstFoldBrUnsignedGreaterThan(
- IR::Instr *const instr,
- const bool branchOnGreaterThan,
- Value *const src1Value,
- const int32 min1,
- const int32 max1,
- Value *const src2Value,
- const int32 min2,
- const int32 max2)
- {
- Assert(DoConstFold());
- Assert(!IsLoopPrePass());
- if(!src1Value ||
- !src2Value ||
- !(
- DoAggressiveIntTypeSpec()
- ? src1Value->GetValueInfo()->IsLikelyInt() && src2Value->GetValueInfo()->IsLikelyInt()
- : src1Value->GetValueInfo()->IsInt() && src2Value->GetValueInfo()->IsInt()
- ))
- {
- return false;
- }
- uint uMin1 = (min1 < 0 ? (max1 < 0 ? min((uint)min1, (uint)max1) : 0) : min1);
- uint uMax1 = max((uint)min1, (uint)max1);
- uint uMin2 = (min2 < 0 ? (max2 < 0 ? min((uint)min2, (uint)max2) : 0) : min2);
- uint uMax2 = max((uint)min2, (uint)max2);
- if (uMin1 > uMax2)
- {
- // Range 1 is always greater than Range 2
- OptConstFoldBr(branchOnGreaterThan, instr, src1Value, src2Value);
- return true;
- }
- if (uMax1 <= uMin2)
- {
- // Range 2 is always greater than Range 1
- OptConstFoldBr(!branchOnGreaterThan, instr, src1Value, src2Value);
- return true;
- }
- return false;
- }
- void
- GlobOpt::SetPathDependentInfo(const bool conditionToBranch, const PathDependentInfo &info)
- {
- Assert(this->currentBlock->GetSuccList()->Count() == 2);
- IR::Instr * fallthrough = this->currentBlock->GetNext()->GetFirstInstr();
- FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, this->currentBlock->GetSuccList())
- {
- if (conditionToBranch == (edge->GetSucc()->GetFirstInstr() != fallthrough))
- {
- edge->SetPathDependentInfo(info, alloc);
- return;
- }
- }
- NEXT_SLISTBASECOUNTED_ENTRY;
- // In case flowgraph peeps is disabled, we could have conditional branch to next instr
- Assert(this->func->HasTry() || PHASE_OFF(Js::FGPeepsPhase, this->func));
- }
- PathDependentInfoToRestore
- GlobOpt::UpdatePathDependentInfo(PathDependentInfo *const info)
- {
- Assert(info);
- if(!info->HasInfo())
- {
- return PathDependentInfoToRestore();
- }
- decltype(&GlobOpt::UpdateIntBoundsForEqual) UpdateIntBoundsForLeftValue, UpdateIntBoundsForRightValue;
- switch(info->Relationship())
- {
- case PathDependentRelationship::Equal:
- UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForEqual;
- UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForEqual;
- break;
- case PathDependentRelationship::NotEqual:
- UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForNotEqual;
- UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForNotEqual;
- break;
- case PathDependentRelationship::GreaterThanOrEqual:
- UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForGreaterThanOrEqual;
- UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForLessThanOrEqual;
- break;
- case PathDependentRelationship::GreaterThan:
- UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForGreaterThan;
- UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForLessThan;
- break;
- case PathDependentRelationship::LessThanOrEqual:
- UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForLessThanOrEqual;
- UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForGreaterThanOrEqual;
- break;
- case PathDependentRelationship::LessThan:
- UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForLessThan;
- UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForGreaterThan;
- break;
- default:
- Assert(false);
- __assume(false);
- }
- ValueInfo *leftValueInfo = info->LeftValue()->GetValueInfo();
- IntConstantBounds leftConstantBounds;
- AssertVerify(leftValueInfo->TryGetIntConstantBounds(&leftConstantBounds, true));
- ValueInfo *rightValueInfo;
- IntConstantBounds rightConstantBounds;
- if(info->RightValue())
- {
- rightValueInfo = info->RightValue()->GetValueInfo();
- AssertVerify(rightValueInfo->TryGetIntConstantBounds(&rightConstantBounds, true));
- }
- else
- {
- rightValueInfo = nullptr;
- rightConstantBounds = IntConstantBounds(info->RightConstantValue(), info->RightConstantValue());
- }
- ValueInfo *const newLeftValueInfo =
- (this->*UpdateIntBoundsForLeftValue)(
- info->LeftValue(),
- leftConstantBounds,
- info->RightValue(),
- rightConstantBounds,
- true);
- if(newLeftValueInfo)
- {
- ChangeValueInfo(nullptr, info->LeftValue(), newLeftValueInfo);
- AssertVerify(newLeftValueInfo->TryGetIntConstantBounds(&leftConstantBounds, true));
- }
- else
- {
- leftValueInfo = nullptr;
- }
- ValueInfo *const newRightValueInfo =
- (this->*UpdateIntBoundsForRightValue)(
- info->RightValue(),
- rightConstantBounds,
- info->LeftValue(),
- leftConstantBounds,
- true);
- if(newRightValueInfo)
- {
- ChangeValueInfo(nullptr, info->RightValue(), newRightValueInfo);
- }
- else
- {
- rightValueInfo = nullptr;
- }
- return PathDependentInfoToRestore(leftValueInfo, rightValueInfo);
- }
- void
- GlobOpt::RestorePathDependentInfo(PathDependentInfo *const info, const PathDependentInfoToRestore infoToRestore)
- {
- Assert(info);
- if(infoToRestore.LeftValueInfo())
- {
- Assert(info->LeftValue());
- ChangeValueInfo(nullptr, info->LeftValue(), infoToRestore.LeftValueInfo());
- }
- if(infoToRestore.RightValueInfo())
- {
- Assert(info->RightValue());
- ChangeValueInfo(nullptr, info->RightValue(), infoToRestore.RightValueInfo());
- }
- }
- bool
- GlobOpt::TypeSpecializeFloatUnary(IR::Instr **pInstr, Value *src1Val, Value **pDstVal, bool skipDst /* = false */)
- {
- IR::Instr *&instr = *pInstr;
- IR::Opnd *src1;
- IR::Opnd *dst;
- Js::OpCode opcode = instr->m_opcode;
- Value *valueToTransfer = nullptr;
- Assert(src1Val && src1Val->GetValueInfo()->IsLikelyNumber() || OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
- if (!this->DoFloatTypeSpec())
- {
- return false;
- }
- // For inline built-ins we need to do type specialization. Check upfront to avoid duplicating same case labels.
- if (!OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
- {
- switch (opcode)
- {
- case Js::OpCode::ArgOut_A_InlineBuiltIn:
- skipDst = true;
- // fall-through
- case Js::OpCode::Ld_A:
- case Js::OpCode::BrTrue_A:
- case Js::OpCode::BrFalse_A:
- if (instr->GetSrc1()->IsRegOpnd())
- {
- StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
- if (CurrentBlockData()->IsFloat64TypeSpecialized(sym) == false)
- {
- // Type specializing an Ld_A isn't worth it, unless the src
- // is already type specialized
- return false;
- }
- }
- if (instr->m_opcode == Js::OpCode::Ld_A)
- {
- valueToTransfer = src1Val;
- }
- break;
- case Js::OpCode::Neg_A:
- break;
- case Js::OpCode::Conv_Num:
- Assert(src1Val);
- opcode = Js::OpCode::Ld_A;
- valueToTransfer = src1Val;
- if (!src1Val->GetValueInfo()->IsNumber())
- {
- StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
- valueToTransfer = NewGenericValue(ValueType::Float, instr->GetDst()->GetStackSym());
- if (CurrentBlockData()->IsFloat64TypeSpecialized(sym) == false)
- {
- // Set the dst as a nonDeadStore. We want to keep the Ld_A to prevent the FromVar from
- // being dead-stored, as it could cause implicit calls.
- dst = instr->GetDst();
- dst->AsRegOpnd()->m_dontDeadStore = true;
- }
- }
- break;
- case Js::OpCode::StElemI_A:
- case Js::OpCode::StElemI_A_Strict:
- case Js::OpCode::StElemC:
- return TypeSpecializeStElem(pInstr, src1Val, pDstVal);
- default:
- return false;
- }
- }
- // Make sure the srcs are specialized
- src1 = instr->GetSrc1();
- // Use original val when calling toFloat64 as this is what we'll use to try hoisting the fromVar if we're in a loop.
- this->ToFloat64(instr, src1, this->currentBlock, src1Val, nullptr, IR::BailOutPrimitiveButString);
- if (!skipDst)
- {
- dst = instr->GetDst();
- if (dst)
- {
- this->TypeSpecializeFloatDst(instr, valueToTransfer, src1Val, nullptr, pDstVal);
- if (!this->IsLoopPrePass())
- {
- instr->m_opcode = opcode;
- }
- }
- }
- GOPT_TRACE_INSTR(instr, _u("Type specialized to FLOAT: "));
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FloatTypeSpecPhase))
- {
- Output::Print(_u("Type specialized to FLOAT: "));
- Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
- }
- #endif
- return true;
- }
- // Unconditionally type-spec dst to float.
- void
- GlobOpt::TypeSpecializeFloatDst(IR::Instr *instr, Value *valToTransfer, Value *const src1Value, Value *const src2Value, Value **pDstVal)
- {
- IR::Opnd* dst = instr->GetDst();
- Assert(dst);
- AssertMsg(dst->IsRegOpnd(), "What else?");
- this->ToFloat64Dst(instr, dst->AsRegOpnd(), this->currentBlock);
- if(valToTransfer)
- {
- *pDstVal = this->ValueNumberTransferDst(instr, valToTransfer);
- CurrentBlockData()->InsertNewValue(*pDstVal, dst);
- }
- else
- {
- *pDstVal = CreateDstUntransferredValue(ValueType::Float, instr, src1Value, src2Value);
- }
- }
- bool
- GlobOpt::TypeSpecializeLdLen(
- IR::Instr * *const instrRef,
- Value * *const src1ValueRef,
- Value * *const dstValueRef,
- bool *const forceInvariantHoistingRef)
- {
- Assert(instrRef);
- IR::Instr *&instr = *instrRef;
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::LdLen_A);
- Assert(src1ValueRef);
- Value *&src1Value = *src1ValueRef;
- Assert(dstValueRef);
- Value *&dstValue = *dstValueRef;
- Assert(forceInvariantHoistingRef);
- bool &forceInvariantHoisting = *forceInvariantHoistingRef;
- if(!DoLdLenIntSpec(instr, instr->GetSrc1()->GetValueType()))
- {
- return false;
- }
- IR::BailOutKind bailOutKind = IR::BailOutOnIrregularLength;
- if(!IsLoopPrePass())
- {
- IR::RegOpnd *const baseOpnd = instr->GetSrc1()->AsRegOpnd();
- if(baseOpnd->IsArrayRegOpnd())
- {
- StackSym *const lengthSym = baseOpnd->AsArrayRegOpnd()->LengthSym();
- if(lengthSym)
- {
- CaptureByteCodeSymUses(instr);
- instr->m_opcode = Js::OpCode::Ld_I4;
- instr->ReplaceSrc1(IR::RegOpnd::New(lengthSym, lengthSym->GetType(), func));
- instr->ClearBailOutInfo();
- // Find the hoisted length value
- Value *const lengthValue = CurrentBlockData()->FindValue(lengthSym);
- Assert(lengthValue);
- src1Value = lengthValue;
- ValueInfo *const lengthValueInfo = lengthValue->GetValueInfo();
- IntConstantBounds lengthConstantBounds;
- AssertVerify(lengthValueInfo->TryGetIntConstantBounds(&lengthConstantBounds));
- Assert(lengthConstantBounds.LowerBound() >= 0);
- if (lengthValueInfo->GetSymStore() == lengthSym)
- {
- // When type specializing the dst below, we will end up inserting lengthSym.u32 as symstore for a var
- // Clear the symstore here, so that we dont end up with problems with copyprop later on
- lengthValueInfo->SetSymStore(nullptr);
- }
- // Int-specialize, and transfer the value to the dst
- TypeSpecializeIntDst(
- instr,
- Js::OpCode::LdLen_A,
- src1Value,
- src1Value,
- nullptr,
- bailOutKind,
- lengthConstantBounds.LowerBound(),
- lengthConstantBounds.UpperBound(),
- &dstValue);
- // Try to force hoisting the Ld_I4 so that the length will have an invariant sym store that can be
- // copy-propped. Invariant hoisting does not automatically hoist Ld_I4.
- forceInvariantHoisting = true;
- return true;
- }
- }
- if (instr->HasBailOutInfo())
- {
- Assert(instr->GetBailOutKind() == IR::BailOutMarkTempObject);
- bailOutKind = IR::BailOutOnIrregularLength | IR::BailOutMarkTempObject;
- instr->SetBailOutKind(bailOutKind);
- }
- else
- {
- Assert(bailOutKind == IR::BailOutOnIrregularLength);
- GenerateBailAtOperation(&instr, bailOutKind);
- }
- }
- TypeSpecializeIntDst(
- instr,
- Js::OpCode::LdLen_A,
- nullptr,
- nullptr,
- nullptr,
- bailOutKind,
- 0,
- INT32_MAX,
- &dstValue);
- return true;
- }
- bool
- GlobOpt::TypeSpecializeFloatBinary(IR::Instr *instr, Value *src1Val, Value *src2Val, Value **pDstVal)
- {
- IR::Opnd *src1;
- IR::Opnd *src2;
- IR::Opnd *dst;
- bool allowUndefinedOrNullSrc1 = true;
- bool allowUndefinedOrNullSrc2 = true;
- bool skipSrc1 = false;
- bool skipSrc2 = false;
- bool skipDst = false;
- bool convertDstToBool = false;
- if (!this->DoFloatTypeSpec())
- {
- return false;
- }
- // For inline built-ins we need to do type specialization. Check upfront to avoid duplicating same case labels.
- if (!OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
- {
- switch (instr->m_opcode)
- {
- case Js::OpCode::Sub_A:
- case Js::OpCode::Mul_A:
- case Js::OpCode::Div_A:
- case Js::OpCode::Expo_A:
- // Avoid if one source is known not to be a number.
- if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
- {
- return false;
- }
- break;
- case Js::OpCode::BrSrEq_A:
- case Js::OpCode::BrSrNeq_A:
- case Js::OpCode::BrEq_A:
- case Js::OpCode::BrNeq_A:
- case Js::OpCode::BrSrNotEq_A:
- case Js::OpCode::BrNotEq_A:
- case Js::OpCode::BrSrNotNeq_A:
- case Js::OpCode::BrNotNeq_A:
- // Avoid if one source is known not to be a number.
- if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
- {
- return false;
- }
- // Undef == Undef, but +Undef != +Undef
- // 0.0 != null, but 0.0 == +null
- //
- // So Bailout on anything but numbers for both src1 and src2
- allowUndefinedOrNullSrc1 = false;
- allowUndefinedOrNullSrc2 = false;
- break;
- case Js::OpCode::BrGt_A:
- case Js::OpCode::BrGe_A:
- case Js::OpCode::BrLt_A:
- case Js::OpCode::BrLe_A:
- case Js::OpCode::BrNotGt_A:
- case Js::OpCode::BrNotGe_A:
- case Js::OpCode::BrNotLt_A:
- case Js::OpCode::BrNotLe_A:
- // Avoid if one source is known not to be a number.
- if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
- {
- return false;
- }
- break;
- case Js::OpCode::Add_A:
- // For Add, we need both sources to be Numbers, otherwise it could be a string concat
- if (!src1Val || !src2Val || !(src1Val->GetValueInfo()->IsLikelyNumber() && src2Val->GetValueInfo()->IsLikelyNumber()))
- {
- return false;
- }
- break;
- case Js::OpCode::ArgOut_A_InlineBuiltIn:
- skipSrc2 = true;
- skipDst = true;
- break;
- case Js::OpCode::CmEq_A:
- case Js::OpCode::CmSrEq_A:
- case Js::OpCode::CmNeq_A:
- case Js::OpCode::CmSrNeq_A:
- {
- if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
- {
- return false;
- }
- allowUndefinedOrNullSrc1 = false;
- allowUndefinedOrNullSrc2 = false;
- convertDstToBool = true;
- break;
- }
- case Js::OpCode::CmLe_A:
- case Js::OpCode::CmLt_A:
- case Js::OpCode::CmGe_A:
- case Js::OpCode::CmGt_A:
- {
- if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
- {
- return false;
- }
- convertDstToBool = true;
- break;
- }
-
- default:
- return false;
- }
- }
- else
- {
- switch (instr->m_opcode)
- {
- case Js::OpCode::InlineArrayPush:
- bool isFloatConstMissingItem = src2Val->GetValueInfo()->IsFloatConstant();
- if(isFloatConstMissingItem)
- {
- FloatConstType floatValue = src2Val->GetValueInfo()->AsFloatConstant()->FloatValue();
- isFloatConstMissingItem = Js::SparseArraySegment<double>::IsMissingItem(&floatValue);
- }
- // Don't specialize if the element is not likelyNumber - we will surely bailout
- if(!(src2Val->GetValueInfo()->IsLikelyNumber()) || isFloatConstMissingItem)
- {
- return false;
- }
- // Only specialize the Second source - element
- skipSrc1 = true;
- skipDst = true;
- allowUndefinedOrNullSrc2 = false;
- break;
- }
- }
- // Make sure the srcs are specialized
- if(!skipSrc1)
- {
- src1 = instr->GetSrc1();
- this->ToFloat64(instr, src1, this->currentBlock, src1Val, nullptr, (allowUndefinedOrNullSrc1 ? IR::BailOutPrimitiveButString : IR::BailOutNumberOnly));
- }
- if (!skipSrc2)
- {
- src2 = instr->GetSrc2();
- this->ToFloat64(instr, src2, this->currentBlock, src2Val, nullptr, (allowUndefinedOrNullSrc2 ? IR::BailOutPrimitiveButString : IR::BailOutNumberOnly));
- }
- if (!skipDst)
- {
- dst = instr->GetDst();
- if (dst)
- {
- if (convertDstToBool)
- {
- *pDstVal = CreateDstUntransferredValue(ValueType::Boolean, instr, src1Val, src2Val);
- ToVarRegOpnd(dst->AsRegOpnd(), currentBlock);
- }
- else
- {
- *pDstVal = CreateDstUntransferredValue(ValueType::Float, instr, src1Val, src2Val);
- AssertMsg(dst->IsRegOpnd(), "What else?");
- this->ToFloat64Dst(instr, dst->AsRegOpnd(), this->currentBlock);
- }
- }
- }
- GOPT_TRACE_INSTR(instr, _u("Type specialized to FLOAT: "));
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FloatTypeSpecPhase))
- {
- Output::Print(_u("Type specialized to FLOAT: "));
- Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
- }
- #endif
- return true;
- }
- bool
- GlobOpt::TypeSpecializeStElem(IR::Instr ** pInstr, Value *src1Val, Value **pDstVal)
- {
- IR::Instr *&instr = *pInstr;
- IR::RegOpnd *baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
- ValueType baseValueType(baseOpnd->GetValueType());
- if (instr->DoStackArgsOpt() ||
- (!this->DoTypedArrayTypeSpec() && baseValueType.IsLikelyOptimizedTypedArray()) ||
- (!this->DoNativeArrayTypeSpec() && baseValueType.IsLikelyNativeArray()) ||
- !(baseValueType.IsLikelyOptimizedTypedArray() || baseValueType.IsLikelyNativeArray()))
- {
- GOPT_TRACE_INSTR(instr, _u("Didn't type specialize array access, because typed array type specialization is disabled, or base is not an optimized typed array.\n"));
- if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
- baseValueType.ToString(baseValueTypeStr);
- Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not specialize because %s.\n"),
- this->func->GetJITFunctionBody()->GetDisplayName(),
- this->func->GetDebugNumberSet(debugStringBuffer),
- Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
- baseValueTypeStr,
- instr->DoStackArgsOpt() ?
- _u("instruction uses the arguments object") :
- _u("typed array type specialization is disabled, or base is not an optimized typed array"));
- Output::Flush();
- }
- return false;
- }
- Assert(instr->GetSrc1()->IsRegOpnd() || (src1Val && src1Val->GetValueInfo()->HasIntConstantValue()));
- StackSym *sym = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd()->m_sym : nullptr;
- // Only type specialize the source of store element if the source symbol is already type specialized to int or float.
- if (sym)
- {
- if (baseValueType.IsLikelyNativeArray())
- {
- // Gently coerce these src's into native if it seems likely to work.
- // Otherwise we can't use the fast path to store.
- // But don't try to put a float-specialized number into an int array this way.
- if (!(
- CurrentBlockData()->IsInt32TypeSpecialized(sym) ||
- (
- src1Val &&
- (
- DoAggressiveIntTypeSpec()
- ? src1Val->GetValueInfo()->IsLikelyInt()
- : src1Val->GetValueInfo()->IsInt()
- )
- )
- ))
- {
- if (!(
- CurrentBlockData()->IsFloat64TypeSpecialized(sym) ||
- (src1Val && src1Val->GetValueInfo()->IsLikelyNumber())
- ) ||
- baseValueType.HasIntElements())
- {
- return false;
- }
- }
- }
- else if (!CurrentBlockData()->IsInt32TypeSpecialized(sym) && !CurrentBlockData()->IsFloat64TypeSpecialized(sym))
- {
- GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access, because src is not type specialized.\n"));
- if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
- baseValueType.ToString(baseValueTypeStr);
- Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not specialize because src is not specialized.\n"),
- this->func->GetJITFunctionBody()->GetDisplayName(),
- this->func->GetDebugNumberSet(debugStringBuffer),
- Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
- baseValueTypeStr);
- Output::Flush();
- }
- return false;
- }
- }
- int32 src1IntConstantValue;
- if(baseValueType.IsLikelyNativeIntArray() && src1Val && src1Val->GetValueInfo()->TryGetIntConstantValue(&src1IntConstantValue))
- {
- if(Js::SparseArraySegment<int32>::IsMissingItem(&src1IntConstantValue))
- {
- return false;
- }
- }
- // Note: doing ToVarUses to make sure we do get the int32 version of the index before trying to access its value in
- // ShouldExpectConventionalArrayIndexValue. Not sure why that never gave us a problem before.
- Assert(instr->GetDst()->IsIndirOpnd());
- IR::IndirOpnd *dst = instr->GetDst()->AsIndirOpnd();
- // Make sure we use the int32 version of the index operand symbol, if available. Otherwise, ensure the var symbol is live (by
- // potentially inserting a ToVar).
- this->ToVarUses(instr, dst, /* isDst = */ true, nullptr);
- if (!ShouldExpectConventionalArrayIndexValue(dst))
- {
- GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access, because index is negative or likely not int.\n"));
- if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
- baseValueType.ToString(baseValueTypeStr);
- Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not specialize because index is negative or likely not int.\n"),
- this->func->GetJITFunctionBody()->GetDisplayName(),
- this->func->GetDebugNumberSet(debugStringBuffer),
- Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
- baseValueTypeStr);
- Output::Flush();
- }
- return false;
- }
- IRType toType = TyVar;
- bool isLossyAllowed = true;
- IR::BailOutKind arrayBailOutKind = IR::BailOutConventionalTypedArrayAccessOnly;
- switch(baseValueType.GetObjectType())
- {
- case ObjectType::Int8Array:
- case ObjectType::Uint8Array:
- case ObjectType::Int16Array:
- case ObjectType::Uint16Array:
- case ObjectType::Int32Array:
- case ObjectType::Int8VirtualArray:
- case ObjectType::Uint8VirtualArray:
- case ObjectType::Int16VirtualArray:
- case ObjectType::Uint16VirtualArray:
- case ObjectType::Int32VirtualArray:
- case ObjectType::Int8MixedArray:
- case ObjectType::Uint8MixedArray:
- case ObjectType::Int16MixedArray:
- case ObjectType::Uint16MixedArray:
- case ObjectType::Int32MixedArray:
- Int32Array:
- if (this->DoAggressiveIntTypeSpec() || this->DoFloatTypeSpec())
- {
- toType = TyInt32;
- }
- break;
- case ObjectType::Uint32Array:
- case ObjectType::Uint32VirtualArray:
- case ObjectType::Uint32MixedArray:
- // Uint32Arrays may store values that overflow int32. If the value being stored comes from a symbol that's
- // already losslessly type specialized to int32, we'll use it. Otherwise, if we only have a float64 specialized
- // value, we don't want to force bailout if it doesn't fit in int32. Instead, we'll emit conversion in the
- // lowerer, and handle overflow, if necessary.
- if (!sym || CurrentBlockData()->IsInt32TypeSpecialized(sym))
- {
- toType = TyInt32;
- }
- else if (CurrentBlockData()->IsFloat64TypeSpecialized(sym))
- {
- toType = TyFloat64;
- }
- break;
- case ObjectType::Float32Array:
- case ObjectType::Float64Array:
- case ObjectType::Float32VirtualArray:
- case ObjectType::Float32MixedArray:
- case ObjectType::Float64VirtualArray:
- case ObjectType::Float64MixedArray:
- Float64Array:
- if (this->DoFloatTypeSpec())
- {
- toType = TyFloat64;
- }
- break;
- case ObjectType::Uint8ClampedArray:
- case ObjectType::Uint8ClampedVirtualArray:
- case ObjectType::Uint8ClampedMixedArray:
- // Uint8ClampedArray requires rounding (as opposed to truncation) of floating point values. If source symbol is
- // float type specialized, type specialize this instruction to float as well, and handle rounding in the
- // lowerer.
- if (!sym || CurrentBlockData()->IsInt32TypeSpecialized(sym))
- {
- toType = TyInt32;
- isLossyAllowed = false;
- }
- else if (CurrentBlockData()->IsFloat64TypeSpecialized(sym))
- {
- toType = TyFloat64;
- }
- break;
- default:
- Assert(baseValueType.IsLikelyNativeArray());
- isLossyAllowed = false;
- arrayBailOutKind = IR::BailOutConventionalNativeArrayAccessOnly;
- if(baseValueType.HasIntElements())
- {
- goto Int32Array;
- }
- Assert(baseValueType.HasFloatElements());
- goto Float64Array;
- }
- if (toType != TyVar)
- {
- GOPT_TRACE_INSTR(instr, _u("Type specialized array access.\n"));
- if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
- baseValueType.ToString(baseValueTypeStr);
- Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, type specialized to %s.\n"),
- this->func->GetJITFunctionBody()->GetDisplayName(),
- this->func->GetDebugNumberSet(debugStringBuffer),
- Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
- baseValueTypeStr,
- toType == TyInt32 ? _u("int32") : _u("float64"));
- Output::Flush();
- }
- IR::BailOutKind bailOutKind = ((toType == TyInt32) ? IR::BailOutIntOnly : IR::BailOutNumberOnly);
- this->ToTypeSpecUse(instr, instr->GetSrc1(), this->currentBlock, src1Val, nullptr, toType, bailOutKind, /* lossy = */ isLossyAllowed);
- if (!this->IsLoopPrePass())
- {
- bool bConvertToBailoutInstr = true;
- // Definite StElemC doesn't need bailout, because it can't fail or cause conversion.
- if (instr->m_opcode == Js::OpCode::StElemC && baseValueType.IsObject())
- {
- if (baseValueType.HasIntElements())
- {
- //Native int array requires a missing element check & bailout
- int32 min = INT32_MIN;
- int32 max = INT32_MAX;
- if (src1Val->GetValueInfo()->GetIntValMinMax(&min, &max, false))
- {
- bConvertToBailoutInstr = ((min <= Js::JavascriptNativeIntArray::MissingItem) && (max >= Js::JavascriptNativeIntArray::MissingItem));
- }
- }
- else
- {
- bConvertToBailoutInstr = false;
- }
- }
- if (bConvertToBailoutInstr)
- {
- if(instr->HasBailOutInfo())
- {
- const IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
- Assert(
- (
- !(oldBailOutKind & ~IR::BailOutKindBits) ||
- (oldBailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp
- ) &&
- !(oldBailOutKind & IR::BailOutKindBits & ~(IR::BailOutOnArrayAccessHelperCall | IR::BailOutMarkTempObject)));
- if(arrayBailOutKind == IR::BailOutConventionalTypedArrayAccessOnly)
- {
- // BailOutConventionalTypedArrayAccessOnly also bails out if the array access is outside the head
- // segment bounds, and guarantees no implicit calls. Override the bailout kind so that the instruction
- // bails out for the right reason.
- instr->SetBailOutKind(
- arrayBailOutKind | (oldBailOutKind & (IR::BailOutKindBits - IR::BailOutOnArrayAccessHelperCall)));
- }
- else
- {
- // BailOutConventionalNativeArrayAccessOnly by itself may generate a helper call, and may cause implicit
- // calls to occur, so it must be merged in to eliminate generating the helper call.
- Assert(arrayBailOutKind == IR::BailOutConventionalNativeArrayAccessOnly);
- instr->SetBailOutKind(oldBailOutKind | arrayBailOutKind);
- }
- }
- else
- {
- GenerateBailAtOperation(&instr, arrayBailOutKind);
- }
- }
- }
- }
- else
- {
- GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access, because the source was not already specialized.\n"));
- if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
- baseValueType.ToString(baseValueTypeStr);
- Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not type specialize, because of array type.\n"),
- this->func->GetJITFunctionBody()->GetDisplayName(),
- this->func->GetDebugNumberSet(debugStringBuffer),
- Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
- baseValueTypeStr);
- Output::Flush();
- }
- }
- return toType != TyVar;
- }
- IR::Instr *
- GlobOpt::ToVarUses(IR::Instr *instr, IR::Opnd *opnd, bool isDst, Value *val)
- {
- Sym *sym;
- switch (opnd->GetKind())
- {
- case IR::OpndKindReg:
- if (!isDst && !CurrentBlockData()->liveVarSyms->Test(opnd->AsRegOpnd()->m_sym->m_id))
- {
- instr = this->ToVar(instr, opnd->AsRegOpnd(), this->currentBlock, val, true);
- }
- break;
- case IR::OpndKindSym:
- sym = opnd->AsSymOpnd()->m_sym;
- if (sym->IsPropertySym() && !CurrentBlockData()->liveVarSyms->Test(sym->AsPropertySym()->m_stackSym->m_id)
- && sym->AsPropertySym()->m_stackSym->IsVar())
- {
- StackSym *propertyBase = sym->AsPropertySym()->m_stackSym;
- IR::RegOpnd *newOpnd = IR::RegOpnd::New(propertyBase, TyVar, instr->m_func);
- instr = this->ToVar(instr, newOpnd, this->currentBlock, CurrentBlockData()->FindValue(propertyBase), true);
- }
- break;
- case IR::OpndKindIndir:
- IR::RegOpnd *baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
- if (!CurrentBlockData()->liveVarSyms->Test(baseOpnd->m_sym->m_id))
- {
- instr = this->ToVar(instr, baseOpnd, this->currentBlock, CurrentBlockData()->FindValue(baseOpnd->m_sym), true);
- }
- IR::RegOpnd *indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
- if (indexOpnd && !indexOpnd->m_sym->IsTypeSpec())
- {
- instr = ToTypeSpecIndex(instr, indexOpnd, opnd->AsIndirOpnd());
- }
- break;
- }
- return instr;
- }
- IR::Instr *
- GlobOpt::ToTypeSpecIndex(IR::Instr * instr, IR::RegOpnd * indexOpnd, IR::IndirOpnd * indirOpnd)
- {
- Assert(indirOpnd != nullptr || indexOpnd == instr->GetSrc1());
- bool isGetterOrSetter = instr->m_opcode == Js::OpCode::InitGetElemI ||
- instr->m_opcode == Js::OpCode::InitSetElemI ||
- instr->m_opcode == Js::OpCode::InitClassMemberGetComputedName ||
- instr->m_opcode == Js::OpCode::InitClassMemberSetComputedName;
- if (!isGetterOrSetter // typespec is disabled for getters, setters
- && (indexOpnd->GetValueType().IsInt()
- ? !IsTypeSpecPhaseOff(func)
- : indexOpnd->GetValueType().IsLikelyInt() && DoAggressiveIntTypeSpec())
- && !GetIsAsmJSFunc()) // typespec is disabled for asmjs
- {
- StackSym *const indexVarSym = indexOpnd->m_sym;
- Value *const indexValue = CurrentBlockData()->FindValue(indexVarSym);
- Assert(indexValue);
- Assert(indexValue->GetValueInfo()->IsLikelyInt());
- ToInt32(instr, indexOpnd, currentBlock, indexValue, indirOpnd, false);
- Assert(indexValue->GetValueInfo()->IsInt() || IsLoopPrePass());
- if (!IsLoopPrePass())
- {
- IR::Opnd * intOpnd = indirOpnd ? indirOpnd->GetIndexOpnd() : instr->GetSrc1();
- if (intOpnd != nullptr)
- {
- Assert(!intOpnd->IsRegOpnd() || intOpnd->AsRegOpnd()->m_sym->IsTypeSpec());
- IntConstantBounds indexConstantBounds;
- AssertVerify(indexValue->GetValueInfo()->TryGetIntConstantBounds(&indexConstantBounds));
- if (ValueInfo::IsGreaterThanOrEqualTo(
- indexValue,
- indexConstantBounds.LowerBound(),
- indexConstantBounds.UpperBound(),
- nullptr,
- 0,
- 0))
- {
- intOpnd->SetType(TyUint32);
- }
- }
- }
- }
- else if (!CurrentBlockData()->liveVarSyms->Test(indexOpnd->m_sym->m_id))
- {
- instr = this->ToVar(instr, indexOpnd, this->currentBlock, CurrentBlockData()->FindValue(indexOpnd->m_sym), true);
- }
- return instr;
- }
- IR::Instr *
- GlobOpt::ToVar(IR::Instr *instr, IR::RegOpnd *regOpnd, BasicBlock *block, Value *value, bool needsUpdate)
- {
- IR::Instr *newInstr;
- StackSym *varSym = regOpnd->m_sym;
- if (IsTypeSpecPhaseOff(this->func))
- {
- return instr;
- }
- if (this->IsLoopPrePass())
- {
- block->globOptData.liveVarSyms->Set(varSym->m_id);
- return instr;
- }
- if (block->globOptData.liveVarSyms->Test(varSym->m_id))
- {
- // Already live, nothing to do
- return instr;
- }
- if (!varSym->IsVar())
- {
- Assert(!varSym->IsTypeSpec());
- // Leave non-vars alone.
- return instr;
- }
- Assert(block->globOptData.IsTypeSpecialized(varSym));
- if (!value)
- {
- value = block->globOptData.FindValue(varSym);
- }
- ValueInfo *valueInfo = value ? value->GetValueInfo() : nullptr;
- if(valueInfo && valueInfo->IsInt())
- {
- // If two syms have the same value, one is lossy-int-specialized, and then the other is int-specialized, the value
- // would have been updated to definitely int. Upon using the lossy-int-specialized sym later, it would be flagged as
- // lossy while the value is definitely int. Since the bit-vectors are based on the sym and not the value, update the
- // lossy state.
- block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
- }
- IRType fromType = TyIllegal;
- StackSym *typeSpecSym = nullptr;
- if (block->globOptData.liveInt32Syms->Test(varSym->m_id) && !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id))
- {
- fromType = TyInt32;
- typeSpecSym = varSym->GetInt32EquivSym(this->func);
- Assert(valueInfo);
- Assert(valueInfo->IsInt());
- }
- else if (block->globOptData.liveFloat64Syms->Test(varSym->m_id))
- {
- fromType = TyFloat64;
- typeSpecSym = varSym->GetFloat64EquivSym(this->func);
- // Ensure that all bailout FromVars that generate a value for this type-specialized sym will bail out on any non-number
- // value, even ones that have already been generated before. Float-specialized non-number values cannot be converted
- // back to Var since they will not go back to the original non-number value. The dead-store pass will update the bailout
- // kind on already-generated FromVars based on this bit.
- typeSpecSym->m_requiresBailOnNotNumber = true;
- // A previous float conversion may have used BailOutPrimitiveButString, which does not change the value type to say
- // definitely float, since it can also be a non-string primitive. The convert back to Var though, will cause that
- // bailout kind to be changed to BailOutNumberOnly in the dead-store phase, so from the point of the initial conversion
- // to float, that the value is definitely number. Since we don't know where the FromVar is, change the value type here.
- if(valueInfo)
- {
- if(!valueInfo->IsNumber())
- {
- valueInfo = valueInfo->SpecializeToFloat64(alloc);
- ChangeValueInfo(block, value, valueInfo);
- regOpnd->SetValueType(valueInfo->Type());
- }
- }
- else
- {
- value = NewGenericValue(ValueType::Float);
- valueInfo = value->GetValueInfo();
- block->globOptData.SetValue(value, varSym);
- regOpnd->SetValueType(valueInfo->Type());
- }
- }
- else
- {
- Assert(UNREACHED);
- }
- AssertOrFailFast(valueInfo);
- int32 intConstantValue;
- if (valueInfo->TryGetIntConstantValue(&intConstantValue))
- {
- // Lower will tag or create a number directly
- newInstr = IR::Instr::New(Js::OpCode::LdC_A_I4, regOpnd,
- IR::IntConstOpnd::New(intConstantValue, TyInt32, instr->m_func), instr->m_func);
- }
- else
- {
- IR::RegOpnd * regNew = IR::RegOpnd::New(typeSpecSym, fromType, instr->m_func);
- Js::OpCode opcode = Js::OpCode::ToVar;
- regNew->SetIsJITOptimizedReg(true);
- newInstr = IR::Instr::New(opcode, regOpnd, regNew, instr->m_func);
- }
- newInstr->SetByteCodeOffset(instr);
- newInstr->GetDst()->AsRegOpnd()->SetIsJITOptimizedReg(true);
- ValueType valueType = valueInfo->Type();
- if(fromType == TyInt32)
- {
- #if !INT32VAR // All 32-bit ints are taggable on 64-bit architectures
- IntConstantBounds constantBounds;
- AssertVerify(valueInfo->TryGetIntConstantBounds(&constantBounds));
- if(constantBounds.IsTaggable())
- #endif
- {
- // The value is within the taggable range, so set the opnd value types to TaggedInt to avoid the overflow check
- valueType = ValueType::GetTaggedInt();
- }
- }
- newInstr->GetDst()->SetValueType(valueType);
- newInstr->GetSrc1()->SetValueType(valueType);
- IR::Instr *insertAfterInstr = instr->m_prev;
- if (instr == block->GetLastInstr() &&
- (instr->IsBranchInstr() || instr->m_opcode == Js::OpCode::BailTarget))
- {
- // Don't insert code between the branch and the preceding ByteCodeUses instrs...
- while(insertAfterInstr->m_opcode == Js::OpCode::ByteCodeUses)
- {
- insertAfterInstr = insertAfterInstr->m_prev;
- }
- }
- block->InsertInstrAfter(newInstr, insertAfterInstr);
- block->globOptData.liveVarSyms->Set(varSym->m_id);
- GOPT_TRACE_OPND(regOpnd, _u("Converting to var\n"));
- if (block->loop)
- {
- Assert(!this->IsLoopPrePass());
- this->TryHoistInvariant(newInstr, block, value, value, nullptr, false);
- }
- if (needsUpdate)
- {
- // Make sure that the kill effect of the ToVar instruction is tracked and that the kill of a property
- // type is reflected in the current instruction.
- this->ProcessKills(newInstr);
- this->ValueNumberObjectType(newInstr->GetDst(), newInstr);
- if (instr->GetSrc1() && instr->GetSrc1()->IsSymOpnd() && instr->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
- {
- // Reprocess the load source. We need to reset the PropertySymOpnd fields first.
- IR::PropertySymOpnd *propertySymOpnd = instr->GetSrc1()->AsPropertySymOpnd();
- if (propertySymOpnd->IsTypeCheckSeqCandidate())
- {
- propertySymOpnd->SetTypeChecked(false);
- propertySymOpnd->SetTypeAvailable(false);
- propertySymOpnd->SetWriteGuardChecked(false);
- }
- this->FinishOptPropOp(instr, propertySymOpnd);
- instr = this->SetTypeCheckBailOut(instr->GetSrc1(), instr, nullptr);
- }
- }
- return instr;
- }
- IR::Instr *
- GlobOpt::ToInt32(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, bool lossy)
- {
- return this->ToTypeSpecUse(instr, opnd, block, val, indir, TyInt32, IR::BailOutIntOnly, lossy);
- }
- IR::Instr *
- GlobOpt::ToFloat64(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, IR::BailOutKind bailOutKind)
- {
- return this->ToTypeSpecUse(instr, opnd, block, val, indir, TyFloat64, bailOutKind);
- }
- IR::Instr *
- GlobOpt::ToTypeSpecUse(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, IRType toType, IR::BailOutKind bailOutKind, bool lossy, IR::Instr *insertBeforeInstr)
- {
- Assert(bailOutKind != IR::BailOutInvalid);
- IR::Instr *newInstr;
- if (!val && opnd->IsRegOpnd())
- {
- val = block->globOptData.FindValue(opnd->AsRegOpnd()->m_sym);
- }
- ValueInfo *valueInfo = val ? val->GetValueInfo() : nullptr;
- bool needReplaceSrc = false;
- bool updateBlockLastInstr = false;
- if (instr)
- {
- needReplaceSrc = true;
- if (!insertBeforeInstr)
- {
- insertBeforeInstr = instr;
- }
- }
- else if (!insertBeforeInstr)
- {
- // Insert it at the end of the block
- insertBeforeInstr = block->GetLastInstr();
- if (insertBeforeInstr->IsBranchInstr() || insertBeforeInstr->m_opcode == Js::OpCode::BailTarget)
- {
- // Don't insert code between the branch and the preceding ByteCodeUses instrs...
- while(insertBeforeInstr->m_prev->m_opcode == Js::OpCode::ByteCodeUses)
- {
- insertBeforeInstr = insertBeforeInstr->m_prev;
- }
- }
- else
- {
- insertBeforeInstr = insertBeforeInstr->m_next;
- updateBlockLastInstr = true;
- }
- }
- // Int constant values will be propagated into the instruction. For ArgOut_A_InlineBuiltIn, there's no benefit from
- // const-propping, so those are excluded.
- if (opnd->IsRegOpnd() &&
- !(
- valueInfo &&
- (valueInfo->HasIntConstantValue() || valueInfo->IsFloatConstant()) &&
- (!instr || instr->m_opcode != Js::OpCode::ArgOut_A_InlineBuiltIn)
- ))
- {
- IR::RegOpnd *regSrc = opnd->AsRegOpnd();
- StackSym *varSym = regSrc->m_sym;
- Js::OpCode opcode = Js::OpCode::FromVar;
- if (varSym->IsTypeSpec() || !block->globOptData.liveVarSyms->Test(varSym->m_id))
- {
- // Conversion between int32 and float64
- if (varSym->IsTypeSpec())
- {
- varSym = varSym->GetVarEquivSym(this->func);
- }
- opcode = Js::OpCode::Conv_Prim;
- }
- Assert(block->globOptData.liveVarSyms->Test(varSym->m_id) || block->globOptData.IsTypeSpecialized(varSym));
- StackSym *typeSpecSym = nullptr;
- BOOL isLive = FALSE;
- BVSparse<JitArenaAllocator> *livenessBv = nullptr;
- if(valueInfo && valueInfo->IsInt())
- {
- // If two syms have the same value, one is lossy-int-specialized, and then the other is int-specialized, the value
- // would have been updated to definitely int. Upon using the lossy-int-specialized sym later, it would be flagged as
- // lossy while the value is definitely int. Since the bit-vectors are based on the sym and not the value, update the
- // lossy state.
- block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
- }
- if (toType == TyInt32)
- {
- // Need to determine whether the conversion is actually lossy or lossless. If the value is an int, then it's a
- // lossless conversion despite the type of conversion requested. The liveness of the converted int32 sym needs to be
- // set to reflect the actual type of conversion done. Also, a lossless conversion needs the value to determine
- // whether the conversion may need to bail out.
- Assert(valueInfo);
- if(valueInfo->IsInt())
- {
- lossy = false;
- }
- else
- {
- Assert(IsLoopPrePass() || !block->globOptData.IsInt32TypeSpecialized(varSym));
- }
- livenessBv = block->globOptData.liveInt32Syms;
- isLive = livenessBv->Test(varSym->m_id) && (lossy || !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id));
- if (this->IsLoopPrePass())
- {
- if (!isLive)
- {
- livenessBv->Set(varSym->m_id);
- if (lossy)
- {
- block->globOptData.liveLossyInt32Syms->Set(varSym->m_id);
- }
- else
- {
- block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
- }
- }
- return instr;
- }
- typeSpecSym = varSym->GetInt32EquivSym(this->func);
- if (!isLive)
- {
- if (!opnd->IsVar() ||
- !block->globOptData.liveVarSyms->Test(varSym->m_id) ||
- (block->globOptData.liveFloat64Syms->Test(varSym->m_id) && valueInfo && valueInfo->IsLikelyFloat()))
- {
- Assert(block->globOptData.liveFloat64Syms->Test(varSym->m_id));
- if(!lossy && !valueInfo->IsInt())
- {
- // Shouldn't try to do a lossless conversion from float64 to int32 when the value is not known to be an
- // int. There are cases where we need more than two passes over loops to flush out all dependencies.
- // It's possible for the loop prepass to think that a sym s1 remains an int because it acquires the
- // value of another sym s2 that is an int in the prepass at that time. However, s2 can become a float
- // later in the loop body, in which case s1 would become a float on the second iteration of the loop. By
- // that time, we would have already committed to having s1 live as a lossless int on entry into the
- // loop, and we end up having to compensate by doing a lossless conversion from float to int, which will
- // need a bailout and will most likely bail out.
- //
- // If s2 becomes a var instead of a float, then the compensation is legal although not ideal. After
- // enough bailouts, rejit would be triggered with aggressive int type spec turned off. For the
- // float-to-int conversion though, there's no point in emitting a bailout because we already know that
- // the value is a float and has high probability of bailing out (whereas a var has a chance to be a
- // tagged int), and so currently lossless conversion from float to int with bailout is not supported.
- //
- // So, treating this case as a compile-time bailout. The exception will trigger the jit work item to be
- // restarted with aggressive int type specialization disabled.
- if(bailOutKind == IR::BailOutExpectingInteger)
- {
- Assert(IsSwitchOptEnabledForIntTypeSpec());
- throw Js::RejitException(RejitReason::DisableSwitchOptExpectingInteger);
- }
- else
- {
- Assert(DoAggressiveIntTypeSpec());
- if(PHASE_TRACE(Js::BailOutPhase, this->func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(
- _u("BailOut (compile-time): function: %s (%s) varSym: "),
- this->func->GetJITFunctionBody()->GetDisplayName(),
- this->func->GetDebugNumberSet(debugStringBuffer),
- varSym->m_id);
- #if DBG_DUMP
- varSym->Dump();
- #else
- Output::Print(_u("s%u"), varSym->m_id);
- #endif
- if(varSym->HasByteCodeRegSlot())
- {
- Output::Print(_u(" byteCodeReg: R%u"), varSym->GetByteCodeRegSlot());
- }
- Output::Print(_u(" (lossless conversion from float64 to int32)\n"));
- Output::Flush();
- }
- if(!DoAggressiveIntTypeSpec())
- {
- // Aggressive int type specialization is already off for some reason. Prevent trying to rejit again
- // because it won't help and the same thing will happen again. Just abort jitting this function.
- if(PHASE_TRACE(Js::BailOutPhase, this->func))
- {
- Output::Print(_u(" Aborting JIT because AggressiveIntTypeSpec is already off\n"));
- Output::Flush();
- }
- throw Js::OperationAbortedException();
- }
- throw Js::RejitException(RejitReason::AggressiveIntTypeSpecDisabled);
- }
- }
- if(opnd->IsVar())
- {
- regSrc->SetType(TyFloat64);
- regSrc->m_sym = varSym->GetFloat64EquivSym(this->func);
- opcode = Js::OpCode::Conv_Prim;
- }
- else
- {
- Assert(regSrc->IsFloat64());
- Assert(regSrc->m_sym->IsFloat64());
- Assert(opcode == Js::OpCode::Conv_Prim);
- }
- }
- }
- GOPT_TRACE_OPND(regSrc, _u("Converting to int32\n"));
- }
- else if (toType == TyFloat64)
- {
- // float64
- typeSpecSym = varSym->GetFloat64EquivSym(this->func);
- if(!IsLoopPrePass() && typeSpecSym->m_requiresBailOnNotNumber && block->globOptData.IsFloat64TypeSpecialized(varSym))
- {
- // This conversion is already protected by a BailOutNumberOnly bailout (or at least it will be after the
- // dead-store phase). Since 'requiresBailOnNotNumber' is not flow-based, change the value to definitely float.
- if(valueInfo)
- {
- if(!valueInfo->IsNumber())
- {
- valueInfo = valueInfo->SpecializeToFloat64(alloc);
- ChangeValueInfo(block, val, valueInfo);
- opnd->SetValueType(valueInfo->Type());
- }
- }
- else
- {
- val = NewGenericValue(ValueType::Float);
- valueInfo = val->GetValueInfo();
- block->globOptData.SetValue(val, varSym);
- opnd->SetValueType(valueInfo->Type());
- }
- }
- if(bailOutKind == IR::BailOutNumberOnly)
- {
- if(!IsLoopPrePass())
- {
- // Ensure that all bailout FromVars that generate a value for this type-specialized sym will bail out on any
- // non-number value, even ones that have already been generated before. The dead-store pass will update the
- // bailout kind on already-generated FromVars based on this bit.
- typeSpecSym->m_requiresBailOnNotNumber = true;
- }
- }
- else if(typeSpecSym->m_requiresBailOnNotNumber)
- {
- Assert(bailOutKind == IR::BailOutPrimitiveButString);
- bailOutKind = IR::BailOutNumberOnly;
- }
- livenessBv = block->globOptData.liveFloat64Syms;
- isLive = livenessBv->Test(varSym->m_id);
- if (this->IsLoopPrePass())
- {
- if(!isLive)
- {
- livenessBv->Set(varSym->m_id);
- }
- if (this->OptIsInvariant(opnd, block, this->prePassLoop, val, false, true))
- {
- this->prePassLoop->forceFloat64SymsOnEntry->Set(varSym->m_id);
- }
- else
- {
- Sym *symStore = (valueInfo ? valueInfo->GetSymStore() : NULL);
- if (symStore && symStore != varSym
- && this->OptIsInvariant(symStore, block, this->prePassLoop, block->globOptData.FindValue(symStore), false, true))
- {
- // If symStore is assigned to sym and we want sym to be type-specialized, for symStore to be specialized
- // outside the loop.
- this->prePassLoop->forceFloat64SymsOnEntry->Set(symStore->m_id);
- }
- }
- return instr;
- }
- if (!isLive && regSrc->IsVar())
- {
- if (!block->globOptData.liveVarSyms->Test(varSym->m_id) ||
- (
- block->globOptData.liveInt32Syms->Test(varSym->m_id) &&
- !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id) &&
- valueInfo &&
- valueInfo->IsLikelyInt()
- ))
- {
- Assert(block->globOptData.liveInt32Syms->Test(varSym->m_id));
- Assert(!block->globOptData.liveLossyInt32Syms->Test(varSym->m_id)); // Shouldn't try to convert a lossy int32 to anything
- regSrc->SetType(TyInt32);
- regSrc->m_sym = varSym->GetInt32EquivSym(this->func);
- opcode = Js::OpCode::Conv_Prim;
- }
- }
- GOPT_TRACE_OPND(regSrc, _u("Converting to float64\n"));
- }
- bool needLoad = false;
- if (needReplaceSrc)
- {
- bool wasDead = regSrc->GetIsDead();
- // needReplaceSrc means we are type specializing a use, and need to replace the src on the instr
- if (!isLive)
- {
- needLoad = true;
- // ReplaceSrc will delete it.
- regSrc = regSrc->Copy(instr->m_func)->AsRegOpnd();
- }
- IR::RegOpnd * regNew = IR::RegOpnd::New(typeSpecSym, toType, instr->m_func);
- if(valueInfo)
- {
- regNew->SetValueType(valueInfo->Type());
- regNew->m_wasNegativeZeroPreventedByBailout = valueInfo->WasNegativeZeroPreventedByBailout();
- }
- regNew->SetIsDead(wasDead);
- regNew->SetIsJITOptimizedReg(true);
- this->CaptureByteCodeSymUses(instr);
- if (indir == nullptr)
- {
- instr->ReplaceSrc(opnd, regNew);
- }
- else
- {
- indir->ReplaceIndexOpnd(regNew);
- }
- opnd = regNew;
- if (!needLoad)
- {
- Assert(isLive);
- return instr;
- }
- }
- else
- {
- // We just need to insert a load of a type spec sym
- if(isLive)
- {
- return instr;
- }
- // Insert it before the specified instruction
- instr = insertBeforeInstr;
- }
- IR::RegOpnd *regDst = IR::RegOpnd::New(typeSpecSym, toType, instr->m_func);
- bool isBailout = false;
- bool isHoisted = false;
- bool isInLandingPad = (block->next && !block->next->isDeleted && block->next->isLoopHeader);
- if (isInLandingPad)
- {
- Loop *loop = block->next->loop;
- Assert(loop && loop->landingPad == block);
- Assert(loop->bailOutInfo);
- }
- if (opcode == Js::OpCode::FromVar)
- {
- if (toType == TyInt32)
- {
- Assert(valueInfo);
- if (lossy)
- {
- if (!valueInfo->IsPrimitive() && !block->globOptData.IsTypeSpecialized(varSym))
- {
- // Lossy conversions to int32 on non-primitive values may have implicit calls to toString or valueOf, which
- // may be overridden to have a side effect. The side effect needs to happen every time the conversion is
- // supposed to happen, so the resulting lossy int32 value cannot be reused. Bail out on implicit calls.
- Assert(DoLossyIntTypeSpec());
- bailOutKind = IR::BailOutOnNotPrimitive;
- isBailout = true;
- }
- }
- else if (!valueInfo->IsInt())
- {
- // The operand is likely an int (hence the request to convert to int), so bail out if it's not an int. Only
- // bail out if a lossless conversion to int is requested. Lossy conversions to int such as in (a | 0) don't
- // need to bail out.
- if (bailOutKind == IR::BailOutExpectingInteger)
- {
- Assert(IsSwitchOptEnabledForIntTypeSpec());
- }
- else
- {
- Assert(DoAggressiveIntTypeSpec());
- }
- isBailout = true;
- }
- }
- else if (toType == TyFloat64 &&
- (!valueInfo || !valueInfo->IsNumber()))
- {
- // Bailout if converting vars to float if we can't prove they are floats:
- // x = str + float; -> need to bailout if str is a string
- //
- // x = obj * 0.1;
- // y = obj * 0.2; -> if obj has valueof, we'll only call valueof once on the FromVar conversion...
- Assert(bailOutKind != IR::BailOutInvalid);
- isBailout = true;
- }
- }
- if (isBailout)
- {
- if (isInLandingPad)
- {
- Loop *loop = block->next->loop;
- this->EnsureBailTarget(loop);
- instr = loop->bailOutInfo->bailOutInstr;
- updateBlockLastInstr = false;
- newInstr = IR::BailOutInstr::New(opcode, bailOutKind, loop->bailOutInfo, instr->m_func);
- newInstr->SetDst(regDst);
- newInstr->SetSrc1(regSrc);
- }
- else
- {
- newInstr = IR::BailOutInstr::New(opcode, regDst, regSrc, bailOutKind, instr, instr->m_func);
- }
- }
- else
- {
- newInstr = IR::Instr::New(opcode, regDst, regSrc, instr->m_func);
- }
- newInstr->SetByteCodeOffset(instr);
- instr->InsertBefore(newInstr);
- if (updateBlockLastInstr)
- {
- block->SetLastInstr(newInstr);
- }
- regDst->SetIsJITOptimizedReg(true);
- newInstr->GetSrc1()->AsRegOpnd()->SetIsJITOptimizedReg(true);
- ValueInfo *const oldValueInfo = valueInfo;
- if(valueInfo)
- {
- newInstr->GetSrc1()->SetValueType(valueInfo->Type());
- }
- if(isBailout)
- {
- Assert(opcode == Js::OpCode::FromVar);
- if(toType == TyInt32)
- {
- Assert(valueInfo);
- if(!lossy)
- {
- Assert(bailOutKind == IR::BailOutIntOnly || bailOutKind == IR::BailOutExpectingInteger);
- valueInfo = valueInfo->SpecializeToInt32(alloc, isPerformingLoopBackEdgeCompensation);
- ChangeValueInfo(nullptr, val, valueInfo);
- int32 intConstantValue;
- if(indir && needReplaceSrc && valueInfo->TryGetIntConstantValue(&intConstantValue))
- {
- // A likely-int value can have constant bounds due to conditional branches narrowing its range. Now that
- // the sym has been proven to be an int, the likely-int value, after specialization, will be constant.
- // Replace the index opnd in the indir with an offset.
- Assert(opnd == indir->GetIndexOpnd());
- Assert(indir->GetScale() == 0);
- indir->UnlinkIndexOpnd()->Free(instr->m_func);
- opnd = nullptr;
- indir->SetOffset(intConstantValue);
- }
- }
- }
- else if (toType == TyFloat64)
- {
- if(bailOutKind == IR::BailOutNumberOnly)
- {
- if(valueInfo)
- {
- valueInfo = valueInfo->SpecializeToFloat64(alloc);
- ChangeValueInfo(block, val, valueInfo);
- }
- else
- {
- val = NewGenericValue(ValueType::Float);
- valueInfo = val->GetValueInfo();
- block->globOptData.SetValue(val, varSym);
- }
- }
- }
- else
- {
- Assert(UNREACHED);
- }
- }
- if(valueInfo)
- {
- newInstr->GetDst()->SetValueType(valueInfo->Type());
- if(needReplaceSrc && opnd)
- {
- opnd->SetValueType(valueInfo->Type());
- }
- }
- if (block->loop)
- {
- Assert(!this->IsLoopPrePass());
- isHoisted = this->TryHoistInvariant(newInstr, block, val, val, nullptr, false, lossy, false, bailOutKind);
- }
- if (isBailout)
- {
- if (!isHoisted && !isInLandingPad)
- {
- if(valueInfo)
- {
- // Since this is a pre-op bailout, the old value info should be used for the purposes of bailout. For
- // instance, the value info could be LikelyInt but with a constant range. Once specialized to int, the value
- // info would be an int constant. However, the int constant is only guaranteed if the value is actually an
- // int, which this conversion is verifying, so bailout cannot assume the constant value.
- if(oldValueInfo)
- {
- val->SetValueInfo(oldValueInfo);
- }
- else
- {
- block->globOptData.ClearSymValue(varSym);
- }
- }
- // Fill in bail out info if the FromVar is a bailout instr, and it wasn't hoisted as invariant.
- // If it was hoisted, the invariant code will fill out the bailout info with the loop landing pad bailout info.
- this->FillBailOutInfo(block, newInstr);
- if(valueInfo)
- {
- // Restore the new value info after filling the bailout info
- if(oldValueInfo)
- {
- val->SetValueInfo(valueInfo);
- }
- else
- {
- block->globOptData.SetValue(val, varSym);
- }
- }
- }
- }
- // Now that we've captured the liveness in the bailout info, we can mark this as live.
- // This type specialized sym isn't live if the FromVar bails out.
- livenessBv->Set(varSym->m_id);
- if(toType == TyInt32)
- {
- if(lossy)
- {
- block->globOptData.liveLossyInt32Syms->Set(varSym->m_id);
- }
- else
- {
- block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
- }
- }
- }
- else
- {
- Assert(valueInfo);
- if(opnd->IsRegOpnd() && valueInfo->IsInt())
- {
- // If two syms have the same value, one is lossy-int-specialized, and then the other is int-specialized, the value
- // would have been updated to definitely int. Upon using the lossy-int-specialized sym later, it would be flagged as
- // lossy while the value is definitely int. Since the bit-vectors are based on the sym and not the value, update the
- // lossy state.
- block->globOptData.liveLossyInt32Syms->Clear(opnd->AsRegOpnd()->m_sym->m_id);
- if(toType == TyInt32)
- {
- lossy = false;
- }
- }
- if (this->IsLoopPrePass())
- {
- if(opnd->IsRegOpnd())
- {
- StackSym *const sym = opnd->AsRegOpnd()->m_sym;
- if(toType == TyInt32)
- {
- Assert(!sym->IsTypeSpec());
- block->globOptData.liveInt32Syms->Set(sym->m_id);
- if(lossy)
- {
- block->globOptData.liveLossyInt32Syms->Set(sym->m_id);
- }
- else
- {
- block->globOptData.liveLossyInt32Syms->Clear(sym->m_id);
- }
- }
- else
- {
- Assert(toType == TyFloat64);
- AnalysisAssert(instr);
- StackSym *const varSym = sym->IsTypeSpec() ? sym->GetVarEquivSym(instr->m_func) : sym;
- block->globOptData.liveFloat64Syms->Set(varSym->m_id);
- }
- }
- return instr;
- }
- if (!needReplaceSrc)
- {
- instr = insertBeforeInstr;
- }
- IR::Opnd *constOpnd;
- int32 intConstantValue;
- if(valueInfo->TryGetIntConstantValue(&intConstantValue))
- {
- if(toType == TyInt32)
- {
- constOpnd = IR::IntConstOpnd::New(intConstantValue, TyInt32, instr->m_func);
- }
- else
- {
- Assert(toType == TyFloat64);
- constOpnd = IR::FloatConstOpnd::New(static_cast<FloatConstType>(intConstantValue), TyFloat64, instr->m_func);
- }
- }
- else if(valueInfo->IsFloatConstant())
- {
- const FloatConstType floatValue = valueInfo->AsFloatConstant()->FloatValue();
- if(toType == TyInt32)
- {
- // In some loop scenarios, a sym can be specialized to int32 on loop entry
- // during the prepass and then subsequentely specialized to float within
- // the loop, leading to an attempted lossy conversion from float64 to int32
- // on the backedge. For these cases, disable aggressive int type specialization
- // and try again.
- if (!lossy)
- {
- AssertOrFailFast(DoAggressiveIntTypeSpec());
- throw Js::RejitException(RejitReason::AggressiveIntTypeSpecDisabled);
- }
- constOpnd =
- IR::IntConstOpnd::New(
- Js::JavascriptMath::ToInt32(floatValue),
- TyInt32,
- instr->m_func);
- }
- else
- {
- Assert(toType == TyFloat64);
- constOpnd = IR::FloatConstOpnd::New(floatValue, TyFloat64, instr->m_func);
- }
- }
- else
- {
- Assert(opnd->IsVar());
- Assert(opnd->IsAddrOpnd());
- AssertMsg(opnd->AsAddrOpnd()->IsVar(), "We only expect to see addr that are var before lower.");
- // Don't need to capture uses, we are only replacing an addr opnd
- if(toType == TyInt32)
- {
- constOpnd = IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(opnd->AsAddrOpnd()->m_address), TyInt32, instr->m_func);
- }
- else
- {
- Assert(toType == TyFloat64);
- constOpnd = IR::FloatConstOpnd::New(Js::TaggedInt::ToDouble(opnd->AsAddrOpnd()->m_address), TyFloat64, instr->m_func);
- }
- }
- if (toType == TyInt32)
- {
- if (needReplaceSrc)
- {
- CaptureByteCodeSymUses(instr);
- if(indir)
- {
- Assert(opnd == indir->GetIndexOpnd());
- Assert(indir->GetScale() == 0);
- indir->UnlinkIndexOpnd()->Free(instr->m_func);
- indir->SetOffset(constOpnd->AsIntConstOpnd()->AsInt32());
- }
- else
- {
- instr->ReplaceSrc(opnd, constOpnd);
- }
- }
- else
- {
- StackSym *varSym = opnd->AsRegOpnd()->m_sym;
- if(varSym->IsTypeSpec())
- {
- varSym = varSym->GetVarEquivSym(nullptr);
- Assert(varSym);
- }
- if(block->globOptData.liveInt32Syms->TestAndSet(varSym->m_id))
- {
- Assert(!!block->globOptData.liveLossyInt32Syms->Test(varSym->m_id) == lossy);
- }
- else
- {
- if(lossy)
- {
- block->globOptData.liveLossyInt32Syms->Set(varSym->m_id);
- }
- StackSym *int32Sym = varSym->GetInt32EquivSym(instr->m_func);
- IR::RegOpnd *int32Reg = IR::RegOpnd::New(int32Sym, TyInt32, instr->m_func);
- int32Reg->SetIsJITOptimizedReg(true);
- newInstr = IR::Instr::New(Js::OpCode::Ld_I4, int32Reg, constOpnd, instr->m_func);
- newInstr->SetByteCodeOffset(instr);
- instr->InsertBefore(newInstr);
- if (updateBlockLastInstr)
- {
- block->SetLastInstr(newInstr);
- }
- }
- }
- }
- else
- {
- StackSym *floatSym;
- bool newFloatSym = false;
- StackSym* varSym;
- if (opnd->IsRegOpnd())
- {
- varSym = opnd->AsRegOpnd()->m_sym;
- if (varSym->IsTypeSpec())
- {
- varSym = varSym->GetVarEquivSym(nullptr);
- Assert(varSym);
- }
- floatSym = varSym->GetFloat64EquivSym(instr->m_func);
- }
- else
- {
- varSym = block->globOptData.GetCopyPropSym(nullptr, val);
- if(!varSym)
- {
- // Clear the symstore to ensure it's set below to this new symbol
- this->SetSymStoreDirect(val->GetValueInfo(), nullptr);
- varSym = StackSym::New(TyVar, instr->m_func);
- newFloatSym = true;
- }
- floatSym = varSym->GetFloat64EquivSym(instr->m_func);
- }
- IR::RegOpnd *floatReg = IR::RegOpnd::New(floatSym, TyFloat64, instr->m_func);
- floatReg->SetIsJITOptimizedReg(true);
- // If the value is not live - let's load it.
- if(!block->globOptData.liveFloat64Syms->TestAndSet(varSym->m_id))
- {
- newInstr = IR::Instr::New(Js::OpCode::LdC_F8_R8, floatReg, constOpnd, instr->m_func);
- newInstr->SetByteCodeOffset(instr);
- instr->InsertBefore(newInstr);
- if (updateBlockLastInstr)
- {
- block->SetLastInstr(newInstr);
- }
- if(newFloatSym)
- {
- block->globOptData.SetValue(val, varSym);
- }
- // Src is always invariant, but check if the dst is, and then hoist.
- if (block->loop &&
- (
- (newFloatSym && block->loop->CanHoistInvariants()) ||
- this->OptIsInvariant(floatReg, block, block->loop, val, false, false)
- ))
- {
- Assert(!this->IsLoopPrePass());
- this->OptHoistInvariant(newInstr, block, block->loop, val, val, nullptr, false);
- }
- }
- if (needReplaceSrc)
- {
- CaptureByteCodeSymUses(instr);
- instr->ReplaceSrc(opnd, floatReg);
- }
- }
- return instr;
- }
- return newInstr;
- }
- void
- GlobOpt::ToVarRegOpnd(IR::RegOpnd *dst, BasicBlock *block)
- {
- ToVarStackSym(dst->m_sym, block);
- }
- void
- GlobOpt::ToVarStackSym(StackSym *varSym, BasicBlock *block)
- {
- //added another check for sym , in case of asmjs there is mostly no var syms and hence added a new check to see if it is the primary sym
- Assert(!varSym->IsTypeSpec());
- block->globOptData.liveVarSyms->Set(varSym->m_id);
- block->globOptData.liveInt32Syms->Clear(varSym->m_id);
- block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
- block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
- }
- void
- GlobOpt::ToInt32Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
- {
- StackSym *varSym = dst->m_sym;
- Assert(!varSym->IsTypeSpec());
- if (!this->IsLoopPrePass() && varSym->IsVar())
- {
- StackSym *int32Sym = varSym->GetInt32EquivSym(instr->m_func);
- // Use UnlinkDst / SetDst to make sure isSingleDef is tracked properly,
- // since we'll just be hammering the symbol.
- dst = instr->UnlinkDst()->AsRegOpnd();
- dst->m_sym = int32Sym;
- dst->SetType(TyInt32);
- instr->SetDst(dst);
- }
- block->globOptData.liveInt32Syms->Set(varSym->m_id);
- block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id); // The store makes it lossless
- block->globOptData.liveVarSyms->Clear(varSym->m_id);
- block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
- }
- void
- GlobOpt::ToUInt32Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
- {
- // We should be calling only for asmjs function
- Assert(GetIsAsmJSFunc());
- StackSym *varSym = dst->m_sym;
- Assert(!varSym->IsTypeSpec());
- block->globOptData.liveInt32Syms->Set(varSym->m_id);
- block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id); // The store makes it lossless
- block->globOptData.liveVarSyms->Clear(varSym->m_id);
- block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
- }
- void
- GlobOpt::ToFloat64Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
- {
- StackSym *varSym = dst->m_sym;
- Assert(!varSym->IsTypeSpec());
- if (!this->IsLoopPrePass() && varSym->IsVar())
- {
- StackSym *float64Sym = varSym->GetFloat64EquivSym(this->func);
- // Use UnlinkDst / SetDst to make sure isSingleDef is tracked properly,
- // since we'll just be hammering the symbol.
- dst = instr->UnlinkDst()->AsRegOpnd();
- dst->m_sym = float64Sym;
- dst->SetType(TyFloat64);
- instr->SetDst(dst);
- }
- block->globOptData.liveFloat64Syms->Set(varSym->m_id);
- block->globOptData.liveVarSyms->Clear(varSym->m_id);
- block->globOptData.liveInt32Syms->Clear(varSym->m_id);
- block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
- }
- static void SetIsConstFlag(StackSym* dstSym, int64 value)
- {
- Assert(dstSym);
- dstSym->SetIsInt64Const();
- }
- static void SetIsConstFlag(StackSym* dstSym, int value)
- {
- Assert(dstSym);
- dstSym->SetIsIntConst(value);
- }
- static IR::Opnd* CreateIntConstOpnd(IR::Instr* instr, int64 value)
- {
- return (IR::Opnd*)IR::Int64ConstOpnd::New(value, instr->GetDst()->GetType(), instr->m_func);
- }
- static IR::Opnd* CreateIntConstOpnd(IR::Instr* instr, int value)
- {
- IntConstType constVal;
- if (instr->GetDst()->IsUnsigned())
- {
- // we should zero extend in case of uint
- constVal = (uint32)value;
- }
- else
- {
- constVal = value;
- }
- return (IR::Opnd*)IR::IntConstOpnd::New(constVal, instr->GetDst()->GetType(), instr->m_func);
- }
- template <typename T>
- IR::Opnd* GlobOpt::ReplaceWConst(IR::Instr **pInstr, T value, Value **pDstVal)
- {
- IR::Instr * &instr = *pInstr;
- IR::Opnd * constOpnd = CreateIntConstOpnd(instr, value);
- instr->ReplaceSrc1(constOpnd);
- instr->FreeSrc2();
- this->OptSrc(constOpnd, &instr);
- IR::Opnd *dst = instr->GetDst();
- StackSym *dstSym = dst->AsRegOpnd()->m_sym;
- if (dstSym->IsSingleDef())
- {
- SetIsConstFlag(dstSym, value);
- }
- GOPT_TRACE_INSTR(instr, _u("Constant folding to %d: \n"), value);
- *pDstVal = GetIntConstantValue(value, instr, dst);
- return dst;
- }
- template <typename T>
- bool GlobOpt::OptConstFoldBinaryWasm(
- IR::Instr** pInstr,
- const Value* src1,
- const Value* src2,
- Value **pDstVal)
- {
- IR::Instr* &instr = *pInstr;
- if (!DoConstFold())
- {
- return false;
- }
- T src1IntConstantValue, src2IntConstantValue;
- if (!src1 || !src1->GetValueInfo()->TryGetIntConstantValue(&src1IntConstantValue, false) || //a bit sketchy: false for int32 means likelyInt = false
- !src2 || !src2->GetValueInfo()->TryGetIntConstantValue(&src2IntConstantValue, false) //and unsigned = false for int64
- )
- {
- return false;
- }
- int64 tmpValueOut;
- if (!instr->BinaryCalculatorT<T>(src1IntConstantValue, src2IntConstantValue, &tmpValueOut, func->GetJITFunctionBody()->IsWasmFunction()))
- {
- return false;
- }
- this->CaptureByteCodeSymUses(instr);
- IR::Opnd *dst = (instr->GetDst()->IsInt64()) ? //dst can be int32 for int64 comparison operators
- ReplaceWConst(pInstr, tmpValueOut, pDstVal) :
- ReplaceWConst(pInstr, (int)tmpValueOut, pDstVal);
- instr->m_opcode = Js::OpCode::Ld_I4;
- this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
- return true;
- }
- bool
- GlobOpt::OptConstFoldBinary(
- IR::Instr * *pInstr,
- const IntConstantBounds &src1IntConstantBounds,
- const IntConstantBounds &src2IntConstantBounds,
- Value **pDstVal)
- {
- IR::Instr * &instr = *pInstr;
- int32 value;
- IR::IntConstOpnd *constOpnd;
- if (!DoConstFold())
- {
- return false;
- }
- int32 src1IntConstantValue = -1;
- int32 src2IntConstantValue = -1;
- int32 src1MaxIntConstantValue = -1;
- int32 src2MaxIntConstantValue = -1;
- int32 src1MinIntConstantValue = -1;
- int32 src2MinIntConstantValue = -1;
- if (instr->IsBranchInstr())
- {
- src1MinIntConstantValue = src1IntConstantBounds.LowerBound();
- src1MaxIntConstantValue = src1IntConstantBounds.UpperBound();
- src2MinIntConstantValue = src2IntConstantBounds.LowerBound();
- src2MaxIntConstantValue = src2IntConstantBounds.UpperBound();
- }
- else if (src1IntConstantBounds.IsConstant() && src2IntConstantBounds.IsConstant())
- {
- src1IntConstantValue = src1IntConstantBounds.LowerBound();
- src2IntConstantValue = src2IntConstantBounds.LowerBound();
- }
- else
- {
- return false;
- }
- IntConstType tmpValueOut;
- if (!instr->BinaryCalculator(src1IntConstantValue, src2IntConstantValue, &tmpValueOut, TyInt32)
- || !Math::FitsInDWord(tmpValueOut))
- {
- return false;
- }
- value = (int32)tmpValueOut;
- this->CaptureByteCodeSymUses(instr);
- constOpnd = IR::IntConstOpnd::New(value, TyInt32, instr->m_func);
- instr->ReplaceSrc1(constOpnd);
- instr->FreeSrc2();
- this->OptSrc(constOpnd, &instr);
- IR::Opnd *dst = instr->GetDst();
- Assert(dst->IsRegOpnd());
- StackSym *dstSym = dst->AsRegOpnd()->m_sym;
- if (dstSym->IsSingleDef())
- {
- dstSym->SetIsIntConst(value);
- }
- GOPT_TRACE_INSTR(instr, _u("Constant folding to %d: \n"), value);
- *pDstVal = GetIntConstantValue(value, instr, dst);
- if (IsTypeSpecPhaseOff(this->func))
- {
- instr->m_opcode = Js::OpCode::LdC_A_I4;
- this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
- }
- else
- {
- instr->m_opcode = Js::OpCode::Ld_I4;
- this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
- }
- InvalidateInductionVariables(instr);
- return true;
- }
- void
- GlobOpt::OptConstFoldBr(bool test, IR::Instr *instr, Value * src1Val, Value * src2Val)
- {
- GOPT_TRACE_INSTR(instr, _u("Constant folding to branch: "));
- BasicBlock *deadBlock;
- if (src1Val)
- {
- this->ToInt32(instr, instr->GetSrc1(), this->currentBlock, src1Val, nullptr, false);
- }
- if (src2Val)
- {
- this->ToInt32(instr, instr->GetSrc2(), this->currentBlock, src2Val, nullptr, false);
- }
- this->CaptureByteCodeSymUses(instr);
- if (test)
- {
- instr->m_opcode = Js::OpCode::Br;
- instr->FreeSrc1();
- if(instr->GetSrc2())
- {
- instr->FreeSrc2();
- }
- deadBlock = instr->m_next->AsLabelInstr()->GetBasicBlock();
- }
- else
- {
- AssertMsg(instr->m_next->IsLabelInstr(), "Next instr of branch should be a label...");
- if(instr->AsBranchInstr()->IsMultiBranch())
- {
- return;
- }
- deadBlock = instr->AsBranchInstr()->GetTarget()->GetBasicBlock();
- instr->FreeSrc1();
- if(instr->GetSrc2())
- {
- instr->FreeSrc2();
- }
- instr->m_opcode = Js::OpCode::Nop;
- }
- // Loop back edge: we would have already decremented data use count for the tail block when we processed the loop header.
- if (!(this->currentBlock->loop && this->currentBlock->loop->GetHeadBlock() == deadBlock))
- {
- this->currentBlock->DecrementDataUseCount();
- }
- this->currentBlock->RemoveDeadSucc(deadBlock, this->func->m_fg);
- if (deadBlock->GetPredList()->Count() == 0)
- {
- deadBlock->SetDataUseCount(0);
- }
- }
- void
- GlobOpt::ChangeValueType(
- BasicBlock *const block,
- Value *const value,
- const ValueType newValueType,
- const bool preserveSubclassInfo,
- const bool allowIncompatibleType) const
- {
- Assert(value);
- // Why are we trying to change the value type of the type sym value? Asserting here to make sure we don't deep copy the type sym's value info.
- Assert(!value->GetValueInfo()->IsJsType());
- ValueInfo *const valueInfo = value->GetValueInfo();
- const ValueType valueType(valueInfo->Type());
- if(valueType == newValueType && (preserveSubclassInfo || valueInfo->IsGeneric()))
- {
- return;
- }
- // ArrayValueInfo has information specific to the array type, so make sure that doesn't change
- Assert(
- !preserveSubclassInfo ||
- !valueInfo->IsArrayValueInfo() ||
- newValueType.IsObject() && newValueType.GetObjectType() == valueInfo->GetObjectType());
- Assert(!valueInfo->GetSymStore() || !valueInfo->GetSymStore()->IsStackSym() || !valueInfo->GetSymStore()->AsStackSym()->IsFromByteCodeConstantTable());
- ValueInfo *const newValueInfo =
- preserveSubclassInfo
- ? valueInfo->Copy(alloc)
- : valueInfo->CopyWithGenericStructureKind(alloc);
- newValueInfo->Type() = newValueType;
- ChangeValueInfo(block, value, newValueInfo, allowIncompatibleType);
- }
- void
- GlobOpt::ChangeValueInfo(BasicBlock *const block, Value *const value, ValueInfo *const newValueInfo, const bool allowIncompatibleType, const bool compensated) const
- {
- Assert(value);
- Assert(newValueInfo);
- // The value type must be changed to something more specific or something more generic. For instance, it would be changed to
- // something more specific if the current value type is LikelyArray and checks have been done to ensure that it's an array,
- // and it would be changed to something more generic if a call kills the Array value type and it must be treated as
- // LikelyArray going forward.
- // There are cases where we change the type because of different profile information, and because of rejit, these profile information
- // may conflict. Need to allow incompatible type in those cause. However, the old type should be indefinite.
- Assert((allowIncompatibleType && !value->GetValueInfo()->IsDefinite()) ||
- AreValueInfosCompatible(newValueInfo, value->GetValueInfo()));
- // ArrayValueInfo has information specific to the array type, so make sure that doesn't change
- Assert(
- !value->GetValueInfo()->IsArrayValueInfo() ||
- !newValueInfo->IsArrayValueInfo() ||
- newValueInfo->GetObjectType() == value->GetValueInfo()->GetObjectType());
- if(block)
- {
- TrackValueInfoChangeForKills(block, value, newValueInfo, compensated);
- }
- value->SetValueInfo(newValueInfo);
- }
- bool
- GlobOpt::AreValueInfosCompatible(const ValueInfo *const v0, const ValueInfo *const v1) const
- {
- Assert(v0);
- Assert(v1);
- if(v0->IsUninitialized() || v1->IsUninitialized())
- {
- return true;
- }
- const bool doAggressiveIntTypeSpec = DoAggressiveIntTypeSpec();
- if(doAggressiveIntTypeSpec && (v0->IsInt() || v1->IsInt()))
- {
- // Int specialization in some uncommon loop cases involving dependencies, needs to allow specializing values of
- // arbitrary types, even values that are definitely not int, to compensate for aggressive assumptions made by a loop
- // prepass
- return true;
- }
- if ((v0->Type()).IsMixedTypedArrayPair(v1->Type()) || (v1->Type()).IsMixedTypedArrayPair(v0->Type()))
- {
- return true;
- }
- const bool doFloatTypeSpec = DoFloatTypeSpec();
- if(doFloatTypeSpec && (v0->IsFloat() || v1->IsFloat()))
- {
- // Float specialization allows specializing values of arbitrary types, even values that are definitely not float
- return true;
- }
- const bool doArrayMissingValueCheckHoist = DoArrayMissingValueCheckHoist();
- const bool doNativeArrayTypeSpec = DoNativeArrayTypeSpec();
- const auto AreValueTypesCompatible = [=](const ValueType t0, const ValueType t1)
- {
- return
- t0.IsSubsetOf(t1, doAggressiveIntTypeSpec, doFloatTypeSpec, doArrayMissingValueCheckHoist, doNativeArrayTypeSpec) ||
- t1.IsSubsetOf(t0, doAggressiveIntTypeSpec, doFloatTypeSpec, doArrayMissingValueCheckHoist, doNativeArrayTypeSpec);
- };
- const ValueType t0(v0->Type().ToDefinite()), t1(v1->Type().ToDefinite());
- if(t0.IsLikelyObject() && t1.IsLikelyObject())
- {
- // Check compatibility for the primitive portions and the object portions of the value types separately
- if(AreValueTypesCompatible(t0.ToDefiniteObject(), t1.ToDefiniteObject()) &&
- (
- !t0.HasBeenPrimitive() ||
- !t1.HasBeenPrimitive() ||
- AreValueTypesCompatible(t0.ToDefinitePrimitiveSubset(), t1.ToDefinitePrimitiveSubset())
- ))
- {
- return true;
- }
- }
- else if(AreValueTypesCompatible(t0, t1))
- {
- return true;
- }
- const FloatConstantValueInfo *floatConstantValueInfo;
- const ValueInfo *likelyIntValueinfo;
- if(v0->IsFloatConstant() && v1->IsLikelyInt())
- {
- floatConstantValueInfo = v0->AsFloatConstant();
- likelyIntValueinfo = v1;
- }
- else if(v0->IsLikelyInt() && v1->IsFloatConstant())
- {
- floatConstantValueInfo = v1->AsFloatConstant();
- likelyIntValueinfo = v0;
- }
- else
- {
- return false;
- }
- // A float constant value with a value that is actually an int is a subset of a likely-int value.
- // Ideally, we should create an int constant value for this up front, such that IsInt() also returns true. There
- // were other issues with that, should see if that can be done.
- int32 int32Value;
- return
- Js::JavascriptNumber::TryGetInt32Value(floatConstantValueInfo->FloatValue(), &int32Value) &&
- (!likelyIntValueinfo->IsLikelyTaggedInt() || !Js::TaggedInt::IsOverflow(int32Value));
- }
- #if DBG
- void
- GlobOpt::VerifyArrayValueInfoForTracking(
- const ValueInfo *const valueInfo,
- const bool isJsArray,
- const BasicBlock *const block,
- const bool ignoreKnownImplicitCalls) const
- {
- Assert(valueInfo);
- Assert(valueInfo->IsAnyOptimizedArray());
- Assert(isJsArray == valueInfo->IsArrayOrObjectWithArray());
- Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
- Assert(block);
- Loop *implicitCallsLoop;
- if(block->next && !block->next->isDeleted && block->next->isLoopHeader)
- {
- // Since a loop's landing pad does not have user code, determine whether disabling implicit calls is allowed in the
- // landing pad based on the loop for which this block is the landing pad.
- implicitCallsLoop = block->next->loop;
- Assert(implicitCallsLoop);
- Assert(implicitCallsLoop->landingPad == block);
- }
- else
- {
- implicitCallsLoop = block->loop;
- }
- Assert(
- !isJsArray ||
- DoArrayCheckHoist(valueInfo->Type(), implicitCallsLoop) ||
- (
- ignoreKnownImplicitCalls &&
- !(implicitCallsLoop ? ImplicitCallFlagsAllowOpts(implicitCallsLoop) : ImplicitCallFlagsAllowOpts(func))
- ));
- Assert(!(isJsArray && valueInfo->HasNoMissingValues() && !DoArrayMissingValueCheckHoist()));
- Assert(
- !(
- valueInfo->IsArrayValueInfo() &&
- (
- valueInfo->AsArrayValueInfo()->HeadSegmentSym() ||
- valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()
- ) &&
- !DoArraySegmentHoist(valueInfo->Type())
- ));
- #if 0
- // We can't assert here that there is only a head segment length sym if hoisting is allowed in the current block,
- // because we may have propagated the sym forward out of a loop, and hoisting may be allowed inside but not
- // outside the loop.
- Assert(
- isJsArray ||
- !valueInfo->IsArrayValueInfo() ||
- !valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym() ||
- DoTypedArraySegmentLengthHoist(implicitCallsLoop) ||
- ignoreKnownImplicitCalls ||
- (implicitCallsLoop ? ImplicitCallFlagsAllowOpts(implicitCallsLoop) : ImplicitCallFlagsAllowOpts(func))
- );
- #endif
- Assert(
- !(
- isJsArray &&
- valueInfo->IsArrayValueInfo() &&
- valueInfo->AsArrayValueInfo()->LengthSym() &&
- !DoArrayLengthHoist()
- ));
- }
- #endif
- void
- GlobOpt::TrackNewValueForKills(Value *const value)
- {
- Assert(value);
- if(!value->GetValueInfo()->IsAnyOptimizedArray())
- {
- return;
- }
- DoTrackNewValueForKills(value);
- }
- void
- GlobOpt::DoTrackNewValueForKills(Value *const value)
- {
- Assert(value);
- ValueInfo *const valueInfo = value->GetValueInfo();
- Assert(valueInfo->IsAnyOptimizedArray());
- Assert(!valueInfo->IsArrayValueInfo());
- // The value and value info here are new, so it's okay to modify the value info in-place
- Assert(!valueInfo->GetSymStore());
- const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
- Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
- const bool isVirtualTypedArray = valueInfo->IsOptimizedVirtualTypedArray();
- Loop *implicitCallsLoop;
- if(currentBlock->next && !currentBlock->next->isDeleted && currentBlock->next->isLoopHeader)
- {
- // Since a loop's landing pad does not have user code, determine whether disabling implicit calls is allowed in the
- // landing pad based on the loop for which this block is the landing pad.
- implicitCallsLoop = currentBlock->next->loop;
- Assert(implicitCallsLoop);
- Assert(implicitCallsLoop->landingPad == currentBlock);
- }
- else
- {
- implicitCallsLoop = currentBlock->loop;
- }
- if(isJsArray || isVirtualTypedArray)
- {
- if(!DoArrayCheckHoist(valueInfo->Type(), implicitCallsLoop))
- {
- // Array opts are disabled for this value type, so treat it as an indefinite value type going forward
- valueInfo->Type() = valueInfo->Type().ToLikely();
- return;
- }
- if(isJsArray && valueInfo->HasNoMissingValues() && !DoArrayMissingValueCheckHoist())
- {
- valueInfo->Type() = valueInfo->Type().SetHasNoMissingValues(false);
- }
- }
- #if DBG
- VerifyArrayValueInfoForTracking(valueInfo, isJsArray, currentBlock);
- #endif
- if(!isJsArray && !isVirtualTypedArray)
- {
- return;
- }
- // Can't assume going forward that it will definitely be an array without disabling implicit calls, because the
- // array may be transformed into an ES5 array. Since array opts are enabled, implicit calls can be disabled, and we can
- // treat it as a definite value type going forward, but the value needs to be tracked so that something like a call can
- // revert the value type to a likely version.
- CurrentBlockData()->valuesToKillOnCalls->Add(value);
- }
- void
- GlobOpt::TrackCopiedValueForKills(Value *const value)
- {
- Assert(value);
- if(!value->GetValueInfo()->IsAnyOptimizedArray())
- {
- return;
- }
- DoTrackCopiedValueForKills(value);
- }
- void
- GlobOpt::DoTrackCopiedValueForKills(Value *const value)
- {
- Assert(value);
- ValueInfo *const valueInfo = value->GetValueInfo();
- Assert(valueInfo->IsAnyOptimizedArray());
- const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
- Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
- const bool isVirtualTypedArray = valueInfo->IsOptimizedVirtualTypedArray();
- #if DBG
- VerifyArrayValueInfoForTracking(valueInfo, isJsArray, currentBlock);
- #endif
- if(!isJsArray && !isVirtualTypedArray && !(valueInfo->IsArrayValueInfo() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()))
- {
- return;
- }
- // Can't assume going forward that it will definitely be an array without disabling implicit calls, because the
- // array may be transformed into an ES5 array. Since array opts are enabled, implicit calls can be disabled, and we can
- // treat it as a definite value type going forward, but the value needs to be tracked so that something like a call can
- // revert the value type to a likely version.
- CurrentBlockData()->valuesToKillOnCalls->Add(value);
- }
- void
- GlobOpt::TrackMergedValueForKills(
- Value *const value,
- GlobOptBlockData *const blockData,
- BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills) const
- {
- Assert(value);
- if(!value->GetValueInfo()->IsAnyOptimizedArray())
- {
- return;
- }
- DoTrackMergedValueForKills(value, blockData, mergedValueTypesTrackedForKills);
- }
- void
- GlobOpt::DoTrackMergedValueForKills(
- Value *const value,
- GlobOptBlockData *const blockData,
- BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills) const
- {
- Assert(value);
- Assert(blockData);
- ValueInfo *valueInfo = value->GetValueInfo();
- Assert(valueInfo->IsAnyOptimizedArray());
- const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
- Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
- const bool isVirtualTypedArray = valueInfo->IsOptimizedVirtualTypedArray();
- #if DBG
- VerifyArrayValueInfoForTracking(valueInfo, isJsArray, currentBlock, true);
- #endif
- if(!isJsArray && !isVirtualTypedArray && !(valueInfo->IsArrayValueInfo() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()))
- {
- return;
- }
- // Can't assume going forward that it will definitely be an array without disabling implicit calls, because the
- // array may be transformed into an ES5 array. Since array opts are enabled, implicit calls can be disabled, and we can
- // treat it as a definite value type going forward, but the value needs to be tracked so that something like a call can
- // revert the value type to a likely version.
- if(!mergedValueTypesTrackedForKills || !mergedValueTypesTrackedForKills->TestAndSet(value->GetValueNumber()))
- {
- blockData->valuesToKillOnCalls->Add(value);
- }
- }
- void
- GlobOpt::TrackValueInfoChangeForKills(BasicBlock *const block, Value *const value, ValueInfo *const newValueInfo, const bool compensated) const
- {
- Assert(block);
- Assert(value);
- Assert(newValueInfo);
- ValueInfo *const oldValueInfo = value->GetValueInfo();
- #if DBG
- if(oldValueInfo->IsAnyOptimizedArray())
- {
- VerifyArrayValueInfoForTracking(oldValueInfo, oldValueInfo->IsArrayOrObjectWithArray(), block, compensated);
- }
- #endif
- const bool trackOldValueInfo =
- oldValueInfo->IsArrayOrObjectWithArray() ||
- oldValueInfo->IsOptimizedVirtualTypedArray() ||
- (
- oldValueInfo->IsOptimizedTypedArray() &&
- oldValueInfo->IsArrayValueInfo() &&
- oldValueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()
- );
- Assert(trackOldValueInfo == block->globOptData.valuesToKillOnCalls->ContainsKey(value));
- #if DBG
- if(newValueInfo->IsAnyOptimizedArray())
- {
- VerifyArrayValueInfoForTracking(newValueInfo, newValueInfo->IsArrayOrObjectWithArray(), block, compensated);
- }
- #endif
- const bool trackNewValueInfo =
- newValueInfo->IsArrayOrObjectWithArray() ||
- newValueInfo->IsOptimizedVirtualTypedArray() ||
- (
- newValueInfo->IsOptimizedTypedArray() &&
- newValueInfo->IsArrayValueInfo() &&
- newValueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()
- );
- if(trackOldValueInfo == trackNewValueInfo)
- {
- return;
- }
- if(trackNewValueInfo)
- {
- block->globOptData.valuesToKillOnCalls->Add(value);
- }
- else
- {
- block->globOptData.valuesToKillOnCalls->Remove(value);
- }
- }
- void
- GlobOpt::ProcessValueKills(IR::Instr *const instr)
- {
- Assert(instr);
- ValueSet *const valuesToKillOnCalls = CurrentBlockData()->valuesToKillOnCalls;
- if(!IsLoopPrePass() && valuesToKillOnCalls->Count() == 0)
- {
- return;
- }
- const JsArrayKills kills = CheckJsArrayKills(instr);
- Assert(!kills.KillsArrayHeadSegments() || kills.KillsArrayHeadSegmentLengths());
- if(IsLoopPrePass())
- {
- rootLoopPrePass->jsArrayKills = rootLoopPrePass->jsArrayKills.Merge(kills);
- Assert(
- !rootLoopPrePass->parent ||
- rootLoopPrePass->jsArrayKills.AreSubsetOf(rootLoopPrePass->parent->jsArrayKills));
- if(kills.KillsAllArrays())
- {
- rootLoopPrePass->needImplicitCallBailoutChecksForJsArrayCheckHoist = false;
- }
- if(valuesToKillOnCalls->Count() == 0)
- {
- return;
- }
- }
- if(kills.KillsAllArrays())
- {
- Assert(kills.KillsTypedArrayHeadSegmentLengths());
- // - Calls need to kill the value types of values in the following list. For instance, calls can transform a JS array
- // into an ES5 array, so any definitely-array value types need to be killed. Also, VirtualTypeArrays do not have
- // bounds checks; this can be problematic if the array is detached, so check to ensure that it is a virtual array.
- // Update the value types to likley to ensure a bailout that asserts Array type is generated.
- // - Calls also need to kill typed array head segment lengths. A typed array's array buffer may be transferred to a web
- // worker, in which case the typed array's length is set to zero.
- for(auto it = valuesToKillOnCalls->GetIterator(); it.IsValid(); it.MoveNext())
- {
- Value *const value = it.CurrentValue();
- ValueInfo *const valueInfo = value->GetValueInfo();
- Assert(
- valueInfo->IsArrayOrObjectWithArray() ||
- valueInfo->IsOptimizedVirtualTypedArray() ||
- valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
- if (valueInfo->IsArrayOrObjectWithArray() || valueInfo->IsOptimizedVirtualTypedArray())
- {
- ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
- continue;
- }
- ChangeValueInfo(
- nullptr,
- value,
- valueInfo->AsArrayValueInfo()->Copy(alloc, true, false /* copyHeadSegmentLength */, true));
- }
- valuesToKillOnCalls->Clear();
- return;
- }
- if(kills.KillsArraysWithNoMissingValues())
- {
- // Some operations may kill arrays with no missing values in unlikely circumstances. Convert their value types to likely
- // versions so that the checks have to be redone.
- for(auto it = valuesToKillOnCalls->GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
- {
- Value *const value = it.CurrentValue();
- ValueInfo *const valueInfo = value->GetValueInfo();
- Assert(
- valueInfo->IsArrayOrObjectWithArray() ||
- valueInfo->IsOptimizedVirtualTypedArray() ||
- valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
- if(!valueInfo->IsArrayOrObjectWithArray() || !valueInfo->HasNoMissingValues())
- {
- continue;
- }
- ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
- it.RemoveCurrent();
- }
- }
- else if(kills.KillsObjectArraysWithNoMissingValues())
- {
- // Some operations may kill objects with arrays-with-no-missing-values in unlikely circumstances. Convert their value types to likely
- // versions so that the checks have to be redone.
- for(auto it = valuesToKillOnCalls->GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
- {
- Value *const value = it.CurrentValue();
- ValueInfo *const valueInfo = value->GetValueInfo();
- Assert(
- valueInfo->IsArrayOrObjectWithArray() ||
- valueInfo->IsOptimizedVirtualTypedArray() ||
- valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
- if(!valueInfo->IsArrayOrObjectWithArray() || valueInfo->IsArray() || !valueInfo->HasNoMissingValues())
- {
- continue;
- }
- ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
- it.RemoveCurrent();
- }
- }
- if(kills.KillsNativeArrays())
- {
- // Some operations may kill native arrays in (what should be) unlikely circumstances. Convert their value types to
- // likely versions so that the checks have to be redone.
- for(auto it = valuesToKillOnCalls->GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
- {
- Value *const value = it.CurrentValue();
- ValueInfo *const valueInfo = value->GetValueInfo();
- Assert(
- valueInfo->IsArrayOrObjectWithArray() ||
- valueInfo->IsOptimizedVirtualTypedArray() ||
- valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
- if(!valueInfo->IsArrayOrObjectWithArray() || valueInfo->HasVarElements())
- {
- continue;
- }
- ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
- it.RemoveCurrent();
- }
- }
- const bool likelyKillsJsArraysWithNoMissingValues = IsOperationThatLikelyKillsJsArraysWithNoMissingValues(instr);
- if(!kills.KillsArrayHeadSegmentLengths())
- {
- Assert(!kills.KillsArrayHeadSegments());
- if(!likelyKillsJsArraysWithNoMissingValues && !kills.KillsArrayLengths())
- {
- return;
- }
- }
- for(auto it = valuesToKillOnCalls->GetIterator(); it.IsValid(); it.MoveNext())
- {
- Value *const value = it.CurrentValue();
- ValueInfo *valueInfo = value->GetValueInfo();
- Assert(
- valueInfo->IsArrayOrObjectWithArray() ||
- valueInfo->IsOptimizedVirtualTypedArray() ||
- valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
- if(!valueInfo->IsArrayOrObjectWithArray())
- {
- continue;
- }
- if(likelyKillsJsArraysWithNoMissingValues && valueInfo->HasNoMissingValues())
- {
- ChangeValueType(nullptr, value, valueInfo->Type().SetHasNoMissingValues(false), true);
- valueInfo = value->GetValueInfo();
- }
- if(!valueInfo->IsArrayValueInfo())
- {
- continue;
- }
- ArrayValueInfo *const arrayValueInfo = valueInfo->AsArrayValueInfo();
- const bool removeHeadSegment = kills.KillsArrayHeadSegments() && arrayValueInfo->HeadSegmentSym();
- const bool removeHeadSegmentLength = kills.KillsArrayHeadSegmentLengths() && arrayValueInfo->HeadSegmentLengthSym();
- const bool removeLength = kills.KillsArrayLengths() && arrayValueInfo->LengthSym();
- if(removeHeadSegment || removeHeadSegmentLength || removeLength)
- {
- ChangeValueInfo(
- nullptr,
- value,
- arrayValueInfo->Copy(alloc, !removeHeadSegment, !removeHeadSegmentLength, !removeLength));
- valueInfo = value->GetValueInfo();
- }
- }
- }
- void
- GlobOpt::ProcessValueKills(BasicBlock *const block, GlobOptBlockData *const blockData)
- {
- Assert(block);
- Assert(blockData);
- ValueSet *const valuesToKillOnCalls = blockData->valuesToKillOnCalls;
- if(!IsLoopPrePass() && valuesToKillOnCalls->Count() == 0)
- {
- return;
- }
- // If the current block or loop has implicit calls, kill all definitely-array value types, as using that info will cause
- // implicit calls to be disabled, resulting in unnecessary bailouts
- const bool killValuesOnImplicitCalls =
- (block->loop ? !this->ImplicitCallFlagsAllowOpts(block->loop) : !this->ImplicitCallFlagsAllowOpts(func));
- if (!killValuesOnImplicitCalls)
- {
- return;
- }
- if(IsLoopPrePass() && block->loop == rootLoopPrePass)
- {
- AnalysisAssert(rootLoopPrePass);
- for (Loop * loop = rootLoopPrePass; loop != nullptr; loop = loop->parent)
- {
- loop->jsArrayKills.SetKillsAllArrays();
- }
- Assert(!rootLoopPrePass->parent || rootLoopPrePass->jsArrayKills.AreSubsetOf(rootLoopPrePass->parent->jsArrayKills));
- if(valuesToKillOnCalls->Count() == 0)
- {
- return;
- }
- }
- for(auto it = valuesToKillOnCalls->GetIterator(); it.IsValid(); it.MoveNext())
- {
- Value *const value = it.CurrentValue();
- ValueInfo *const valueInfo = value->GetValueInfo();
- Assert(
- valueInfo->IsArrayOrObjectWithArray() ||
- valueInfo->IsOptimizedVirtualTypedArray() ||
- valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
- if(valueInfo->IsArrayOrObjectWithArray() || valueInfo->IsOptimizedVirtualTypedArray())
- {
- ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
- continue;
- }
- ChangeValueInfo(
- nullptr,
- value,
- valueInfo->AsArrayValueInfo()->Copy(alloc, true, false /* copyHeadSegmentLength */, true));
- }
- valuesToKillOnCalls->Clear();
- }
- void
- GlobOpt::ProcessValueKillsForLoopHeaderAfterBackEdgeMerge(BasicBlock *const block, GlobOptBlockData *const blockData)
- {
- Assert(block);
- Assert(block->isLoopHeader);
- Assert(blockData);
- ValueSet *const valuesToKillOnCalls = blockData->valuesToKillOnCalls;
- if(valuesToKillOnCalls->Count() == 0)
- {
- return;
- }
- const JsArrayKills loopKills(block->loop->jsArrayKills);
- for(auto it = valuesToKillOnCalls->GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
- {
- Value *const value = it.CurrentValue();
- ValueInfo *valueInfo = value->GetValueInfo();
- Assert(
- valueInfo->IsArrayOrObjectWithArray() ||
- valueInfo->IsOptimizedVirtualTypedArray() ||
- valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
- const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
- Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
- const bool isVirtualTypedArray = valueInfo->IsOptimizedVirtualTypedArray();
- if((isJsArray || isVirtualTypedArray) ? loopKills.KillsValueType(valueInfo->Type()) : loopKills.KillsTypedArrayHeadSegmentLengths())
- {
- // Hoisting array checks and other related things for this type is disabled for the loop due to the kill, as
- // compensation code is currently not added on back-edges. When merging values from a back-edge, the array value
- // type cannot be definite, as that may require adding compensation code on the back-edge if the optimization pass
- // chooses to not optimize the array.
- if(isJsArray || isVirtualTypedArray)
- {
- ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
- }
- else
- {
- ChangeValueInfo(
- nullptr,
- value,
- valueInfo->AsArrayValueInfo()->Copy(alloc, true, false /* copyHeadSegmentLength */, true));
- }
- it.RemoveCurrent();
- continue;
- }
- if(!isJsArray || !valueInfo->IsArrayValueInfo())
- {
- continue;
- }
- // Similarly, if the loop contains an operation that kills JS array segments, don't make the segment or other related
- // syms available initially inside the loop
- ArrayValueInfo *const arrayValueInfo = valueInfo->AsArrayValueInfo();
- const bool removeHeadSegment = loopKills.KillsArrayHeadSegments() && arrayValueInfo->HeadSegmentSym();
- const bool removeHeadSegmentLength = loopKills.KillsArrayHeadSegmentLengths() && arrayValueInfo->HeadSegmentLengthSym();
- const bool removeLength = loopKills.KillsArrayLengths() && arrayValueInfo->LengthSym();
- if(removeHeadSegment || removeHeadSegmentLength || removeLength)
- {
- ChangeValueInfo(
- nullptr,
- value,
- arrayValueInfo->Copy(alloc, !removeHeadSegment, !removeHeadSegmentLength, !removeLength));
- valueInfo = value->GetValueInfo();
- }
- }
- }
- bool
- GlobOpt::NeedBailOnImplicitCallForLiveValues(BasicBlock const * const block, const bool isForwardPass) const
- {
- if(isForwardPass)
- {
- return block->globOptData.valuesToKillOnCalls->Count() != 0;
- }
- if(block->noImplicitCallUses->IsEmpty())
- {
- Assert(block->noImplicitCallNoMissingValuesUses->IsEmpty());
- Assert(block->noImplicitCallNativeArrayUses->IsEmpty());
- Assert(block->noImplicitCallJsArrayHeadSegmentSymUses->IsEmpty());
- Assert(block->noImplicitCallArrayLengthSymUses->IsEmpty());
- return false;
- }
- return true;
- }
- IR::Instr*
- GlobOpt::CreateBoundsCheckInstr(IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset, Func* func)
- {
- IR::Instr* instr = IR::Instr::New(Js::OpCode::BoundCheck, func);
- return AttachBoundsCheckData(instr, lowerBound, upperBound, offset);
- }
- IR::Instr*
- GlobOpt::CreateBoundsCheckInstr(IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset, IR::BailOutKind bailoutkind, BailOutInfo* bailoutInfo, Func * func)
- {
- IR::Instr* instr = IR::BailOutInstr::New(Js::OpCode::BoundCheck, bailoutkind, bailoutInfo, func);
- return AttachBoundsCheckData(instr, lowerBound, upperBound, offset);
- }
- IR::Instr*
- GlobOpt::AttachBoundsCheckData(IR::Instr* instr, IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset)
- {
- instr->SetSrc1(lowerBound);
- instr->SetSrc2(upperBound);
- if (offset != 0)
- {
- instr->SetDst(IR::IntConstOpnd::New(offset, TyInt32, instr->m_func));
- }
- return instr;
- }
- void
- GlobOpt::OptArraySrc(IR::Instr ** const instrRef, Value ** src1Val, Value ** src2Val)
- {
- Assert(instrRef != nullptr);
- ArraySrcOpt arraySrcOpt(this, instrRef, src1Val, src2Val);
- arraySrcOpt.Optimize();
- }
- void
- GlobOpt::ProcessNoImplicitCallArrayUses(IR::RegOpnd * baseOpnd, IR::ArrayRegOpnd * baseArrayOpnd, IR::Instr * instr, bool isLikelyJsArray, bool useNoMissingValues)
- {
- if (isLikelyJsArray)
- {
- // Insert an instruction to indicate to the dead-store pass that implicit calls need to be kept disabled until this
- // instruction. Operations other than LdElem, StElem and IsIn don't benefit much from arrays having no missing values,
- // so no need to ensure that the array still has no missing values. For a particular array, if none of the accesses
- // benefit much from the no-missing-values information, it may be beneficial to avoid checking for no missing
- // values, especially in the case for a single array access, where the cost of the check could be relatively
- // significant. An StElem has to do additional checks in the common path if the array may have missing values, and
- // a StElem that operates on an array that has no missing values is more likely to keep the no-missing-values info
- // on the array more precise, so it still benefits a little from the no-missing-values info.
- this->CaptureNoImplicitCallUses(baseOpnd, isLikelyJsArray);
- }
- else if (baseArrayOpnd && baseArrayOpnd->HeadSegmentLengthSym())
- {
- // A typed array's array buffer may be transferred to a web worker as part of an implicit call, in which case the typed
- // array's length is set to zero. Insert an instruction to indicate to the dead-store pass that implicit calls need to
- // be disabled until this instruction.
- IR::RegOpnd *const headSegmentLengthOpnd =
- IR::RegOpnd::New(
- baseArrayOpnd->HeadSegmentLengthSym(),
- baseArrayOpnd->HeadSegmentLengthSym()->GetType(),
- instr->m_func);
- const IR::AutoReuseOpnd autoReuseHeadSegmentLengthOpnd(headSegmentLengthOpnd, instr->m_func);
- this->CaptureNoImplicitCallUses(headSegmentLengthOpnd, false);
- }
- }
- void
- GlobOpt::OptStackArgLenAndConst(IR::Instr* instr, Value** src1Val)
- {
- if (!PHASE_OFF(Js::StackArgLenConstOptPhase, instr->m_func) && instr->m_func->IsStackArgsEnabled() && instr->usesStackArgumentsObject && instr->IsInlined())
- {
- IR::Opnd* src1 = instr->GetSrc1();
- auto replaceInstr = [&](IR::Opnd* newopnd, Js::OpCode opcode)
- {
- if (PHASE_TESTTRACE(Js::StackArgLenConstOptPhase, instr->m_func))
- {
- Output::Print(_u("Inlined function %s have replaced opcode %s with opcode %s for stack arg optimization. \n"), instr->m_func->GetJITFunctionBody()->GetDisplayName(),
- Js::OpCodeUtil::GetOpCodeName(instr->m_opcode), Js::OpCodeUtil::GetOpCodeName(opcode));
- Output::Flush();
- }
- this->CaptureByteCodeSymUses(instr);
- instr->m_opcode = opcode;
- instr->ReplaceSrc1(newopnd);
- if (instr->HasBailOutInfo())
- {
- instr->ClearBailOutInfo();
- }
- if (instr->IsProfiledInstr())
- {
- Assert(opcode == Js::OpCode::Ld_A || opcode == Js::OpCode::Typeof);
- instr->AsProfiledInstr()->u.FldInfo().valueType = ValueType::Uninitialized;
- }
- *src1Val = this->OptSrc(instr->GetSrc1(), &instr);
- instr->m_func->hasArgLenAndConstOpt = true;
- };
- Assert(CurrentBlockData()->IsArgumentsOpnd(src1));
- switch(instr->m_opcode)
- {
- case Js::OpCode::LdLen_A:
- {
- IR::AddrOpnd* newopnd = IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(instr->m_func->actualCount - 1), IR::AddrOpndKindConstantVar, instr->m_func);
- replaceInstr(newopnd, Js::OpCode::Ld_A);
- break;
- }
- case Js::OpCode::LdElemI_A:
- case Js::OpCode::TypeofElem:
- {
- IR::IndirOpnd* indirOpndSrc1 = src1->AsIndirOpnd();
- if (!indirOpndSrc1->GetIndexOpnd())
- {
- int argIndex = indirOpndSrc1->GetOffset() + 1;
- IR::Instr* defInstr = nullptr;
- IR::Instr* inlineeStart = instr->m_func->GetInlineeStart();
- inlineeStart->IterateArgInstrs([&](IR::Instr* argInstr) {
- StackSym *argSym = argInstr->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
- if (argSym->GetArgSlotNum() - 1 == argIndex)
- {
- defInstr = argInstr;
- return true;
- }
- return false;
- });
- Js::OpCode replacementOpcode;
- if (instr->m_opcode == Js::OpCode::TypeofElem)
- {
- replacementOpcode = Js::OpCode::Typeof;
- }
- else
- {
- replacementOpcode = Js::OpCode::Ld_A;
- }
- // If we cannot find the right instruction. I.E. When calling arguments[2] and no arguments were passed to the func
- if (defInstr == nullptr)
- {
- IR::Opnd * undefined = IR::AddrOpnd::New(instr->m_func->GetScriptContextInfo()->GetUndefinedAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
- undefined->SetValueType(ValueType::Undefined);
- replaceInstr(undefined, replacementOpcode);
- }
- else
- {
- replaceInstr(defInstr->GetSrc1(), replacementOpcode);
- }
- }
- else
- {
- instr->m_func->unoptimizableArgumentsObjReference++;
- }
- break;
- }
- }
- }
- }
- void
- GlobOpt::CaptureNoImplicitCallUses(
- IR::Opnd *opnd,
- const bool usesNoMissingValuesInfo,
- IR::Instr *const includeCurrentInstr)
- {
- Assert(!IsLoopPrePass());
- Assert(noImplicitCallUsesToInsert);
- Assert(opnd);
- // The opnd may be deleted later, so make a copy to ensure it is alive for inserting NoImplicitCallUses later
- opnd = opnd->Copy(func);
- if(!usesNoMissingValuesInfo)
- {
- const ValueType valueType(opnd->GetValueType());
- if(valueType.IsArrayOrObjectWithArray() && valueType.HasNoMissingValues())
- {
- // Inserting NoImplicitCallUses for an opnd with a definitely-array-with-no-missing-values value type means that the
- // instruction following it uses the information that the array has no missing values in some way, for instance, it
- // may omit missing value checks. Based on that, the dead-store phase in turn ensures that the necessary bailouts
- // are inserted to ensure that the array still has no missing values until the following instruction. Since
- // 'usesNoMissingValuesInfo' is false, change the value type to indicate to the dead-store phase that the following
- // instruction does not use the no-missing-values information.
- opnd->SetValueType(valueType.SetHasNoMissingValues(false));
- }
- }
- if(includeCurrentInstr)
- {
- IR::Instr *const noImplicitCallUses =
- IR::PragmaInstr::New(Js::OpCode::NoImplicitCallUses, 0, includeCurrentInstr->m_func);
- noImplicitCallUses->SetSrc1(opnd);
- noImplicitCallUses->GetSrc1()->SetIsJITOptimizedReg(true);
- includeCurrentInstr->InsertAfter(noImplicitCallUses);
- return;
- }
- noImplicitCallUsesToInsert->Add(opnd);
- }
- void
- GlobOpt::InsertNoImplicitCallUses(IR::Instr *const instr)
- {
- Assert(noImplicitCallUsesToInsert);
- const int n = noImplicitCallUsesToInsert->Count();
- if(n == 0)
- {
- return;
- }
- IR::Instr *const insertBeforeInstr = instr->GetInsertBeforeByteCodeUsesInstr();
- for(int i = 0; i < n;)
- {
- IR::Instr *const noImplicitCallUses = IR::PragmaInstr::New(Js::OpCode::NoImplicitCallUses, 0, instr->m_func);
- noImplicitCallUses->SetSrc1(noImplicitCallUsesToInsert->Item(i));
- noImplicitCallUses->GetSrc1()->SetIsJITOptimizedReg(true);
- ++i;
- if(i < n)
- {
- noImplicitCallUses->SetSrc2(noImplicitCallUsesToInsert->Item(i));
- noImplicitCallUses->GetSrc2()->SetIsJITOptimizedReg(true);
- ++i;
- }
- noImplicitCallUses->SetByteCodeOffset(instr);
- insertBeforeInstr->InsertBefore(noImplicitCallUses);
- }
- noImplicitCallUsesToInsert->Clear();
- }
- void
- GlobOpt::PrepareLoopArrayCheckHoist()
- {
- if(IsLoopPrePass() || !currentBlock->loop || !currentBlock->isLoopHeader || !currentBlock->loop->parent)
- {
- return;
- }
- if(currentBlock->loop->parent->needImplicitCallBailoutChecksForJsArrayCheckHoist)
- {
- // If the parent loop is an array check elimination candidate, so is the current loop. Even though the current loop may
- // not have array accesses, if the parent loop hoists array checks, the current loop also needs implicit call checks.
- currentBlock->loop->needImplicitCallBailoutChecksForJsArrayCheckHoist = true;
- }
- }
- JsArrayKills
- GlobOpt::CheckJsArrayKills(IR::Instr *const instr)
- {
- Assert(instr);
- JsArrayKills kills;
- if(instr->UsesAllFields())
- {
- // Calls can (but are unlikely to) change a javascript array into an ES5 array, which may have different behavior for
- // index properties.
- kills.SetKillsAllArrays();
- return kills;
- }
- const bool doArrayMissingValueCheckHoist = DoArrayMissingValueCheckHoist();
- const bool doNativeArrayTypeSpec = DoNativeArrayTypeSpec();
- const bool doArraySegmentHoist = DoArraySegmentHoist(ValueType::GetObject(ObjectType::Array));
- Assert(doArraySegmentHoist == DoArraySegmentHoist(ValueType::GetObject(ObjectType::ObjectWithArray)));
- const bool doArrayLengthHoist = DoArrayLengthHoist();
- if(!doArrayMissingValueCheckHoist && !doNativeArrayTypeSpec && !doArraySegmentHoist && !doArrayLengthHoist)
- {
- return kills;
- }
- // The following operations may create missing values in an array in an unlikely circumstance. Even though they don't kill
- // the fact that the 'this' parameter is an array (when implicit calls are disabled), we don't have a way to say the value
- // type is definitely array but it likely has no missing values. So, these will kill the definite value type as well, making
- // it likely array, such that the array checks will have to be redone.
- const bool useValueTypes = !IsLoopPrePass(); // Source value types are not guaranteed to be correct in a loop prepass
- switch(instr->m_opcode)
- {
- case Js::OpCode::StElemC:
- case Js::OpCode::StElemI_A:
- case Js::OpCode::StElemI_A_Strict:
- {
- Assert(instr->GetDst());
- if(!instr->GetDst()->IsIndirOpnd())
- {
- break;
- }
- const ValueType baseValueType =
- useValueTypes ? instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType() : ValueType::Uninitialized;
- if(useValueTypes && baseValueType.IsNotArrayOrObjectWithArray())
- {
- break;
- }
- if(instr->IsProfiledInstr())
- {
- const Js::StElemInfo *const stElemInfo = instr->AsProfiledInstr()->u.stElemInfo;
- if(doArraySegmentHoist && stElemInfo->LikelyStoresOutsideHeadSegmentBounds())
- {
- kills.SetKillsArrayHeadSegments();
- kills.SetKillsArrayHeadSegmentLengths();
- }
- if(doArrayLengthHoist &&
- !(useValueTypes && baseValueType.IsNotArray()) &&
- stElemInfo->LikelyStoresOutsideArrayBounds())
- {
- kills.SetKillsArrayLengths();
- }
- }
- break;
- }
- case Js::OpCode::DeleteElemI_A:
- case Js::OpCode::DeleteElemIStrict_A:
- Assert(instr->GetSrc1());
- if(!instr->GetSrc1()->IsIndirOpnd() ||
- (useValueTypes && instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsNotArrayOrObjectWithArray()))
- {
- break;
- }
- if(doArrayMissingValueCheckHoist)
- {
- kills.SetKillsArraysWithNoMissingValues();
- }
- if(doArraySegmentHoist)
- {
- kills.SetKillsArrayHeadSegmentLengths();
- }
- break;
- case Js::OpCode::ConsoleScopedStFld:
- case Js::OpCode::ConsoleScopedStFldStrict:
- case Js::OpCode::ScopedStFld:
- case Js::OpCode::ScopedStFldStrict:
- case Js::OpCode::StFld:
- case Js::OpCode::StFldStrict:
- case Js::OpCode::StSuperFld:
- case Js::OpCode::StSuperFldStrict:
- {
- Assert(instr->GetDst());
- if(!doArraySegmentHoist && !doArrayLengthHoist)
- {
- break;
- }
- IR::SymOpnd *const symDst = instr->GetDst()->AsSymOpnd();
- if(!symDst->IsPropertySymOpnd())
- {
- break;
- }
- IR::PropertySymOpnd *const dst = symDst->AsPropertySymOpnd();
- if(dst->m_sym->AsPropertySym()->m_propertyId != Js::PropertyIds::length)
- {
- break;
- }
- if(useValueTypes && dst->GetPropertyOwnerValueType().IsNotArray())
- {
- // Setting the 'length' property of an object that is not an array, even if it has an internal array, does
- // not kill the head segment or head segment length of any arrays.
- break;
- }
- if(doArraySegmentHoist)
- {
- kills.SetKillsArrayHeadSegmentLengths();
- }
- if(doArrayLengthHoist)
- {
- kills.SetKillsArrayLengths();
- }
- break;
- }
- case Js::OpCode::InlineArrayPush:
- {
- Assert(instr->GetSrc2());
- IR::Opnd *const arrayOpnd = instr->GetSrc1();
- Assert(arrayOpnd);
- const ValueType arrayValueType(arrayOpnd->GetValueType());
- if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
- {
- break;
- }
- if(doArrayMissingValueCheckHoist)
- {
- kills.SetKillsArraysWithNoMissingValues();
- }
- if(doArraySegmentHoist)
- {
- kills.SetKillsArrayHeadSegments();
- kills.SetKillsArrayHeadSegmentLengths();
- }
- if(doArrayLengthHoist && !(useValueTypes && arrayValueType.IsNotArray()))
- {
- kills.SetKillsArrayLengths();
- }
- // Don't kill NativeArray, if there is no mismatch between array's type and element's type.
- if(doNativeArrayTypeSpec &&
- !(useValueTypes && arrayValueType.IsNativeArray() &&
- ((arrayValueType.IsLikelyNativeIntArray() && instr->GetSrc2()->IsInt32()) ||
- (arrayValueType.IsLikelyNativeFloatArray() && instr->GetSrc2()->IsFloat()))
- ) &&
- !(useValueTypes && arrayValueType.IsNotNativeArray()))
- {
- kills.SetKillsNativeArrays();
- }
- break;
- }
- case Js::OpCode::InlineArrayPop:
- {
- IR::Opnd *const arrayOpnd = instr->GetSrc1();
- Assert(arrayOpnd);
- const ValueType arrayValueType(arrayOpnd->GetValueType());
- if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
- {
- break;
- }
- if(doArraySegmentHoist)
- {
- kills.SetKillsArrayHeadSegmentLengths();
- }
- if(doArrayLengthHoist && !(useValueTypes && arrayValueType.IsNotArray()))
- {
- kills.SetKillsArrayLengths();
- }
- if(doArrayMissingValueCheckHoist && !(useValueTypes && arrayValueType.IsArray()))
- {
- kills.SetKillsObjectArraysWithNoMissingValues();
- }
- break;
- }
- case Js::OpCode::CallDirect:
- {
- Assert(instr->GetSrc1());
- // Find the 'this' parameter and check if it's possible for it to be an array
- IR::Opnd *const arrayOpnd = instr->FindCallArgumentOpnd(1);
- Assert(arrayOpnd);
- const ValueType arrayValueType(arrayOpnd->GetValueType());
- if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
- {
- break;
- }
- const IR::JnHelperMethod helperMethod = instr->GetSrc1()->AsHelperCallOpnd()->m_fnHelper;
- if(doArrayMissingValueCheckHoist)
- {
- switch(helperMethod)
- {
- case IR::HelperArray_Reverse:
- case IR::HelperArray_Shift:
- case IR::HelperArray_Splice:
- case IR::HelperArray_Unshift:
- kills.SetKillsArraysWithNoMissingValues();
- break;
- }
- }
- if(doArraySegmentHoist)
- {
- switch(helperMethod)
- {
- case IR::HelperArray_Reverse:
- case IR::HelperArray_Shift:
- case IR::HelperArray_Splice:
- case IR::HelperArray_Unshift:
- case IR::HelperArray_Concat:
- kills.SetKillsArrayHeadSegments();
- kills.SetKillsArrayHeadSegmentLengths();
- break;
- }
- }
- if(doArrayLengthHoist && !(useValueTypes && arrayValueType.IsNotArray()))
- {
- switch(helperMethod)
- {
- case IR::HelperArray_Shift:
- case IR::HelperArray_Splice:
- case IR::HelperArray_Unshift:
- kills.SetKillsArrayLengths();
- break;
- }
- }
- if(doNativeArrayTypeSpec && !(useValueTypes && arrayValueType.IsNotNativeArray()))
- {
- switch(helperMethod)
- {
- case IR::HelperArray_Reverse:
- case IR::HelperArray_Shift:
- case IR::HelperArray_Slice:
- // Currently not inlined.
- //case IR::HelperArray_Sort:
- case IR::HelperArray_Splice:
- case IR::HelperArray_Unshift:
- case IR::HelperArray_Concat:
- kills.SetKillsNativeArrays();
- break;
- }
- }
- break;
- }
- case Js::OpCode::InitProto:
- {
- // Find the 'this' parameter and check if it's possible for it to be an array
- IR::Opnd *const arrayOpnd = instr->GetSrc1();
- Assert(arrayOpnd);
- const ValueType arrayValueType(arrayOpnd->GetValueType());
- if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
- {
- break;
- }
- if(doNativeArrayTypeSpec && !(useValueTypes && arrayValueType.IsNotNativeArray()))
- {
- kills.SetKillsNativeArrays();
- }
- break;
- }
- case Js::OpCode::NewClassProto:
- Assert(instr->GetSrc1());
- if (IR::AddrOpnd::IsEqualAddr(instr->GetSrc1(), (void*)func->GetScriptContextInfo()->GetObjectPrototypeAddr()))
- {
- // No extends operand, the proto parent is the Object prototype
- break;
- }
- // Fall through
- case Js::OpCode::NewScObjectNoCtor:
- case Js::OpCode::NewScObjectNoCtorFull:
- if(doNativeArrayTypeSpec)
- {
- // Class/object construction can make something a prototype
- kills.SetKillsNativeArrays();
- }
- break;
- }
- return kills;
- }
- GlobOptBlockData const * GlobOpt::CurrentBlockData() const
- {
- return &this->currentBlock->globOptData;
- }
- GlobOptBlockData * GlobOpt::CurrentBlockData()
- {
- return &this->currentBlock->globOptData;
- }
- void GlobOpt::CommitCapturedValuesCandidate()
- {
- GlobOptBlockData * globOptData = CurrentBlockData();
- globOptData->changedSyms->ClearAll();
- if (!this->changedSymsAfterIncBailoutCandidate->IsEmpty())
- {
- //
- // some symbols are changed after the values for current bailout have been
- // captured (GlobOpt::CapturedValues), need to restore such symbols as changed
- // for following incremental bailout construction, or we will miss capturing
- // values for later bailout
- //
- // swap changedSyms and changedSymsAfterIncBailoutCandidate
- // because both are from this->alloc
- BVSparse<JitArenaAllocator> * tempBvSwap = globOptData->changedSyms;
- globOptData->changedSyms = this->changedSymsAfterIncBailoutCandidate;
- this->changedSymsAfterIncBailoutCandidate = tempBvSwap;
- }
- if (globOptData->capturedValues)
- {
- globOptData->capturedValues->DecrementRefCount();
- }
- globOptData->capturedValues = globOptData->capturedValuesCandidate;
- // null out capturedValuesCandidate to stop tracking symbols change for it
- globOptData->capturedValuesCandidate = nullptr;
- }
- bool
- GlobOpt::IsOperationThatLikelyKillsJsArraysWithNoMissingValues(IR::Instr *const instr)
- {
- // StElem is profiled with information indicating whether it will likely create a missing value in the array. In that case,
- // we prefer to kill the no-missing-values information in the value so that we don't bail out in a likely circumstance.
- return
- (instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict) &&
- DoArrayMissingValueCheckHoist() &&
- instr->IsProfiledInstr() &&
- instr->AsProfiledInstr()->u.stElemInfo->LikelyCreatesMissingValue();
- }
- bool
- GlobOpt::NeedBailOnImplicitCallForArrayCheckHoist(BasicBlock const * const block, const bool isForwardPass) const
- {
- Assert(block);
- return isForwardPass && block->loop && block->loop->needImplicitCallBailoutChecksForJsArrayCheckHoist;
- }
- bool
- GlobOpt::PrepareForIgnoringIntOverflow(IR::Instr *const instr)
- {
- Assert(instr);
- const bool isBoundary = instr->m_opcode == Js::OpCode::NoIntOverflowBoundary;
- // Update the instruction's "int overflow matters" flag based on whether we are currently allowing ignoring int overflows.
- // Some operations convert their srcs to int32s, those can still ignore int overflow.
- if(instr->ignoreIntOverflowInRange)
- {
- instr->ignoreIntOverflowInRange = !intOverflowCurrentlyMattersInRange || OpCodeAttr::IsInt32(instr->m_opcode);
- }
- if(!intOverflowDoesNotMatterRange)
- {
- Assert(intOverflowCurrentlyMattersInRange);
- // There are no more ranges of instructions where int overflow does not matter, in this block.
- return isBoundary;
- }
- if(instr == intOverflowDoesNotMatterRange->LastInstr())
- {
- Assert(isBoundary);
- // Reached the last instruction in the range
- intOverflowCurrentlyMattersInRange = true;
- intOverflowDoesNotMatterRange = intOverflowDoesNotMatterRange->Next();
- return isBoundary;
- }
- if(!intOverflowCurrentlyMattersInRange)
- {
- return isBoundary;
- }
- if(instr != intOverflowDoesNotMatterRange->FirstInstr())
- {
- // Have not reached the next range
- return isBoundary;
- }
- Assert(isBoundary);
- // This is the first instruction in a range of instructions where int overflow does not matter. There can be many inputs to
- // instructions in the range, some of which are inputs to the range itself (that is, the values are not defined in the
- // range). Ignoring int overflow is only valid for int operations, so we need to ensure that all inputs to the range are
- // int (not "likely int") before ignoring any overflows in the range. Ensuring that a sym with a "likely int" value is an
- // int requires a bail-out. These bail-out check need to happen before any overflows are ignored, otherwise it's too late.
- // The backward pass tracked all inputs into the range. Iterate over them and verify the values, and insert lossless
- // conversions to int as necessary, before the first instruction in the range. If for any reason all values cannot be
- // guaranteed to be ints, the optimization will be disabled for this range.
- intOverflowCurrentlyMattersInRange = false;
- {
- BVSparse<JitArenaAllocator> tempBv1(tempAlloc);
- BVSparse<JitArenaAllocator> tempBv2(tempAlloc);
- {
- // Just renaming the temp BVs for this section to indicate how they're used so that it makes sense
- BVSparse<JitArenaAllocator> &symsToExclude = tempBv1;
- BVSparse<JitArenaAllocator> &symsToInclude = tempBv2;
- #if DBG_DUMP
- SymID couldNotConvertSymId = 0;
- #endif
- FOREACH_BITSET_IN_SPARSEBV(id, intOverflowDoesNotMatterRange->SymsRequiredToBeInt())
- {
- Sym *const sym = func->m_symTable->Find(id);
- Assert(sym);
- // Some instructions with property syms are also tracked by the backward pass, and may be included in the range
- // (LdSlot for instance). These property syms don't get their values until either copy-prop resolves a value for
- // them, or a new value is created once the use of the property sym is reached. In either case, we're not that
- // far yet, so we need to find the future value of the property sym by evaluating copy-prop in reverse.
- Value *const value = sym->IsStackSym() ? CurrentBlockData()->FindValue(sym) : CurrentBlockData()->FindFuturePropertyValue(sym->AsPropertySym());
- if(!value)
- {
- #if DBG_DUMP
- couldNotConvertSymId = id;
- #endif
- intOverflowCurrentlyMattersInRange = true;
- BREAK_BITSET_IN_SPARSEBV;
- }
- const bool isInt32OrUInt32Float =
- value->GetValueInfo()->IsFloatConstant() &&
- Js::JavascriptNumber::IsInt32OrUInt32(value->GetValueInfo()->AsFloatConstant()->FloatValue());
- if(value->GetValueInfo()->IsInt() || isInt32OrUInt32Float)
- {
- if(!IsLoopPrePass())
- {
- // Input values that are already int can be excluded from int-specialization. We can treat unsigned
- // int32 values as int32 values (ignoring the overflow), since the values will only be used inside the
- // range where overflow does not matter.
- symsToExclude.Set(sym->m_id);
- }
- continue;
- }
- if(!DoAggressiveIntTypeSpec() || !value->GetValueInfo()->IsLikelyInt())
- {
- // When aggressive int specialization is off, syms with "likely int" values cannot be forced to int since
- // int bail-out checks are not allowed in that mode. Similarly, with aggressive int specialization on, it
- // wouldn't make sense to force non-"likely int" values to int since it would almost guarantee a bail-out at
- // runtime. In both cases, just disable ignoring overflow for this range.
- #if DBG_DUMP
- couldNotConvertSymId = id;
- #endif
- intOverflowCurrentlyMattersInRange = true;
- BREAK_BITSET_IN_SPARSEBV;
- }
- if(IsLoopPrePass())
- {
- // The loop prepass does not modify bit-vectors. Since it doesn't add bail-out checks, it also does not need
- // to specialize anything up-front. It only needs to be consistent in how it determines whether to allow
- // ignoring overflow for a range, based on the values of inputs into the range.
- continue;
- }
- // Since input syms are tracked in the backward pass, where there is no value tracking, it will not be aware of
- // copy-prop. If a copy-prop sym is available, it will be used instead, so exclude the original sym and include
- // the copy-prop sym for specialization.
- StackSym *const copyPropSym = CurrentBlockData()->GetCopyPropSym(sym, value);
- if(copyPropSym)
- {
- symsToExclude.Set(sym->m_id);
- Assert(!symsToExclude.Test(copyPropSym->m_id));
- const bool needsToBeLossless =
- !intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Test(sym->m_id);
- if(intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Test(copyPropSym->m_id) ||
- symsToInclude.TestAndSet(copyPropSym->m_id))
- {
- // The copy-prop sym is already included
- if(needsToBeLossless)
- {
- // The original sym needs to be lossless, so make the copy-prop sym lossless as well.
- intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Clear(copyPropSym->m_id);
- }
- }
- else if(!needsToBeLossless)
- {
- // The copy-prop sym was not included before, and the original sym can be lossy, so make it lossy.
- intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Set(copyPropSym->m_id);
- }
- }
- else if(!sym->IsStackSym())
- {
- // Only stack syms can be converted to int, and copy-prop syms are stack syms. If a copy-prop sym was not
- // found for the property sym, we can't ignore overflows in this range.
- #if DBG_DUMP
- couldNotConvertSymId = id;
- #endif
- intOverflowCurrentlyMattersInRange = true;
- BREAK_BITSET_IN_SPARSEBV;
- }
- } NEXT_BITSET_IN_SPARSEBV;
- if(intOverflowCurrentlyMattersInRange)
- {
- #if DBG_DUMP
- if(PHASE_TRACE(Js::TrackCompoundedIntOverflowPhase, func) && !IsLoopPrePass())
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(
- _u("TrackCompoundedIntOverflow - Top function: %s (%s), Phase: %s, Block: %u, Disabled ignoring overflows\n"),
- func->GetJITFunctionBody()->GetDisplayName(),
- func->GetDebugNumberSet(debugStringBuffer),
- Js::PhaseNames[Js::ForwardPhase],
- currentBlock->GetBlockNum());
- Output::Print(_u(" Input sym could not be turned into an int: %u\n"), couldNotConvertSymId);
- Output::Print(_u(" First instr: "));
- instr->m_next->Dump();
- Output::Flush();
- }
- #endif
- intOverflowDoesNotMatterRange = intOverflowDoesNotMatterRange->Next();
- return isBoundary;
- }
- if(IsLoopPrePass())
- {
- return isBoundary;
- }
- // Update the syms to specialize after enumeration
- intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Minus(&symsToExclude);
- intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Minus(&symsToExclude);
- intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Or(&symsToInclude);
- }
- {
- // Exclude syms that are already live as lossless int32, and exclude lossy conversions of syms that are already live
- // as lossy int32.
- // symsToExclude = liveInt32Syms - liveLossyInt32Syms // syms live as lossless int
- // lossySymsToExclude = symsRequiredToBeLossyInt & liveLossyInt32Syms; // syms we want as lossy int that are already live as lossy int
- // symsToExclude |= lossySymsToExclude
- // symsRequiredToBeInt -= symsToExclude
- // symsRequiredToBeLossyInt -= symsToExclude
- BVSparse<JitArenaAllocator> &symsToExclude = tempBv1;
- BVSparse<JitArenaAllocator> &lossySymsToExclude = tempBv2;
- symsToExclude.Minus(CurrentBlockData()->liveInt32Syms, CurrentBlockData()->liveLossyInt32Syms);
- lossySymsToExclude.And(
- intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt(),
- CurrentBlockData()->liveLossyInt32Syms);
- symsToExclude.Or(&lossySymsToExclude);
- intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Minus(&symsToExclude);
- intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Minus(&symsToExclude);
- }
- #if DBG
- {
- // Verify that the syms to be converted are live
- // liveSyms = liveInt32Syms | liveFloat64Syms | liveVarSyms
- // deadSymsRequiredToBeInt = symsRequiredToBeInt - liveSyms
- BVSparse<JitArenaAllocator> &liveSyms = tempBv1;
- BVSparse<JitArenaAllocator> &deadSymsRequiredToBeInt = tempBv2;
- liveSyms.Or(CurrentBlockData()->liveInt32Syms, CurrentBlockData()->liveFloat64Syms);
- liveSyms.Or(CurrentBlockData()->liveVarSyms);
- deadSymsRequiredToBeInt.Minus(intOverflowDoesNotMatterRange->SymsRequiredToBeInt(), &liveSyms);
- Assert(deadSymsRequiredToBeInt.IsEmpty());
- }
- #endif
- }
- // Int-specialize the syms before the first instruction of the range (the current instruction)
- intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Minus(intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt());
- #if DBG_DUMP
- if(PHASE_TRACE(Js::TrackCompoundedIntOverflowPhase, func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(
- _u("TrackCompoundedIntOverflow - Top function: %s (%s), Phase: %s, Block: %u\n"),
- func->GetJITFunctionBody()->GetDisplayName(),
- func->GetDebugNumberSet(debugStringBuffer),
- Js::PhaseNames[Js::ForwardPhase],
- currentBlock->GetBlockNum());
- Output::Print(_u(" Input syms to be int-specialized (lossless): "));
- intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Dump();
- Output::Print(_u(" Input syms to be converted to int (lossy): "));
- intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Dump();
- Output::Print(_u(" First instr: "));
- instr->m_next->Dump();
- Output::Flush();
- }
- #endif
- ToInt32(intOverflowDoesNotMatterRange->SymsRequiredToBeInt(), currentBlock, false /* lossy */, instr);
- ToInt32(intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt(), currentBlock, true /* lossy */, instr);
- return isBoundary;
- }
- void
- GlobOpt::VerifyIntSpecForIgnoringIntOverflow(IR::Instr *const instr)
- {
- if(intOverflowCurrentlyMattersInRange || IsLoopPrePass())
- {
- return;
- }
- Assert(instr->m_opcode != Js::OpCode::Mul_I4 ||
- (instr->m_opcode == Js::OpCode::Mul_I4 && !instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow() ));
- // Instructions that are marked as "overflow doesn't matter" in the range must guarantee that they operate on int values and
- // result in int values, for ignoring overflow to be valid. So, int-specialization is required for such instructions in the
- // range. Ld_A is an exception because it only specializes if the src sym is available as a required specialized sym, and it
- // doesn't generate bailouts or cause ignoring int overflow to be invalid.
- // MULs are allowed to start a region and have BailOutInfo since they will bailout on non-32 bit overflow.
- if(instr->m_opcode == Js::OpCode::Ld_A ||
- ((!instr->HasBailOutInfo() || instr->m_opcode == Js::OpCode::Mul_I4) &&
- (!instr->GetDst() || instr->GetDst()->IsInt32()) &&
- (!instr->GetSrc1() || instr->GetSrc1()->IsInt32()) &&
- (!instr->GetSrc2() || instr->GetSrc2()->IsInt32())))
- {
- return;
- }
- if (!instr->HasBailOutInfo() && !instr->HasAnySideEffects())
- {
- return;
- }
- // This can happen for Neg_A if it needs to bail out on negative zero, and perhaps other cases as well. It's too late to fix
- // the problem (overflows may already be ignored), so handle it by bailing out at compile-time and disabling tracking int
- // overflow.
- Assert(!func->IsTrackCompoundedIntOverflowDisabled());
- if(PHASE_TRACE(Js::BailOutPhase, this->func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(
- _u("BailOut (compile-time): function: %s (%s) instr: "),
- func->GetJITFunctionBody()->GetDisplayName(),
- func->GetDebugNumberSet(debugStringBuffer));
- #if DBG_DUMP
- instr->Dump();
- #else
- Output::Print(_u("%s "), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
- #endif
- Output::Print(_u("(overflow does not matter but could not int-spec or needed bailout)\n"));
- Output::Flush();
- }
- if(func->IsTrackCompoundedIntOverflowDisabled())
- {
- // Tracking int overflows is already off for some reason. Prevent trying to rejit again because it won't help and the
- // same thing will happen again and cause an infinite loop. Just abort jitting this function.
- if(PHASE_TRACE(Js::BailOutPhase, this->func))
- {
- Output::Print(_u(" Aborting JIT because TrackIntOverflow is already off\n"));
- Output::Flush();
- }
- throw Js::OperationAbortedException();
- }
- throw Js::RejitException(RejitReason::TrackIntOverflowDisabled);
- }
- // It makes lowering easier if it can assume that the first src is never a constant,
- // at least for commutative operators. For non-commutative, just hoist the constant.
- void
- GlobOpt::PreLowerCanonicalize(IR::Instr *instr, Value **pSrc1Val, Value **pSrc2Val)
- {
- IR::Opnd *dst = instr->GetDst();
- IR::Opnd *src1 = instr->GetSrc1();
- IR::Opnd *src2 = instr->GetSrc2();
- if (src1->IsImmediateOpnd())
- {
- // Swap for dst, src
- }
- else if (src2 && dst && src2->IsRegOpnd())
- {
- if (src2->GetIsDead() && !src1->GetIsDead() && !src1->IsEqual(dst))
- {
- // Swap if src2 is dead, as the reg can be reuse for the dst for opEqs like on x86 (ADD r1, r2)
- }
- else if (src2->IsEqual(dst))
- {
- // Helps lowering of opEqs
- }
- else
- {
- return;
- }
- // Make sure we don't swap 2 srcs with valueOf calls.
- if (OpCodeAttr::OpndHasImplicitCall(instr->m_opcode))
- {
- if (instr->IsBranchInstr())
- {
- if (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive())
- {
- return;
- }
- }
- else if (!src1->GetValueType().IsPrimitive() && !src2->GetValueType().IsPrimitive())
- {
- return;
- }
- }
- }
- else
- {
- return;
- }
- Js::OpCode opcode = instr->m_opcode;
- switch (opcode)
- {
- case Js::OpCode::And_A:
- case Js::OpCode::Mul_A:
- case Js::OpCode::Or_A:
- case Js::OpCode::Xor_A:
- case Js::OpCode::And_I4:
- case Js::OpCode::Mul_I4:
- case Js::OpCode::Or_I4:
- case Js::OpCode::Xor_I4:
- case Js::OpCode::Add_I4:
- swap_srcs:
- if (!instr->GetSrc2()->IsImmediateOpnd())
- {
- instr->m_opcode = opcode;
- instr->SwapOpnds();
- Value *tempVal = *pSrc1Val;
- *pSrc1Val = *pSrc2Val;
- *pSrc2Val = tempVal;
- return;
- }
- break;
- case Js::OpCode::BrSrEq_A:
- case Js::OpCode::BrSrNotNeq_A:
- case Js::OpCode::BrEq_I4:
- goto swap_srcs;
- case Js::OpCode::BrSrNeq_A:
- case Js::OpCode::BrNeq_A:
- case Js::OpCode::BrSrNotEq_A:
- case Js::OpCode::BrNotEq_A:
- case Js::OpCode::BrNeq_I4:
- goto swap_srcs;
- case Js::OpCode::BrGe_A:
- opcode = Js::OpCode::BrLe_A;
- goto swap_srcs;
- case Js::OpCode::BrNotGe_A:
- opcode = Js::OpCode::BrNotLe_A;
- goto swap_srcs;
- case Js::OpCode::BrGe_I4:
- opcode = Js::OpCode::BrLe_I4;
- goto swap_srcs;
- case Js::OpCode::BrGt_A:
- opcode = Js::OpCode::BrLt_A;
- goto swap_srcs;
- case Js::OpCode::BrNotGt_A:
- opcode = Js::OpCode::BrNotLt_A;
- goto swap_srcs;
- case Js::OpCode::BrGt_I4:
- opcode = Js::OpCode::BrLt_I4;
- goto swap_srcs;
- case Js::OpCode::BrLe_A:
- opcode = Js::OpCode::BrGe_A;
- goto swap_srcs;
- case Js::OpCode::BrNotLe_A:
- opcode = Js::OpCode::BrNotGe_A;
- goto swap_srcs;
- case Js::OpCode::BrLe_I4:
- opcode = Js::OpCode::BrGe_I4;
- goto swap_srcs;
- case Js::OpCode::BrLt_A:
- opcode = Js::OpCode::BrGt_A;
- goto swap_srcs;
- case Js::OpCode::BrNotLt_A:
- opcode = Js::OpCode::BrNotGt_A;
- goto swap_srcs;
- case Js::OpCode::BrLt_I4:
- opcode = Js::OpCode::BrGt_I4;
- goto swap_srcs;
- case Js::OpCode::BrEq_A:
- case Js::OpCode::BrNotNeq_A:
- case Js::OpCode::CmEq_A:
- case Js::OpCode::CmNeq_A:
- // this == "" not the same as "" == this...
- if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
- {
- return;
- }
- goto swap_srcs;
- case Js::OpCode::CmGe_A:
- if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
- {
- return;
- }
- opcode = Js::OpCode::CmLe_A;
- goto swap_srcs;
- case Js::OpCode::CmGt_A:
- if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
- {
- return;
- }
- opcode = Js::OpCode::CmLt_A;
- goto swap_srcs;
- case Js::OpCode::CmLe_A:
- if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
- {
- return;
- }
- opcode = Js::OpCode::CmGe_A;
- goto swap_srcs;
- case Js::OpCode::CmLt_A:
- if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
- {
- return;
- }
- opcode = Js::OpCode::CmGt_A;
- goto swap_srcs;
- case Js::OpCode::CallI:
- case Js::OpCode::CallIFixed:
- case Js::OpCode::NewScObject:
- case Js::OpCode::NewScObjectSpread:
- case Js::OpCode::NewScObjArray:
- case Js::OpCode::NewScObjArraySpread:
- case Js::OpCode::NewScObjectNoCtor:
- // Don't insert load to register if the function operand is a fixed function.
- if (instr->HasFixedFunctionAddressTarget())
- {
- return;
- }
- break;
- // Can't do add because <32 + "Hello"> isn't equal to <"Hello" + 32>
- // Lower can do the swap. Other op-codes listed below don't need immediate source hoisting, as the fast paths handle it,
- // or the lowering handles the hoisting.
- case Js::OpCode::Add_A:
- if (src1->IsFloat())
- {
- goto swap_srcs;
- }
- return;
- case Js::OpCode::Sub_I4:
- case Js::OpCode::Neg_I4:
- case Js::OpCode::Not_I4:
- case Js::OpCode::NewScFunc:
- case Js::OpCode::NewScGenFunc:
- case Js::OpCode::NewScFuncHomeObj:
- case Js::OpCode::NewScGenFuncHomeObj:
- case Js::OpCode::NewScArray:
- case Js::OpCode::NewScIntArray:
- case Js::OpCode::NewScFltArray:
- case Js::OpCode::NewScArrayWithMissingValues:
- case Js::OpCode::NewRegEx:
- case Js::OpCode::Ld_A:
- case Js::OpCode::Ld_I4:
- case Js::OpCode::ThrowRuntimeError:
- case Js::OpCode::TrapIfMinIntOverNegOne:
- case Js::OpCode::TrapIfTruncOverflow:
- case Js::OpCode::TrapIfZero:
- case Js::OpCode::TrapIfUnalignedAccess:
- case Js::OpCode::FromVar:
- case Js::OpCode::Conv_Prim:
- case Js::OpCode::Conv_Prim_Sat:
- case Js::OpCode::LdC_A_I4:
- case Js::OpCode::LdStr:
- case Js::OpCode::InitFld:
- case Js::OpCode::InitRootFld:
- case Js::OpCode::StartCall:
- case Js::OpCode::ArgOut_A:
- case Js::OpCode::ArgOut_A_Inline:
- case Js::OpCode::ArgOut_A_Dynamic:
- case Js::OpCode::ArgOut_A_FromStackArgs:
- case Js::OpCode::ArgOut_A_InlineBuiltIn:
- case Js::OpCode::ArgOut_A_InlineSpecialized:
- case Js::OpCode::ArgOut_A_SpreadArg:
- case Js::OpCode::InlineeEnd:
- case Js::OpCode::EndCallForPolymorphicInlinee:
- case Js::OpCode::InlineeMetaArg:
- case Js::OpCode::InlineBuiltInEnd:
- case Js::OpCode::InlineNonTrackingBuiltInEnd:
- case Js::OpCode::CallHelper:
- case Js::OpCode::LdElemUndef:
- case Js::OpCode::LdElemUndefScoped:
- case Js::OpCode::RuntimeTypeError:
- case Js::OpCode::RuntimeReferenceError:
- case Js::OpCode::Ret:
- case Js::OpCode::NewScObjectSimple:
- case Js::OpCode::NewScObjectLiteral:
- case Js::OpCode::StFld:
- case Js::OpCode::StRootFld:
- case Js::OpCode::StSlot:
- case Js::OpCode::StSlotChkUndecl:
- case Js::OpCode::StElemC:
- case Js::OpCode::StArrSegElemC:
- case Js::OpCode::StElemI_A:
- case Js::OpCode::StElemI_A_Strict:
- case Js::OpCode::CallDirect:
- case Js::OpCode::BrNotHasSideEffects:
- case Js::OpCode::NewConcatStrMulti:
- case Js::OpCode::NewConcatStrMultiBE:
- case Js::OpCode::ExtendArg_A:
- case Js::OpCode::NewScopeSlots:
- case Js::OpCode::NewScopeSlotsWithoutPropIds:
- case Js::OpCode::NewStackScopeSlots:
- case Js::OpCode::IsInst:
- case Js::OpCode::BailOnEqual:
- case Js::OpCode::BailOnNotEqual:
- case Js::OpCode::StArrViewElem:
- return;
- }
- if (!src1->IsImmediateOpnd())
- {
- return;
- }
- // The fast paths or lowering of the remaining instructions may not support handling immediate opnds for the first src. The
- // immediate src1 is hoisted here into a separate instruction.
- if (src1->IsIntConstOpnd())
- {
- IR::Instr *newInstr = instr->HoistSrc1(Js::OpCode::Ld_I4);
- ToInt32Dst(newInstr, newInstr->GetDst()->AsRegOpnd(), this->currentBlock);
- }
- else if (src1->IsInt64ConstOpnd())
- {
- instr->HoistSrc1(Js::OpCode::Ld_I4);
- }
- else
- {
- instr->HoistSrc1(Js::OpCode::Ld_A);
- }
- src1 = instr->GetSrc1();
- src1->AsRegOpnd()->m_sym->SetIsConst();
- }
- // Clear the ValueMap pf the values invalidated by this instr.
- void
- GlobOpt::ProcessKills(IR::Instr *instr)
- {
- if (instr->m_opcode == Js::OpCode::Yield)
- {
- this->CurrentBlockData()->KillStateForGeneratorYield(instr);
- }
- this->ProcessFieldKills(instr);
- this->ProcessValueKills(instr);
- this->ProcessArrayValueKills(instr);
- }
- bool
- GlobOpt::OptIsInvariant(IR::Opnd *src, BasicBlock *block, Loop *loop, Value *srcVal, bool isNotTypeSpecConv, bool allowNonPrimitives)
- {
- if(!loop->CanHoistInvariants())
- {
- return false;
- }
- Sym *sym;
- switch(src->GetKind())
- {
- case IR::OpndKindAddr:
- case IR::OpndKindFloatConst:
- case IR::OpndKindIntConst:
- return true;
- case IR::OpndKindReg:
- sym = src->AsRegOpnd()->m_sym;
- break;
- case IR::OpndKindSym:
- sym = src->AsSymOpnd()->m_sym;
- if (src->AsSymOpnd()->IsPropertySymOpnd())
- {
- if (src->AsSymOpnd()->AsPropertySymOpnd()->IsTypeChecked())
- {
- // We do not handle hoisting these yet. We might be hoisting this across the instr with the type check protecting this one.
- // And somehow, the dead-store pass now removes the type check on that instr later on...
- // For CheckFixedFld, there is no benefit hoisting these if they don't have a type check as they won't generate code.
- return false;
- }
- }
- break;
- case IR::OpndKindHelperCall:
- // Helper calls, like the private slot getter, can be invariant.
- // Consider moving more math builtin to invariant?
- return HelperMethodAttributes::IsInVariant(src->AsHelperCallOpnd()->m_fnHelper);
- default:
- return false;
- }
- return OptIsInvariant(sym, block, loop, srcVal, isNotTypeSpecConv, allowNonPrimitives);
- }
- bool
- GlobOpt::OptIsInvariant(Sym *sym, BasicBlock *block, Loop *loop, Value *srcVal, bool isNotTypeSpecConv, bool allowNonPrimitives, Value **loopHeadValRef)
- {
- Value *localLoopHeadVal;
- if(!loopHeadValRef)
- {
- loopHeadValRef = &localLoopHeadVal;
- }
- Value *&loopHeadVal = *loopHeadValRef;
- loopHeadVal = nullptr;
- if(!loop->CanHoistInvariants())
- {
- return false;
- }
- if (sym->IsStackSym())
- {
- if (sym->AsStackSym()->IsTypeSpec())
- {
- StackSym *varSym = sym->AsStackSym()->GetVarEquivSym(this->func);
- // Make sure the int32/float64 version of this is available.
- // Note: We could handle this by converting the src, but usually the
- // conversion is hoistable if this is hoistable anyway.
- // In some weird cases it may not be however, so we'll bail out.
- if (sym->AsStackSym()->IsInt32())
- {
- Assert(block->globOptData.liveInt32Syms->Test(varSym->m_id));
- if (!loop->landingPad->globOptData.liveInt32Syms->Test(varSym->m_id) ||
- (loop->landingPad->globOptData.liveLossyInt32Syms->Test(varSym->m_id) &&
- !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id)))
- {
- // Either the int32 sym is not live in the landing pad, or it's lossy in the landing pad and the
- // instruction's block is using the lossless version. In either case, the instruction cannot be hoisted
- // without doing a conversion of this operand.
- return false;
- }
- }
- else if (sym->AsStackSym()->IsFloat64())
- {
- if (!loop->landingPad->globOptData.liveFloat64Syms->Test(varSym->m_id))
- {
- return false;
- }
- }
- sym = sym->AsStackSym()->GetVarEquivSym(this->func);
- }
- else
- {
- // Make sure the var version of this is available.
- // Note: We could handle this by converting the src, but usually the
- // conversion is hoistable if this is hoistable anyway.
- // In some weird cases it may not be however, so we'll bail out.
- if (!loop->landingPad->globOptData.liveVarSyms->Test(sym->m_id))
- {
- return false;
- }
- }
- }
- else if (sym->IsPropertySym())
- {
- if (!loop->landingPad->globOptData.liveVarSyms->Test(sym->AsPropertySym()->m_stackSym->m_id))
- {
- return false;
- }
- }
- else
- {
- return false;
- }
- // We rely on having a value.
- if (srcVal == NULL)
- {
- return false;
- }
- // A symbol is invariant if its current value is the same as it was upon entering the loop.
- loopHeadVal = loop->landingPad->globOptData.FindValue(sym);
- if (loopHeadVal == NULL || loopHeadVal->GetValueNumber() != srcVal->GetValueNumber())
- {
- return false;
- }
- // Can't hoist non-primitives, unless we have safeguards against valueof/tostring. Additionally, we need to consider
- // the value annotations on the source *before* the loop: if we hoist this instruction outside the loop, we can't
- // necessarily rely on type annotations added (and enforced) earlier in the loop's body.
- //
- // It might look as though !loopHeadVal->GetValueInfo()->IsPrimitive() implies
- // !loop->landingPad->globOptData.IsTypeSpecialized(sym), but it turns out that this is not always the case. We
- // encountered a test case in which we had previously hoisted a FromVar (to float 64) instruction, but its bailout code was
- // BailoutPrimitiveButString, rather than BailoutNumberOnly, which would have allowed us to conclude that the dest was
- // definitely a float64. Instead, it was only *likely* a float64, causing IsPrimitive to return false.
- if (!allowNonPrimitives && !loopHeadVal->GetValueInfo()->IsPrimitive() && !loop->landingPad->globOptData.IsTypeSpecialized(sym))
- {
- return false;
- }
- if(!isNotTypeSpecConv && loop->symsDefInLoop->Test(sym->m_id))
- {
- // Typically, a sym is considered invariant if it has the same value in the current block and in the loop landing pad.
- // The sym may have had a different value earlier in the loop or on the back-edge, but as long as it's reassigned to its
- // value outside the loop, it would be considered invariant in this block. Consider that case:
- // s1 = s2[invariant]
- // <loop start>
- // s1 = s2[invariant]
- // // s1 now has the same value as in the landing pad, and is considered invariant
- // s1 += s3
- // // s1 is not invariant here, or on the back-edge
- // ++s3 // s3 is not invariant, so the add above cannot be hoisted
- // <loop end>
- //
- // A problem occurs at the point of (s1 += s3) when:
- // - At (s1 = s2) inside the loop, s1 was made to be the sym store of that value. This by itself is legal, because
- // after that transfer, s1 and s2 have the same value.
- // - (s1 += s3) is type-specialized but s1 is not specialized in the loop header. This happens when s1 is not
- // specialized entering the loop, and since s1 is not used before it's defined in the loop, it's not specialized
- // on back-edges.
- //
- // With that, at (s1 += s3), the conversion of s1 to the type-specialized version would be hoisted because s1 is
- // invariant just before that instruction. Since this add is specialized, the specialized version of the sym is modified
- // in the loop without a reassignment at (s1 = s2) inside the loop, and (s1 += s3) would then use an incorrect value of
- // s1 (it would use the value of s1 from the previous loop iteration, instead of using the value of s2).
- //
- // The problem here, is that we cannot hoist the conversion of s1 into its specialized version across the assignment
- // (s1 = s2) inside the loop. So for the purposes of type specialization, don't consider a sym invariant if it has a def
- // inside the loop.
- return false;
- }
- // For values with an int range, require additionally that the range is the same as in the landing pad, as the range may
- // have been changed on this path based on branches, and int specialization and invariant hoisting may rely on the range
- // being the same. For type spec conversions, only require that if the value is an int constant in the current block, that
- // it is also an int constant with the same value in the landing pad. Other range differences don't matter for type spec.
- IntConstantBounds srcIntConstantBounds, loopHeadIntConstantBounds;
- if(srcVal->GetValueInfo()->TryGetIntConstantBounds(&srcIntConstantBounds) &&
- (isNotTypeSpecConv || srcIntConstantBounds.IsConstant()) &&
- (
- !loopHeadVal->GetValueInfo()->TryGetIntConstantBounds(&loopHeadIntConstantBounds) ||
- loopHeadIntConstantBounds.LowerBound() != srcIntConstantBounds.LowerBound() ||
- loopHeadIntConstantBounds.UpperBound() != srcIntConstantBounds.UpperBound()
- ))
- {
- return false;
- }
- // Disabling this assert, because it does not hold true when we force specialize in the loop landing pad
- //Assert((!loopHeadVal->GetValueInfo()->IsPrimitive()) || srcVal->GetValueInfo()->IsLikelyPrimitive());
- return true;
- }
- bool
- GlobOpt::OptIsInvariant(
- IR::Instr *instr,
- BasicBlock *block,
- Loop *loop,
- Value *src1Val,
- Value *src2Val,
- bool isNotTypeSpecConv,
- const bool forceInvariantHoisting)
- {
- if (!loop->CanHoistInvariants())
- {
- return false;
- }
- if (!OpCodeAttr::CanCSE(instr->m_opcode))
- {
- return false;
- }
- bool allowNonPrimitives = !OpCodeAttr::OpndHasImplicitCall(instr->m_opcode);
- switch(instr->m_opcode)
- {
- // Can't legally hoist these
- case Js::OpCode::LdLen_A:
- return false;
- //Can't Hoist BailOnNotStackArgs, as it is necessary as InlineArgsOptimization relies on this opcode
- //to decide whether to throw rejit exception or not.
- case Js::OpCode::BailOnNotStackArgs:
- return false;
- // Usually not worth hoisting these
- case Js::OpCode::Ld_A:
- case Js::OpCode::Ld_I4:
- case Js::OpCode::LdC_A_I4:
- if(!forceInvariantHoisting)
- {
- return false;
- }
- break;
- // Can't hoist these outside the function it's for. The LdArgumentsFromFrame for an inlinee depends on the inlinee meta arg
- // that holds the arguments object, which is only initialized at the start of the inlinee. So, can't hoist this outside the
- // inlinee.
- case Js::OpCode::LdArgumentsFromFrame:
- if(instr->m_func != loop->GetFunc())
- {
- return false;
- }
- break;
- case Js::OpCode::FromVar:
- if (instr->HasBailOutInfo())
- {
- allowNonPrimitives = true;
- }
- break;
- case Js::OpCode::CheckObjType:
- // Bug 11712101: If the operand is a field, ensure that its containing object type is invariant
- // before hoisting -- that is, don't hoist a CheckObjType over a DeleteFld on that object.
- // (CheckObjType only checks the operand and its immediate parent, so we don't need to go
- // any farther up the object graph.)
- Assert(instr->GetSrc1());
- PropertySym *propertySym = instr->GetSrc1()->AsPropertySymOpnd()->GetPropertySym();
- if (propertySym->HasObjectTypeSym()) {
- StackSym *objectTypeSym = propertySym->GetObjectTypeSym();
- if (!this->OptIsInvariant(objectTypeSym, block, loop, this->CurrentBlockData()->FindValue(objectTypeSym), true, true)) {
- return false;
- }
- }
- break;
- }
- IR::Opnd *dst = instr->GetDst();
- if (dst && !dst->IsRegOpnd())
- {
- return false;
- }
- IR::Opnd *src1 = instr->GetSrc1();
- if (src1)
- {
- if (!this->OptIsInvariant(src1, block, loop, src1Val, isNotTypeSpecConv, allowNonPrimitives))
- {
- return false;
- }
- IR::Opnd *src2 = instr->GetSrc2();
- if (src2)
- {
- if (!this->OptIsInvariant(src2, block, loop, src2Val, isNotTypeSpecConv, allowNonPrimitives))
- {
- return false;
- }
- }
- }
- return true;
- }
- bool
- GlobOpt::OptDstIsInvariant(IR::RegOpnd *dst)
- {
- StackSym *dstSym = dst->m_sym;
- if (dstSym->IsTypeSpec())
- {
- // The type-specialized sym may be single def, but not the original...
- dstSym = dstSym->GetVarEquivSym(this->func);
- }
- return (dstSym->m_isSingleDef);
- }
- void
- GlobOpt::OptHoistUpdateValueType(
- Loop* loop,
- IR::Instr* instr,
- IR::Opnd** srcOpndPtr /* All code paths that change src, should update srcOpndPtr*/,
- Value* opndVal)
- {
- if (opndVal == nullptr || instr->m_opcode == Js::OpCode::FromVar || srcOpndPtr == nullptr || *srcOpndPtr == nullptr)
- {
- return;
- }
- IR::Opnd* srcOpnd = *srcOpndPtr;
- Sym* opndSym = srcOpnd->GetSym();;
- if (opndSym)
- {
- BasicBlock* landingPad = loop->landingPad;
- Value* opndValueInLandingPad = landingPad->globOptData.FindValue(opndSym);
- Assert(opndVal->GetValueNumber() == opndValueInLandingPad->GetValueNumber());
- ValueType opndValueTypeInLandingPad = opndValueInLandingPad->GetValueInfo()->Type();
- if (srcOpnd->GetValueType() != opndValueTypeInLandingPad)
- {
- srcOpnd->SetValueType(opndValueTypeInLandingPad);
- if (instr->m_opcode == Js::OpCode::SetConcatStrMultiItemBE)
- {
- Assert(!opndSym->IsPropertySym());
- Assert(!opndValueTypeInLandingPad.IsString());
- Assert(instr->GetDst());
- IR::RegOpnd* strOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
- strOpnd->SetValueType(ValueType::String);
- strOpnd->SetValueTypeFixed();
- IR::Instr* convPrimStrInstr =
- IR::Instr::New(Js::OpCode::Conv_PrimStr, strOpnd, srcOpnd->Use(instr->m_func), instr->m_func);
- instr->ReplaceSrc(srcOpnd, strOpnd);
- // Replace above will free srcOpnd, so reassign it
- *srcOpndPtr = srcOpnd = reinterpret_cast<IR::Opnd *>(strOpnd);
- // We add ConvPrim_Str in the landingpad, and since this instruction doesn't go through the checks in OptInstr, the bailout is never added
- // As we expand hoisting of instructions to new opcode, we need a better framework to handle such cases
- if (IsImplicitCallBailOutCurrentlyNeeded(convPrimStrInstr, opndValueInLandingPad, nullptr, landingPad, landingPad->globOptData.liveFields->IsEmpty(), true, true))
- {
- EnsureBailTarget(loop);
- loop->bailOutInfo->bailOutInstr->InsertBefore(convPrimStrInstr);
- convPrimStrInstr = convPrimStrInstr->ConvertToBailOutInstr(convPrimStrInstr, IR::BailOutOnImplicitCallsPreOp, loop->bailOutInfo->bailOutOffset);
- convPrimStrInstr->ReplaceBailOutInfo(loop->bailOutInfo);
- }
- else
- {
- if (loop->bailOutInfo->bailOutInstr)
- {
- loop->bailOutInfo->bailOutInstr->InsertBefore(convPrimStrInstr);
- }
- else
- {
- landingPad->InsertAfter(convPrimStrInstr);
- }
- }
- // If we came here opndSym can't be PropertySym
- return;
- }
- }
- if (opndSym->IsPropertySym())
- {
- // Also fix valueInfo on objPtr
- StackSym* opndObjPtrSym = opndSym->AsPropertySym()->m_stackSym;
- Value* opndObjPtrSymValInLandingPad = landingPad->globOptData.FindValue(opndObjPtrSym);
- ValueInfo* opndObjPtrSymValueInfoInLandingPad = opndObjPtrSymValInLandingPad->GetValueInfo();
- srcOpnd->AsSymOpnd()->SetPropertyOwnerValueType(opndObjPtrSymValueInfoInLandingPad->Type());
- }
- }
- }
- void
- GlobOpt::OptHoistInvariant(
- IR::Instr *instr,
- BasicBlock *block,
- Loop *loop,
- Value *dstVal,
- Value *const src1Val,
- Value *const src2Val,
- bool isNotTypeSpecConv,
- bool lossy,
- IR::BailOutKind bailoutKind)
- {
- BasicBlock *landingPad = loop->landingPad;
- IR::Opnd* src1 = instr->GetSrc1();
- if (src1)
- {
- // We are hoisting this instruction possibly past other uses, which might invalidate the last use info. Clear it.
- OptHoistUpdateValueType(loop, instr, &src1, src1Val);
- if (src1->IsRegOpnd())
- {
- src1->AsRegOpnd()->m_isTempLastUse = false;
- }
- IR::Opnd* src2 = instr->GetSrc2();
- if (src2)
- {
- OptHoistUpdateValueType(loop, instr, &src2, src2Val);
- if (src2->IsRegOpnd())
- {
- src2->AsRegOpnd()->m_isTempLastUse = false;
- }
- }
- }
- IR::RegOpnd *dst = instr->GetDst() ? instr->GetDst()->AsRegOpnd() : nullptr;
- if(dst)
- {
- switch (instr->m_opcode)
- {
- case Js::OpCode::CmEq_I4:
- case Js::OpCode::CmNeq_I4:
- case Js::OpCode::CmLt_I4:
- case Js::OpCode::CmLe_I4:
- case Js::OpCode::CmGt_I4:
- case Js::OpCode::CmGe_I4:
- case Js::OpCode::CmUnLt_I4:
- case Js::OpCode::CmUnLe_I4:
- case Js::OpCode::CmUnGt_I4:
- case Js::OpCode::CmUnGe_I4:
- // These operations are a special case. They generate a lossy int value, and the var sym is initialized using
- // Conv_Bool. A sym cannot be live only as a lossy int sym, the var needs to be live as well since the lossy int
- // sym cannot be used to convert to var. We don't know however, whether the Conv_Bool will be hoisted. The idea
- // currently is that the sym is only used on the path in which it is initialized inside the loop. So, don't
- // hoist any liveness info for the dst.
- if (!this->GetIsAsmJSFunc())
- {
- lossy = true;
- }
- break;
- case Js::OpCode::FromVar:
- {
- StackSym* src1StackSym = IR::RegOpnd::TryGetStackSym(instr->GetSrc1());
- if (instr->HasBailOutInfo())
- {
- IR::BailOutKind instrBailoutKind = instr->GetBailOutKind();
- Assert(instrBailoutKind == IR::BailOutIntOnly ||
- instrBailoutKind == IR::BailOutExpectingInteger ||
- instrBailoutKind == IR::BailOutOnNotPrimitive ||
- instrBailoutKind == IR::BailOutNumberOnly ||
- instrBailoutKind == IR::BailOutPrimitiveButString);
- }
- else if (src1StackSym && bailoutKind != IR::BailOutInvalid)
- {
- // We may be hoisting FromVar from a region where it didn't need a bailout (src1 had a definite value type) to a region
- // where it would. In such cases, the FromVar needs a bailout based on the value type of src1 in its new position.
- Assert(!src1StackSym->IsTypeSpec());
- Value* landingPadSrc1val = landingPad->globOptData.FindValue(src1StackSym);
- Assert(src1Val->GetValueNumber() == landingPadSrc1val->GetValueNumber());
- ValueInfo *src1ValueInfo = src1Val->GetValueInfo();
- ValueInfo *landingPadSrc1ValueInfo = landingPadSrc1val->GetValueInfo();
- IRType dstType = dst->GetType();
- const auto AddBailOutToFromVar = [&]()
- {
- instr->GetSrc1()->SetValueType(landingPadSrc1val->GetValueInfo()->Type());
- EnsureBailTarget(loop);
- if (block->IsLandingPad())
- {
- instr = instr->ConvertToBailOutInstr(instr, bailoutKind, loop->bailOutInfo->bailOutOffset);
- }
- else
- {
- instr = instr->ConvertToBailOutInstr(instr, bailoutKind);
- }
- };
- // A definite type in the source position and not a definite type in the destination (landing pad)
- // and no bailout on the instruction; we should put a bailout on the hoisted instruction.
- if (dstType == TyInt32)
- {
- if (lossy)
- {
- if ((src1ValueInfo->IsPrimitive() || block->globOptData.IsTypeSpecialized(src1StackSym)) && // didn't need a lossy type spec bailout in the source block
- (!landingPadSrc1ValueInfo->IsPrimitive() && !landingPad->globOptData.IsTypeSpecialized(src1StackSym))) // needs a lossy type spec bailout in the landing pad
- {
- bailoutKind = IR::BailOutOnNotPrimitive;
- AddBailOutToFromVar();
- }
- }
- else if (src1ValueInfo->IsInt() && !landingPadSrc1ValueInfo->IsInt())
- {
- AddBailOutToFromVar();
- }
- }
- else if ((dstType == TyFloat64 && src1ValueInfo->IsNumber() && !landingPadSrc1ValueInfo->IsNumber()))
- {
- AddBailOutToFromVar();
- }
- }
- break;
- }
- }
- if (dstVal == NULL)
- {
- dstVal = this->NewGenericValue(ValueType::Uninitialized, dst);
- }
- // ToVar/FromVar don't need a new dst because it has to be invariant if their src is invariant.
- bool dstDoesntNeedLoad = (!isNotTypeSpecConv && instr->m_opcode != Js::OpCode::LdC_A_I4);
- StackSym *varSym = dst->m_sym;
- if (varSym->IsTypeSpec())
- {
- varSym = varSym->GetVarEquivSym(this->func);
- }
- Value *const landingPadDstVal = loop->landingPad->globOptData.FindValue(varSym);
- if(landingPadDstVal
- ? dstVal->GetValueNumber() != landingPadDstVal->GetValueNumber()
- : loop->symsDefInLoop->Test(varSym->m_id))
- {
- // We need a temp for FromVar/ToVar if dst changes in the loop.
- dstDoesntNeedLoad = false;
- }
- if (!dstDoesntNeedLoad && this->OptDstIsInvariant(dst) == false)
- {
- // Keep dst in place, hoist instr using a new dst.
- instr->UnlinkDst();
- // Set type specialization info correctly for this new sym
- StackSym *copyVarSym;
- IR::RegOpnd *copyReg;
- if (dst->m_sym->IsTypeSpec())
- {
- copyVarSym = StackSym::New(TyVar, instr->m_func);
- StackSym *copySym = copyVarSym;
- if (dst->m_sym->IsInt32())
- {
- if(lossy)
- {
- // The new sym would only be live as a lossy int since we're only hoisting the store to the int version
- // of the sym, and cannot be converted to var. It is not legal to have a sym only live as a lossy int,
- // so don't update liveness info for this sym.
- }
- else
- {
- block->globOptData.liveInt32Syms->Set(copyVarSym->m_id);
- }
- copySym = copySym->GetInt32EquivSym(instr->m_func);
- }
- else if (dst->m_sym->IsFloat64())
- {
- block->globOptData.liveFloat64Syms->Set(copyVarSym->m_id);
- copySym = copySym->GetFloat64EquivSym(instr->m_func);
- }
- copyReg = IR::RegOpnd::New(copySym, copySym->GetType(), instr->m_func);
- }
- else
- {
- copyReg = IR::RegOpnd::New(dst->GetType(), instr->m_func);
- copyVarSym = copyReg->m_sym;
- block->globOptData.liveVarSyms->Set(copyVarSym->m_id);
- }
- copyReg->SetValueType(dst->GetValueType());
- IR::Instr *copyInstr = IR::Instr::New(Js::OpCode::Ld_A, dst, copyReg, instr->m_func);
- copyInstr->SetByteCodeOffset(instr);
- instr->SetDst(copyReg);
- instr->InsertBefore(copyInstr);
- dst->m_sym->m_mayNotBeTempLastUse = true;
- if (instr->GetSrc1() && instr->GetSrc1()->IsImmediateOpnd())
- {
- // Propagate IsIntConst if appropriate
- switch(instr->m_opcode)
- {
- case Js::OpCode::Ld_A:
- case Js::OpCode::Ld_I4:
- case Js::OpCode::LdC_A_I4:
- copyReg->m_sym->SetIsConst();
- break;
- }
- }
- ValueInfo *dstValueInfo = dstVal->GetValueInfo();
- if((!dstValueInfo->GetSymStore() || dstValueInfo->GetSymStore() == varSym) && !lossy)
- {
- // The destination's value may have been transferred from one of the invariant sources, in which case we should
- // keep the sym store intact, as that sym will likely have a better lifetime than this new copy sym. For
- // instance, if we're inside a conditioned block, because we don't make the copy sym live and set its value in
- // all preceding blocks, this sym would not be live after exiting this block, causing this value to not
- // participate in copy-prop after this block.
- this->SetSymStoreDirect(dstValueInfo, copyVarSym);
- }
- block->globOptData.InsertNewValue(dstVal, copyReg);
- dst = copyReg;
- }
- }
- // Move to landing pad
- block->UnlinkInstr(instr);
- if (loop->bailOutInfo->bailOutInstr)
- {
- loop->bailOutInfo->bailOutInstr->InsertBefore(instr);
- }
- else
- {
- landingPad->InsertAfter(instr);
- }
- GlobOpt::MarkNonByteCodeUsed(instr);
- if (instr->HasBailOutInfo() || instr->HasAuxBailOut())
- {
- Assert(loop->bailOutInfo);
- EnsureBailTarget(loop);
- // Copy bailout info of loop top.
- instr->ReplaceBailOutInfo(loop->bailOutInfo);
- }
- if(!dst)
- {
- return;
- }
- // The bailout info's liveness for the dst sym is not updated in loop landing pads because bailout instructions previously
- // hoisted into the loop's landing pad may bail out before the current type of the dst sym became live (perhaps due to this
- // instruction). Since the landing pad will have a shared bailout point, the bailout info cannot assume that the current
- // type of the dst sym was live during every bailout hoisted into the landing pad.
- StackSym *const dstSym = dst->m_sym;
- StackSym *const dstVarSym = dstSym->IsTypeSpec() ? dstSym->GetVarEquivSym(nullptr) : dstSym;
- Assert(dstVarSym);
- if(isNotTypeSpecConv || !loop->landingPad->globOptData.IsLive(dstVarSym))
- {
- // A new dst is being hoisted, or the same single-def dst that would not be live before this block. So, make it live and
- // update the value info with the same value info in this block.
- if(lossy)
- {
- // This is a lossy conversion to int. The instruction was given a new dst specifically for hoisting, so this new dst
- // will not be live as a var before this block. A sym cannot be live only as a lossy int sym, the var needs to be
- // live as well since the lossy int sym cannot be used to convert to var. Since the var version of the sym is not
- // going to be initialized, don't hoist any liveness info for the dst. The sym is only going to be used on the path
- // in which it is initialized inside the loop.
- Assert(dstSym->IsTypeSpec());
- Assert(dstSym->IsInt32());
- return;
- }
- // Check if the dst value was transferred from the src. If so, the value transfer needs to be replicated.
- bool isTransfer = dstVal == src1Val;
- StackSym *transferValueOfSym = nullptr;
- if(isTransfer)
- {
- Assert(instr->GetSrc1());
- if(instr->GetSrc1()->IsRegOpnd())
- {
- StackSym *src1Sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
- if(src1Sym->IsTypeSpec())
- {
- src1Sym = src1Sym->GetVarEquivSym(nullptr);
- Assert(src1Sym);
- }
- if(dstVal == block->globOptData.FindValue(src1Sym))
- {
- transferValueOfSym = src1Sym;
- }
- }
- }
- // SIMD_JS
- if (instr->m_opcode == Js::OpCode::ExtendArg_A)
- {
- // Check if we should have CSE'ed this EA
- Assert(instr->GetSrc1());
- // If the dstVal symstore is not the dst itself, then we copied the Value from another expression.
- if (dstVal->GetValueInfo()->GetSymStore() != instr->GetDst()->GetStackSym())
- {
- isTransfer = true;
- transferValueOfSym = dstVal->GetValueInfo()->GetSymStore()->AsStackSym();
- }
- }
- const ValueNumber dstValueNumber = dstVal->GetValueNumber();
- ValueNumber dstNewValueNumber = InvalidValueNumber;
- for(InvariantBlockBackwardIterator it(this, block, loop->landingPad, nullptr); it.IsValid(); it.MoveNext())
- {
- BasicBlock *const hoistBlock = it.Block();
- GlobOptBlockData &hoistBlockData = hoistBlock->globOptData;
- Assert(!hoistBlockData.IsLive(dstVarSym));
- hoistBlockData.MakeLive(dstSym, lossy);
- Value *newDstValue;
- do
- {
- if(isTransfer)
- {
- if(transferValueOfSym)
- {
- newDstValue = hoistBlockData.FindValue(transferValueOfSym);
- if(newDstValue && newDstValue->GetValueNumber() == dstValueNumber)
- {
- break;
- }
- }
- // It's a transfer, but we don't have a sym whose value number matches in the target block. Use a new value
- // number since we don't know if there is already a value with the current number for the target block.
- if(dstNewValueNumber == InvalidValueNumber)
- {
- dstNewValueNumber = NewValueNumber();
- }
- newDstValue = CopyValue(dstVal, dstNewValueNumber);
- break;
- }
- newDstValue = CopyValue(dstVal, dstValueNumber);
- } while(false);
- hoistBlockData.SetValue(newDstValue, dstVarSym);
- }
- return;
- }
- #if DBG
- if(instr->GetSrc1()->IsRegOpnd()) // Type spec conversion may load a constant into a dst sym
- {
- StackSym *const srcSym = instr->GetSrc1()->AsRegOpnd()->m_sym;
- Assert(srcSym != dstSym); // Type spec conversion must be changing the type, so the syms must be different
- StackSym *const srcVarSym = srcSym->IsTypeSpec() ? srcSym->GetVarEquivSym(nullptr) : srcSym;
- Assert(srcVarSym == dstVarSym); // Type spec conversion must be between variants of the same var sym
- }
- #endif
- bool changeValueType = false, changeValueTypeToInt = false;
- if(dstSym->IsTypeSpec())
- {
- if(dst->IsInt32())
- {
- if(!lossy)
- {
- Assert(
- !instr->HasBailOutInfo() ||
- instr->GetBailOutKind() == IR::BailOutIntOnly ||
- instr->GetBailOutKind() == IR::BailOutExpectingInteger);
- changeValueType = changeValueTypeToInt = true;
- }
- }
- else if (dst->IsFloat64())
- {
- if(instr->HasBailOutInfo() && instr->GetBailOutKind() == IR::BailOutNumberOnly)
- {
- changeValueType = true;
- }
- }
- }
- ValueInfo *previousValueInfoBeforeUpdate = nullptr, *previousValueInfoAfterUpdate = nullptr;
- for(InvariantBlockBackwardIterator it(
- this,
- block,
- loop->landingPad,
- dstVarSym,
- dstVal->GetValueNumber());
- it.IsValid();
- it.MoveNext())
- {
- BasicBlock *const hoistBlock = it.Block();
- GlobOptBlockData &hoistBlockData = hoistBlock->globOptData;
- #if DBG
- // TODO: There are some odd cases with field hoisting where the sym is invariant in only part of the loop and the info
- // does not flow through all blocks. Un-comment the verification below after PRE replaces field hoisting.
- //// Verify that the src sym is live as the required type, and that the conversion is valid
- //Assert(IsLive(dstVarSym, &hoistBlockData));
- //if(instr->GetSrc1()->IsRegOpnd())
- //{
- // IR::RegOpnd *const src = instr->GetSrc1()->AsRegOpnd();
- // StackSym *const srcSym = instr->GetSrc1()->AsRegOpnd()->m_sym;
- // if(srcSym->IsTypeSpec())
- // {
- // if(src->IsInt32())
- // {
- // Assert(hoistBlockData.liveInt32Syms->Test(dstVarSym->m_id));
- // Assert(!hoistBlockData.liveLossyInt32Syms->Test(dstVarSym->m_id)); // shouldn't try to convert a lossy int32 to anything
- // }
- // else
- // {
- // Assert(src->IsFloat64());
- // Assert(hoistBlockData.liveFloat64Syms->Test(dstVarSym->m_id));
- // if(dstSym->IsTypeSpec() && dst->IsInt32())
- // {
- // Assert(lossy); // shouldn't try to do a lossless conversion from float64 to int32
- // }
- // }
- // }
- // else
- // {
- // Assert(hoistBlockData.liveVarSyms->Test(dstVarSym->m_id));
- // }
- //}
- //if(dstSym->IsTypeSpec() && dst->IsInt32())
- //{
- // // If the sym is already specialized as required in the block to which we are attempting to hoist the conversion,
- // // that info should have flowed into this block
- // if(lossy)
- // {
- // Assert(!hoistBlockData.liveInt32Syms->Test(dstVarSym->m_id));
- // }
- // else
- // {
- // Assert(!IsInt32TypeSpecialized(dstVarSym, hoistBlock));
- // }
- //}
- #endif
- hoistBlockData.MakeLive(dstSym, lossy);
- if(!changeValueType)
- {
- continue;
- }
- Value *const hoistBlockValue = it.InvariantSymValue();
- ValueInfo *const hoistBlockValueInfo = hoistBlockValue->GetValueInfo();
- if(hoistBlockValueInfo == previousValueInfoBeforeUpdate)
- {
- if(hoistBlockValueInfo != previousValueInfoAfterUpdate)
- {
- HoistInvariantValueInfo(previousValueInfoAfterUpdate, hoistBlockValue, hoistBlock);
- }
- }
- else
- {
- previousValueInfoBeforeUpdate = hoistBlockValueInfo;
- ValueInfo *const newValueInfo =
- changeValueTypeToInt
- ? hoistBlockValueInfo->SpecializeToInt32(alloc)
- : hoistBlockValueInfo->SpecializeToFloat64(alloc);
- previousValueInfoAfterUpdate = newValueInfo;
- ChangeValueInfo(changeValueTypeToInt ? nullptr : hoistBlock, hoistBlockValue, newValueInfo);
- }
- }
- }
- bool
- GlobOpt::TryHoistInvariant(
- IR::Instr *instr,
- BasicBlock *block,
- Value *dstVal,
- Value *src1Val,
- Value *src2Val,
- bool isNotTypeSpecConv,
- const bool lossy,
- const bool forceInvariantHoisting,
- IR::BailOutKind bailoutKind)
- {
- Assert(!this->IsLoopPrePass());
- if (OptIsInvariant(instr, block, block->loop, src1Val, src2Val, isNotTypeSpecConv, forceInvariantHoisting))
- {
- #if DBG
- if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::InvariantsPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
- {
- Output::Print(_u(" **** INVARIANT *** "));
- instr->Dump();
- }
- #endif
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::InvariantsPhase))
- {
- Output::Print(_u(" **** INVARIANT *** "));
- Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
- }
- #endif
- Loop *loop = block->loop;
- // Try hoisting from to outer most loop
- while (loop->parent && OptIsInvariant(instr, block, loop->parent, src1Val, src2Val, isNotTypeSpecConv, forceInvariantHoisting))
- {
- loop = loop->parent;
- }
- // Record the byte code use here since we are going to move this instruction up
- if (isNotTypeSpecConv)
- {
- InsertNoImplicitCallUses(instr);
- this->CaptureByteCodeSymUses(instr);
- this->InsertByteCodeUses(instr, true);
- }
- #if DBG
- else
- {
- PropertySym *propertySymUse = NULL;
- NoRecoverMemoryJitArenaAllocator tempAllocator(_u("BE-GlobOpt-Temp"), this->alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
- BVSparse<JitArenaAllocator> * tempByteCodeUse = JitAnew(&tempAllocator, BVSparse<JitArenaAllocator>, &tempAllocator);
- GlobOpt::TrackByteCodeSymUsed(instr, tempByteCodeUse, &propertySymUse);
- Assert(tempByteCodeUse->Count() == 0 && propertySymUse == NULL);
- }
- #endif
- OptHoistInvariant(instr, block, loop, dstVal, src1Val, src2Val, isNotTypeSpecConv, lossy, bailoutKind);
- return true;
- }
- return false;
- }
- InvariantBlockBackwardIterator::InvariantBlockBackwardIterator(
- GlobOpt *const globOpt,
- BasicBlock *const exclusiveBeginBlock,
- BasicBlock *const inclusiveEndBlock,
- StackSym *const invariantSym,
- const ValueNumber invariantSymValueNumber,
- bool followFlow)
- : globOpt(globOpt),
- exclusiveEndBlock(inclusiveEndBlock->prev),
- invariantSym(invariantSym),
- invariantSymValueNumber(invariantSymValueNumber),
- block(exclusiveBeginBlock),
- blockBV(globOpt->tempAlloc),
- followFlow(followFlow)
- #if DBG
- ,
- inclusiveEndBlock(inclusiveEndBlock)
- #endif
- {
- Assert(exclusiveBeginBlock);
- Assert(inclusiveEndBlock);
- Assert(!inclusiveEndBlock->isDeleted);
- Assert(exclusiveBeginBlock != inclusiveEndBlock);
- Assert(!invariantSym == (invariantSymValueNumber == InvalidValueNumber));
- MoveNext();
- }
- bool
- InvariantBlockBackwardIterator::IsValid() const
- {
- return block != exclusiveEndBlock;
- }
- void
- InvariantBlockBackwardIterator::MoveNext()
- {
- Assert(IsValid());
- while(true)
- {
- #if DBG
- BasicBlock *const previouslyIteratedBlock = block;
- #endif
- block = block->prev;
- if(!IsValid())
- {
- Assert(previouslyIteratedBlock == inclusiveEndBlock);
- break;
- }
- if (!this->UpdatePredBlockBV())
- {
- continue;
- }
- if (!this->UpdatePredBlockBV())
- {
- continue;
- }
- if(block->isDeleted)
- {
- continue;
- }
- if(!block->globOptData.HasData())
- {
- // This block's info has already been merged with all of its successors
- continue;
- }
- if(!invariantSym)
- {
- break;
- }
- invariantSymValue = block->globOptData.FindValue(invariantSym);
- if(!invariantSymValue || invariantSymValue->GetValueNumber() != invariantSymValueNumber)
- {
- // BailOnNoProfile and throw blocks are not moved outside loops. A sym table cleanup on these paths may delete the
- // values. Field hoisting also has some odd cases where the hoisted stack sym is invariant in only part of the loop.
- continue;
- }
- break;
- }
- }
- bool
- InvariantBlockBackwardIterator::UpdatePredBlockBV()
- {
- if (!this->followFlow)
- {
- return true;
- }
- // Track blocks we've visited to ensure that we only iterate over predecessor blocks
- if (!this->blockBV.IsEmpty() && !this->blockBV.Test(this->block->GetBlockNum()))
- {
- return false;
- }
- FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, this->block->GetPredList())
- {
- this->blockBV.Set(edge->GetPred()->GetBlockNum());
- } NEXT_SLISTBASECOUNTED_ENTRY;
- return true;
- }
- BasicBlock *
- InvariantBlockBackwardIterator::Block() const
- {
- Assert(IsValid());
- return block;
- }
- Value *
- InvariantBlockBackwardIterator::InvariantSymValue() const
- {
- Assert(IsValid());
- Assert(invariantSym);
- return invariantSymValue;
- }
- void
- GlobOpt::HoistInvariantValueInfo(
- ValueInfo *const invariantValueInfoToHoist,
- Value *const valueToUpdate,
- BasicBlock *const targetBlock)
- {
- Assert(invariantValueInfoToHoist);
- Assert(valueToUpdate);
- Assert(targetBlock);
- // Why are we trying to change the value type of the type sym value? Asserting here to make sure we don't deep copy the type sym's value info.
- Assert(!invariantValueInfoToHoist->IsJsType());
- Sym *const symStore = valueToUpdate->GetValueInfo()->GetSymStore();
- ValueInfo *newValueInfo;
- if(invariantValueInfoToHoist->GetSymStore() == symStore)
- {
- newValueInfo = invariantValueInfoToHoist;
- }
- else
- {
- newValueInfo = invariantValueInfoToHoist->Copy(alloc);
- this->SetSymStoreDirect(newValueInfo, symStore);
- }
- ChangeValueInfo(targetBlock, valueToUpdate, newValueInfo, true);
- }
- // static
- bool
- GlobOpt::DoInlineArgsOpt(Func const * func)
- {
- Func const * topFunc = func->GetTopFunc();
- Assert(topFunc != func);
- bool doInlineArgsOpt =
- !PHASE_OFF(Js::InlineArgsOptPhase, topFunc) &&
- !func->GetHasCalls() &&
- !func->GetHasUnoptimizedArgumentsAccess() &&
- func->m_canDoInlineArgsOpt;
- return doInlineArgsOpt;
- }
- bool
- GlobOpt::IsSwitchOptEnabled(Func const * func)
- {
- Assert(func->IsTopFunc());
- return !PHASE_OFF(Js::SwitchOptPhase, func) && !func->IsSwitchOptDisabled() && func->DoGlobOpt();
- }
- bool
- GlobOpt::IsSwitchOptEnabledForIntTypeSpec(Func const * func)
- {
- return IsSwitchOptEnabled(func) && !IsTypeSpecPhaseOff(func) && DoAggressiveIntTypeSpec(func);
- }
- bool
- GlobOpt::DoConstFold() const
- {
- return !PHASE_OFF(Js::ConstFoldPhase, func);
- }
- bool
- GlobOpt::IsTypeSpecPhaseOff(Func const *func)
- {
- return PHASE_OFF(Js::TypeSpecPhase, func) || func->IsJitInDebugMode();
- }
- bool
- GlobOpt::DoTypeSpec() const
- {
- return doTypeSpec;
- }
- bool
- GlobOpt::DoAggressiveIntTypeSpec(Func const * func)
- {
- return
- !PHASE_OFF(Js::AggressiveIntTypeSpecPhase, func) &&
- !IsTypeSpecPhaseOff(func) &&
- !func->IsAggressiveIntTypeSpecDisabled();
- }
- bool
- GlobOpt::DoAggressiveIntTypeSpec() const
- {
- return doAggressiveIntTypeSpec;
- }
- bool
- GlobOpt::DoAggressiveMulIntTypeSpec() const
- {
- return doAggressiveMulIntTypeSpec;
- }
- bool
- GlobOpt::DoDivIntTypeSpec() const
- {
- return doDivIntTypeSpec;
- }
- // static
- bool
- GlobOpt::DoLossyIntTypeSpec(Func const * func)
- {
- return
- !PHASE_OFF(Js::LossyIntTypeSpecPhase, func) &&
- !IsTypeSpecPhaseOff(func) &&
- (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsLossyIntTypeSpecDisabled());
- }
- bool
- GlobOpt::DoLossyIntTypeSpec() const
- {
- return doLossyIntTypeSpec;
- }
- // static
- bool
- GlobOpt::DoFloatTypeSpec(Func const * func)
- {
- return
- !PHASE_OFF(Js::FloatTypeSpecPhase, func) &&
- !IsTypeSpecPhaseOff(func) &&
- (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsFloatTypeSpecDisabled()) &&
- AutoSystemInfo::Data.SSE2Available();
- }
- bool
- GlobOpt::DoFloatTypeSpec() const
- {
- return doFloatTypeSpec;
- }
- bool
- GlobOpt::DoStringTypeSpec(Func const * func)
- {
- return !PHASE_OFF(Js::StringTypeSpecPhase, func) && !IsTypeSpecPhaseOff(func);
- }
- // static
- bool
- GlobOpt::DoTypedArrayTypeSpec(Func const * func)
- {
- return !PHASE_OFF(Js::TypedArrayTypeSpecPhase, func) &&
- !IsTypeSpecPhaseOff(func) &&
- (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsTypedArrayTypeSpecDisabled(func->IsLoopBody()))
- #if defined(_M_IX86)
- && AutoSystemInfo::Data.SSE2Available()
- #endif
- ;
- }
- // static
- bool
- GlobOpt::DoNativeArrayTypeSpec(Func const * func)
- {
- return !PHASE_OFF(Js::NativeArrayPhase, func) &&
- !IsTypeSpecPhaseOff(func)
- #if defined(_M_IX86)
- && AutoSystemInfo::Data.SSE2Available()
- #endif
- ;
- }
- bool
- GlobOpt::DoArrayCheckHoist(Func const * const func)
- {
- Assert(func->IsTopFunc());
- return
- !PHASE_OFF(Js::ArrayCheckHoistPhase, func) &&
- !func->IsArrayCheckHoistDisabled() &&
- !func->IsJitInDebugMode(); // StElemI fast path is not allowed when in debug mode, so it cannot have bailout
- }
- bool
- GlobOpt::DoArrayCheckHoist() const
- {
- return doArrayCheckHoist;
- }
- bool
- GlobOpt::DoArrayCheckHoist(const ValueType baseValueType, Loop* loop, IR::Instr const * const instr) const
- {
- if(!DoArrayCheckHoist() || (instr && !IsLoopPrePass() && instr->DoStackArgsOpt()))
- {
- return false;
- }
- // This includes typed arrays, but not virtual typed arrays, whose vtable can change if the buffer goes away.
- // Note that in the virtual case the vtable check is the only way to catch this, since there's no bound check.
- if(!(baseValueType.IsLikelyArrayOrObjectWithArray() || baseValueType.IsLikelyOptimizedVirtualTypedArray()) ||
- (loop ? ImplicitCallFlagsAllowOpts(loop) : ImplicitCallFlagsAllowOpts(func)))
- {
- return true;
- }
- // The function or loop does not allow disabling implicit calls, which is required to eliminate redundant JS array checks
- #if DBG_DUMP
- if((((loop ? loop->GetImplicitCallFlags() : func->m_fg->implicitCallFlags) & ~Js::ImplicitCall_External) == 0) &&
- Js::Configuration::Global.flags.Trace.IsEnabled(Js::HostOptPhase))
- {
- Output::Print(_u("DoArrayCheckHoist disabled for JS arrays because of external: "));
- func->DumpFullFunctionName();
- Output::Print(_u("\n"));
- Output::Flush();
- }
- #endif
- return false;
- }
- bool
- GlobOpt::DoArrayMissingValueCheckHoist(Func const * const func)
- {
- return
- DoArrayCheckHoist(func) &&
- !PHASE_OFF(Js::ArrayMissingValueCheckHoistPhase, func) &&
- (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsArrayMissingValueCheckHoistDisabled(func->IsLoopBody()));
- }
- bool
- GlobOpt::DoArrayMissingValueCheckHoist() const
- {
- return doArrayMissingValueCheckHoist;
- }
- bool
- GlobOpt::DoArraySegmentHoist(const ValueType baseValueType, Func const * const func)
- {
- Assert(baseValueType.IsLikelyAnyOptimizedArray());
- if(!DoArrayCheckHoist(func) || PHASE_OFF(Js::ArraySegmentHoistPhase, func))
- {
- return false;
- }
- if(!baseValueType.IsLikelyArrayOrObjectWithArray())
- {
- return true;
- }
- return
- !PHASE_OFF(Js::JsArraySegmentHoistPhase, func) &&
- (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsJsArraySegmentHoistDisabled(func->IsLoopBody()));
- }
- bool
- GlobOpt::DoArraySegmentHoist(const ValueType baseValueType) const
- {
- Assert(baseValueType.IsLikelyAnyOptimizedArray());
- return baseValueType.IsLikelyArrayOrObjectWithArray() ? doJsArraySegmentHoist : doArraySegmentHoist;
- }
- bool
- GlobOpt::DoTypedArraySegmentLengthHoist(Loop *const loop) const
- {
- if(!DoArraySegmentHoist(ValueType::GetObject(ObjectType::Int32Array)))
- {
- return false;
- }
- if(loop ? ImplicitCallFlagsAllowOpts(loop) : ImplicitCallFlagsAllowOpts(func))
- {
- return true;
- }
- // The function or loop does not allow disabling implicit calls, which is required to eliminate redundant typed array
- // segment length loads.
- #if DBG_DUMP
- if((((loop ? loop->GetImplicitCallFlags() : func->m_fg->implicitCallFlags) & ~Js::ImplicitCall_External) == 0) &&
- Js::Configuration::Global.flags.Trace.IsEnabled(Js::HostOptPhase))
- {
- Output::Print(_u("DoArraySegmentLengthHoist disabled for typed arrays because of external: "));
- func->DumpFullFunctionName();
- Output::Print(_u("\n"));
- Output::Flush();
- }
- #endif
- return false;
- }
- bool
- GlobOpt::DoArrayLengthHoist(Func const * const func)
- {
- return
- DoArrayCheckHoist(func) &&
- !PHASE_OFF(Js::Phase::ArrayLengthHoistPhase, func) &&
- (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsArrayLengthHoistDisabled(func->IsLoopBody()));
- }
- bool
- GlobOpt::DoArrayLengthHoist() const
- {
- return doArrayLengthHoist;
- }
- bool
- GlobOpt::DoEliminateArrayAccessHelperCall(Func *const func)
- {
- return DoArrayCheckHoist(func);
- }
- bool
- GlobOpt::DoEliminateArrayAccessHelperCall() const
- {
- return doEliminateArrayAccessHelperCall;
- }
- bool
- GlobOpt::DoLdLenIntSpec(IR::Instr * const instr, const ValueType baseValueType)
- {
- Assert(!instr || instr->m_opcode == Js::OpCode::LdLen_A);
- Assert(!instr || instr->GetDst());
- Assert(!instr || instr->GetSrc1());
- if(PHASE_OFF(Js::LdLenIntSpecPhase, func) ||
- IsTypeSpecPhaseOff(func) ||
- (func->HasProfileInfo() && func->GetReadOnlyProfileInfo()->IsLdLenIntSpecDisabled()) ||
- (instr && !IsLoopPrePass() && instr->DoStackArgsOpt()))
- {
- return false;
- }
- if(instr &&
- instr->IsProfiledInstr() &&
- (
- !instr->AsProfiledInstr()->u.FldInfo().valueType.IsLikelyInt() ||
- instr->GetDst()->AsRegOpnd()->m_sym->m_isNotNumber
- ))
- {
- return false;
- }
- Assert(!instr || baseValueType == instr->GetSrc1()->GetValueType());
- return
- baseValueType.HasBeenString() ||
- (baseValueType.IsLikelyAnyOptimizedArray() && baseValueType.GetObjectType() != ObjectType::ObjectWithArray);
- }
- bool
- GlobOpt::DoPathDependentValues() const
- {
- return !PHASE_OFF(Js::Phase::PathDependentValuesPhase, func);
- }
- bool
- GlobOpt::DoTrackRelativeIntBounds() const
- {
- return doTrackRelativeIntBounds;
- }
- bool
- GlobOpt::DoBoundCheckElimination() const
- {
- return doBoundCheckElimination;
- }
- bool
- GlobOpt::DoBoundCheckHoist() const
- {
- return doBoundCheckHoist;
- }
- bool
- GlobOpt::DoLoopCountBasedBoundCheckHoist() const
- {
- return doLoopCountBasedBoundCheckHoist;
- }
- bool
- GlobOpt::DoPowIntIntTypeSpec() const
- {
- return doPowIntIntTypeSpec;
- }
- bool
- GlobOpt::DoTagChecks() const
- {
- return doTagChecks;
- }
- bool
- GlobOpt::TrackArgumentsObject()
- {
- if (PHASE_OFF(Js::StackArgOptPhase, this->func))
- {
- this->CannotAllocateArgumentsObjectOnStack(nullptr);
- return false;
- }
- return func->GetHasStackArgs();
- }
- void
- GlobOpt::CannotAllocateArgumentsObjectOnStack(Func * curFunc)
- {
- if (curFunc != nullptr && curFunc->hasArgLenAndConstOpt)
- {
- Assert(!curFunc->GetJITOutput()->GetOutputData()->disableStackArgOpt);
- curFunc->GetJITOutput()->GetOutputData()->disableStackArgOpt = true;
- throw Js::RejitException(RejitReason::DisableStackArgLenAndConstOpt);
- }
- func->SetHasStackArgs(false);
- #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
- if (PHASE_TESTTRACE(Js::StackArgOptPhase, this->func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(_u("Stack args disabled for function %s(%s)\n"), func->GetJITFunctionBody()->GetDisplayName(), func->GetDebugNumberSet(debugStringBuffer));
- Output::Flush();
- }
- #endif
- }
- IR::Instr *
- GlobOpt::PreOptPeep(IR::Instr *instr)
- {
- if (OpCodeAttr::HasDeadFallThrough(instr->m_opcode))
- {
- switch (instr->m_opcode)
- {
- case Js::OpCode::BailOnNoProfile:
- {
- // Handle BailOnNoProfile
- if (instr->HasBailOutInfo())
- {
- if (!this->prePassLoop)
- {
- FillBailOutInfo(this->currentBlock, instr);
- }
- // Already processed.
- return instr;
- }
- // Convert to bailout instr
- IR::Instr *nextBytecodeOffsetInstr = instr->GetNextRealInstrOrLabel();
- while(nextBytecodeOffsetInstr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset)
- {
- nextBytecodeOffsetInstr = nextBytecodeOffsetInstr->GetNextRealInstrOrLabel();
- Assert(!nextBytecodeOffsetInstr->IsLabelInstr());
- }
- instr = instr->ConvertToBailOutInstr(nextBytecodeOffsetInstr, IR::BailOutOnNoProfile);
- instr->ClearByteCodeOffset();
- instr->SetByteCodeOffset(nextBytecodeOffsetInstr);
- if (!this->currentBlock->loop)
- {
- FillBailOutInfo(this->currentBlock, instr);
- }
- else
- {
- Assert(this->prePassLoop);
- }
- break;
- }
- case Js::OpCode::BailOnException:
- {
- Assert(
- (
- this->func->HasTry() && this->func->DoOptimizeTry() &&
- instr->m_prev->m_opcode == Js::OpCode::Catch &&
- instr->m_prev->m_prev->IsLabelInstr() &&
- instr->m_prev->m_prev->AsLabelInstr()->GetRegion()->GetType() == RegionType::RegionTypeCatch
- )
- ||
- (
- this->func->HasFinally() && this->func->DoOptimizeTry() &&
- instr->m_prev->AsLabelInstr() &&
- instr->m_prev->AsLabelInstr()->GetRegion()->GetType() == RegionType::RegionTypeFinally
- )
- );
- break;
- }
- case Js::OpCode::BailOnEarlyExit:
- {
- Assert(this->func->HasFinally() && this->func->DoOptimizeTry());
- break;
- }
- default:
- {
- if(this->currentBlock->loop && !this->IsLoopPrePass())
- {
- return instr;
- }
- break;
- }
- }
- RemoveCodeAfterNoFallthroughInstr(instr);
- }
- return instr;
- }
- void
- GlobOpt::RemoveCodeAfterNoFallthroughInstr(IR::Instr *instr)
- {
- if (instr != this->currentBlock->GetLastInstr())
- {
- // Remove dead code after bailout
- IR::Instr *instrDead = instr->m_next;
- IR::Instr *instrNext;
- for (; instrDead != this->currentBlock->GetLastInstr(); instrDead = instrNext)
- {
- instrNext = instrDead->m_next;
- if (instrNext->m_opcode == Js::OpCode::FunctionExit)
- {
- break;
- }
- this->func->m_fg->RemoveInstr(instrDead, this);
- }
- IR::Instr *instrNextBlock = instrDead->m_next;
- this->func->m_fg->RemoveInstr(instrDead, this);
- this->currentBlock->SetLastInstr(instrNextBlock->m_prev);
- }
- // Cleanup dead successors
- FOREACH_SUCCESSOR_BLOCK_EDITING(deadBlock, this->currentBlock, iter)
- {
- this->currentBlock->RemoveDeadSucc(deadBlock, this->func->m_fg);
- if (this->currentBlock->GetDataUseCount() > 0)
- {
- this->currentBlock->DecrementDataUseCount();
- }
- } NEXT_SUCCESSOR_BLOCK_EDITING;
- }
- void
- GlobOpt::ProcessTryHandler(IR::Instr* instr)
- {
- Assert(instr->m_next->IsLabelInstr() && instr->m_next->AsLabelInstr()->GetRegion()->GetType() == RegionType::RegionTypeTry);
- Region* tryRegion = instr->m_next->AsLabelInstr()->GetRegion();
- BVSparse<JitArenaAllocator> * writeThroughSymbolsSet = tryRegion->writeThroughSymbolsSet;
- ToVar(writeThroughSymbolsSet, this->currentBlock);
- }
- bool
- GlobOpt::ProcessExceptionHandlingEdges(IR::Instr* instr)
- {
- Assert(instr->m_opcode == Js::OpCode::BrOnException || instr->m_opcode == Js::OpCode::BrOnNoException);
- if (instr->m_opcode == Js::OpCode::BrOnException)
- {
- if (instr->AsBranchInstr()->GetTarget()->GetRegion()->GetType() == RegionType::RegionTypeCatch)
- {
- // BrOnException was added to model flow from try region to the catch region to assist
- // the backward pass in propagating bytecode upward exposed info from the catch block
- // to the try, and to handle break blocks. Removing it here as it has served its purpose
- // and keeping it around might also have unintended effects while merging block data for
- // the catch block's predecessors.
- // Note that the Deadstore pass will still be able to propagate bytecode upward exposed info
- // because it doesn't skip dead blocks for that.
- this->RemoveFlowEdgeToCatchBlock(instr);
- this->currentBlock->RemoveInstr(instr);
- return true;
- }
- else
- {
- // We add BrOnException from a finally region to early exit, remove that since it has served its purpose
- return this->RemoveFlowEdgeToFinallyOnExceptionBlock(instr);
- }
- }
- else if (instr->m_opcode == Js::OpCode::BrOnNoException)
- {
- if (instr->AsBranchInstr()->GetTarget()->GetRegion()->GetType() == RegionType::RegionTypeCatch)
- {
- this->RemoveFlowEdgeToCatchBlock(instr);
- }
- else
- {
- this->RemoveFlowEdgeToFinallyOnExceptionBlock(instr);
- }
- }
- return false;
- }
- void
- GlobOpt::InsertToVarAtDefInTryRegion(IR::Instr * instr, IR::Opnd * dstOpnd)
- {
- if ((this->currentRegion->GetType() == RegionTypeTry || this->currentRegion->GetType() == RegionTypeFinally) &&
- dstOpnd->IsRegOpnd() && dstOpnd->AsRegOpnd()->m_sym->HasByteCodeRegSlot())
- {
- StackSym * sym = dstOpnd->AsRegOpnd()->m_sym;
- if (sym->IsVar())
- {
- return;
- }
- StackSym * varSym = sym->GetVarEquivSym(nullptr);
- if ((this->currentRegion->GetType() == RegionTypeTry && this->currentRegion->writeThroughSymbolsSet->Test(varSym->m_id)) ||
- ((this->currentRegion->GetType() == RegionTypeFinally && this->currentRegion->GetMatchingTryRegion()->writeThroughSymbolsSet->Test(varSym->m_id))))
- {
- IR::RegOpnd * regOpnd = IR::RegOpnd::New(varSym, IRType::TyVar, instr->m_func);
- this->ToVar(instr->m_next, regOpnd, this->currentBlock, NULL, false);
- }
- }
- }
- void
- GlobOpt::RemoveFlowEdgeToCatchBlock(IR::Instr * instr)
- {
- Assert(instr->IsBranchInstr());
- BasicBlock * catchBlock = nullptr;
- BasicBlock * predBlock = nullptr;
- if (instr->m_opcode == Js::OpCode::BrOnException)
- {
- catchBlock = instr->AsBranchInstr()->GetTarget()->GetBasicBlock();
- predBlock = this->currentBlock;
- }
- else
- {
- Assert(instr->m_opcode == Js::OpCode::BrOnNoException);
- IR::Instr * nextInstr = instr->GetNextRealInstrOrLabel();
- Assert(nextInstr->IsLabelInstr());
- IR::LabelInstr * nextLabel = nextInstr->AsLabelInstr();
- if (nextLabel->GetRegion() && nextLabel->GetRegion()->GetType() == RegionTypeCatch)
- {
- catchBlock = nextLabel->GetBasicBlock();
- predBlock = this->currentBlock;
- }
- else
- {
- Assert(nextLabel->m_next->IsBranchInstr() && nextLabel->m_next->AsBranchInstr()->IsUnconditional());
- BasicBlock * nextBlock = nextLabel->GetBasicBlock();
- IR::BranchInstr * branchToCatchBlock = nextLabel->m_next->AsBranchInstr();
- IR::LabelInstr * catchBlockLabel = branchToCatchBlock->GetTarget();
- Assert(catchBlockLabel->GetRegion()->GetType() == RegionTypeCatch);
- catchBlock = catchBlockLabel->GetBasicBlock();
- predBlock = nextBlock;
- }
- }
- Assert(catchBlock);
- Assert(predBlock);
- if (this->func->m_fg->FindEdge(predBlock, catchBlock))
- {
- predBlock->RemoveDeadSucc(catchBlock, this->func->m_fg);
- if (predBlock == this->currentBlock)
- {
- predBlock->DecrementDataUseCount();
- }
- }
- }
- bool
- GlobOpt::RemoveFlowEdgeToFinallyOnExceptionBlock(IR::Instr * instr)
- {
- Assert(instr->IsBranchInstr());
- if (instr->m_opcode == Js::OpCode::BrOnNoException && instr->AsBranchInstr()->m_brFinallyToEarlyExit)
- {
- // We add edge from finally to early exit block
- // We should not remove this edge
- // If a loop has continue, and we add edge in finally to continue
- // Break block removal can move all continues inside the loop to branch to the continue added within finally
- // If we get rid of this edge, then loop may loose all backedges
- // Ideally, doing tail duplication before globopt would enable us to remove these edges, but since we do it after globopt, keep it this way for now
- // See test1() in core/test/tryfinallytests.js
- return false;
- }
- BasicBlock * finallyBlock = nullptr;
- BasicBlock * predBlock = nullptr;
- if (instr->m_opcode == Js::OpCode::BrOnException)
- {
- finallyBlock = instr->AsBranchInstr()->GetTarget()->GetBasicBlock();
- predBlock = this->currentBlock;
- }
- else
- {
- Assert(instr->m_opcode == Js::OpCode::BrOnNoException);
- IR::Instr * nextInstr = instr->GetNextRealInstrOrLabel();
- Assert(nextInstr->IsLabelInstr());
- IR::LabelInstr * nextLabel = nextInstr->AsLabelInstr();
- if (nextLabel->GetRegion() && nextLabel->GetRegion()->GetType() == RegionTypeFinally)
- {
- finallyBlock = nextLabel->GetBasicBlock();
- predBlock = this->currentBlock;
- }
- else
- {
- if (!(nextLabel->m_next->IsBranchInstr() && nextLabel->m_next->AsBranchInstr()->IsUnconditional()))
- {
- return false;
- }
- BasicBlock * nextBlock = nextLabel->GetBasicBlock();
- IR::BranchInstr * branchTofinallyBlockOrEarlyExit = nextLabel->m_next->AsBranchInstr();
- IR::LabelInstr * finallyBlockLabelOrEarlyExitLabel = branchTofinallyBlockOrEarlyExit->GetTarget();
- finallyBlock = finallyBlockLabelOrEarlyExitLabel->GetBasicBlock();
- predBlock = nextBlock;
- }
- }
- Assert(finallyBlock && predBlock);
- if (this->func->m_fg->FindEdge(predBlock, finallyBlock))
- {
- predBlock->RemoveDeadSucc(finallyBlock, this->func->m_fg);
- if (instr->m_opcode == Js::OpCode::BrOnException)
- {
- this->currentBlock->RemoveInstr(instr);
- }
- if (finallyBlock->GetFirstInstr()->AsLabelInstr()->IsUnreferenced())
- {
- // Traverse predBlocks of finallyBlock, if any of the preds have a different region, set m_hasNonBranchRef to true
- // If not, this label can get eliminated and an incorrect region from the predecessor can get propagated in lowered code
- // See test3() in tryfinallytests.js
- Region * finallyRegion = finallyBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
- FOREACH_PREDECESSOR_BLOCK(pred, finallyBlock)
- {
- Region * predRegion = pred->GetFirstInstr()->AsLabelInstr()->GetRegion();
- if (predRegion != finallyRegion)
- {
- finallyBlock->GetFirstInstr()->AsLabelInstr()->m_hasNonBranchRef = true;
- }
- } NEXT_PREDECESSOR_BLOCK;
- }
- if (predBlock == this->currentBlock)
- {
- predBlock->DecrementDataUseCount();
- }
- }
- return true;
- }
- IR::Instr *
- GlobOpt::OptPeep(IR::Instr *instr, Value *src1Val, Value *src2Val)
- {
- IR::Opnd *dst, *src1, *src2;
- if (this->IsLoopPrePass())
- {
- return instr;
- }
- switch (instr->m_opcode)
- {
- case Js::OpCode::DeadBrEqual:
- case Js::OpCode::DeadBrRelational:
- case Js::OpCode::DeadBrSrEqual:
- src1 = instr->GetSrc1();
- src2 = instr->GetSrc2();
- // These branches were turned into dead branches because they were unnecessary (branch to next, ...).
- // The DeadBr are necessary in case the evaluation of the sources have side-effects.
- // If we know for sure the srcs are primitive or have been type specialized, we don't need these instructions
- if (((src1Val && src1Val->GetValueInfo()->IsPrimitive()) || (src1->IsRegOpnd() && CurrentBlockData()->IsTypeSpecialized(src1->AsRegOpnd()->m_sym))) &&
- ((src2Val && src2Val->GetValueInfo()->IsPrimitive()) || (src2->IsRegOpnd() && CurrentBlockData()->IsTypeSpecialized(src2->AsRegOpnd()->m_sym))))
- {
- this->CaptureByteCodeSymUses(instr);
- instr->m_opcode = Js::OpCode::Nop;
- }
- break;
- case Js::OpCode::DeadBrOnHasProperty:
- src1 = instr->GetSrc1();
- if (((src1Val && src1Val->GetValueInfo()->IsPrimitive()) || (src1->IsRegOpnd() && CurrentBlockData()->IsTypeSpecialized(src1->AsRegOpnd()->m_sym))))
- {
- this->CaptureByteCodeSymUses(instr);
- instr->m_opcode = Js::OpCode::Nop;
- }
- break;
- case Js::OpCode::Ld_A:
- case Js::OpCode::Ld_I4:
- src1 = instr->GetSrc1();
- dst = instr->GetDst();
- if (dst->IsRegOpnd() && dst->IsEqual(src1))
- {
- dst = instr->UnlinkDst();
- if (!dst->GetIsJITOptimizedReg())
- {
- IR::ByteCodeUsesInstr *bytecodeUse = IR::ByteCodeUsesInstr::New(instr);
- bytecodeUse->SetDst(dst);
- instr->InsertAfter(bytecodeUse);
- }
- instr->FreeSrc1();
- instr->m_opcode = Js::OpCode::Nop;
- }
- break;
- }
- return instr;
- }
- void
- GlobOpt::OptimizeIndirUses(IR::IndirOpnd *indirOpnd, IR::Instr * *pInstr, Value **indirIndexValRef)
- {
- IR::Instr * &instr = *pInstr;
- Assert(!indirIndexValRef || !*indirIndexValRef);
- // Update value types and copy-prop the base
- OptSrc(indirOpnd->GetBaseOpnd(), &instr, nullptr, indirOpnd);
- IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
- if (!indexOpnd)
- {
- return;
- }
- // Update value types and copy-prop the index
- Value *indexVal = OptSrc(indexOpnd, &instr, nullptr, indirOpnd);
- if(indirIndexValRef)
- {
- *indirIndexValRef = indexVal;
- }
- }
- bool
- GlobOpt::IsPREInstrCandidateLoad(Js::OpCode opcode)
- {
- switch (opcode)
- {
- case Js::OpCode::LdFld:
- case Js::OpCode::LdFldForTypeOf:
- case Js::OpCode::LdRootFld:
- case Js::OpCode::LdRootFldForTypeOf:
- case Js::OpCode::LdMethodFld:
- case Js::OpCode::LdRootMethodFld:
- case Js::OpCode::LdSlot:
- case Js::OpCode::LdSlotArr:
- return true;
- }
- return false;
- }
- bool
- GlobOpt::IsPREInstrSequenceCandidateLoad(Js::OpCode opcode)
- {
- switch (opcode)
- {
- default:
- return IsPREInstrCandidateLoad(opcode);
- case Js::OpCode::Ld_A:
- case Js::OpCode::BytecodeArgOutCapture:
- return true;
- }
- }
- bool
- GlobOpt::IsPREInstrCandidateStore(Js::OpCode opcode)
- {
- switch (opcode)
- {
- case Js::OpCode::StFld:
- case Js::OpCode::StRootFld:
- case Js::OpCode::StSlot:
- return true;
- }
- return false;
- }
- bool
- GlobOpt::ImplicitCallFlagsAllowOpts(Loop *loop)
- {
- return loop->GetImplicitCallFlags() != Js::ImplicitCall_HasNoInfo &&
- (((loop->GetImplicitCallFlags() & ~Js::ImplicitCall_Accessor) | Js::ImplicitCall_None) == Js::ImplicitCall_None);
- }
- bool
- GlobOpt::ImplicitCallFlagsAllowOpts(Func const *func)
- {
- return func->m_fg->implicitCallFlags != Js::ImplicitCall_HasNoInfo &&
- (((func->m_fg->implicitCallFlags & ~Js::ImplicitCall_Accessor) | Js::ImplicitCall_None) == Js::ImplicitCall_None);
- }
- #if DBG_DUMP
- void
- GlobOpt::Dump() const
- {
- this->DumpSymToValueMap();
- }
- void
- GlobOpt::DumpSymToValueMap(BasicBlock const * block) const
- {
- Output::Print(_u("\n*** SymToValueMap ***\n"));
- block->globOptData.DumpSymToValueMap();
- }
- void
- GlobOpt::DumpSymToValueMap() const
- {
- DumpSymToValueMap(this->currentBlock);
- }
- void
- GlobOpt::DumpSymVal(int index)
- {
- SymID id = index;
- extern Func *CurrentFunc;
- Sym *sym = this->func->m_symTable->Find(id);
- AssertMsg(sym, "Sym not found!!!");
- Output::Print(_u("Sym: "));
- sym->Dump();
- Output::Print(_u("\t\tValueNumber: "));
- Value * pValue = CurrentBlockData()->FindValueFromMapDirect(sym->m_id);
- pValue->Dump();
- Output::Print(_u("\n"));
- }
- void
- GlobOpt::Trace(BasicBlock * block, bool before) const
- {
- bool globOptTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::GlobOptPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
- bool typeSpecTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::TypeSpecPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
- bool floatTypeSpecTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::FloatTypeSpecPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
- bool fieldCopyPropTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldCopyPropPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
- bool objTypeSpecTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::ObjTypeSpecPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
- bool valueTableTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::ValueTablePhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
- bool fieldPRETrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldPREPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
- bool anyTrace = globOptTrace || typeSpecTrace || floatTypeSpecTrace || fieldCopyPropTrace || objTypeSpecTrace || valueTableTrace || fieldPRETrace;
- if (!anyTrace)
- {
- return;
- }
- if (fieldPRETrace && this->IsLoopPrePass())
- {
- if (block->isLoopHeader && before)
- {
- Output::Print(_u("==== Loop Prepass block header #%-3d, Visiting Loop block head #%-3d\n"),
- this->prePassLoop->GetHeadBlock()->GetBlockNum(), block->GetBlockNum());
- }
- }
- if (!typeSpecTrace && !floatTypeSpecTrace && !valueTableTrace && !Js::Configuration::Global.flags.Verbose)
- {
- return;
- }
- if (before)
- {
- Output::Print(_u("========================================================================\n"));
- Output::Print(_u("Begin OptBlock: Block #%-3d"), block->GetBlockNum());
- if (block->loop)
- {
- Output::Print(_u(" Loop block header:%-3d currentLoop block head:%-3d %s"),
- block->loop->GetHeadBlock()->GetBlockNum(),
- this->prePassLoop ? this->prePassLoop->GetHeadBlock()->GetBlockNum() : 0,
- this->IsLoopPrePass() ? _u("PrePass") : _u(""));
- }
- Output::Print(_u("\n"));
- }
- else
- {
- Output::Print(_u("-----------------------------------------------------------------------\n"));
- Output::Print(_u("After OptBlock: Block #%-3d\n"), block->GetBlockNum());
- }
- if ((typeSpecTrace || floatTypeSpecTrace) && !block->globOptData.liveVarSyms->IsEmpty())
- {
- Output::Print(_u(" Live var syms: "));
- block->globOptData.liveVarSyms->Dump();
- }
- if (typeSpecTrace && !block->globOptData.liveInt32Syms->IsEmpty())
- {
- Assert(this->tempBv->IsEmpty());
- this->tempBv->Minus(block->globOptData.liveInt32Syms, block->globOptData.liveLossyInt32Syms);
- if(!this->tempBv->IsEmpty())
- {
- Output::Print(_u(" Int32 type specialized (lossless) syms: "));
- this->tempBv->Dump();
- }
- this->tempBv->ClearAll();
- if(!block->globOptData.liveLossyInt32Syms->IsEmpty())
- {
- Output::Print(_u(" Int32 converted (lossy) syms: "));
- block->globOptData.liveLossyInt32Syms->Dump();
- }
- }
- if (floatTypeSpecTrace && !block->globOptData.liveFloat64Syms->IsEmpty())
- {
- Output::Print(_u(" Float64 type specialized syms: "));
- block->globOptData.liveFloat64Syms->Dump();
- }
- if ((fieldCopyPropTrace || objTypeSpecTrace) && this->DoFieldCopyProp(block->loop) && !block->globOptData.liveFields->IsEmpty())
- {
- Output::Print(_u(" Live field syms: "));
- block->globOptData.liveFields->Dump();
- }
- if (objTypeSpecTrace || valueTableTrace)
- {
- Output::Print(_u(" Value table:\n"));
- block->globOptData.DumpSymToValueMap();
- }
- if (before)
- {
- Output::Print(_u("-----------------------------------------------------------------------\n")); \
- }
- Output::Flush();
- }
- void
- GlobOpt::TraceSettings() const
- {
- Output::Print(_u("GlobOpt Settings:\r\n"));
- Output::Print(_u(" FloatTypeSpec: %s\r\n"), this->DoFloatTypeSpec() ? _u("enabled") : _u("disabled"));
- Output::Print(_u(" AggressiveIntTypeSpec: %s\r\n"), this->DoAggressiveIntTypeSpec() ? _u("enabled") : _u("disabled"));
- Output::Print(_u(" LossyIntTypeSpec: %s\r\n"), this->DoLossyIntTypeSpec() ? _u("enabled") : _u("disabled"));
- Output::Print(_u(" ArrayCheckHoist: %s\r\n"), this->func->IsArrayCheckHoistDisabled() ? _u("disabled") : _u("enabled"));
- Output::Print(_u(" ImplicitCallFlags: %s\r\n"), Js::DynamicProfileInfo::GetImplicitCallFlagsString(this->func->m_fg->implicitCallFlags));
- for (Loop * loop = this->func->m_fg->loopList; loop != NULL; loop = loop->next)
- {
- Output::Print(_u(" loop: %d, ImplicitCallFlags: %s\r\n"), loop->GetLoopNumber(),
- Js::DynamicProfileInfo::GetImplicitCallFlagsString(loop->GetImplicitCallFlags()));
- }
- Output::Flush();
- }
- #endif // DBG_DUMP
- IR::Instr *
- GlobOpt::TrackMarkTempObject(IR::Instr * instrStart, IR::Instr * instrLast)
- {
- if (!this->func->GetHasMarkTempObjects())
- {
- return instrLast;
- }
- IR::Instr * instr = instrStart;
- IR::Instr * instrEnd = instrLast->m_next;
- IR::Instr * lastInstr = nullptr;
- GlobOptBlockData& globOptData = *CurrentBlockData();
- do
- {
- bool mayNeedBailOnImplicitCallsPreOp = !this->IsLoopPrePass()
- && instr->HasAnyImplicitCalls()
- && globOptData.maybeTempObjectSyms != nullptr;
- if (mayNeedBailOnImplicitCallsPreOp)
- {
- IR::Opnd * src1 = instr->GetSrc1();
- if (src1)
- {
- instr = GenerateBailOutMarkTempObjectIfNeeded(instr, src1, false);
- IR::Opnd * src2 = instr->GetSrc2();
- if (src2)
- {
- instr = GenerateBailOutMarkTempObjectIfNeeded(instr, src2, false);
- }
- }
- }
- IR::Opnd *dst = instr->GetDst();
- if (dst)
- {
- if (dst->IsRegOpnd())
- {
- TrackTempObjectSyms(instr, dst->AsRegOpnd());
- }
- else if (mayNeedBailOnImplicitCallsPreOp)
- {
- instr = GenerateBailOutMarkTempObjectIfNeeded(instr, dst, true);
- }
- }
- lastInstr = instr;
- instr = instr->m_next;
- }
- while (instr != instrEnd);
- return lastInstr;
- }
- void
- GlobOpt::TrackTempObjectSyms(IR::Instr * instr, IR::RegOpnd * opnd)
- {
- // If it is marked as dstIsTempObject, we should have mark temped it, or type specialized it to Ld_I4.
- Assert(!instr->dstIsTempObject || ObjectTempVerify::CanMarkTemp(instr, nullptr));
- GlobOptBlockData& globOptData = *CurrentBlockData();
- bool canStoreTemp = false;
- bool maybeTemp = false;
- if (OpCodeAttr::TempObjectProducing(instr->m_opcode))
- {
- maybeTemp = instr->dstIsTempObject;
- // We have to make sure that lower will always generate code to do stack allocation
- // before we can store any other stack instance onto it. Otherwise, we would not
- // walk object to box the stack property.
- canStoreTemp = instr->dstIsTempObject && ObjectTemp::CanStoreTemp(instr);
- }
- else if (OpCodeAttr::TempObjectTransfer(instr->m_opcode))
- {
- // Need to check both sources, GetNewScObject has two srcs for transfer.
- // No need to get var equiv sym here as transfer of type spec value does not transfer a mark temp object.
- maybeTemp = globOptData.maybeTempObjectSyms && (
- (instr->GetSrc1()->IsRegOpnd() && globOptData.maybeTempObjectSyms->Test(instr->GetSrc1()->AsRegOpnd()->m_sym->m_id))
- || (instr->GetSrc2() && instr->GetSrc2()->IsRegOpnd() && globOptData.maybeTempObjectSyms->Test(instr->GetSrc2()->AsRegOpnd()->m_sym->m_id)));
- canStoreTemp = globOptData.canStoreTempObjectSyms && (
- (instr->GetSrc1()->IsRegOpnd() && globOptData.canStoreTempObjectSyms->Test(instr->GetSrc1()->AsRegOpnd()->m_sym->m_id))
- && (!instr->GetSrc2() || (instr->GetSrc2()->IsRegOpnd() && globOptData.canStoreTempObjectSyms->Test(instr->GetSrc2()->AsRegOpnd()->m_sym->m_id))));
- AssertOrFailFast(!canStoreTemp || instr->dstIsTempObject);
- AssertOrFailFast(!maybeTemp || instr->dstIsTempObject);
- }
- // Need to get the var equiv sym as assignment of type specialized sym kill the var sym value anyway.
- StackSym * sym = opnd->m_sym;
- if (!sym->IsVar())
- {
- sym = sym->GetVarEquivSym(nullptr);
- if (sym == nullptr)
- {
- return;
- }
- }
- SymID symId = sym->m_id;
- if (maybeTemp)
- {
- // Only var sym should be temp objects
- Assert(opnd->m_sym == sym);
- if (globOptData.maybeTempObjectSyms == nullptr)
- {
- globOptData.maybeTempObjectSyms = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
- }
- globOptData.maybeTempObjectSyms->Set(symId);
- if (canStoreTemp)
- {
- if (instr->m_opcode == Js::OpCode::NewScObjectLiteral && !this->IsLoopPrePass())
- {
- // For object literal, we install the final type up front.
- // If there are bailout before we finish initializing all the fields, we need to
- // zero out the rest if we stack allocate the literal, so that the boxing would not
- // try to box trash pointer in the properties.
- // Although object Literal initialization can be done lexically, BailOnNoProfile may cause some path
- // to disappear. Do it is flow base make it easier to stop propagate those entries.
- IR::IntConstOpnd * propertyArrayIdOpnd = instr->GetSrc1()->AsIntConstOpnd();
- const Js::PropertyIdArray * propIds = instr->m_func->GetJITFunctionBody()->ReadPropertyIdArrayFromAuxData(propertyArrayIdOpnd->AsUint32());
- // Duplicates are removed by parser
- Assert(!propIds->hadDuplicates);
- if (globOptData.stackLiteralInitFldDataMap == nullptr)
- {
- globOptData.stackLiteralInitFldDataMap = JitAnew(alloc, StackLiteralInitFldDataMap, alloc);
- }
- else
- {
- Assert(!globOptData.stackLiteralInitFldDataMap->ContainsKey(sym));
- }
- StackLiteralInitFldData data = { propIds, 0};
- globOptData.stackLiteralInitFldDataMap->AddNew(sym, data);
- }
- if (globOptData.canStoreTempObjectSyms == nullptr)
- {
- globOptData.canStoreTempObjectSyms = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
- }
- globOptData.canStoreTempObjectSyms->Set(symId);
- }
- else if (globOptData.canStoreTempObjectSyms)
- {
- globOptData.canStoreTempObjectSyms->Clear(symId);
- }
- }
- else
- {
- Assert(!canStoreTemp);
- if (globOptData.maybeTempObjectSyms)
- {
- if (globOptData.canStoreTempObjectSyms)
- {
- globOptData.canStoreTempObjectSyms->Clear(symId);
- }
- globOptData.maybeTempObjectSyms->Clear(symId);
- }
- else
- {
- Assert(!globOptData.canStoreTempObjectSyms);
- }
- // The symbol is being assigned to, the sym shouldn't still be in the stackLiteralInitFldDataMap
- Assert(this->IsLoopPrePass() ||
- globOptData.stackLiteralInitFldDataMap == nullptr
- || globOptData.stackLiteralInitFldDataMap->Count() == 0
- || !globOptData.stackLiteralInitFldDataMap->ContainsKey(sym));
- }
- }
- IR::Instr *
- GlobOpt::GenerateBailOutMarkTempObjectIfNeeded(IR::Instr * instr, IR::Opnd * opnd, bool isDst)
- {
- Assert(opnd);
- Assert(isDst == (opnd == instr->GetDst()));
- Assert(opnd != instr->GetDst() || !opnd->IsRegOpnd());
- Assert(!this->IsLoopPrePass());
- Assert(instr->HasAnyImplicitCalls());
- // Only dst reg opnd opcode or ArgOut_A should have dstIsTempObject marked
- Assert(!isDst || !instr->dstIsTempObject || instr->m_opcode == Js::OpCode::ArgOut_A);
- // Post-op implicit call shouldn't have installed yet
- Assert(!instr->HasBailOutInfo() || (instr->GetBailOutKind() & IR::BailOutKindBits) != IR::BailOutOnImplicitCalls);
- GlobOptBlockData& globOptData = *CurrentBlockData();
- Assert(globOptData.maybeTempObjectSyms != nullptr);
- IR::PropertySymOpnd * propertySymOpnd = nullptr;
- StackSym * stackSym = ObjectTemp::GetStackSym(opnd, &propertySymOpnd);
- // It is okay to not get the var equiv sym here, as use of a type specialized sym is not use of the temp object
- // so no need to add mark temp bailout.
- // TempObjectSysm doesn't contain any type spec sym, so we will get false here for all type spec sym.
- if (stackSym && globOptData.maybeTempObjectSyms->Test(stackSym->m_id))
- {
- if (instr->HasBailOutInfo())
- {
- instr->SetBailOutKind(instr->GetBailOutKind() | IR::BailOutMarkTempObject);
- instr->GetBailOutInfo()->canDeadStore = false;
- }
- else
- {
- // On insert the pre op bailout if it is not Direct field access do nothing, don't check the dst yet.
- // SetTypeCheckBailout will clear this out if it is direct field access.
- if (isDst
- || (instr->m_opcode == Js::OpCode::FromVar && !opnd->GetValueType().IsPrimitive())
- || propertySymOpnd == nullptr
- || !propertySymOpnd->IsTypeCheckProtected())
- {
- this->GenerateBailAtOperation(&instr, IR::BailOutMarkTempObject);
- instr->GetBailOutInfo()->canDeadStore = false;
- }
- else if (propertySymOpnd->MayHaveImplicitCall())
- {
- this->GenerateBailAtOperation(&instr, IR::BailOutMarkTempObject);
- }
- }
- if (!opnd->IsRegOpnd() && (!isDst || (globOptData.canStoreTempObjectSyms && globOptData.canStoreTempObjectSyms->Test(stackSym->m_id))))
- {
- // If this opnd is a dst, that means that the object pointer is a stack object,
- // and we can store temp object/number on it.
- // If the opnd is a src, that means that the object pointer may be a stack object
- // so the load may be a temp object/number and we need to track its use.
- // Don't mark start of indir as can store temp, because we don't actually know
- // what it is assigning to.
- if (!isDst || !opnd->IsIndirOpnd())
- {
- opnd->SetCanStoreTemp();
- }
- if (propertySymOpnd)
- {
- // Track initfld of stack literals
- if (isDst && instr->m_opcode == Js::OpCode::InitFld)
- {
- const Js::PropertyId propertyId = propertySymOpnd->m_sym->AsPropertySym()->m_propertyId;
- // We don't need to track numeric properties init
- if (!this->func->GetThreadContextInfo()->IsNumericProperty(propertyId))
- {
- DebugOnly(bool found = false);
- globOptData.stackLiteralInitFldDataMap->RemoveIf(stackSym,
- [&](StackSym * key, StackLiteralInitFldData & data)
- {
- DebugOnly(found = true);
- Assert(key == stackSym);
- Assert(data.currentInitFldCount < data.propIds->count);
- if (data.propIds->elements[data.currentInitFldCount] != propertyId)
- {
- #if DBG
- bool duplicate = false;
- for (uint i = 0; i < data.currentInitFldCount; i++)
- {
- if (data.propIds->elements[i] == propertyId)
- {
- duplicate = true;
- break;
- }
- }
- Assert(duplicate);
- #endif
- // duplicate initialization
- return false;
- }
- bool finished = (++data.currentInitFldCount == data.propIds->count);
- #if DBG
- if (finished)
- {
- // We can still track the finished stack literal InitFld lexically.
- this->finishedStackLiteralInitFld->Set(stackSym->m_id);
- }
- #endif
- return finished;
- });
- // We might still see InitFld even we have finished with all the property Id because
- // of duplicate entries at the end
- Assert(found || finishedStackLiteralInitFld->Test(stackSym->m_id));
- }
- }
- }
- }
- }
- return instr;
- }
- LoopCount *
- GlobOpt::GetOrGenerateLoopCountForMemOp(Loop *loop)
- {
- LoopCount *loopCount = loop->loopCount;
- if (loopCount && !loopCount->HasGeneratedLoopCountSym())
- {
- Assert(loop->bailOutInfo);
- EnsureBailTarget(loop);
- GenerateLoopCountPlusOne(loop, loopCount);
- }
- return loopCount;
- }
- IR::Opnd *
- GlobOpt::GenerateInductionVariableChangeForMemOp(Loop *loop, byte unroll, IR::Instr *insertBeforeInstr)
- {
- AssertOrFailFast(unroll != Js::Constants::InvalidLoopUnrollFactor);
- LoopCount *loopCount = loop->loopCount;
- IR::Opnd *sizeOpnd = nullptr;
- Assert(loopCount);
- Assert(loop->memOpInfo->inductionVariableOpndPerUnrollMap);
- if (loop->memOpInfo->inductionVariableOpndPerUnrollMap->TryGetValue(unroll, &sizeOpnd))
- {
- return sizeOpnd;
- }
- Func *localFunc = loop->GetFunc();
- const auto InsertInstr = [&](IR::Instr *instr)
- {
- if (insertBeforeInstr == nullptr)
- {
- loop->landingPad->InsertAfter(instr);
- }
- else
- {
- insertBeforeInstr->InsertBefore(instr);
- }
- };
- if (loopCount->LoopCountMinusOneSym())
- {
- IRType type = loopCount->LoopCountSym()->GetType();
- // Loop count is off by one, so add one
- IR::RegOpnd *loopCountOpnd = IR::RegOpnd::New(loopCount->LoopCountSym(), type, localFunc);
- sizeOpnd = loopCountOpnd;
- if (unroll != 1)
- {
- sizeOpnd = IR::RegOpnd::New(TyUint32, this->func);
- IR::Opnd *unrollOpnd = IR::IntConstOpnd::New(unroll, type, localFunc);
- IR::Instr *inductionChangeMultiplier = IR::Instr::New(
- Js::OpCode::Mul_I4, sizeOpnd, loopCountOpnd, unrollOpnd, localFunc);
- InsertInstr(inductionChangeMultiplier);
- inductionChangeMultiplier->ConvertToBailOutInstr(loop->bailOutInfo, IR::BailOutOnOverflow);
- }
- }
- else
- {
- int32 loopCountMinusOnePlusOne;
- int32 size;
- if (Int32Math::Add(loopCount->LoopCountMinusOneConstantValue(), 1, &loopCountMinusOnePlusOne) ||
- Int32Math::Mul(loopCountMinusOnePlusOne, unroll, &size))
- {
- throw Js::RejitException(RejitReason::MemOpDisabled);
- }
- Assert(size > 0);
- sizeOpnd = IR::IntConstOpnd::New(size, IRType::TyUint32, localFunc);
- }
- loop->memOpInfo->inductionVariableOpndPerUnrollMap->Add(unroll, sizeOpnd);
- return sizeOpnd;
- }
- IR::RegOpnd*
- GlobOpt::GenerateStartIndexOpndForMemop(Loop *loop, IR::Opnd *indexOpnd, IR::Opnd *sizeOpnd, bool isInductionVariableChangeIncremental, bool bIndexAlreadyChanged, IR::Instr *insertBeforeInstr)
- {
- IR::RegOpnd *startIndexOpnd = nullptr;
- Func *localFunc = loop->GetFunc();
- IRType type = indexOpnd->GetType();
- const int cacheIndex = ((int)isInductionVariableChangeIncremental << 1) | (int)bIndexAlreadyChanged;
- if (loop->memOpInfo->startIndexOpndCache[cacheIndex])
- {
- return loop->memOpInfo->startIndexOpndCache[cacheIndex];
- }
- const auto InsertInstr = [&](IR::Instr *instr)
- {
- if (insertBeforeInstr == nullptr)
- {
- loop->landingPad->InsertAfter(instr);
- }
- else
- {
- insertBeforeInstr->InsertBefore(instr);
- }
- };
- startIndexOpnd = IR::RegOpnd::New(type, localFunc);
- // If the 2 are different we can simply use indexOpnd
- if (isInductionVariableChangeIncremental != bIndexAlreadyChanged)
- {
- InsertInstr(IR::Instr::New(Js::OpCode::Ld_A,
- startIndexOpnd,
- indexOpnd,
- localFunc));
- }
- else
- {
- // Otherwise add 1 to it
- InsertInstr(IR::Instr::New(Js::OpCode::Add_I4,
- startIndexOpnd,
- indexOpnd,
- IR::IntConstOpnd::New(1, type, localFunc, true),
- localFunc));
- }
- if (!isInductionVariableChangeIncremental)
- {
- InsertInstr(IR::Instr::New(Js::OpCode::Sub_I4,
- startIndexOpnd,
- startIndexOpnd,
- sizeOpnd,
- localFunc));
- }
- loop->memOpInfo->startIndexOpndCache[cacheIndex] = startIndexOpnd;
- return startIndexOpnd;
- }
- IR::Instr*
- GlobOpt::FindUpperBoundsCheckInstr(IR::Instr* fromInstr)
- {
- IR::Instr *upperBoundCheck = fromInstr;
- do
- {
- upperBoundCheck = upperBoundCheck->m_prev;
- Assert(upperBoundCheck);
- Assert(!upperBoundCheck->IsLabelInstr());
- } while (upperBoundCheck->m_opcode != Js::OpCode::BoundCheck);
- return upperBoundCheck;
- }
- IR::Instr*
- GlobOpt::FindArraySegmentLoadInstr(IR::Instr* fromInstr)
- {
- IR::Instr *headSegmentLengthLoad = fromInstr;
- do
- {
- headSegmentLengthLoad = headSegmentLengthLoad->m_prev;
- Assert(headSegmentLengthLoad);
- Assert(!headSegmentLengthLoad->IsLabelInstr());
- } while (headSegmentLengthLoad->m_opcode != Js::OpCode::LdIndir);
- return headSegmentLengthLoad;
- }
- void
- GlobOpt::RemoveMemOpSrcInstr(IR::Instr* memopInstr, IR::Instr* srcInstr, BasicBlock* block)
- {
- Assert(srcInstr && (srcInstr->m_opcode == Js::OpCode::LdElemI_A || srcInstr->m_opcode == Js::OpCode::StElemI_A || srcInstr->m_opcode == Js::OpCode::StElemI_A_Strict));
- Assert(memopInstr && (memopInstr->m_opcode == Js::OpCode::Memcopy || memopInstr->m_opcode == Js::OpCode::Memset));
- Assert(block);
- const bool isDst = srcInstr->m_opcode == Js::OpCode::StElemI_A || srcInstr->m_opcode == Js::OpCode::StElemI_A_Strict;
- IR::RegOpnd* opnd = (isDst ? memopInstr->GetDst() : memopInstr->GetSrc1())->AsIndirOpnd()->GetBaseOpnd();
- IR::ArrayRegOpnd* arrayOpnd = opnd->IsArrayRegOpnd() ? opnd->AsArrayRegOpnd() : nullptr;
- IR::Instr* topInstr = srcInstr;
- if (srcInstr->extractedUpperBoundCheckWithoutHoisting)
- {
- IR::Instr *upperBoundCheck = FindUpperBoundsCheckInstr(srcInstr);
- Assert(upperBoundCheck && upperBoundCheck != srcInstr);
- topInstr = upperBoundCheck;
- }
- if (srcInstr->loadedArrayHeadSegmentLength && arrayOpnd && arrayOpnd->HeadSegmentLengthSym())
- {
- IR::Instr *arrayLoadSegmentHeadLength = FindArraySegmentLoadInstr(topInstr);
- Assert(arrayLoadSegmentHeadLength);
- topInstr = arrayLoadSegmentHeadLength;
- arrayOpnd->RemoveHeadSegmentLengthSym();
- }
- if (srcInstr->loadedArrayHeadSegment && arrayOpnd && arrayOpnd->HeadSegmentSym())
- {
- IR::Instr *arrayLoadSegmentHead = FindArraySegmentLoadInstr(topInstr);
- Assert(arrayLoadSegmentHead);
- topInstr = arrayLoadSegmentHead;
- arrayOpnd->RemoveHeadSegmentSym();
- }
- // If no bounds check are present, simply look up for instruction added for instrumentation
- if(topInstr == srcInstr)
- {
- bool checkPrev = true;
- while (checkPrev)
- {
- switch (topInstr->m_prev->m_opcode)
- {
- case Js::OpCode::BailOnNotArray:
- case Js::OpCode::NoImplicitCallUses:
- case Js::OpCode::ByteCodeUses:
- topInstr = topInstr->m_prev;
- checkPrev = !!topInstr->m_prev;
- break;
- default:
- checkPrev = false;
- break;
- }
- }
- }
- while (topInstr != srcInstr)
- {
- IR::Instr* removeInstr = topInstr;
- topInstr = topInstr->m_next;
- Assert(
- removeInstr->m_opcode == Js::OpCode::BailOnNotArray ||
- removeInstr->m_opcode == Js::OpCode::NoImplicitCallUses ||
- removeInstr->m_opcode == Js::OpCode::ByteCodeUses ||
- removeInstr->m_opcode == Js::OpCode::LdIndir ||
- removeInstr->m_opcode == Js::OpCode::BoundCheck
- );
- if (removeInstr->m_opcode != Js::OpCode::ByteCodeUses)
- {
- block->RemoveInstr(removeInstr);
- }
- }
- this->ConvertToByteCodeUses(srcInstr);
- }
- void
- GlobOpt::GetMemOpSrcInfo(Loop* loop, IR::Instr* instr, IR::RegOpnd*& base, IR::RegOpnd*& index, IRType& arrayType)
- {
- Assert(instr && (instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict));
- IR::Opnd* arrayOpnd = instr->m_opcode == Js::OpCode::LdElemI_A ? instr->GetSrc1() : instr->GetDst();
- Assert(arrayOpnd->IsIndirOpnd());
- IR::IndirOpnd* indirArrayOpnd = arrayOpnd->AsIndirOpnd();
- IR::RegOpnd* baseOpnd = (IR::RegOpnd*)indirArrayOpnd->GetBaseOpnd();
- IR::RegOpnd* indexOpnd = (IR::RegOpnd*)indirArrayOpnd->GetIndexOpnd();
- Assert(baseOpnd);
- Assert(indexOpnd);
- // Process Out Params
- base = baseOpnd;
- index = indexOpnd;
- arrayType = indirArrayOpnd->GetType();
- }
- void
- GlobOpt::EmitMemop(Loop * loop, LoopCount *loopCount, const MemOpEmitData* emitData)
- {
- Assert(emitData);
- Assert(emitData->candidate);
- Assert(emitData->stElemInstr);
- Assert(emitData->stElemInstr->m_opcode == Js::OpCode::StElemI_A || emitData->stElemInstr->m_opcode == Js::OpCode::StElemI_A_Strict);
- IR::BailOutKind bailOutKind = emitData->bailOutKind;
- const byte unroll = emitData->inductionVar.unroll;
- Assert(unroll == 1);
- const bool isInductionVariableChangeIncremental = emitData->inductionVar.isIncremental;
- const bool bIndexAlreadyChanged = emitData->candidate->bIndexAlreadyChanged;
- IR::RegOpnd *baseOpnd = nullptr;
- IR::RegOpnd *indexOpnd = nullptr;
- IRType dstType;
- GetMemOpSrcInfo(loop, emitData->stElemInstr, baseOpnd, indexOpnd, dstType);
- Func *localFunc = loop->GetFunc();
- // Handle bailout info
- EnsureBailTarget(loop);
- Assert(bailOutKind != IR::BailOutInvalid);
- // Keep only Array bits bailOuts. Consider handling these bailouts instead of simply ignoring them
- bailOutKind &= IR::BailOutForArrayBits;
- // Add our custom bailout to handle Op_MemCopy return value.
- bailOutKind |= IR::BailOutOnMemOpError;
- BailOutInfo *const bailOutInfo = loop->bailOutInfo;
- Assert(bailOutInfo);
- IR::Instr *insertBeforeInstr = bailOutInfo->bailOutInstr;
- Assert(insertBeforeInstr);
- IR::Opnd *sizeOpnd = GenerateInductionVariableChangeForMemOp(loop, unroll, insertBeforeInstr);
- IR::RegOpnd *startIndexOpnd = GenerateStartIndexOpndForMemop(loop, indexOpnd, sizeOpnd, isInductionVariableChangeIncremental, bIndexAlreadyChanged, insertBeforeInstr);
- IR::IndirOpnd* dstOpnd = IR::IndirOpnd::New(baseOpnd, startIndexOpnd, dstType, localFunc);
- IR::Opnd *src1;
- const bool isMemset = emitData->candidate->IsMemSet();
- // Get the source according to the memop type
- if (isMemset)
- {
- MemSetEmitData* data = (MemSetEmitData*)emitData;
- const Loop::MemSetCandidate* candidate = data->candidate->AsMemSet();
- if (candidate->srcSym)
- {
- IR::RegOpnd* regSrc = IR::RegOpnd::New(candidate->srcSym, candidate->srcSym->GetType(), func);
- regSrc->SetIsJITOptimizedReg(true);
- src1 = regSrc;
- }
- else
- {
- src1 = IR::AddrOpnd::New(candidate->constant.ToVar(localFunc), IR::AddrOpndKindConstantAddress, localFunc);
- }
- }
- else
- {
- Assert(emitData->candidate->IsMemCopy());
- MemCopyEmitData* data = (MemCopyEmitData*)emitData;
- Assert(data->ldElemInstr);
- Assert(data->ldElemInstr->m_opcode == Js::OpCode::LdElemI_A);
- IR::RegOpnd *srcBaseOpnd = nullptr;
- IR::RegOpnd *srcIndexOpnd = nullptr;
- IRType srcType;
- GetMemOpSrcInfo(loop, data->ldElemInstr, srcBaseOpnd, srcIndexOpnd, srcType);
- Assert(GetVarSymID(srcIndexOpnd->GetStackSym()) == GetVarSymID(indexOpnd->GetStackSym()));
- src1 = IR::IndirOpnd::New(srcBaseOpnd, startIndexOpnd, srcType, localFunc);
- }
- // Generate memcopy
- IR::Instr* memopInstr = IR::BailOutInstr::New(isMemset ? Js::OpCode::Memset : Js::OpCode::Memcopy, bailOutKind, bailOutInfo, localFunc);
- memopInstr->SetDst(dstOpnd);
- memopInstr->SetSrc1(src1);
- memopInstr->SetSrc2(sizeOpnd);
- insertBeforeInstr->InsertBefore(memopInstr);
- loop->memOpInfo->instr = memopInstr;
- #if DBG_DUMP
- if (DO_MEMOP_TRACE())
- {
- char valueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
- baseOpnd->GetValueType().ToString(valueTypeStr);
- const int loopCountBufSize = 16;
- char16 loopCountBuf[loopCountBufSize];
- if (loopCount->LoopCountMinusOneSym())
- {
- swprintf_s(loopCountBuf, _u("s%u"), loopCount->LoopCountMinusOneSym()->m_id);
- }
- else
- {
- swprintf_s(loopCountBuf, _u("%u"), loopCount->LoopCountMinusOneConstantValue() + 1);
- }
- if (isMemset)
- {
- const Loop::MemSetCandidate* candidate = emitData->candidate->AsMemSet();
- const int constBufSize = 32;
- char16 constBuf[constBufSize];
- if (candidate->srcSym)
- {
- swprintf_s(constBuf, _u("s%u"), candidate->srcSym->m_id);
- }
- else
- {
- switch (candidate->constant.type)
- {
- case TyInt8:
- case TyInt16:
- case TyInt32:
- case TyInt64:
- swprintf_s(constBuf, sizeof(IntConstType) == 8 ? _u("%lld") : _u("%d"), candidate->constant.u.intConst.value);
- break;
- case TyFloat32:
- case TyFloat64:
- swprintf_s(constBuf, _u("%.4f"), candidate->constant.u.floatConst.value);
- break;
- case TyVar:
- swprintf_s(constBuf, sizeof(Js::Var) == 8 ? _u("0x%.16llX") : _u("0x%.8X"), candidate->constant.u.varConst.value);
- break;
- default:
- AssertMsg(false, "Unsupported constant type");
- swprintf_s(constBuf, _u("Unknown"));
- break;
- }
- }
- TRACE_MEMOP_PHASE(MemSet, loop, emitData->stElemInstr,
- _u("ValueType: %S, Base: s%u, Index: s%u, Constant: %s, LoopCount: %s, IsIndexChangedBeforeUse: %d"),
- valueTypeStr,
- candidate->base,
- candidate->index,
- constBuf,
- loopCountBuf,
- bIndexAlreadyChanged);
- }
- else
- {
- const Loop::MemCopyCandidate* candidate = emitData->candidate->AsMemCopy();
- TRACE_MEMOP_PHASE(MemCopy, loop, emitData->stElemInstr,
- _u("ValueType: %S, StBase: s%u, Index: s%u, LdBase: s%u, LoopCount: %s, IsIndexChangedBeforeUse: %d"),
- valueTypeStr,
- candidate->base,
- candidate->index,
- candidate->ldBase,
- loopCountBuf,
- bIndexAlreadyChanged);
- }
- }
- #endif
- Assert(noImplicitCallUsesToInsert->Count() == 0);
- bool isLikelyJsArray;
- if (emitData->stElemInstr->GetDst()->IsIndirOpnd())
- {
- baseOpnd = emitData->stElemInstr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
- isLikelyJsArray = baseOpnd->GetValueType().IsLikelyArrayOrObjectWithArray();
- ProcessNoImplicitCallArrayUses(baseOpnd, baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd() : nullptr, emitData->stElemInstr, isLikelyJsArray, true);
- }
- RemoveMemOpSrcInstr(memopInstr, emitData->stElemInstr, emitData->block);
- if (!isMemset)
- {
- IR::Instr* ldElemInstr = ((MemCopyEmitData*)emitData)->ldElemInstr;
- if (ldElemInstr->GetSrc1()->IsIndirOpnd())
- {
- baseOpnd = ldElemInstr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd();
- isLikelyJsArray = baseOpnd->GetValueType().IsLikelyArrayOrObjectWithArray();
- ProcessNoImplicitCallArrayUses(baseOpnd, baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd() : nullptr, ldElemInstr, isLikelyJsArray, true);
- }
- RemoveMemOpSrcInstr(memopInstr, ldElemInstr, emitData->block);
- }
- InsertNoImplicitCallUses(memopInstr);
- noImplicitCallUsesToInsert->Clear();
- }
- bool
- GlobOpt::InspectInstrForMemSetCandidate(Loop* loop, IR::Instr* instr, MemSetEmitData* emitData, bool& errorInInstr)
- {
- Assert(emitData && emitData->candidate && emitData->candidate->IsMemSet());
- Loop::MemSetCandidate* candidate = (Loop::MemSetCandidate*)emitData->candidate;
- if (instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict)
- {
- if (instr->GetDst()->IsIndirOpnd()
- && (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym()) == candidate->base)
- && (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym()) == candidate->index)
- )
- {
- Assert(instr->IsProfiledInstr());
- emitData->stElemInstr = instr;
- emitData->bailOutKind = instr->GetBailOutKind();
- return true;
- }
- TRACE_MEMOP_PHASE_VERBOSE(MemSet, loop, instr, _u("Orphan StElemI_A detected"));
- errorInInstr = true;
- }
- else if (instr->m_opcode == Js::OpCode::LdElemI_A)
- {
- TRACE_MEMOP_PHASE_VERBOSE(MemSet, loop, instr, _u("Orphan LdElemI_A detected"));
- errorInInstr = true;
- }
- return false;
- }
- bool
- GlobOpt::InspectInstrForMemCopyCandidate(Loop* loop, IR::Instr* instr, MemCopyEmitData* emitData, bool& errorInInstr)
- {
- Assert(emitData && emitData->candidate && emitData->candidate->IsMemCopy());
- Loop::MemCopyCandidate* candidate = (Loop::MemCopyCandidate*)emitData->candidate;
- if (instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict)
- {
- if (
- instr->GetDst()->IsIndirOpnd() &&
- (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym()) == candidate->base) &&
- (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym()) == candidate->index)
- )
- {
- Assert(instr->IsProfiledInstr());
- emitData->stElemInstr = instr;
- emitData->bailOutKind = instr->GetBailOutKind();
- // Still need to find the LdElem
- return false;
- }
- TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Orphan StElemI_A detected"));
- errorInInstr = true;
- }
- else if (instr->m_opcode == Js::OpCode::LdElemI_A)
- {
- if (
- emitData->stElemInstr &&
- instr->GetSrc1()->IsIndirOpnd() &&
- (GetVarSymID(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym()) == candidate->ldBase) &&
- (GetVarSymID(instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym()) == candidate->index)
- )
- {
- Assert(instr->IsProfiledInstr());
- emitData->ldElemInstr = instr;
- ValueType stValueType = emitData->stElemInstr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType();
- ValueType ldValueType = emitData->ldElemInstr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType();
- if (stValueType != ldValueType)
- {
- #if DBG_DUMP
- char16 stValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
- stValueType.ToString(stValueTypeStr);
- char16 ldValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
- ldValueType.ToString(ldValueTypeStr);
- TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("for mismatch in Load(%s) and Store(%s) value type"), ldValueTypeStr, stValueTypeStr);
- #endif
- errorInInstr = true;
- return false;
- }
- // We found both instruction for this candidate
- return true;
- }
- TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Orphan LdElemI_A detected"));
- errorInInstr = true;
- }
- return false;
- }
- // The caller is responsible to free the memory allocated between inOrderEmitData[iEmitData -> end]
- bool
- GlobOpt::ValidateMemOpCandidates(Loop * loop, _Out_writes_(iEmitData) MemOpEmitData** inOrderEmitData, int& iEmitData)
- {
- AnalysisAssert(iEmitData == (int)loop->memOpInfo->candidates->Count());
- // We iterate over the second block of the loop only. MemOp Works only if the loop has exactly 2 blocks
- Assert(loop->blockList.HasTwo());
- Loop::MemOpList::Iterator iter(loop->memOpInfo->candidates);
- BasicBlock* bblock = loop->blockList.Head()->next;
- Loop::MemOpCandidate* candidate = nullptr;
- MemOpEmitData* emitData = nullptr;
- // Iterate backward because the list of candidate is reversed
- FOREACH_INSTR_BACKWARD_IN_BLOCK(instr, bblock)
- {
- if (!candidate)
- {
- // Time to check next candidate
- if (!iter.Next())
- {
- // We have been through the whole list of candidates, finish
- break;
- }
- candidate = iter.Data();
- if (!candidate)
- {
- continue;
- }
- // Common check for memset and memcopy
- Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
- // Get the inductionVariable changeInfo
- if (!loop->memOpInfo->inductionVariableChangeInfoMap->TryGetValue(candidate->index, &inductionVariableChangeInfo))
- {
- TRACE_MEMOP_VERBOSE(loop, nullptr, _u("MemOp skipped (s%d): no induction variable"), candidate->base);
- return false;
- }
- if (inductionVariableChangeInfo.unroll != candidate->count)
- {
- TRACE_MEMOP_VERBOSE(loop, nullptr, _u("MemOp skipped (s%d): not matching unroll count"), candidate->base);
- return false;
- }
- if (candidate->IsMemSet())
- {
- Assert(!PHASE_OFF(Js::MemSetPhase, this->func));
- emitData = JitAnew(this->alloc, MemSetEmitData);
- }
- else
- {
- Assert(!PHASE_OFF(Js::MemCopyPhase, this->func));
- // Specific check for memcopy
- Assert(candidate->IsMemCopy());
- Loop::MemCopyCandidate* memcopyCandidate = candidate->AsMemCopy();
- if (memcopyCandidate->base == Js::Constants::InvalidSymID
- || memcopyCandidate->ldBase == Js::Constants::InvalidSymID
- || (memcopyCandidate->ldCount != memcopyCandidate->count))
- {
- TRACE_MEMOP_PHASE(MemCopy, loop, nullptr, _u("(s%d): not matching ldElem and stElem"), candidate->base);
- return false;
- }
- emitData = JitAnew(this->alloc, MemCopyEmitData);
- }
- Assert(emitData);
- emitData->block = bblock;
- emitData->inductionVar = inductionVariableChangeInfo;
- emitData->candidate = candidate;
- }
- bool errorInInstr = false;
- bool candidateFound = candidate->IsMemSet() ?
- InspectInstrForMemSetCandidate(loop, instr, (MemSetEmitData*)emitData, errorInInstr)
- : InspectInstrForMemCopyCandidate(loop, instr, (MemCopyEmitData*)emitData, errorInInstr);
- if (errorInInstr)
- {
- JitAdelete(this->alloc, emitData);
- return false;
- }
- if (candidateFound)
- {
- AnalysisAssert(iEmitData > 0);
- if (iEmitData == 0)
- {
- // Explicit for OACR
- break;
- }
- inOrderEmitData[--iEmitData] = emitData;
- candidate = nullptr;
- emitData = nullptr;
- }
- } NEXT_INSTR_BACKWARD_IN_BLOCK;
- if (iter.IsValid())
- {
- TRACE_MEMOP(loop, nullptr, _u("Candidates not found in loop while validating"));
- return false;
- }
- return true;
- }
- void
- GlobOpt::ProcessMemOp()
- {
- FOREACH_LOOP_IN_FUNC_EDITING(loop, this->func)
- {
- if (HasMemOp(loop))
- {
- const int candidateCount = loop->memOpInfo->candidates->Count();
- Assert(candidateCount > 0);
- LoopCount * loopCount = GetOrGenerateLoopCountForMemOp(loop);
- // If loopCount is not available we can not continue with memop
- if (!loopCount || !(loopCount->LoopCountMinusOneSym() || loopCount->LoopCountMinusOneConstantValue()))
- {
- TRACE_MEMOP(loop, nullptr, _u("MemOp skipped for no loop count"));
- loop->doMemOp = false;
- loop->memOpInfo->candidates->Clear();
- continue;
- }
- // The list is reversed, check them and place them in order in the following array
- MemOpEmitData** inOrderCandidates = JitAnewArray(this->alloc, MemOpEmitData*, candidateCount);
- int i = candidateCount;
- if (ValidateMemOpCandidates(loop, inOrderCandidates, i))
- {
- Assert(i == 0);
- // Process the valid MemOp candidate in order.
- for (; i < candidateCount; ++i)
- {
- // Emit
- EmitMemop(loop, loopCount, inOrderCandidates[i]);
- JitAdelete(this->alloc, inOrderCandidates[i]);
- }
- }
- else
- {
- Assert(i != 0);
- for (; i < candidateCount; ++i)
- {
- JitAdelete(this->alloc, inOrderCandidates[i]);
- }
- // One of the memop candidates did not validate. Do not emit for this loop.
- loop->doMemOp = false;
- loop->memOpInfo->candidates->Clear();
- }
- // Free memory
- JitAdeleteArray(this->alloc, candidateCount, inOrderCandidates);
- }
- } NEXT_LOOP_EDITING;
- }
- void GlobOpt::PRE::FieldPRE(Loop *loop)
- {
- JitArenaAllocator *alloc = this->globOpt->tempAlloc;
- this->FindPossiblePRECandidates(loop, alloc);
- this->PreloadPRECandidates(loop);
- this->RemoveOverlyOptimisticInitialValues(loop);
- }
- bool
- GlobOpt::PRE::InsertSymDefinitionInLandingPad(StackSym * sym, Loop * loop, Sym ** objPtrCopyPropSym)
- {
- Assert(sym->IsSingleDef());
- IR::Instr * symDefInstr = sym->GetInstrDef();
- if (!GlobOpt::IsPREInstrSequenceCandidateLoad(symDefInstr->m_opcode))
- {
- return false;
- }
- IR::Opnd * symDefInstrSrc1 = symDefInstr->GetSrc1();
- if (symDefInstrSrc1->IsSymOpnd())
- {
- Assert(symDefInstrSrc1->AsSymOpnd()->m_sym->IsPropertySym());
- // $L1
- // T1 = o.x (v1|T3)
- // T2 = T1.y (v2|T4) <-- T1 is not live in the loop landing pad
- // jmp $L1
- // Trying to make T1 live in the landing pad
- // o.x
- PropertySym* propSym = symDefInstrSrc1->AsSymOpnd()->m_sym->AsPropertySym();
- if (candidates->candidatesBv->Test(propSym->m_id))
- {
- // If propsym is a PRE candidate, then it must have had the same value on all back edges.
- // So, just look up the value on one of the back edges.
- BasicBlock* loopTail = loop->GetAnyTailBlock();
- Value * valueOnBackEdge = loopTail->globOptData.FindValue(propSym);
-
- // If o.x is not invariant in the loop, we can't use the preloaded value of o.x.y in the landing pad
- Value * valueInLandingPad = loop->landingPad->globOptData.FindValue(propSym);
- if (valueOnBackEdge->GetValueNumber() != valueInLandingPad->GetValueNumber())
- {
- return false;
- }
- *objPtrCopyPropSym = valueOnBackEdge->GetValueInfo()->GetSymStore();
- if (candidates->candidatesToProcess->Test(propSym->m_id))
- {
- GlobHashBucket bucket;
- bucket.element = valueOnBackEdge;
- bucket.value = propSym;
- if (!PreloadPRECandidate(loop, &bucket))
- {
- return false;
- }
- Assert(!candidates->candidatesToProcess->Test(propSym->m_id));
- Assert(loop->landingPad->globOptData.IsLive(valueOnBackEdge->GetValueInfo()->GetSymStore()));
- // Inserted T3 = o.x
- // Now, we want to
- // 1. Insert T1 = o.x
- // 2. Insert T4 = T1.y
- // 3. Indentify T3 as the objptr copy prop sym for T1, and make T3.y live on the back-edges
- // #1 is done next. #2 and #3 are done as part of preloading T1.y
- // Insert T1 = o.x
- if (!InsertPropertySymPreloadInLandingPad(symDefInstr->Copy(), loop, propSym))
- {
- return false;
- }
- return true;
- }
- else
- {
- // o.x was already processed as a PRE candidate. If we were successful in preloading o.x,
- // we can now insert T1 = o.x
- if (loop->landingPad->globOptData.IsLive(*objPtrCopyPropSym))
- {
- // insert T1 = o.x
- if (!InsertPropertySymPreloadInLandingPad(symDefInstr->Copy(), loop, propSym))
- {
- return false;
- }
- return true;
- }
- else
- {
- return false;
- }
- }
- }
- else
- {
- return false;
- }
- }
- else if (symDefInstrSrc1->IsRegOpnd())
- {
- // T2 = T1
- // T3 = T2.y
- // trying to insert def of T2
- // T1
- StackSym * symDefInstrSrc1Sym = symDefInstrSrc1->AsRegOpnd()->GetStackSym();
- if (!loop->landingPad->globOptData.IsLive(symDefInstrSrc1Sym))
- {
- if (symDefInstrSrc1Sym->IsSingleDef())
- {
- if (!InsertSymDefinitionInLandingPad(symDefInstrSrc1Sym, loop, objPtrCopyPropSym))
- {
- return false;
- }
- }
- }
- else
- {
- *objPtrCopyPropSym = symDefInstrSrc1Sym;
- }
- if (!(OpCodeAttr::TempNumberTransfer(symDefInstr->m_opcode) && OpCodeAttr::TempObjectTransfer(symDefInstr->m_opcode)))
- {
- *objPtrCopyPropSym = sym;
- }
- IR::Instr * instr = symDefInstr->Copy();
- if (instr->m_opcode == Js::OpCode::BytecodeArgOutCapture)
- {
- instr->m_opcode = Js::OpCode::Ld_A;
- }
- InsertInstrInLandingPad(instr, loop);
- return true;
- }
- else
- {
- return false;
- }
- }
- void
- GlobOpt::PRE::InsertInstrInLandingPad(IR::Instr * instr, Loop * loop)
- {
- instr->GetSrc1()->SetIsJITOptimizedReg(true);
- if (instr->GetDst())
- {
- instr->GetDst()->SetIsJITOptimizedReg(true);
- loop->landingPad->globOptData.liveVarSyms->Set(instr->GetDst()->GetStackSym()->m_id);
- }
- if (instr->HasAnyImplicitCalls())
- {
- IR::Instr * bailInstr = globOpt->EnsureDisableImplicitCallRegion(loop);
- bailInstr->InsertBefore(instr);
- }
- else if (loop->endDisableImplicitCall)
- {
- loop->endDisableImplicitCall->InsertBefore(instr);
- }
- else
- {
- loop->landingPad->InsertAfter(instr);
- }
- instr->ClearByteCodeOffset();
- instr->SetByteCodeOffset(loop->landingPad->GetFirstInstr());
- }
- IR::Instr *
- GlobOpt::PRE::InsertPropertySymPreloadInLandingPad(IR::Instr * ldInstr, Loop * loop, PropertySym * propertySym)
- {
- IR::SymOpnd *ldSrc = ldInstr->GetSrc1()->AsSymOpnd();
- if (ldSrc->m_sym != propertySym)
- {
- // It's possible that the property syms are different but have equivalent objPtrs. Verify their values.
- Value *val1 = globOpt->CurrentBlockData()->FindValue(ldSrc->m_sym->AsPropertySym()->m_stackSym);
- Value *val2 = globOpt->CurrentBlockData()->FindValue(propertySym->m_stackSym);
- if (!val1 || !val2 || val1->GetValueNumber() != val2->GetValueNumber())
- {
- return nullptr;
- }
- }
- // Consider: Shouldn't be necessary once we have copy-prop in prepass...
- ldInstr->GetSrc1()->AsSymOpnd()->m_sym = propertySym;
- ldSrc = ldInstr->GetSrc1()->AsSymOpnd();
- if (ldSrc->IsPropertySymOpnd())
- {
- IR::PropertySymOpnd *propSymOpnd = ldSrc->AsPropertySymOpnd();
- IR::PropertySymOpnd *newPropSymOpnd;
- newPropSymOpnd = propSymOpnd->AsPropertySymOpnd()->CopyWithoutFlowSensitiveInfo(this->globOpt->func);
- ldInstr->ReplaceSrc1(newPropSymOpnd);
- }
- if (ldInstr->GetDst())
- {
- loop->landingPad->globOptData.liveVarSyms->Set(ldInstr->GetDst()->GetStackSym()->m_id);
- }
- InsertInstrInLandingPad(ldInstr, loop);
- return ldInstr;
- }
- void
- GlobOpt::PRE::MakePropertySymLiveOnBackEdges(PropertySym * propertySym, Loop * loop, Value * valueToAdd)
- {
- BasicBlock * loopHeader = loop->GetHeadBlock();
- FOREACH_PREDECESSOR_BLOCK(blockPred, loopHeader)
- {
- if (!loop->IsDescendentOrSelf(blockPred->loop))
- {
- // Not a loop back-edge
- continue;
- }
- // Insert it in the value table
- blockPred->globOptData.SetValue(valueToAdd, propertySym);
- // Make it a live field
- blockPred->globOptData.liveFields->Set(propertySym->m_id);
- } NEXT_PREDECESSOR_BLOCK;
- }
- void GlobOpt::PRE::RemoveOverlyOptimisticInitialValues(Loop * loop)
- {
- BasicBlock * landingPad = loop->landingPad;
- // For a property sym whose obj ptr sym wasn't live in the landing pad, we can optimistically (if the obj ptr sym was
- // single def) insert an initial value in the landing pad, with the hope that PRE could make the obj ptr sym live.
- // But, if PRE couldn't make the obj ptr sym live, we need to clear the value for the property sym from the landing pad
- for (auto it = loop->initialValueFieldMap.GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
- {
- PropertySym * propertySym = it.CurrentKey();
- StackSym * objPtrSym = propertySym->m_stackSym;
- if (!landingPad->globOptData.IsLive(objPtrSym))
- {
- Value * landingPadPropSymValue = landingPad->globOptData.FindValue(propertySym);
- Assert(landingPadPropSymValue);
- Assert(landingPadPropSymValue->GetValueNumber() == it.CurrentValue()->GetValueNumber());
- Assert(landingPadPropSymValue->GetValueInfo()->GetSymStore() == propertySym);
- landingPad->globOptData.ClearSymValue(propertySym);
- it.RemoveCurrent();
- }
- }
- }
- #if DBG_DUMP
- void GlobOpt::PRE::TraceFailedPreloadInLandingPad(const Loop *const loop, PropertySym * propertySym, const char16* reason) const
- {
- if (PHASE_TRACE(Js::FieldPREPhase, this->globOpt->func))
- {
- int32 propertyId = propertySym->m_propertyId;
- SymID objectSymId = propertySym->m_stackSym->m_id;
- char16 propSymStr[32];
- switch (propertySym->m_fieldKind)
- {
- case PropertyKindData:
- if (JITManager::GetJITManager()->IsOOPJITEnabled())
- {
- swprintf_s(propSymStr, _u("s%d->#%d"), objectSymId, propertyId);
- }
- else
- {
- Js::PropertyRecord const* fieldName = propertySym->m_func->GetInProcThreadContext()->GetPropertyRecord(propertyId);
- swprintf_s(propSymStr, _u("s%d->%s"), objectSymId, fieldName->GetBuffer());
- }
- break;
- case PropertyKindSlots:
- case PropertyKindSlotArray:
- swprintf_s(propSymStr, _u("s%d[%d]"), objectSymId, propertyId);
- break;
- case PropertyKindLocalSlots:
- swprintf_s(propSymStr, _u("s%dl[%d]"), objectSymId, propertyId);
- break;
- default:
- AssertMsg(0, "Unknown field kind");
- break;
- }
- Output::Print(_u("** TRACE: Field PRE: "));
- this->globOpt->func->DumpFullFunctionName();
- Output::Print(_u(": Failed to pre-load (%s) in landing pad of loop #%d. Reason: %s "), propSymStr, loop->GetLoopNumber(), reason);
- Output::Print(_u("\n"));
- }
- }
- #endif
|