| 12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572957395749575957695779578957995809581958295839584958595869587958895899590959195929593959495959596959795989599960096019602960396049605960696079608960996109611961296139614961596169617961896199620962196229623962496259626962796289629963096319632963396349635963696379638963996409641964296439644964596469647964896499650965196529653965496559656965796589659966096619662966396649665966696679668966996709671967296739674967596769677967896799680968196829683968496859686968796889689969096919692969396949695969696979698969997009701970297039704970597069707970897099710971197129713971497159716971797189719972097219722972397249725972697279728972997309731973297339734973597369737973897399740974197429743974497459746974797489749975097519752975397549755975697579758975997609761976297639764976597669767976897699770977197729773977497759776977797789779978097819782978397849785978697879788978997909791979297939794979597969797979897999800980198029803980498059806980798089809981098119812981398149815981698179818981998209821982298239824982598269827982898299830983198329833983498359836983798389839984098419842984398449845984698479848984998509851985298539854985598569857985898599860986198629863986498659866986798689869987098719872987398749875987698779878987998809881988298839884988598869887988898899890989198929893989498959896989798989899990099019902990399049905990699079908990999109911991299139914991599169917991899199920992199229923992499259926992799289929993099319932993399349935993699379938993999409941994299439944994599469947994899499950995199529953995499559956995799589959996099619962996399649965996699679968996999709971997299739974997599769977997899799980998199829983998499859986998799889989999099919992999399949995999699979998999910000100011000210003100041000510006100071000810009100101001110012100131001410015100161001710018100191002010021100221002310024100251002610027100281002910030100311003210033100341003510036100371003810039100401004110042100431004410045100461004710048100491005010051100521005310054100551005610057100581005910060100611006210063100641006510066100671006810069100701007110072100731007410075100761007710078100791008010081100821008310084100851008610087100881008910090100911009210093100941009510096100971009810099101001010110102101031010410105101061010710108101091011010111101121011310114101151011610117101181011910120101211012210123101241012510126101271012810129101301013110132101331013410135101361013710138101391014010141101421014310144101451014610147101481014910150101511015210153101541015510156101571015810159101601016110162101631016410165101661016710168101691017010171101721017310174101751017610177101781017910180101811018210183101841018510186101871018810189101901019110192101931019410195101961019710198101991020010201102021020310204102051020610207102081020910210102111021210213102141021510216102171021810219102201022110222102231022410225102261022710228102291023010231102321023310234102351023610237102381023910240102411024210243102441024510246102471024810249102501025110252102531025410255102561025710258102591026010261102621026310264102651026610267102681026910270102711027210273102741027510276102771027810279102801028110282102831028410285102861028710288102891029010291102921029310294102951029610297102981029910300103011030210303103041030510306103071030810309103101031110312103131031410315103161031710318103191032010321103221032310324103251032610327103281032910330103311033210333103341033510336103371033810339103401034110342103431034410345103461034710348103491035010351103521035310354103551035610357103581035910360103611036210363103641036510366103671036810369103701037110372103731037410375103761037710378103791038010381103821038310384103851038610387103881038910390103911039210393103941039510396103971039810399104001040110402104031040410405104061040710408104091041010411104121041310414104151041610417104181041910420104211042210423104241042510426104271042810429104301043110432104331043410435104361043710438104391044010441104421044310444104451044610447104481044910450104511045210453104541045510456104571045810459104601046110462104631046410465104661046710468104691047010471104721047310474104751047610477104781047910480104811048210483104841048510486104871048810489104901049110492104931049410495104961049710498104991050010501105021050310504105051050610507105081050910510105111051210513105141051510516105171051810519105201052110522105231052410525105261052710528105291053010531105321053310534105351053610537105381053910540105411054210543105441054510546105471054810549105501055110552105531055410555105561055710558105591056010561105621056310564105651056610567105681056910570105711057210573105741057510576105771057810579105801058110582105831058410585105861058710588105891059010591105921059310594105951059610597105981059910600106011060210603106041060510606106071060810609106101061110612106131061410615106161061710618106191062010621106221062310624106251062610627106281062910630106311063210633106341063510636106371063810639106401064110642106431064410645106461064710648106491065010651106521065310654106551065610657106581065910660106611066210663106641066510666106671066810669106701067110672106731067410675106761067710678106791068010681106821068310684106851068610687106881068910690106911069210693106941069510696106971069810699107001070110702107031070410705107061070710708107091071010711107121071310714107151071610717107181071910720107211072210723107241072510726107271072810729107301073110732107331073410735107361073710738107391074010741107421074310744107451074610747107481074910750107511075210753107541075510756107571075810759107601076110762107631076410765107661076710768107691077010771107721077310774107751077610777107781077910780107811078210783107841078510786107871078810789107901079110792107931079410795107961079710798107991080010801108021080310804108051080610807108081080910810108111081210813108141081510816108171081810819108201082110822108231082410825108261082710828108291083010831108321083310834108351083610837108381083910840108411084210843108441084510846108471084810849108501085110852108531085410855108561085710858108591086010861108621086310864108651086610867108681086910870108711087210873108741087510876108771087810879108801088110882108831088410885108861088710888108891089010891108921089310894108951089610897108981089910900109011090210903109041090510906109071090810909109101091110912109131091410915109161091710918109191092010921109221092310924109251092610927109281092910930109311093210933109341093510936109371093810939109401094110942109431094410945109461094710948109491095010951109521095310954109551095610957109581095910960109611096210963109641096510966109671096810969109701097110972109731097410975109761097710978109791098010981109821098310984109851098610987109881098910990109911099210993109941099510996109971099810999110001100111002110031100411005110061100711008110091101011011110121101311014110151101611017110181101911020110211102211023110241102511026110271102811029110301103111032110331103411035110361103711038110391104011041110421104311044110451104611047110481104911050110511105211053110541105511056110571105811059110601106111062110631106411065110661106711068110691107011071110721107311074110751107611077110781107911080110811108211083110841108511086110871108811089110901109111092110931109411095110961109711098110991110011101111021110311104111051110611107111081110911110111111111211113111141111511116111171111811119111201112111122111231112411125111261112711128111291113011131111321113311134111351113611137111381113911140111411114211143111441114511146111471114811149111501115111152111531115411155111561115711158111591116011161111621116311164111651116611167111681116911170111711117211173111741117511176111771117811179111801118111182111831118411185111861118711188111891119011191111921119311194111951119611197111981119911200112011120211203112041120511206112071120811209112101121111212112131121411215112161121711218112191122011221112221122311224112251122611227112281122911230112311123211233112341123511236112371123811239112401124111242112431124411245112461124711248112491125011251112521125311254112551125611257112581125911260112611126211263112641126511266112671126811269112701127111272112731127411275112761127711278112791128011281112821128311284112851128611287112881128911290112911129211293112941129511296112971129811299113001130111302113031130411305113061130711308113091131011311113121131311314113151131611317113181131911320113211132211323113241132511326113271132811329113301133111332113331133411335113361133711338113391134011341113421134311344113451134611347113481134911350113511135211353113541135511356113571135811359113601136111362113631136411365113661136711368113691137011371113721137311374113751137611377113781137911380113811138211383113841138511386113871138811389113901139111392113931139411395113961139711398113991140011401114021140311404114051140611407114081140911410114111141211413114141141511416114171141811419114201142111422114231142411425114261142711428114291143011431114321143311434114351143611437114381143911440114411144211443114441144511446114471144811449114501145111452114531145411455114561145711458114591146011461114621146311464114651146611467114681146911470114711147211473114741147511476114771147811479114801148111482114831148411485114861148711488114891149011491114921149311494114951149611497114981149911500115011150211503115041150511506115071150811509115101151111512115131151411515115161151711518115191152011521115221152311524115251152611527115281152911530115311153211533115341153511536115371153811539115401154111542115431154411545115461154711548115491155011551115521155311554115551155611557115581155911560115611156211563115641156511566115671156811569115701157111572115731157411575115761157711578115791158011581115821158311584115851158611587115881158911590115911159211593115941159511596115971159811599116001160111602116031160411605116061160711608116091161011611116121161311614116151161611617116181161911620116211162211623116241162511626116271162811629116301163111632116331163411635116361163711638116391164011641116421164311644116451164611647116481164911650116511165211653116541165511656116571165811659116601166111662116631166411665116661166711668116691167011671116721167311674116751167611677116781167911680116811168211683116841168511686116871168811689116901169111692116931169411695116961169711698116991170011701117021170311704117051170611707117081170911710117111171211713117141171511716117171171811719117201172111722117231172411725117261172711728117291173011731117321173311734117351173611737117381173911740117411174211743117441174511746117471174811749117501175111752117531175411755117561175711758117591176011761117621176311764117651176611767117681176911770117711177211773117741177511776117771177811779117801178111782117831178411785117861178711788117891179011791117921179311794117951179611797117981179911800118011180211803118041180511806118071180811809118101181111812118131181411815118161181711818118191182011821118221182311824118251182611827118281182911830118311183211833118341183511836118371183811839118401184111842118431184411845118461184711848118491185011851118521185311854118551185611857118581185911860118611186211863118641186511866118671186811869118701187111872118731187411875118761187711878118791188011881118821188311884118851188611887118881188911890118911189211893118941189511896118971189811899119001190111902119031190411905119061190711908119091191011911119121191311914119151191611917119181191911920119211192211923119241192511926119271192811929119301193111932119331193411935119361193711938119391194011941119421194311944119451194611947119481194911950119511195211953119541195511956119571195811959119601196111962119631196411965119661196711968119691197011971119721197311974119751197611977119781197911980119811198211983119841198511986119871198811989119901199111992119931199411995119961199711998119991200012001120021200312004120051200612007120081200912010120111201212013120141201512016120171201812019120201202112022120231202412025120261202712028120291203012031120321203312034120351203612037120381203912040120411204212043120441204512046120471204812049120501205112052120531205412055120561205712058120591206012061120621206312064120651206612067120681206912070120711207212073120741207512076120771207812079120801208112082120831208412085120861208712088120891209012091120921209312094120951209612097120981209912100121011210212103121041210512106121071210812109121101211112112121131211412115121161211712118121191212012121121221212312124121251212612127121281212912130121311213212133121341213512136121371213812139121401214112142121431214412145121461214712148121491215012151121521215312154121551215612157121581215912160121611216212163121641216512166121671216812169121701217112172121731217412175121761217712178121791218012181121821218312184121851218612187121881218912190121911219212193121941219512196121971219812199122001220112202122031220412205122061220712208122091221012211122121221312214122151221612217122181221912220122211222212223122241222512226122271222812229122301223112232122331223412235122361223712238122391224012241122421224312244122451224612247122481224912250122511225212253122541225512256122571225812259122601226112262122631226412265122661226712268122691227012271122721227312274122751227612277122781227912280122811228212283122841228512286122871228812289122901229112292122931229412295122961229712298122991230012301123021230312304123051230612307123081230912310123111231212313123141231512316123171231812319123201232112322123231232412325123261232712328123291233012331123321233312334123351233612337123381233912340123411234212343123441234512346123471234812349123501235112352123531235412355123561235712358123591236012361123621236312364123651236612367123681236912370123711237212373123741237512376123771237812379123801238112382123831238412385123861238712388123891239012391123921239312394123951239612397123981239912400124011240212403124041240512406124071240812409124101241112412124131241412415124161241712418124191242012421124221242312424124251242612427124281242912430124311243212433124341243512436124371243812439124401244112442124431244412445124461244712448124491245012451124521245312454124551245612457124581245912460124611246212463124641246512466124671246812469124701247112472124731247412475124761247712478124791248012481124821248312484124851248612487124881248912490124911249212493124941249512496124971249812499125001250112502125031250412505125061250712508125091251012511125121251312514125151251612517125181251912520125211252212523125241252512526125271252812529125301253112532125331253412535125361253712538125391254012541125421254312544125451254612547125481254912550125511255212553125541255512556125571255812559125601256112562125631256412565125661256712568125691257012571125721257312574125751257612577125781257912580125811258212583125841258512586125871258812589125901259112592125931259412595125961259712598125991260012601126021260312604126051260612607126081260912610126111261212613126141261512616126171261812619126201262112622126231262412625126261262712628126291263012631126321263312634126351263612637126381263912640126411264212643126441264512646126471264812649126501265112652126531265412655126561265712658126591266012661126621266312664126651266612667126681266912670126711267212673126741267512676126771267812679126801268112682126831268412685126861268712688126891269012691126921269312694126951269612697126981269912700127011270212703127041270512706127071270812709127101271112712127131271412715127161271712718127191272012721127221272312724127251272612727127281272912730127311273212733127341273512736127371273812739127401274112742127431274412745127461274712748127491275012751127521275312754127551275612757127581275912760127611276212763127641276512766127671276812769127701277112772127731277412775127761277712778127791278012781127821278312784127851278612787127881278912790127911279212793127941279512796127971279812799128001280112802128031280412805128061280712808128091281012811128121281312814128151281612817128181281912820128211282212823128241282512826128271282812829128301283112832128331283412835128361283712838128391284012841128421284312844128451284612847128481284912850128511285212853128541285512856128571285812859128601286112862128631286412865128661286712868128691287012871128721287312874128751287612877128781287912880128811288212883128841288512886128871288812889128901289112892128931289412895128961289712898128991290012901129021290312904129051290612907129081290912910129111291212913129141291512916129171291812919129201292112922129231292412925129261292712928129291293012931129321293312934129351293612937129381293912940129411294212943129441294512946129471294812949129501295112952129531295412955129561295712958129591296012961129621296312964129651296612967129681296912970129711297212973129741297512976129771297812979129801298112982129831298412985129861298712988129891299012991129921299312994129951299612997129981299913000130011300213003130041300513006130071300813009130101301113012130131301413015130161301713018130191302013021130221302313024130251302613027130281302913030130311303213033130341303513036130371303813039130401304113042130431304413045130461304713048130491305013051130521305313054130551305613057130581305913060130611306213063130641306513066130671306813069130701307113072130731307413075130761307713078130791308013081130821308313084130851308613087130881308913090130911309213093130941309513096130971309813099131001310113102131031310413105131061310713108131091311013111131121311313114131151311613117131181311913120131211312213123131241312513126131271312813129131301313113132131331313413135131361313713138131391314013141131421314313144131451314613147131481314913150131511315213153131541315513156131571315813159131601316113162131631316413165131661316713168131691317013171131721317313174131751317613177131781317913180131811318213183131841318513186131871318813189131901319113192131931319413195131961319713198131991320013201132021320313204132051320613207132081320913210132111321213213132141321513216132171321813219132201322113222132231322413225132261322713228132291323013231132321323313234132351323613237132381323913240132411324213243132441324513246132471324813249132501325113252132531325413255132561325713258132591326013261132621326313264132651326613267132681326913270132711327213273132741327513276132771327813279132801328113282132831328413285132861328713288132891329013291132921329313294132951329613297132981329913300133011330213303133041330513306133071330813309133101331113312133131331413315133161331713318133191332013321133221332313324133251332613327133281332913330133311333213333133341333513336133371333813339133401334113342133431334413345133461334713348133491335013351133521335313354133551335613357133581335913360133611336213363133641336513366133671336813369133701337113372133731337413375133761337713378133791338013381133821338313384133851338613387133881338913390133911339213393133941339513396133971339813399134001340113402134031340413405134061340713408134091341013411134121341313414134151341613417134181341913420134211342213423134241342513426134271342813429134301343113432134331343413435134361343713438134391344013441134421344313444134451344613447134481344913450134511345213453134541345513456134571345813459134601346113462134631346413465134661346713468134691347013471134721347313474134751347613477134781347913480134811348213483134841348513486134871348813489134901349113492134931349413495134961349713498134991350013501135021350313504135051350613507135081350913510135111351213513135141351513516135171351813519135201352113522135231352413525135261352713528135291353013531135321353313534135351353613537135381353913540135411354213543135441354513546135471354813549135501355113552135531355413555135561355713558135591356013561135621356313564135651356613567135681356913570135711357213573135741357513576135771357813579135801358113582135831358413585135861358713588135891359013591135921359313594135951359613597135981359913600136011360213603136041360513606136071360813609136101361113612136131361413615136161361713618136191362013621136221362313624136251362613627136281362913630136311363213633136341363513636136371363813639136401364113642136431364413645136461364713648136491365013651136521365313654136551365613657136581365913660136611366213663136641366513666136671366813669136701367113672136731367413675136761367713678136791368013681136821368313684136851368613687136881368913690136911369213693136941369513696136971369813699137001370113702137031370413705137061370713708137091371013711137121371313714137151371613717137181371913720137211372213723137241372513726137271372813729137301373113732137331373413735137361373713738137391374013741137421374313744137451374613747137481374913750137511375213753137541375513756137571375813759137601376113762137631376413765137661376713768137691377013771137721377313774137751377613777137781377913780137811378213783137841378513786137871378813789137901379113792137931379413795137961379713798137991380013801138021380313804138051380613807138081380913810138111381213813138141381513816138171381813819138201382113822138231382413825138261382713828138291383013831138321383313834138351383613837138381383913840138411384213843138441384513846138471384813849138501385113852138531385413855138561385713858138591386013861138621386313864138651386613867138681386913870138711387213873138741387513876138771387813879138801388113882138831388413885138861388713888138891389013891138921389313894138951389613897138981389913900139011390213903139041390513906139071390813909139101391113912139131391413915139161391713918139191392013921139221392313924139251392613927139281392913930139311393213933139341393513936139371393813939139401394113942139431394413945139461394713948139491395013951139521395313954139551395613957139581395913960139611396213963139641396513966139671396813969139701397113972139731397413975139761397713978139791398013981139821398313984139851398613987139881398913990139911399213993139941399513996139971399813999140001400114002140031400414005140061400714008140091401014011140121401314014140151401614017140181401914020140211402214023140241402514026140271402814029140301403114032140331403414035140361403714038140391404014041140421404314044140451404614047140481404914050140511405214053140541405514056140571405814059140601406114062140631406414065140661406714068140691407014071140721407314074140751407614077140781407914080140811408214083140841408514086140871408814089140901409114092140931409414095140961409714098140991410014101141021410314104141051410614107141081410914110141111411214113141141411514116141171411814119141201412114122141231412414125141261412714128141291413014131141321413314134141351413614137141381413914140141411414214143141441414514146141471414814149141501415114152141531415414155141561415714158141591416014161141621416314164141651416614167141681416914170141711417214173141741417514176141771417814179141801418114182141831418414185141861418714188141891419014191141921419314194141951419614197141981419914200142011420214203142041420514206142071420814209142101421114212142131421414215142161421714218142191422014221142221422314224142251422614227142281422914230142311423214233142341423514236142371423814239142401424114242142431424414245142461424714248142491425014251142521425314254142551425614257142581425914260142611426214263142641426514266142671426814269142701427114272142731427414275142761427714278142791428014281142821428314284142851428614287142881428914290142911429214293142941429514296142971429814299143001430114302143031430414305143061430714308143091431014311143121431314314143151431614317143181431914320143211432214323143241432514326143271432814329143301433114332143331433414335143361433714338143391434014341143421434314344143451434614347143481434914350143511435214353143541435514356143571435814359143601436114362143631436414365143661436714368143691437014371143721437314374143751437614377143781437914380143811438214383143841438514386143871438814389143901439114392143931439414395143961439714398143991440014401144021440314404144051440614407144081440914410144111441214413144141441514416144171441814419144201442114422144231442414425144261442714428144291443014431144321443314434144351443614437144381443914440144411444214443144441444514446144471444814449144501445114452144531445414455144561445714458144591446014461144621446314464144651446614467144681446914470144711447214473144741447514476144771447814479144801448114482144831448414485144861448714488144891449014491144921449314494144951449614497144981449914500145011450214503145041450514506145071450814509145101451114512145131451414515145161451714518145191452014521145221452314524145251452614527145281452914530145311453214533145341453514536145371453814539145401454114542145431454414545145461454714548145491455014551145521455314554145551455614557145581455914560145611456214563145641456514566145671456814569145701457114572145731457414575145761457714578145791458014581145821458314584145851458614587145881458914590145911459214593145941459514596145971459814599146001460114602146031460414605146061460714608146091461014611146121461314614146151461614617146181461914620146211462214623146241462514626146271462814629146301463114632146331463414635146361463714638146391464014641146421464314644146451464614647146481464914650146511465214653146541465514656146571465814659146601466114662146631466414665146661466714668146691467014671146721467314674146751467614677146781467914680146811468214683146841468514686146871468814689146901469114692146931469414695146961469714698146991470014701147021470314704147051470614707147081470914710147111471214713147141471514716147171471814719147201472114722147231472414725147261472714728147291473014731147321473314734147351473614737147381473914740147411474214743147441474514746147471474814749147501475114752147531475414755147561475714758147591476014761147621476314764147651476614767147681476914770147711477214773147741477514776147771477814779147801478114782147831478414785147861478714788147891479014791147921479314794147951479614797147981479914800148011480214803148041480514806148071480814809148101481114812148131481414815148161481714818148191482014821148221482314824148251482614827148281482914830148311483214833148341483514836148371483814839148401484114842148431484414845148461484714848148491485014851148521485314854148551485614857148581485914860148611486214863148641486514866148671486814869148701487114872148731487414875148761487714878148791488014881148821488314884148851488614887148881488914890148911489214893148941489514896148971489814899149001490114902149031490414905149061490714908149091491014911149121491314914149151491614917149181491914920149211492214923149241492514926149271492814929149301493114932149331493414935149361493714938149391494014941149421494314944149451494614947149481494914950149511495214953149541495514956149571495814959149601496114962149631496414965149661496714968149691497014971149721497314974149751497614977149781497914980149811498214983149841498514986149871498814989149901499114992149931499414995149961499714998149991500015001150021500315004150051500615007150081500915010150111501215013150141501515016150171501815019150201502115022150231502415025150261502715028150291503015031150321503315034150351503615037150381503915040150411504215043150441504515046150471504815049150501505115052150531505415055150561505715058150591506015061150621506315064150651506615067150681506915070150711507215073150741507515076150771507815079150801508115082150831508415085150861508715088150891509015091150921509315094150951509615097150981509915100151011510215103151041510515106151071510815109151101511115112151131511415115151161511715118151191512015121151221512315124151251512615127151281512915130151311513215133151341513515136151371513815139151401514115142151431514415145151461514715148151491515015151151521515315154151551515615157151581515915160151611516215163151641516515166151671516815169151701517115172151731517415175151761517715178151791518015181151821518315184151851518615187151881518915190151911519215193151941519515196151971519815199152001520115202152031520415205152061520715208152091521015211152121521315214152151521615217152181521915220152211522215223152241522515226152271522815229152301523115232152331523415235152361523715238152391524015241152421524315244152451524615247152481524915250152511525215253152541525515256152571525815259152601526115262152631526415265152661526715268152691527015271152721527315274152751527615277152781527915280152811528215283152841528515286152871528815289152901529115292152931529415295152961529715298152991530015301153021530315304153051530615307153081530915310153111531215313153141531515316153171531815319153201532115322153231532415325153261532715328153291533015331153321533315334153351533615337153381533915340153411534215343153441534515346153471534815349153501535115352153531535415355153561535715358153591536015361153621536315364153651536615367153681536915370153711537215373153741537515376153771537815379153801538115382153831538415385153861538715388153891539015391153921539315394153951539615397153981539915400154011540215403154041540515406154071540815409154101541115412154131541415415154161541715418154191542015421154221542315424154251542615427154281542915430154311543215433154341543515436154371543815439154401544115442154431544415445154461544715448154491545015451154521545315454154551545615457154581545915460154611546215463154641546515466154671546815469154701547115472154731547415475154761547715478154791548015481154821548315484154851548615487154881548915490154911549215493154941549515496154971549815499155001550115502155031550415505155061550715508155091551015511155121551315514155151551615517155181551915520155211552215523155241552515526155271552815529155301553115532155331553415535155361553715538155391554015541155421554315544155451554615547155481554915550155511555215553155541555515556155571555815559155601556115562155631556415565155661556715568155691557015571155721557315574155751557615577155781557915580155811558215583155841558515586155871558815589155901559115592155931559415595155961559715598155991560015601156021560315604156051560615607156081560915610156111561215613156141561515616156171561815619156201562115622156231562415625156261562715628156291563015631156321563315634156351563615637156381563915640156411564215643156441564515646156471564815649156501565115652156531565415655156561565715658156591566015661156621566315664156651566615667156681566915670156711567215673156741567515676156771567815679156801568115682156831568415685156861568715688156891569015691156921569315694156951569615697156981569915700157011570215703157041570515706157071570815709157101571115712157131571415715157161571715718157191572015721157221572315724157251572615727157281572915730157311573215733157341573515736157371573815739157401574115742157431574415745157461574715748157491575015751157521575315754157551575615757157581575915760157611576215763157641576515766157671576815769157701577115772157731577415775157761577715778157791578015781157821578315784157851578615787157881578915790157911579215793157941579515796157971579815799158001580115802158031580415805158061580715808158091581015811158121581315814158151581615817158181581915820158211582215823158241582515826158271582815829158301583115832158331583415835158361583715838158391584015841158421584315844158451584615847158481584915850158511585215853158541585515856158571585815859158601586115862158631586415865158661586715868158691587015871158721587315874158751587615877158781587915880158811588215883158841588515886158871588815889158901589115892158931589415895158961589715898158991590015901159021590315904159051590615907159081590915910159111591215913159141591515916159171591815919159201592115922159231592415925159261592715928159291593015931159321593315934159351593615937159381593915940159411594215943159441594515946159471594815949159501595115952159531595415955159561595715958159591596015961159621596315964159651596615967159681596915970159711597215973159741597515976159771597815979159801598115982159831598415985159861598715988159891599015991159921599315994159951599615997159981599916000160011600216003160041600516006160071600816009160101601116012160131601416015160161601716018160191602016021160221602316024160251602616027160281602916030160311603216033160341603516036160371603816039160401604116042160431604416045160461604716048160491605016051160521605316054160551605616057160581605916060160611606216063160641606516066160671606816069160701607116072160731607416075160761607716078160791608016081160821608316084160851608616087160881608916090160911609216093160941609516096160971609816099161001610116102161031610416105161061610716108161091611016111161121611316114161151611616117161181611916120161211612216123161241612516126161271612816129161301613116132161331613416135161361613716138161391614016141161421614316144161451614616147161481614916150161511615216153161541615516156161571615816159161601616116162161631616416165161661616716168161691617016171161721617316174161751617616177161781617916180161811618216183161841618516186161871618816189161901619116192161931619416195161961619716198161991620016201162021620316204162051620616207162081620916210162111621216213162141621516216162171621816219162201622116222162231622416225162261622716228162291623016231162321623316234162351623616237162381623916240162411624216243162441624516246162471624816249162501625116252162531625416255162561625716258162591626016261162621626316264162651626616267162681626916270162711627216273162741627516276162771627816279162801628116282162831628416285162861628716288162891629016291162921629316294162951629616297162981629916300163011630216303163041630516306163071630816309163101631116312163131631416315163161631716318163191632016321163221632316324163251632616327163281632916330163311633216333163341633516336163371633816339163401634116342163431634416345163461634716348163491635016351163521635316354163551635616357163581635916360163611636216363163641636516366163671636816369163701637116372163731637416375163761637716378163791638016381163821638316384163851638616387163881638916390163911639216393163941639516396163971639816399164001640116402164031640416405164061640716408164091641016411164121641316414164151641616417164181641916420164211642216423164241642516426164271642816429164301643116432164331643416435164361643716438164391644016441164421644316444164451644616447164481644916450164511645216453164541645516456164571645816459164601646116462164631646416465164661646716468164691647016471164721647316474164751647616477164781647916480164811648216483164841648516486164871648816489164901649116492164931649416495164961649716498164991650016501165021650316504165051650616507165081650916510165111651216513165141651516516165171651816519165201652116522165231652416525165261652716528165291653016531165321653316534165351653616537165381653916540165411654216543165441654516546165471654816549165501655116552165531655416555165561655716558165591656016561165621656316564165651656616567165681656916570165711657216573165741657516576165771657816579165801658116582165831658416585165861658716588165891659016591165921659316594165951659616597165981659916600166011660216603166041660516606166071660816609166101661116612166131661416615166161661716618166191662016621166221662316624166251662616627166281662916630166311663216633166341663516636166371663816639166401664116642166431664416645166461664716648166491665016651166521665316654166551665616657166581665916660166611666216663166641666516666166671666816669166701667116672166731667416675166761667716678166791668016681166821668316684166851668616687166881668916690166911669216693166941669516696166971669816699167001670116702167031670416705167061670716708167091671016711167121671316714167151671616717167181671916720167211672216723167241672516726167271672816729167301673116732167331673416735167361673716738167391674016741167421674316744167451674616747167481674916750167511675216753167541675516756167571675816759167601676116762167631676416765167661676716768167691677016771167721677316774167751677616777167781677916780167811678216783167841678516786167871678816789167901679116792167931679416795167961679716798167991680016801168021680316804168051680616807168081680916810168111681216813168141681516816168171681816819168201682116822168231682416825168261682716828168291683016831168321683316834168351683616837168381683916840168411684216843168441684516846168471684816849168501685116852168531685416855168561685716858168591686016861168621686316864168651686616867168681686916870168711687216873168741687516876168771687816879168801688116882168831688416885168861688716888168891689016891168921689316894168951689616897168981689916900169011690216903169041690516906169071690816909169101691116912169131691416915169161691716918169191692016921169221692316924169251692616927169281692916930169311693216933169341693516936169371693816939169401694116942169431694416945169461694716948169491695016951169521695316954169551695616957169581695916960169611696216963169641696516966169671696816969169701697116972169731697416975169761697716978169791698016981169821698316984169851698616987169881698916990169911699216993169941699516996169971699816999170001700117002170031700417005170061700717008170091701017011170121701317014170151701617017170181701917020170211702217023170241702517026170271702817029170301703117032170331703417035170361703717038170391704017041170421704317044170451704617047170481704917050170511705217053170541705517056170571705817059170601706117062170631706417065170661706717068170691707017071170721707317074170751707617077170781707917080170811708217083170841708517086170871708817089170901709117092170931709417095170961709717098170991710017101171021710317104171051710617107171081710917110171111711217113171141711517116171171711817119171201712117122171231712417125171261712717128171291713017131171321713317134171351713617137171381713917140171411714217143171441714517146171471714817149171501715117152171531715417155171561715717158171591716017161171621716317164171651716617167171681716917170171711717217173171741717517176171771717817179171801718117182171831718417185171861718717188171891719017191171921719317194171951719617197171981719917200172011720217203172041720517206172071720817209172101721117212172131721417215172161721717218172191722017221172221722317224172251722617227172281722917230172311723217233172341723517236172371723817239172401724117242172431724417245172461724717248172491725017251172521725317254172551725617257172581725917260172611726217263172641726517266172671726817269172701727117272172731727417275172761727717278172791728017281172821728317284172851728617287172881728917290172911729217293172941729517296172971729817299173001730117302173031730417305173061730717308173091731017311173121731317314173151731617317173181731917320173211732217323173241732517326173271732817329173301733117332173331733417335173361733717338173391734017341173421734317344173451734617347173481734917350173511735217353173541735517356173571735817359173601736117362173631736417365173661736717368173691737017371173721737317374173751737617377173781737917380173811738217383173841738517386173871738817389173901739117392173931739417395173961739717398173991740017401174021740317404174051740617407174081740917410174111741217413174141741517416174171741817419174201742117422174231742417425174261742717428174291743017431174321743317434174351743617437174381743917440174411744217443174441744517446174471744817449174501745117452174531745417455174561745717458174591746017461174621746317464174651746617467174681746917470174711747217473174741747517476174771747817479174801748117482174831748417485174861748717488174891749017491174921749317494174951749617497174981749917500175011750217503175041750517506175071750817509175101751117512175131751417515175161751717518175191752017521175221752317524175251752617527175281752917530175311753217533175341753517536175371753817539175401754117542175431754417545175461754717548175491755017551175521755317554175551755617557175581755917560175611756217563175641756517566175671756817569175701757117572175731757417575175761757717578175791758017581175821758317584175851758617587175881758917590175911759217593175941759517596175971759817599176001760117602176031760417605176061760717608176091761017611176121761317614176151761617617176181761917620176211762217623176241762517626176271762817629176301763117632176331763417635176361763717638176391764017641176421764317644176451764617647176481764917650176511765217653176541765517656176571765817659176601766117662176631766417665176661766717668176691767017671176721767317674176751767617677176781767917680176811768217683176841768517686176871768817689176901769117692176931769417695176961769717698176991770017701177021770317704177051770617707177081770917710177111771217713177141771517716177171771817719177201772117722177231772417725177261772717728177291773017731177321773317734177351773617737177381773917740177411774217743177441774517746177471774817749177501775117752177531775417755177561775717758177591776017761177621776317764177651776617767177681776917770177711777217773177741777517776177771777817779177801778117782177831778417785177861778717788177891779017791177921779317794177951779617797177981779917800178011780217803178041780517806178071780817809178101781117812178131781417815178161781717818178191782017821178221782317824178251782617827178281782917830178311783217833178341783517836178371783817839178401784117842178431784417845178461784717848178491785017851178521785317854178551785617857178581785917860178611786217863178641786517866178671786817869178701787117872178731787417875178761787717878178791788017881178821788317884178851788617887178881788917890178911789217893178941789517896178971789817899179001790117902179031790417905179061790717908179091791017911179121791317914179151791617917179181791917920179211792217923179241792517926179271792817929179301793117932179331793417935179361793717938179391794017941179421794317944179451794617947179481794917950179511795217953179541795517956179571795817959179601796117962179631796417965179661796717968179691797017971179721797317974179751797617977179781797917980179811798217983179841798517986179871798817989179901799117992179931799417995179961799717998179991800018001180021800318004180051800618007180081800918010180111801218013180141801518016180171801818019180201802118022180231802418025180261802718028180291803018031180321803318034180351803618037180381803918040180411804218043180441804518046180471804818049180501805118052180531805418055180561805718058180591806018061180621806318064180651806618067180681806918070180711807218073180741807518076180771807818079180801808118082180831808418085180861808718088180891809018091180921809318094180951809618097180981809918100181011810218103181041810518106181071810818109181101811118112181131811418115181161811718118181191812018121181221812318124181251812618127181281812918130181311813218133181341813518136181371813818139181401814118142181431814418145181461814718148181491815018151181521815318154181551815618157181581815918160181611816218163181641816518166181671816818169181701817118172181731817418175181761817718178181791818018181181821818318184181851818618187181881818918190181911819218193181941819518196181971819818199182001820118202182031820418205182061820718208182091821018211182121821318214182151821618217182181821918220182211822218223182241822518226182271822818229182301823118232182331823418235182361823718238182391824018241182421824318244182451824618247182481824918250182511825218253182541825518256182571825818259182601826118262182631826418265182661826718268182691827018271182721827318274182751827618277182781827918280182811828218283182841828518286182871828818289182901829118292182931829418295182961829718298182991830018301183021830318304183051830618307183081830918310183111831218313183141831518316183171831818319183201832118322183231832418325183261832718328183291833018331183321833318334183351833618337183381833918340183411834218343183441834518346183471834818349183501835118352183531835418355183561835718358183591836018361183621836318364183651836618367183681836918370183711837218373183741837518376183771837818379183801838118382183831838418385183861838718388183891839018391183921839318394183951839618397183981839918400184011840218403184041840518406184071840818409184101841118412184131841418415184161841718418184191842018421184221842318424184251842618427184281842918430184311843218433184341843518436184371843818439184401844118442184431844418445184461844718448184491845018451184521845318454184551845618457184581845918460184611846218463184641846518466184671846818469184701847118472184731847418475184761847718478184791848018481184821848318484184851848618487184881848918490184911849218493184941849518496184971849818499185001850118502185031850418505185061850718508185091851018511185121851318514185151851618517185181851918520185211852218523185241852518526185271852818529185301853118532185331853418535185361853718538185391854018541185421854318544185451854618547185481854918550185511855218553185541855518556185571855818559185601856118562185631856418565185661856718568185691857018571185721857318574185751857618577185781857918580185811858218583185841858518586185871858818589185901859118592185931859418595185961859718598185991860018601186021860318604186051860618607186081860918610186111861218613186141861518616186171861818619186201862118622186231862418625186261862718628186291863018631186321863318634186351863618637186381863918640186411864218643186441864518646186471864818649186501865118652186531865418655186561865718658186591866018661186621866318664186651866618667186681866918670186711867218673186741867518676186771867818679186801868118682186831868418685186861868718688186891869018691186921869318694186951869618697186981869918700187011870218703187041870518706187071870818709187101871118712187131871418715187161871718718187191872018721187221872318724187251872618727187281872918730187311873218733187341873518736187371873818739187401874118742187431874418745187461874718748187491875018751187521875318754187551875618757187581875918760187611876218763187641876518766187671876818769187701877118772187731877418775187761877718778187791878018781187821878318784187851878618787187881878918790187911879218793187941879518796187971879818799188001880118802188031880418805188061880718808188091881018811188121881318814188151881618817188181881918820188211882218823188241882518826188271882818829188301883118832188331883418835188361883718838188391884018841188421884318844188451884618847188481884918850188511885218853188541885518856188571885818859188601886118862188631886418865188661886718868188691887018871188721887318874188751887618877188781887918880188811888218883188841888518886188871888818889188901889118892188931889418895188961889718898188991890018901189021890318904189051890618907189081890918910189111891218913189141891518916189171891818919189201892118922189231892418925189261892718928189291893018931189321893318934189351893618937189381893918940189411894218943189441894518946189471894818949189501895118952189531895418955189561895718958189591896018961189621896318964189651896618967189681896918970189711897218973189741897518976189771897818979189801898118982189831898418985189861898718988189891899018991189921899318994189951899618997189981899919000190011900219003190041900519006190071900819009190101901119012190131901419015190161901719018190191902019021190221902319024190251902619027190281902919030190311903219033190341903519036190371903819039190401904119042190431904419045190461904719048190491905019051190521905319054190551905619057190581905919060190611906219063190641906519066190671906819069190701907119072190731907419075190761907719078190791908019081190821908319084190851908619087190881908919090190911909219093190941909519096190971909819099191001910119102191031910419105191061910719108191091911019111191121911319114191151911619117191181911919120191211912219123191241912519126191271912819129191301913119132191331913419135191361913719138191391914019141191421914319144191451914619147191481914919150191511915219153191541915519156191571915819159191601916119162191631916419165191661916719168191691917019171191721917319174191751917619177191781917919180191811918219183191841918519186191871918819189191901919119192191931919419195191961919719198191991920019201192021920319204192051920619207192081920919210192111921219213192141921519216192171921819219192201922119222192231922419225192261922719228192291923019231192321923319234192351923619237192381923919240192411924219243192441924519246192471924819249192501925119252192531925419255192561925719258192591926019261192621926319264192651926619267192681926919270192711927219273192741927519276192771927819279192801928119282192831928419285192861928719288192891929019291192921929319294192951929619297192981929919300193011930219303193041930519306193071930819309193101931119312193131931419315193161931719318193191932019321193221932319324193251932619327193281932919330193311933219333193341933519336193371933819339193401934119342193431934419345193461934719348193491935019351193521935319354193551935619357193581935919360193611936219363193641936519366193671936819369193701937119372193731937419375193761937719378193791938019381193821938319384193851938619387193881938919390193911939219393193941939519396193971939819399194001940119402194031940419405194061940719408194091941019411194121941319414194151941619417194181941919420194211942219423194241942519426194271942819429194301943119432194331943419435194361943719438194391944019441194421944319444194451944619447194481944919450194511945219453194541945519456194571945819459194601946119462194631946419465194661946719468194691947019471194721947319474194751947619477194781947919480194811948219483194841948519486194871948819489194901949119492194931949419495194961949719498194991950019501195021950319504195051950619507195081950919510195111951219513195141951519516195171951819519195201952119522195231952419525195261952719528195291953019531195321953319534195351953619537195381953919540195411954219543195441954519546195471954819549195501955119552195531955419555195561955719558195591956019561195621956319564195651956619567195681956919570195711957219573195741957519576195771957819579195801958119582195831958419585195861958719588195891959019591195921959319594195951959619597195981959919600196011960219603196041960519606196071960819609196101961119612196131961419615196161961719618196191962019621196221962319624196251962619627196281962919630196311963219633196341963519636196371963819639196401964119642196431964419645196461964719648196491965019651196521965319654196551965619657196581965919660196611966219663196641966519666196671966819669196701967119672196731967419675196761967719678196791968019681196821968319684196851968619687196881968919690196911969219693196941969519696196971969819699197001970119702197031970419705197061970719708197091971019711197121971319714197151971619717197181971919720197211972219723197241972519726197271972819729197301973119732197331973419735197361973719738197391974019741197421974319744197451974619747197481974919750197511975219753197541975519756197571975819759197601976119762197631976419765197661976719768197691977019771197721977319774197751977619777197781977919780197811978219783197841978519786197871978819789197901979119792197931979419795197961979719798197991980019801198021980319804198051980619807198081980919810198111981219813198141981519816198171981819819198201982119822198231982419825198261982719828198291983019831198321983319834198351983619837198381983919840198411984219843198441984519846198471984819849198501985119852198531985419855198561985719858198591986019861198621986319864198651986619867198681986919870198711987219873198741987519876198771987819879198801988119882198831988419885198861988719888198891989019891198921989319894198951989619897198981989919900199011990219903199041990519906199071990819909199101991119912199131991419915199161991719918199191992019921199221992319924199251992619927199281992919930199311993219933199341993519936199371993819939199401994119942199431994419945199461994719948199491995019951199521995319954199551995619957199581995919960199611996219963199641996519966199671996819969199701997119972199731997419975199761997719978199791998019981199821998319984199851998619987199881998919990199911999219993199941999519996199971999819999200002000120002200032000420005200062000720008200092001020011200122001320014200152001620017200182001920020200212002220023200242002520026200272002820029200302003120032200332003420035200362003720038200392004020041200422004320044200452004620047200482004920050200512005220053200542005520056200572005820059200602006120062200632006420065200662006720068200692007020071200722007320074200752007620077200782007920080200812008220083200842008520086200872008820089200902009120092200932009420095200962009720098200992010020101201022010320104201052010620107201082010920110201112011220113201142011520116201172011820119201202012120122201232012420125201262012720128201292013020131201322013320134201352013620137201382013920140201412014220143201442014520146201472014820149201502015120152201532015420155201562015720158201592016020161201622016320164201652016620167201682016920170201712017220173201742017520176201772017820179201802018120182201832018420185201862018720188201892019020191201922019320194201952019620197201982019920200202012020220203202042020520206202072020820209202102021120212202132021420215202162021720218202192022020221202222022320224202252022620227202282022920230202312023220233202342023520236202372023820239202402024120242202432024420245202462024720248202492025020251202522025320254202552025620257202582025920260202612026220263202642026520266202672026820269202702027120272202732027420275202762027720278202792028020281202822028320284202852028620287202882028920290202912029220293202942029520296202972029820299203002030120302203032030420305203062030720308203092031020311203122031320314203152031620317203182031920320203212032220323203242032520326203272032820329203302033120332203332033420335203362033720338203392034020341203422034320344203452034620347203482034920350203512035220353203542035520356203572035820359203602036120362203632036420365203662036720368203692037020371203722037320374203752037620377203782037920380203812038220383203842038520386203872038820389203902039120392203932039420395203962039720398203992040020401204022040320404204052040620407204082040920410204112041220413204142041520416204172041820419204202042120422204232042420425204262042720428204292043020431204322043320434204352043620437204382043920440204412044220443204442044520446204472044820449204502045120452204532045420455204562045720458204592046020461204622046320464204652046620467204682046920470204712047220473204742047520476204772047820479204802048120482204832048420485204862048720488204892049020491204922049320494204952049620497204982049920500205012050220503205042050520506205072050820509205102051120512205132051420515205162051720518205192052020521205222052320524205252052620527205282052920530205312053220533205342053520536205372053820539205402054120542205432054420545205462054720548205492055020551205522055320554205552055620557205582055920560205612056220563205642056520566205672056820569205702057120572205732057420575205762057720578205792058020581205822058320584205852058620587205882058920590205912059220593205942059520596205972059820599206002060120602206032060420605206062060720608206092061020611206122061320614206152061620617206182061920620206212062220623206242062520626206272062820629206302063120632206332063420635206362063720638206392064020641206422064320644206452064620647206482064920650206512065220653206542065520656206572065820659206602066120662206632066420665206662066720668206692067020671206722067320674206752067620677206782067920680206812068220683206842068520686206872068820689206902069120692206932069420695206962069720698206992070020701207022070320704207052070620707207082070920710207112071220713207142071520716207172071820719207202072120722207232072420725207262072720728207292073020731207322073320734207352073620737207382073920740207412074220743207442074520746207472074820749207502075120752207532075420755207562075720758207592076020761207622076320764207652076620767207682076920770207712077220773207742077520776207772077820779207802078120782207832078420785207862078720788207892079020791207922079320794207952079620797207982079920800208012080220803208042080520806208072080820809208102081120812208132081420815208162081720818208192082020821208222082320824208252082620827208282082920830208312083220833208342083520836208372083820839208402084120842208432084420845208462084720848208492085020851208522085320854208552085620857208582085920860208612086220863208642086520866208672086820869208702087120872208732087420875208762087720878208792088020881208822088320884208852088620887208882088920890208912089220893208942089520896208972089820899209002090120902209032090420905209062090720908209092091020911209122091320914209152091620917209182091920920209212092220923209242092520926209272092820929209302093120932209332093420935209362093720938209392094020941209422094320944209452094620947209482094920950209512095220953209542095520956209572095820959209602096120962209632096420965209662096720968209692097020971209722097320974209752097620977209782097920980209812098220983209842098520986209872098820989209902099120992209932099420995209962099720998209992100021001210022100321004210052100621007210082100921010210112101221013210142101521016210172101821019210202102121022210232102421025210262102721028210292103021031210322103321034210352103621037210382103921040210412104221043210442104521046210472104821049210502105121052210532105421055210562105721058210592106021061210622106321064210652106621067210682106921070210712107221073210742107521076210772107821079210802108121082210832108421085210862108721088210892109021091210922109321094210952109621097210982109921100211012110221103211042110521106211072110821109211102111121112211132111421115211162111721118211192112021121211222112321124211252112621127211282112921130211312113221133211342113521136211372113821139211402114121142211432114421145211462114721148211492115021151211522115321154211552115621157211582115921160211612116221163211642116521166211672116821169211702117121172211732117421175211762117721178211792118021181211822118321184211852118621187211882118921190211912119221193211942119521196211972119821199212002120121202212032120421205212062120721208212092121021211212122121321214212152121621217212182121921220212212122221223212242122521226212272122821229212302123121232212332123421235212362123721238212392124021241212422124321244212452124621247212482124921250212512125221253212542125521256212572125821259212602126121262212632126421265212662126721268212692127021271212722127321274212752127621277212782127921280212812128221283212842128521286212872128821289212902129121292212932129421295212962129721298212992130021301213022130321304213052130621307213082130921310213112131221313213142131521316213172131821319213202132121322213232132421325213262132721328213292133021331213322133321334213352133621337213382133921340213412134221343213442134521346213472134821349213502135121352213532135421355213562135721358213592136021361213622136321364213652136621367213682136921370213712137221373213742137521376213772137821379213802138121382213832138421385213862138721388213892139021391213922139321394213952139621397213982139921400214012140221403214042140521406214072140821409214102141121412214132141421415214162141721418214192142021421214222142321424214252142621427214282142921430214312143221433214342143521436214372143821439214402144121442214432144421445214462144721448214492145021451214522145321454214552145621457214582145921460214612146221463214642146521466214672146821469214702147121472214732147421475214762147721478214792148021481214822148321484214852148621487214882148921490214912149221493214942149521496214972149821499215002150121502215032150421505215062150721508215092151021511215122151321514215152151621517215182151921520215212152221523215242152521526215272152821529215302153121532215332153421535215362153721538215392154021541215422154321544215452154621547215482154921550215512155221553215542155521556215572155821559215602156121562215632156421565215662156721568215692157021571215722157321574215752157621577215782157921580215812158221583215842158521586215872158821589215902159121592215932159421595215962159721598215992160021601216022160321604216052160621607216082160921610216112161221613216142161521616216172161821619216202162121622216232162421625216262162721628216292163021631216322163321634216352163621637216382163921640216412164221643216442164521646216472164821649216502165121652216532165421655216562165721658216592166021661216622166321664216652166621667216682166921670216712167221673216742167521676216772167821679216802168121682216832168421685216862168721688216892169021691216922169321694216952169621697216982169921700217012170221703217042170521706217072170821709217102171121712217132171421715217162171721718217192172021721217222172321724217252172621727217282172921730217312173221733217342173521736217372173821739217402174121742217432174421745217462174721748217492175021751217522175321754217552175621757217582175921760217612176221763217642176521766217672176821769217702177121772217732177421775217762177721778217792178021781217822178321784217852178621787217882178921790217912179221793217942179521796217972179821799218002180121802218032180421805218062180721808218092181021811218122181321814218152181621817218182181921820218212182221823218242182521826218272182821829218302183121832218332183421835218362183721838218392184021841218422184321844218452184621847218482184921850218512185221853218542185521856218572185821859218602186121862218632186421865218662186721868218692187021871218722187321874218752187621877218782187921880218812188221883218842188521886218872188821889218902189121892218932189421895218962189721898218992190021901219022190321904219052190621907219082190921910219112191221913219142191521916219172191821919219202192121922219232192421925219262192721928219292193021931219322193321934219352193621937219382193921940219412194221943219442194521946219472194821949219502195121952219532195421955219562195721958219592196021961219622196321964219652196621967219682196921970219712197221973219742197521976219772197821979219802198121982219832198421985219862198721988219892199021991219922199321994219952199621997219982199922000220012200222003220042200522006220072200822009220102201122012220132201422015220162201722018220192202022021220222202322024220252202622027220282202922030220312203222033220342203522036220372203822039220402204122042220432204422045220462204722048220492205022051220522205322054220552205622057220582205922060220612206222063220642206522066220672206822069220702207122072220732207422075220762207722078220792208022081220822208322084220852208622087220882208922090220912209222093220942209522096220972209822099221002210122102221032210422105221062210722108221092211022111221122211322114221152211622117221182211922120221212212222123221242212522126221272212822129221302213122132221332213422135221362213722138221392214022141221422214322144221452214622147221482214922150221512215222153221542215522156221572215822159221602216122162221632216422165221662216722168221692217022171221722217322174221752217622177221782217922180221812218222183221842218522186221872218822189221902219122192221932219422195221962219722198221992220022201222022220322204222052220622207222082220922210222112221222213222142221522216222172221822219222202222122222222232222422225222262222722228222292223022231222322223322234222352223622237222382223922240222412224222243222442224522246222472224822249222502225122252222532225422255222562225722258222592226022261222622226322264222652226622267222682226922270222712227222273222742227522276222772227822279222802228122282222832228422285222862228722288222892229022291222922229322294222952229622297222982229922300223012230222303223042230522306223072230822309223102231122312223132231422315223162231722318223192232022321223222232322324223252232622327223282232922330223312233222333223342233522336223372233822339223402234122342223432234422345223462234722348223492235022351223522235322354223552235622357223582235922360223612236222363223642236522366223672236822369223702237122372223732237422375223762237722378223792238022381223822238322384223852238622387223882238922390223912239222393223942239522396223972239822399224002240122402224032240422405224062240722408224092241022411224122241322414224152241622417224182241922420224212242222423224242242522426224272242822429224302243122432224332243422435224362243722438224392244022441224422244322444224452244622447224482244922450224512245222453224542245522456224572245822459224602246122462224632246422465224662246722468224692247022471224722247322474224752247622477224782247922480224812248222483224842248522486224872248822489224902249122492224932249422495224962249722498224992250022501225022250322504225052250622507225082250922510225112251222513225142251522516225172251822519225202252122522225232252422525225262252722528225292253022531225322253322534225352253622537225382253922540225412254222543225442254522546225472254822549225502255122552225532255422555225562255722558225592256022561225622256322564225652256622567225682256922570225712257222573225742257522576225772257822579225802258122582225832258422585225862258722588225892259022591225922259322594225952259622597225982259922600226012260222603226042260522606226072260822609226102261122612226132261422615226162261722618226192262022621226222262322624226252262622627226282262922630226312263222633226342263522636226372263822639226402264122642226432264422645226462264722648226492265022651226522265322654226552265622657226582265922660226612266222663226642266522666226672266822669226702267122672226732267422675226762267722678226792268022681226822268322684226852268622687226882268922690226912269222693226942269522696226972269822699227002270122702227032270422705227062270722708227092271022711227122271322714227152271622717227182271922720227212272222723227242272522726227272272822729227302273122732227332273422735227362273722738227392274022741227422274322744227452274622747227482274922750227512275222753227542275522756227572275822759227602276122762227632276422765227662276722768227692277022771227722277322774227752277622777227782277922780227812278222783227842278522786227872278822789227902279122792227932279422795227962279722798227992280022801228022280322804228052280622807228082280922810228112281222813228142281522816228172281822819228202282122822228232282422825228262282722828228292283022831228322283322834228352283622837228382283922840228412284222843228442284522846228472284822849228502285122852228532285422855228562285722858228592286022861228622286322864228652286622867228682286922870228712287222873228742287522876228772287822879228802288122882228832288422885228862288722888228892289022891228922289322894228952289622897228982289922900229012290222903229042290522906229072290822909229102291122912229132291422915229162291722918229192292022921229222292322924229252292622927229282292922930229312293222933229342293522936229372293822939229402294122942229432294422945229462294722948229492295022951229522295322954229552295622957229582295922960229612296222963229642296522966229672296822969229702297122972229732297422975229762297722978229792298022981229822298322984229852298622987229882298922990229912299222993229942299522996229972299822999230002300123002230032300423005230062300723008230092301023011230122301323014230152301623017230182301923020230212302223023230242302523026230272302823029230302303123032230332303423035230362303723038230392304023041230422304323044230452304623047230482304923050230512305223053230542305523056230572305823059230602306123062230632306423065230662306723068230692307023071230722307323074230752307623077230782307923080230812308223083230842308523086230872308823089230902309123092230932309423095230962309723098230992310023101231022310323104231052310623107231082310923110231112311223113231142311523116231172311823119231202312123122231232312423125231262312723128231292313023131231322313323134231352313623137231382313923140231412314223143231442314523146231472314823149231502315123152231532315423155231562315723158231592316023161231622316323164231652316623167231682316923170231712317223173231742317523176231772317823179231802318123182231832318423185231862318723188231892319023191231922319323194231952319623197231982319923200232012320223203232042320523206232072320823209232102321123212232132321423215232162321723218232192322023221232222322323224232252322623227232282322923230232312323223233232342323523236232372323823239232402324123242232432324423245232462324723248232492325023251232522325323254232552325623257232582325923260232612326223263232642326523266232672326823269232702327123272232732327423275232762327723278232792328023281232822328323284232852328623287232882328923290232912329223293232942329523296232972329823299233002330123302233032330423305233062330723308233092331023311233122331323314233152331623317233182331923320233212332223323233242332523326233272332823329233302333123332233332333423335233362333723338233392334023341233422334323344233452334623347233482334923350233512335223353233542335523356233572335823359233602336123362233632336423365233662336723368233692337023371233722337323374233752337623377233782337923380233812338223383233842338523386233872338823389233902339123392233932339423395233962339723398233992340023401234022340323404234052340623407234082340923410234112341223413234142341523416234172341823419234202342123422234232342423425234262342723428234292343023431234322343323434234352343623437234382343923440234412344223443234442344523446234472344823449234502345123452234532345423455234562345723458234592346023461234622346323464234652346623467234682346923470234712347223473234742347523476234772347823479234802348123482234832348423485234862348723488234892349023491234922349323494234952349623497234982349923500235012350223503235042350523506235072350823509235102351123512235132351423515235162351723518235192352023521235222352323524235252352623527235282352923530235312353223533235342353523536235372353823539235402354123542235432354423545235462354723548235492355023551235522355323554235552355623557235582355923560235612356223563235642356523566235672356823569235702357123572235732357423575235762357723578235792358023581235822358323584235852358623587235882358923590235912359223593235942359523596235972359823599236002360123602236032360423605236062360723608236092361023611236122361323614236152361623617236182361923620236212362223623236242362523626236272362823629236302363123632236332363423635236362363723638236392364023641236422364323644236452364623647236482364923650236512365223653236542365523656236572365823659236602366123662236632366423665236662366723668236692367023671236722367323674236752367623677236782367923680236812368223683236842368523686236872368823689236902369123692236932369423695236962369723698236992370023701237022370323704237052370623707237082370923710237112371223713237142371523716237172371823719237202372123722237232372423725237262372723728237292373023731237322373323734237352373623737237382373923740237412374223743237442374523746237472374823749237502375123752237532375423755237562375723758237592376023761237622376323764237652376623767237682376923770237712377223773237742377523776237772377823779237802378123782237832378423785237862378723788237892379023791237922379323794237952379623797237982379923800238012380223803238042380523806238072380823809238102381123812238132381423815238162381723818238192382023821238222382323824238252382623827238282382923830238312383223833238342383523836238372383823839238402384123842238432384423845238462384723848238492385023851238522385323854238552385623857238582385923860238612386223863238642386523866238672386823869238702387123872238732387423875238762387723878238792388023881238822388323884238852388623887238882388923890238912389223893238942389523896238972389823899239002390123902239032390423905239062390723908239092391023911239122391323914239152391623917239182391923920239212392223923239242392523926239272392823929239302393123932239332393423935239362393723938239392394023941239422394323944239452394623947239482394923950239512395223953239542395523956239572395823959239602396123962239632396423965239662396723968239692397023971239722397323974239752397623977239782397923980239812398223983239842398523986239872398823989239902399123992239932399423995239962399723998239992400024001240022400324004240052400624007240082400924010240112401224013240142401524016240172401824019240202402124022240232402424025240262402724028240292403024031240322403324034240352403624037240382403924040240412404224043240442404524046240472404824049240502405124052240532405424055240562405724058240592406024061240622406324064240652406624067240682406924070240712407224073240742407524076240772407824079240802408124082240832408424085240862408724088240892409024091240922409324094240952409624097240982409924100241012410224103241042410524106241072410824109241102411124112241132411424115241162411724118241192412024121241222412324124241252412624127241282412924130241312413224133241342413524136241372413824139241402414124142241432414424145241462414724148241492415024151241522415324154241552415624157241582415924160241612416224163241642416524166241672416824169241702417124172241732417424175241762417724178241792418024181241822418324184241852418624187241882418924190241912419224193241942419524196241972419824199242002420124202242032420424205242062420724208242092421024211242122421324214242152421624217242182421924220242212422224223242242422524226242272422824229242302423124232242332423424235242362423724238242392424024241242422424324244242452424624247242482424924250242512425224253242542425524256242572425824259242602426124262242632426424265242662426724268242692427024271242722427324274242752427624277242782427924280242812428224283242842428524286242872428824289242902429124292242932429424295242962429724298242992430024301243022430324304243052430624307243082430924310243112431224313243142431524316243172431824319243202432124322243232432424325243262432724328243292433024331243322433324334243352433624337243382433924340243412434224343243442434524346243472434824349243502435124352243532435424355243562435724358243592436024361243622436324364243652436624367243682436924370243712437224373243742437524376243772437824379243802438124382243832438424385243862438724388243892439024391243922439324394243952439624397243982439924400244012440224403244042440524406244072440824409244102441124412244132441424415244162441724418244192442024421244222442324424244252442624427244282442924430244312443224433244342443524436244372443824439244402444124442244432444424445244462444724448244492445024451244522445324454244552445624457244582445924460244612446224463244642446524466244672446824469244702447124472244732447424475244762447724478244792448024481244822448324484244852448624487244882448924490244912449224493244942449524496244972449824499245002450124502245032450424505245062450724508245092451024511245122451324514245152451624517245182451924520245212452224523245242452524526245272452824529245302453124532245332453424535245362453724538245392454024541245422454324544245452454624547245482454924550245512455224553245542455524556245572455824559245602456124562245632456424565245662456724568245692457024571245722457324574245752457624577245782457924580245812458224583245842458524586245872458824589245902459124592245932459424595245962459724598245992460024601246022460324604246052460624607246082460924610246112461224613246142461524616246172461824619246202462124622246232462424625246262462724628246292463024631246322463324634246352463624637246382463924640246412464224643246442464524646246472464824649246502465124652246532465424655246562465724658246592466024661246622466324664246652466624667246682466924670246712467224673246742467524676246772467824679246802468124682246832468424685246862468724688246892469024691246922469324694246952469624697246982469924700247012470224703247042470524706247072470824709247102471124712247132471424715247162471724718247192472024721247222472324724247252472624727247282472924730247312473224733247342473524736247372473824739247402474124742247432474424745247462474724748247492475024751247522475324754247552475624757247582475924760247612476224763247642476524766247672476824769247702477124772247732477424775247762477724778247792478024781247822478324784247852478624787247882478924790247912479224793247942479524796247972479824799248002480124802248032480424805248062480724808248092481024811248122481324814248152481624817248182481924820248212482224823248242482524826248272482824829248302483124832248332483424835248362483724838248392484024841248422484324844248452484624847248482484924850248512485224853248542485524856248572485824859248602486124862248632486424865248662486724868248692487024871248722487324874248752487624877248782487924880248812488224883248842488524886248872488824889248902489124892248932489424895248962489724898248992490024901249022490324904249052490624907249082490924910249112491224913249142491524916249172491824919249202492124922249232492424925249262492724928249292493024931249322493324934249352493624937249382493924940249412494224943249442494524946249472494824949249502495124952249532495424955249562495724958249592496024961249622496324964249652496624967249682496924970249712497224973249742497524976249772497824979249802498124982249832498424985249862498724988249892499024991249922499324994249952499624997249982499925000250012500225003250042500525006250072500825009250102501125012250132501425015250162501725018250192502025021250222502325024250252502625027250282502925030250312503225033250342503525036250372503825039250402504125042250432504425045250462504725048250492505025051250522505325054250552505625057250582505925060250612506225063250642506525066250672506825069250702507125072250732507425075250762507725078250792508025081250822508325084250852508625087250882508925090250912509225093250942509525096250972509825099251002510125102251032510425105251062510725108251092511025111251122511325114251152511625117251182511925120251212512225123251242512525126251272512825129251302513125132251332513425135251362513725138251392514025141251422514325144251452514625147251482514925150251512515225153251542515525156251572515825159251602516125162251632516425165251662516725168251692517025171251722517325174251752517625177251782517925180251812518225183251842518525186251872518825189251902519125192251932519425195251962519725198251992520025201252022520325204252052520625207252082520925210252112521225213252142521525216252172521825219252202522125222252232522425225252262522725228252292523025231252322523325234252352523625237252382523925240252412524225243252442524525246252472524825249252502525125252252532525425255252562525725258252592526025261252622526325264252652526625267252682526925270252712527225273252742527525276252772527825279252802528125282252832528425285252862528725288252892529025291252922529325294252952529625297252982529925300253012530225303253042530525306253072530825309253102531125312253132531425315253162531725318253192532025321253222532325324253252532625327253282532925330253312533225333253342533525336253372533825339253402534125342253432534425345253462534725348253492535025351253522535325354253552535625357253582535925360253612536225363253642536525366253672536825369253702537125372253732537425375253762537725378253792538025381253822538325384253852538625387253882538925390253912539225393253942539525396253972539825399254002540125402254032540425405254062540725408254092541025411254122541325414254152541625417254182541925420254212542225423254242542525426254272542825429254302543125432254332543425435254362543725438254392544025441254422544325444254452544625447254482544925450254512545225453254542545525456254572545825459254602546125462254632546425465254662546725468254692547025471254722547325474254752547625477254782547925480254812548225483254842548525486254872548825489254902549125492254932549425495254962549725498254992550025501255022550325504255052550625507255082550925510255112551225513255142551525516255172551825519255202552125522255232552425525255262552725528255292553025531255322553325534255352553625537255382553925540255412554225543255442554525546255472554825549255502555125552255532555425555255562555725558255592556025561255622556325564255652556625567255682556925570255712557225573255742557525576255772557825579255802558125582255832558425585255862558725588255892559025591255922559325594255952559625597255982559925600256012560225603256042560525606256072560825609256102561125612256132561425615256162561725618256192562025621256222562325624256252562625627256282562925630256312563225633256342563525636256372563825639256402564125642256432564425645256462564725648256492565025651256522565325654256552565625657256582565925660256612566225663256642566525666256672566825669256702567125672256732567425675256762567725678256792568025681256822568325684256852568625687256882568925690256912569225693256942569525696256972569825699257002570125702257032570425705257062570725708257092571025711257122571325714257152571625717257182571925720257212572225723257242572525726257272572825729257302573125732257332573425735257362573725738257392574025741257422574325744257452574625747257482574925750257512575225753257542575525756257572575825759257602576125762257632576425765257662576725768257692577025771257722577325774257752577625777257782577925780257812578225783257842578525786257872578825789257902579125792257932579425795257962579725798257992580025801258022580325804258052580625807258082580925810258112581225813258142581525816258172581825819258202582125822258232582425825258262582725828258292583025831258322583325834258352583625837258382583925840258412584225843258442584525846258472584825849258502585125852258532585425855258562585725858258592586025861258622586325864258652586625867258682586925870258712587225873258742587525876258772587825879258802588125882258832588425885258862588725888258892589025891258922589325894258952589625897258982589925900259012590225903259042590525906259072590825909259102591125912259132591425915259162591725918259192592025921259222592325924259252592625927259282592925930259312593225933259342593525936259372593825939259402594125942259432594425945259462594725948259492595025951259522595325954259552595625957259582595925960259612596225963259642596525966259672596825969259702597125972259732597425975259762597725978259792598025981259822598325984259852598625987259882598925990259912599225993259942599525996259972599825999260002600126002260032600426005260062600726008260092601026011260122601326014260152601626017260182601926020260212602226023260242602526026260272602826029260302603126032260332603426035260362603726038260392604026041260422604326044260452604626047260482604926050260512605226053260542605526056260572605826059260602606126062260632606426065260662606726068260692607026071260722607326074260752607626077260782607926080260812608226083260842608526086260872608826089260902609126092260932609426095260962609726098260992610026101261022610326104261052610626107261082610926110261112611226113261142611526116261172611826119261202612126122261232612426125261262612726128261292613026131261322613326134261352613626137261382613926140261412614226143261442614526146261472614826149261502615126152261532615426155261562615726158261592616026161261622616326164261652616626167261682616926170261712617226173261742617526176261772617826179261802618126182261832618426185261862618726188261892619026191261922619326194261952619626197261982619926200262012620226203262042620526206262072620826209262102621126212262132621426215262162621726218262192622026221262222622326224262252622626227262282622926230262312623226233262342623526236262372623826239262402624126242262432624426245262462624726248262492625026251262522625326254262552625626257262582625926260262612626226263262642626526266262672626826269262702627126272262732627426275262762627726278262792628026281262822628326284262852628626287262882628926290262912629226293262942629526296262972629826299263002630126302263032630426305263062630726308263092631026311263122631326314263152631626317263182631926320263212632226323263242632526326263272632826329263302633126332263332633426335263362633726338263392634026341263422634326344263452634626347263482634926350263512635226353263542635526356263572635826359263602636126362263632636426365263662636726368263692637026371263722637326374263752637626377263782637926380263812638226383263842638526386263872638826389263902639126392263932639426395263962639726398263992640026401264022640326404264052640626407264082640926410264112641226413264142641526416264172641826419264202642126422264232642426425264262642726428264292643026431264322643326434264352643626437264382643926440264412644226443264442644526446264472644826449264502645126452264532645426455264562645726458264592646026461264622646326464264652646626467264682646926470264712647226473264742647526476264772647826479264802648126482264832648426485264862648726488264892649026491264922649326494264952649626497264982649926500265012650226503265042650526506265072650826509265102651126512265132651426515265162651726518265192652026521265222652326524265252652626527265282652926530265312653226533265342653526536265372653826539265402654126542265432654426545265462654726548265492655026551265522655326554265552655626557265582655926560265612656226563265642656526566265672656826569265702657126572265732657426575265762657726578265792658026581265822658326584265852658626587265882658926590265912659226593265942659526596265972659826599266002660126602266032660426605266062660726608266092661026611266122661326614266152661626617266182661926620266212662226623266242662526626266272662826629266302663126632266332663426635266362663726638266392664026641266422664326644266452664626647266482664926650266512665226653266542665526656266572665826659266602666126662266632666426665266662666726668266692667026671266722667326674266752667626677266782667926680266812668226683266842668526686266872668826689266902669126692266932669426695266962669726698266992670026701267022670326704267052670626707267082670926710267112671226713267142671526716267172671826719267202672126722267232672426725267262672726728267292673026731267322673326734267352673626737267382673926740267412674226743267442674526746267472674826749267502675126752267532675426755267562675726758267592676026761267622676326764267652676626767267682676926770267712677226773267742677526776267772677826779267802678126782267832678426785267862678726788267892679026791267922679326794267952679626797267982679926800268012680226803268042680526806268072680826809268102681126812268132681426815268162681726818268192682026821268222682326824268252682626827268282682926830268312683226833268342683526836268372683826839268402684126842268432684426845268462684726848268492685026851268522685326854268552685626857268582685926860268612686226863268642686526866268672686826869268702687126872268732687426875268762687726878268792688026881268822688326884268852688626887268882688926890268912689226893268942689526896268972689826899269002690126902269032690426905269062690726908269092691026911269122691326914269152691626917269182691926920269212692226923269242692526926269272692826929269302693126932269332693426935269362693726938269392694026941269422694326944269452694626947269482694926950269512695226953269542695526956269572695826959269602696126962269632696426965269662696726968269692697026971269722697326974269752697626977269782697926980269812698226983269842698526986269872698826989269902699126992269932699426995269962699726998269992700027001270022700327004270052700627007270082700927010270112701227013270142701527016270172701827019270202702127022270232702427025270262702727028270292703027031270322703327034270352703627037270382703927040270412704227043270442704527046270472704827049270502705127052270532705427055270562705727058270592706027061270622706327064270652706627067270682706927070270712707227073270742707527076270772707827079270802708127082270832708427085270862708727088270892709027091270922709327094270952709627097270982709927100271012710227103271042710527106271072710827109271102711127112271132711427115271162711727118271192712027121271222712327124271252712627127271282712927130271312713227133271342713527136271372713827139271402714127142271432714427145271462714727148271492715027151271522715327154271552715627157271582715927160271612716227163271642716527166271672716827169271702717127172271732717427175271762717727178271792718027181271822718327184271852718627187271882718927190271912719227193271942719527196271972719827199272002720127202272032720427205272062720727208272092721027211272122721327214272152721627217272182721927220272212722227223272242722527226272272722827229272302723127232272332723427235272362723727238272392724027241272422724327244272452724627247272482724927250272512725227253272542725527256272572725827259272602726127262272632726427265272662726727268272692727027271272722727327274272752727627277272782727927280272812728227283272842728527286272872728827289272902729127292272932729427295272962729727298272992730027301273022730327304273052730627307273082730927310273112731227313273142731527316273172731827319273202732127322273232732427325273262732727328273292733027331273322733327334273352733627337273382733927340273412734227343273442734527346273472734827349273502735127352273532735427355273562735727358273592736027361273622736327364273652736627367273682736927370273712737227373273742737527376273772737827379273802738127382273832738427385273862738727388273892739027391273922739327394273952739627397273982739927400274012740227403274042740527406274072740827409274102741127412274132741427415274162741727418274192742027421274222742327424274252742627427274282742927430274312743227433274342743527436274372743827439274402744127442274432744427445274462744727448274492745027451274522745327454274552745627457274582745927460274612746227463274642746527466274672746827469274702747127472274732747427475274762747727478274792748027481274822748327484274852748627487274882748927490274912749227493274942749527496274972749827499275002750127502275032750427505275062750727508275092751027511275122751327514275152751627517275182751927520275212752227523275242752527526275272752827529275302753127532275332753427535275362753727538275392754027541275422754327544275452754627547275482754927550275512755227553275542755527556275572755827559275602756127562275632756427565275662756727568275692757027571275722757327574275752757627577275782757927580275812758227583275842758527586275872758827589275902759127592275932759427595275962759727598275992760027601276022760327604276052760627607276082760927610276112761227613276142761527616276172761827619276202762127622276232762427625276262762727628276292763027631276322763327634276352763627637276382763927640276412764227643276442764527646276472764827649276502765127652276532765427655276562765727658276592766027661276622766327664276652766627667276682766927670276712767227673276742767527676276772767827679276802768127682276832768427685276862768727688276892769027691276922769327694276952769627697276982769927700277012770227703277042770527706277072770827709277102771127712277132771427715277162771727718277192772027721277222772327724277252772627727277282772927730277312773227733277342773527736277372773827739277402774127742277432774427745277462774727748277492775027751277522775327754277552775627757277582775927760277612776227763277642776527766277672776827769277702777127772277732777427775277762777727778277792778027781277822778327784277852778627787277882778927790277912779227793277942779527796277972779827799278002780127802278032780427805278062780727808278092781027811278122781327814278152781627817278182781927820278212782227823278242782527826278272782827829278302783127832278332783427835278362783727838278392784027841278422784327844278452784627847278482784927850278512785227853278542785527856278572785827859278602786127862278632786427865278662786727868278692787027871278722787327874278752787627877278782787927880278812788227883278842788527886278872788827889278902789127892278932789427895278962789727898278992790027901279022790327904279052790627907279082790927910279112791227913279142791527916279172791827919279202792127922279232792427925279262792727928279292793027931279322793327934279352793627937279382793927940279412794227943279442794527946279472794827949279502795127952279532795427955279562795727958279592796027961279622796327964279652796627967279682796927970279712797227973279742797527976279772797827979279802798127982279832798427985279862798727988279892799027991279922799327994279952799627997279982799928000280012800228003280042800528006280072800828009280102801128012280132801428015280162801728018280192802028021280222802328024280252802628027280282802928030280312803228033280342803528036280372803828039280402804128042280432804428045280462804728048280492805028051280522805328054280552805628057280582805928060280612806228063280642806528066280672806828069280702807128072280732807428075280762807728078280792808028081280822808328084280852808628087280882808928090280912809228093280942809528096280972809828099281002810128102281032810428105281062810728108281092811028111281122811328114281152811628117281182811928120281212812228123281242812528126281272812828129281302813128132281332813428135281362813728138281392814028141281422814328144281452814628147281482814928150281512815228153281542815528156281572815828159281602816128162281632816428165281662816728168281692817028171281722817328174281752817628177281782817928180281812818228183281842818528186281872818828189281902819128192281932819428195281962819728198281992820028201282022820328204282052820628207282082820928210282112821228213282142821528216282172821828219282202822128222282232822428225282262822728228282292823028231282322823328234282352823628237282382823928240282412824228243282442824528246282472824828249282502825128252282532825428255282562825728258282592826028261282622826328264282652826628267282682826928270282712827228273282742827528276282772827828279282802828128282282832828428285282862828728288282892829028291282922829328294282952829628297282982829928300283012830228303283042830528306283072830828309283102831128312283132831428315283162831728318283192832028321283222832328324283252832628327283282832928330283312833228333283342833528336283372833828339283402834128342283432834428345283462834728348283492835028351283522835328354283552835628357283582835928360283612836228363283642836528366283672836828369283702837128372283732837428375283762837728378283792838028381283822838328384283852838628387283882838928390283912839228393283942839528396283972839828399284002840128402284032840428405284062840728408284092841028411284122841328414284152841628417284182841928420284212842228423284242842528426284272842828429284302843128432284332843428435284362843728438284392844028441284422844328444284452844628447284482844928450284512845228453284542845528456284572845828459284602846128462284632846428465284662846728468284692847028471284722847328474284752847628477284782847928480284812848228483284842848528486284872848828489284902849128492284932849428495284962849728498284992850028501285022850328504285052850628507285082850928510285112851228513285142851528516285172851828519285202852128522285232852428525285262852728528285292853028531285322853328534285352853628537285382853928540285412854228543285442854528546285472854828549285502855128552285532855428555285562855728558285592856028561285622856328564285652856628567285682856928570285712857228573285742857528576285772857828579285802858128582285832858428585285862858728588285892859028591285922859328594285952859628597285982859928600286012860228603286042860528606286072860828609286102861128612286132861428615286162861728618286192862028621286222862328624286252862628627286282862928630286312863228633286342863528636286372863828639286402864128642286432864428645286462864728648286492865028651286522865328654286552865628657286582865928660286612866228663286642866528666286672866828669286702867128672286732867428675286762867728678286792868028681286822868328684286852868628687286882868928690286912869228693286942869528696286972869828699287002870128702287032870428705287062870728708287092871028711287122871328714287152871628717287182871928720287212872228723287242872528726287272872828729287302873128732287332873428735287362873728738287392874028741287422874328744287452874628747287482874928750287512875228753287542875528756287572875828759287602876128762287632876428765287662876728768287692877028771287722877328774287752877628777287782877928780287812878228783287842878528786287872878828789287902879128792287932879428795287962879728798287992880028801288022880328804288052880628807288082880928810288112881228813288142881528816288172881828819288202882128822288232882428825288262882728828288292883028831288322883328834288352883628837288382883928840288412884228843288442884528846288472884828849288502885128852288532885428855288562885728858288592886028861288622886328864288652886628867288682886928870288712887228873288742887528876288772887828879288802888128882288832888428885288862888728888288892889028891288922889328894288952889628897288982889928900289012890228903289042890528906289072890828909289102891128912289132891428915289162891728918289192892028921289222892328924289252892628927289282892928930289312893228933289342893528936289372893828939289402894128942289432894428945289462894728948289492895028951289522895328954289552895628957289582895928960289612896228963289642896528966289672896828969289702897128972289732897428975289762897728978289792898028981289822898328984289852898628987289882898928990289912899228993289942899528996289972899828999290002900129002290032900429005290062900729008290092901029011290122901329014290152901629017290182901929020290212902229023290242902529026290272902829029290302903129032290332903429035290362903729038290392904029041290422904329044290452904629047290482904929050290512905229053290542905529056290572905829059290602906129062290632906429065290662906729068290692907029071290722907329074 |
- //-------------------------------------------------------------------------------------------------------
- // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
- // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
- //-------------------------------------------------------------------------------------------------------
- #include "Backend.h"
- #ifdef ENABLE_SCRIPT_DEBUGGING
- #include "Debug/DebuggingFlags.h"
- #include "Debug/DiagProbe.h"
- #include "Debug/DebugManager.h"
- #endif
- // Parser includes
- #include "RegexCommon.h"
- #include "RegexPattern.h"
- #include "ExternalLowerer.h"
- #include "Types/DynamicObjectPropertyEnumerator.h"
- #include "Types/JavascriptStaticEnumerator.h"
- #include "Library/ForInObjectEnumerator.h"
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::Lower
- ///
- /// Lowerer's main entrypoint. Lowers this function..
- ///
- ///----------------------------------------------------------------------------
- void
- Lowerer::Lower()
- {
- this->m_func->StopMaintainByteCodeOffset();
- NoRecoverMemoryJitArenaAllocator localAlloc(_u("BE-Lower"), this->m_func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
- this->m_alloc = &localAlloc;
- BVSparse<JitArenaAllocator> localInitializedTempSym(&localAlloc);
- this->initializedTempSym = &localInitializedTempSym;
- BVSparse<JitArenaAllocator> localAddToLiveOnBackEdgeSyms(&localAlloc);
- this->addToLiveOnBackEdgeSyms = &localAddToLiveOnBackEdgeSyms;
- Assert(this->m_func->GetCloneMap() == nullptr);
- m_lowererMD.Init(this);
- bool defaultDoFastPath = this->m_func->DoFastPaths();
- bool loopFastPath = this->m_func->DoLoopFastPaths();
- if (m_func->HasAnyStackNestedFunc())
- {
- EnsureStackFunctionListStackSym();
- }
- if (m_func->DoStackFrameDisplay() && !m_func->IsLoopBody())
- {
- AllocStackClosure();
- }
- AllocStackForInObjectEnumeratorArray();
- if (m_func->IsJitInDebugMode())
- {
- // Initialize metadata of local var slots.
- // Too late to wait until Register Allocator, as we need the offset when lowerering bailout for debugger.
- int32 hasLocalVarChangedOffset = m_func->GetHasLocalVarChangedOffset();
- if (hasLocalVarChangedOffset != Js::Constants::InvalidOffset)
- {
- // MOV [EBP + m_func->GetHasLocalVarChangedOffset()], 0
- StackSym* sym = StackSym::New(TyInt8, m_func);
- sym->m_offset = hasLocalVarChangedOffset;
- sym->m_allocated = true;
- IR::Opnd* opnd1 = IR::SymOpnd::New(sym, TyInt8, m_func);
- IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func);
- Lowerer::InsertMove(opnd1, opnd2, m_func->GetFunctionEntryInsertionPoint());
- #ifdef DBG
- // Pre-fill all local slots with a pattern. This will help identify non-initialized/garbage var values.
- // Note that in the beginning of the function in bytecode we should initialize all locals to undefined.
- uint32 localSlotCount = m_func->GetJITFunctionBody()->GetEndNonTempLocalIndex() - m_func->GetJITFunctionBody()->GetFirstNonTempLocalIndex();
- for (uint i = 0; i < localSlotCount; ++i)
- {
- int offset = m_func->GetLocalVarSlotOffset(i);
- IRType opnd1Type;
- #if defined(TARGET_32)
- opnd1Type = TyInt32;
- opnd2 = IR::IntConstOpnd::New(Func::c_debugFillPattern4, opnd1Type, m_func);
- #else
- opnd1Type = TyInt64;
- opnd2 = IR::IntConstOpnd::New(Func::c_debugFillPattern8, opnd1Type, m_func);
- #endif
- sym = StackSym::New(opnd1Type, m_func);
- sym->m_offset = offset;
- sym->m_allocated = true;
- opnd1 = IR::SymOpnd::New(sym, opnd1Type, m_func);
- Lowerer::InsertMove(opnd1, opnd2, m_func->GetFunctionEntryInsertionPoint());
- }
- #endif
- }
- Assert(!m_func->HasAnyStackNestedFunc());
- }
- this->LowerRange(m_func->m_headInstr, m_func->m_tailInstr, defaultDoFastPath, loopFastPath);
- #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
- // TODO: (leish)(swb) implement for arm
- #if defined(_M_IX86) || defined(_M_AMD64)
- if (CONFIG_FLAG(ForceSoftwareWriteBarrier) && CONFIG_FLAG(VerifyBarrierBit))
- {
- // find out all write barrier setting instr, call Recycler::WBSetBit for verification purpose
- // should do this in LowererMD::GenerateWriteBarrier, however, can't insert call instruction there
- FOREACH_INSTR_EDITING(instr, instrNext, m_func->m_headInstr)
- if (instr->m_src1 && instr->m_src1->IsAddrOpnd())
- {
- IR::AddrOpnd* addrOpnd = instr->m_src1->AsAddrOpnd();
- if (addrOpnd->GetAddrOpndKind() == IR::AddrOpndKindWriteBarrierCardTable)
- {
- auto& leaInstr = instr->m_prev->m_prev->m_prev;
- auto& movInstr = instr->m_prev->m_prev;
- auto& shrInstr = instr->m_prev;
- Assert(leaInstr->m_opcode == Js::OpCode::LEA);
- Assert(movInstr->m_opcode == Js::OpCode::MOV);
- Assert(shrInstr->m_opcode == Js::OpCode::SHR);
- m_lowererMD.LoadHelperArgument(movInstr, leaInstr->m_dst);
- IR::Instr* instrCall = IR::Instr::New(Js::OpCode::Call, m_func);
- movInstr->InsertBefore(instrCall);
- m_lowererMD.ChangeToHelperCall(instrCall, IR::HelperWriteBarrierSetVerifyBit);
- }
- }
- NEXT_INSTR_EDITING
- }
- #endif
- #endif
- this->m_func->ClearCloneMap();
- if (m_func->HasAnyStackNestedFunc())
- {
- EnsureZeroLastStackFunctionNext();
- }
- if (!m_func->IsSimpleJit())
- {
- #if 0 // TODO michhol oop jit, reenable assert
- Js::EntryPointInfo* entryPointInfo = this->m_func->m_workItem->GetEntryPoint();
- Assert(entryPointInfo->GetJitTransferData() != nullptr && !entryPointInfo->GetJitTransferData()->GetIsReady());
- #endif
- }
- this->initializedTempSym = nullptr;
- this->m_alloc = nullptr;
- this->m_func->DisableConstandAddressLoadHoist();
- }
- void
- Lowerer::LowerRange(IR::Instr *instrStart, IR::Instr *instrEnd, bool defaultDoFastPath, bool defaultDoLoopFastPath)
- {
- bool noMathFastPath;
- bool noFieldFastPath;
- bool isStrictMode = this->m_func->GetJITFunctionBody()->IsStrictMode();
- noFieldFastPath = !defaultDoFastPath;
- noMathFastPath = !defaultDoFastPath;
- #if DBG_DUMP
- char16 * globOptInstrString = nullptr;
- #endif
- FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, instrEnd, instrStart)
- {
- // Try to peep this`
- instr = this->PreLowerPeepInstr(instr, &instrPrev);
- #if DBG
- IR::Instr * verifyLegalizeInstrNext = instr->m_next;
- m_currentInstrOpCode = instr->m_opcode;
- #endif
- // If we have debugger bailout as part of real instr (not separate BailForDebugger instr),
- // extract/split out BailOutForDebugger into separate instr, if needed.
- // The instr can have just debugger bailout, or debugger bailout + other shared bailout.
- // Note that by the time we get here, we should not have aux-only bailout (in globopt we promote it to normal bailout).
- if (m_func->IsJitInDebugMode() && instr->HasBailOutInfo() &&
- (((instr->GetBailOutKind() & IR::BailOutForDebuggerBits) && instr->m_opcode != Js::OpCode::BailForDebugger) ||
- instr->HasAuxBailOut()))
- {
- instr = this->SplitBailForDebugger(instr); // Change instr, as returned is the one we need to lower next.
- instrPrev = instr->m_prev; // Change just in case if instr got changed.
- }
- #if DBG_DUMP
- if (!instr->IsLowered() && !instr->IsLabelInstr()
- && (CONFIG_FLAG(ForcePostLowerGlobOptInstrString) ||
- PHASE_DUMP(Js::LowererPhase, m_func) ||
- PHASE_DUMP(Js::LinearScanPhase, m_func) ||
- PHASE_DUMP(Js::RegAllocPhase, m_func) ||
- PHASE_DUMP(Js::PeepsPhase, m_func) ||
- PHASE_DUMP(Js::LayoutPhase, m_func) ||
- PHASE_DUMP(Js::EmitterPhase, m_func) ||
- PHASE_DUMP(Js::EncoderPhase, m_func) ||
- PHASE_DUMP(Js::BackEndPhase, m_func)))
- {
- if(instr->m_next && instr->m_next->m_opcode != Js::OpCode::StatementBoundary && !instr->m_next->IsLabelInstr())
- {
- instr->m_next->globOptInstrString = globOptInstrString;
- }
- globOptInstrString = instr->DumpString();
- }
- #endif
- if (instr->IsBranchInstr() && !instr->AsBranchInstr()->IsMultiBranch() && instr->AsBranchInstr()->GetTarget()->m_isLoopTop)
- {
- Loop * loop = instr->AsBranchInstr()->GetTarget()->GetLoop();
- if (this->outerMostLoopLabel == nullptr && !loop->isProcessed)
- {
- while (loop && loop->GetLoopTopInstr()) // some loops are optimized away so that they are not loops anymore.
- // They do, however, stay in the loop graph but don't have loop top labels assigned to them
- {
- this->outerMostLoopLabel = loop->GetLoopTopInstr();
- Assert(this->outerMostLoopLabel->m_isLoopTop);
- // landing pad must fall through to the loop
- Assert(this->outerMostLoopLabel->m_prev->HasFallThrough());
- loop = loop->parent;
- }
- this->initializedTempSym->ClearAll();
- }
- noFieldFastPath = !defaultDoLoopFastPath;
- noMathFastPath = !defaultDoLoopFastPath;
- }
- #ifdef INLINE_CACHE_STATS
- if(PHASE_STATS1(Js::PolymorphicInlineCachePhase))
- {
- // Always use the slow path, so we can track property accesses
- noFieldFastPath = true;
- }
- #endif
- #if DBG
- if (instr->HasBailOutInfo())
- {
- IR::BailOutKind bailoutKind = instr->GetBailOutKind();
- if (BailOutInfo::IsBailOutOnImplicitCalls(bailoutKind))
- {
- this->helperCallCheckState = (HelperCallCheckState)(this->helperCallCheckState | HelperCallCheckState_ImplicitCallsBailout);
- }
- if ((bailoutKind & IR::BailOutOnArrayAccessHelperCall) != 0 &&
- instr->m_opcode != Js::OpCode::Memcopy &&
- instr->m_opcode != Js::OpCode::Memset)
- {
- this->helperCallCheckState = (HelperCallCheckState)(this->helperCallCheckState | HelperCallCheckState_NoHelperCalls);
- }
- }
- #endif
- switch (instr->m_opcode)
- {
- case Js::OpCode::LdHandlerScope:
- this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdHandlerScope);
- break;
- case Js::OpCode::InitSetFld:
- instrPrev = this->LowerStFld(instr, IR::HelperOP_InitSetter, IR::HelperOP_InitSetter, false);
- break;
- case Js::OpCode::InitGetFld:
- instrPrev = this->LowerStFld(instr, IR::HelperOP_InitGetter, IR::HelperOP_InitGetter, false);
- break;
- case Js::OpCode::InitProto:
- instrPrev = this->LowerStFld(instr, IR::HelperOP_InitProto, IR::HelperOP_InitProto, false);
- break;
- case Js::OpCode::LdArgCnt:
- this->LoadArgumentCount(instr);
- break;
- case Js::OpCode::LdStackArgPtr:
- this->LoadStackArgPtr(instr);
- break;
- case Js::OpCode::LdHeapArguments:
- case Js::OpCode::LdLetHeapArguments:
- instrPrev = m_lowererMD.LoadHeapArguments(instr);
- break;
- case Js::OpCode::LdHeapArgsCached:
- case Js::OpCode::LdLetHeapArgsCached:
- m_lowererMD.LoadHeapArgsCached(instr);
- break;
- case Js::OpCode::InvalCachedScope:
- this->LowerBinaryHelper(instr, IR::HelperOP_InvalidateCachedScope);
- break;
- case Js::OpCode::InitCachedScope:
- if (instr->m_func->GetJITFunctionBody()->GetDoScopeObjectCreation() || !instr->m_func->IsStackArgsEnabled())
- {
- instrPrev = this->LowerInitCachedScope(instr);
- }
- else
- {
- instr->ReplaceSrc1(IR::AddrOpnd::NewNull(instr->m_func));
- instr->m_opcode = Js::OpCode::Ld_A;
- instrPrev = instr;
- if (PHASE_TRACE1(Js::StackArgFormalsOptPhase))
- {
- Output::Print(_u("StackArgFormals : %s (%d) :Removing Scope object creation in Lowerer and replacing it with MOV NULL. \n"), instr->m_func->GetJITFunctionBody()->GetDisplayName(), instr->m_func->GetFunctionNumber());
- Output::Flush();
- }
- }
- break;
- case Js::OpCode::NewScopeObject:
- {
- Func * currFunc = instr->m_func;
- if (currFunc->GetJITFunctionBody()->GetDoScopeObjectCreation() || !currFunc->IsStackArgsEnabled())
- {
- //Call Helper that creates scope object and does type transition for the formals
- if (currFunc->IsStackArgsEnabled() && currFunc->GetJITFunctionBody()->GetInParamsCount() != 1)
- {
- // s3 = formals are let decls
- this->m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(currFunc->GetHasNonSimpleParams() ? TRUE : FALSE, TyUint8, currFunc));
- // s2 = current function.
- IR::Opnd * paramOpnd = LoadFunctionBodyOpnd(instr);
- this->m_lowererMD.LoadHelperArgument(instr, paramOpnd);
- m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewScopeObjectWithFormals);
- }
- else
- {
- m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewScopeObject);
- }
- }
- else
- {
- instr->SetSrc1(IR::AddrOpnd::NewNull(instr->m_func));
- instr->m_opcode = Js::OpCode::Ld_A;
- instrPrev = instr;
- if (PHASE_TRACE1(Js::StackArgFormalsOptPhase))
- {
- Output::Print(_u("StackArgFormals : %s (%d) :Removing Scope object creation in Lowerer and replacing it with MOV NULL. \n"), currFunc->GetJITFunctionBody()->GetDisplayName(), currFunc->GetFunctionNumber());
- Output::Flush();
- }
- }
- break;
- }
- case Js::OpCode::NewStackScopeSlots:
- this->LowerNewScopeSlots(instr, m_func->DoStackScopeSlots());
- break;
- case Js::OpCode::NewScopeSlots:
- this->LowerNewScopeSlots(instr, false);
- break;
- case Js::OpCode::InitLocalClosure:
- // Real initialization of the stack pointers happens on entry to the function, so this instruction
- // (which exists to provide a def in the IR) can go away.
- instr->Remove();
- break;
- case Js::OpCode::NewScopeSlotsWithoutPropIds:
- this->LowerBinaryHelperMemWithFuncBody(instr, IR::HelperOP_NewScopeSlotsWithoutPropIds);
- break;
- case Js::OpCode::NewBlockScope:
- m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewBlockScope);
- break;
- case Js::OpCode::NewPseudoScope:
- m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewPseudoScope);
- break;
- case Js::OpCode::CloneInnerScopeSlots:
- this->LowerUnaryHelperMem(instr, IR::HelperOP_CloneInnerScopeSlots);
- break;
- case Js::OpCode::CloneBlockScope:
- this->LowerUnaryHelperMem(instr, IR::HelperOP_CloneBlockScope);
- break;
- case Js::OpCode::GetCachedFunc:
- this->LowerGetCachedFunc(instr);
- break;
- case Js::OpCode::BrFncCachedScopeEq:
- case Js::OpCode::BrFncCachedScopeNeq:
- this->LowerBrFncCachedScopeEq(instr);
- break;
- case Js::OpCode::CommitScope:
- this->LowerCommitScope(instr);
- break;
- case Js::OpCode::LdFldForTypeOf:
- instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValueForTypeOf, IR::HelperOp_PatchGetValuePolymorphicForTypeOf,
- IR::HelperOp_PatchGetValueForTypeOf, IR::HelperOp_PatchGetValuePolymorphicForTypeOf);
- break;
- case Js::OpCode::LdFld:
- case Js::OpCode::LdFldForCallApplyTarget:
- instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValue, IR::HelperOp_PatchGetValuePolymorphic,
- IR::HelperOp_PatchGetValue, IR::HelperOp_PatchGetValuePolymorphic);
- break;
- case Js::OpCode::LdSuperFld:
- instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValueWithThisPtr, IR::HelperOp_PatchGetValuePolymorphicWithThisPtr,
- IR::HelperOp_PatchGetValueWithThisPtr, IR::HelperOp_PatchGetValuePolymorphicWithThisPtr);
- break;
- case Js::OpCode::LdRootFld:
- instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootValue, IR::HelperOp_PatchGetRootValuePolymorphic,
- IR::HelperOp_PatchGetRootValue, IR::HelperOp_PatchGetRootValuePolymorphic);
- break;
- case Js::OpCode::LdRootFldForTypeOf:
- instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootValueForTypeOf, IR::HelperOp_PatchGetRootValuePolymorphicForTypeOf,
- IR::HelperOp_PatchGetRootValueForTypeOf, IR::HelperOp_PatchGetRootValuePolymorphicForTypeOf);
- break;
- case Js::OpCode::LdMethodFldPolyInlineMiss:
- instrPrev = LowerLdFld(instr, IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic, true, nullptr, true);
- break;
- case Js::OpCode::LdMethodFld:
- instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic,
- IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic);
- break;
- case Js::OpCode::LdRootMethodFld:
- instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootMethod, IR::HelperOp_PatchGetRootMethodPolymorphic,
- IR::HelperOp_PatchGetRootMethod, IR::HelperOp_PatchGetRootMethodPolymorphic);
- break;
- case Js::OpCode::ScopedLdMethodFld:
- // "Scoped" in ScopedLdMethodFld is a bit of a misnomer because it doesn't look through a scope chain.
- // Instead the op is to allow for either a LdRootMethodFld or LdMethodFld depending on whether the
- // object is the root object or not.
- instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_ScopedGetMethod, IR::HelperOp_ScopedGetMethodPolymorphic,
- IR::HelperOp_ScopedGetMethod, IR::HelperOp_ScopedGetMethodPolymorphic);
- break;
- case Js::OpCode::LdMethodFromFlags:
- {
- Assert(instr->HasBailOutInfo());
- bool success = GenerateFastLdMethodFromFlags(instr);
- AssertMsg(success, "Not expected to generate helper block here");
- break;
- }
- case Js::OpCode::CheckFixedFld:
- AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instr->m_func) || !PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func), "CheckFixedFld with fixed prop(Data|Method) phase disabled?");
- this->GenerateCheckFixedFld(instr);
- break;
- case Js::OpCode::CheckPropertyGuardAndLoadType:
- instrPrev = this->GeneratePropertyGuardCheckBailoutAndLoadType(instr);
- break;
- case Js::OpCode::CheckObjType:
- this->GenerateCheckObjType(instr);
- break;
- case Js::OpCode::AdjustObjType:
- case Js::OpCode::AdjustObjTypeReloadAuxSlotPtr:
- this->LowerAdjustObjType(instr);
- break;
- case Js::OpCode::DeleteFld:
- instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteProperty, false, false);
- break;
- case Js::OpCode::DeleteRootFld:
- instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteRootProperty, false, false);
- break;
- case Js::OpCode::DeleteFldStrict:
- instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteProperty, false, true);
- break;
- case Js::OpCode::DeleteRootFldStrict:
- instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteRootProperty, false, true);
- break;
- case Js::OpCode::ScopedLdFldForTypeOf:
- if (!noFieldFastPath)
- {
- m_lowererMD.GenerateFastScopedLdFld(instr);
- }
- instrPrev = this->LowerScopedLdFld(instr, IR::HelperOp_PatchGetPropertyForTypeOfScoped, true);
- break;
- case Js::OpCode::ScopedLdFld:
- if (!noFieldFastPath)
- {
- m_lowererMD.GenerateFastScopedLdFld(instr);
- }
- instrPrev = this->LowerScopedLdFld(instr, IR::HelperOp_PatchGetPropertyScoped, true);
- break;
- case Js::OpCode::ScopedLdInst:
- instrPrev = this->LowerScopedLdInst(instr, IR::HelperOp_GetInstanceScoped);
- break;
- case Js::OpCode::ScopedDeleteFld:
- instrPrev = this->LowerScopedDelFld(instr, IR::HelperOp_DeletePropertyScoped, false, false);
- break;
- case Js::OpCode::ScopedDeleteFldStrict:
- instrPrev = this->LowerScopedDelFld(instr, IR::HelperOp_DeletePropertyScoped, false, true);
- break;
- case Js::OpCode::NewScFunc:
- instrPrev = this->LowerNewScFunc(instr);
- break;
- case Js::OpCode::NewScFuncHomeObj:
- instrPrev = this->LowerNewScFuncHomeObj(instr);
- break;
- case Js::OpCode::NewScGenFunc:
- instrPrev = this->LowerNewScGenFunc(instr);
- break;
- case Js::OpCode::NewScGenFuncHomeObj:
- instrPrev = this->LowerNewScGenFuncHomeObj(instr);
- break;
- case Js::OpCode::StFld:
- instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueNoLocalFastPath, IR::HelperOp_PatchPutValueNoLocalFastPathPolymorphic,
- IR::HelperOp_PatchPutValue, IR::HelperOp_PatchPutValuePolymorphic, true, Js::PropertyOperation_None);
- break;
- case Js::OpCode::StSuperFld:
- instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueWithThisPtrNoLocalFastPath, IR::HelperOp_PatchPutValueWithThisPtrNoLocalFastPathPolymorphic,
- IR::HelperOp_PatchPutValueWithThisPtr, IR::HelperOp_PatchPutValueWithThisPtrPolymorphic, true, isStrictMode ? Js::PropertyOperation_StrictMode : Js::PropertyOperation_None);
- break;
- case Js::OpCode::StRootFld:
- instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPathPolymorphic,
- IR::HelperOp_PatchPutRootValue, IR::HelperOp_PatchPutRootValuePolymorphic, true, Js::PropertyOperation_Root);
- break;
- case Js::OpCode::StFldStrict:
- instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueNoLocalFastPath, IR::HelperOp_PatchPutValueNoLocalFastPathPolymorphic,
- IR::HelperOp_PatchPutValue, IR::HelperOp_PatchPutValuePolymorphic, true, Js::PropertyOperation_StrictMode);
- break;
- case Js::OpCode::StRootFldStrict:
- instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPathPolymorphic,
- IR::HelperOp_PatchPutRootValue, IR::HelperOp_PatchPutRootValuePolymorphic, true, Js::PropertyOperation_StrictModeRoot);
- break;
- case Js::OpCode::InitFld:
- case Js::OpCode::InitRootFld:
- instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchInitValue, IR::HelperOp_PatchInitValuePolymorphic,
- IR::HelperOp_PatchInitValue, IR::HelperOp_PatchInitValuePolymorphic, false, Js::PropertyOperation_None);
- break;
- case Js::OpCode::ScopedInitFunc:
- instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_InitFuncScoped, false);
- break;
- case Js::OpCode::ScopedStFld:
- case Js::OpCode::ScopedStFldStrict:
- if (!noFieldFastPath)
- {
- m_lowererMD.GenerateFastScopedStFld(instr);
- }
- instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_PatchSetPropertyScoped, true, true,
- instr->m_opcode == Js::OpCode::ScopedStFld ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode);
- break;
- case Js::OpCode::ConsoleScopedStFld:
- case Js::OpCode::ConsoleScopedStFldStrict:
- {
- if (!noFieldFastPath)
- {
- m_lowererMD.GenerateFastScopedStFld(instr);
- }
- Js::PropertyOperationFlags flags = static_cast<Js::PropertyOperationFlags>((instr->m_opcode == Js::OpCode::ConsoleScopedStFld ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode) | Js::PropertyOperation_AllowUndeclInConsoleScope);
- instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_ConsolePatchSetPropertyScoped, true, true, flags);
- break;
- }
- case Js::OpCode::LdStr:
- m_lowererMD.ChangeToAssign(instr);
- break;
- case Js::OpCode::CloneStr:
- {
- GenerateGetImmutableOrScriptUnreferencedString(instr->GetSrc1()->AsRegOpnd(), instr, IR::HelperOp_CompoundStringCloneForAppending, false);
- instr->Remove();
- break;
- }
- case Js::OpCode::NewScObjArray:
- instrPrev = this->LowerNewScObjArray(instr);
- break;
- case Js::OpCode::NewScObject:
- case Js::OpCode::NewScObjectSpread:
- case Js::OpCode::NewScObjArraySpread:
- instrPrev = this->LowerNewScObject(instr, true, true);
- break;
- case Js::OpCode::NewScObjectNoCtor:
- instrPrev = this->LowerNewScObject(instr, false, true);
- break;
- case Js::OpCode::NewScObjectNoCtorFull:
- instrPrev = this->LowerNewScObject(instr, false, true, true);
- break;
- case Js::OpCode::GetNewScObject:
- instrPrev = this->LowerGetNewScObject(instr);
- break;
- case Js::OpCode::UpdateNewScObjectCache:
- instrPrev = instr->m_prev;
- this->LowerUpdateNewScObjectCache(instr, instr->GetSrc2(), instr->GetSrc1(), true /* isCtorFunction */);
- instr->Remove();
- break;
- case Js::OpCode::NewScObjectSimple:
- this->LowerNewScObjectSimple(instr);
- break;
- case Js::OpCode::NewScObjectLiteral:
- this->LowerNewScObjectLiteral(instr);
- break;
- case Js::OpCode::LdPropIds:
- m_lowererMD.ChangeToAssign(instr);
- break;
- case Js::OpCode::StArrSegItem_A:
- instrPrev = this->LowerArraySegmentVars(instr);
- break;
- case Js::OpCode::InlineMathAcos:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Acos);
- break;
- case Js::OpCode::InlineMathAsin:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Asin);
- break;
- case Js::OpCode::InlineMathAtan:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Atan);
- break;
- case Js::OpCode::InlineMathAtan2:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Atan2);
- break;
- case Js::OpCode::InlineMathCos:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Cos);
- break;
- case Js::OpCode::InlineMathExp:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Exp);
- break;
- case Js::OpCode::InlineMathLog:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Log);
- break;
- case Js::OpCode::InlineMathPow:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Pow);
- break;
- case Js::OpCode::InlineMathSin:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Sin);
- break;
- case Js::OpCode::InlineMathSqrt:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
- break;
- case Js::OpCode::InlineMathTan:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Tan);
- break;
- case Js::OpCode::InlineMathFloor:
- #if defined(ASMJS_PLAT) && (defined(_M_X64) || defined(_M_IX86))
- if (!AutoSystemInfo::Data.SSE4_1Available() && instr->m_func->GetJITFunctionBody()->IsAsmJsMode())
- {
- m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_FloorFlt, IR::HelperDirectMath_FloorDb);
- break;
- }
- #endif
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
- break;
- case Js::OpCode::InlineMathCeil:
- #if defined(ASMJS_PLAT) && (defined(_M_X64) || defined(_M_IX86))
- if (!AutoSystemInfo::Data.SSE4_1Available() && instr->m_func->GetJITFunctionBody()->IsAsmJsMode())
- {
- m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_CeilFlt, IR::HelperDirectMath_CeilDb);
- break;
- }
- #endif
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
- break;
- case Js::OpCode::InlineMathRound:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
- break;
- case Js::OpCode::InlineMathAbs:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
- break;
- case Js::OpCode::InlineMathImul:
- GenerateFastInlineMathImul(instr);
- break;
- case Js::OpCode::Ctz:
- GenerateCtz(instr);
- break;
- case Js::OpCode::PopCnt:
- GeneratePopCnt(instr);
- break;
- case Js::OpCode::InlineMathClz:
- GenerateFastInlineMathClz(instr);
- break;
- case Js::OpCode::InlineMathFround:
- GenerateFastInlineMathFround(instr);
- break;
- case Js::OpCode::Reinterpret_Prim:
- LowerReinterpretPrimitive(instr);
- break;
- case Js::OpCode::InlineMathMin:
- case Js::OpCode::InlineMathMax:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
- break;
- case Js::OpCode::InlineMathRandom:
- this->GenerateFastInlineBuiltInMathRandom(instr);
- break;
- #ifdef ENABLE_DOM_FAST_PATH
- case Js::OpCode::DOMFastPathGetter:
- this->LowerFastInlineDOMFastPathGetter(instr);
- break;
- #endif
- case Js::OpCode::InlineArrayPush:
- this->GenerateFastInlineArrayPush(instr);
- break;
- case Js::OpCode::InlineArrayPop:
- this->GenerateFastInlineArrayPop(instr);
- break;
- //Now retrieve the function object from the ArgOut_A_InlineSpecialized instruction opcode to push it on the stack after all the other arguments have been pushed.
- //The lowering of the direct call to helper is handled by GenerateDirectCall (architecture specific).
- case Js::OpCode::CallDirect:
- {
- IR::Opnd * src1 = instr->GetSrc1();
- Assert(src1->IsHelperCallOpnd());
- switch (src1->AsHelperCallOpnd()->m_fnHelper)
- {
- case IR::JnHelperMethod::HelperString_Split:
- case IR::JnHelperMethod::HelperString_Match:
- GenerateFastInlineStringSplitMatch(instr);
- break;
- case IR::JnHelperMethod::HelperRegExp_Exec:
- GenerateFastInlineRegExpExec(instr);
- break;
- case IR::JnHelperMethod::HelperGlobalObject_ParseInt:
- GenerateFastInlineGlobalObjectParseInt(instr);
- break;
- case IR::JnHelperMethod::HelperString_FromCharCode:
- GenerateFastInlineStringFromCharCode(instr);
- break;
- case IR::JnHelperMethod::HelperString_FromCodePoint:
- GenerateFastInlineStringFromCodePoint(instr);
- break;
- case IR::JnHelperMethod::HelperString_CharAt:
- GenerateFastInlineStringCharCodeAt(instr, Js::BuiltinFunction::JavascriptString_CharAt);
- break;
- case IR::JnHelperMethod::HelperString_CharCodeAt:
- GenerateFastInlineStringCharCodeAt(instr, Js::BuiltinFunction::JavascriptString_CharCodeAt);
- break;
- case IR::JnHelperMethod::HelperString_Replace:
- GenerateFastInlineStringReplace(instr);
- break;
- case IR::JnHelperMethod::HelperObject_HasOwnProperty:
- this->GenerateFastInlineHasOwnProperty(instr);
- break;
- case IR::JnHelperMethod::HelperArray_IsArray:
- this->GenerateFastInlineIsArray(instr);
- break;
- }
- instrPrev = LowerCallDirect(instr);
- break;
- }
- case Js::OpCode::CallIDynamic:
- {
- Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
- instrPrev = this->LowerCallIDynamic(instr, (ushort)flags);
- break;
- }
- case Js::OpCode::CallIDynamicSpread:
- {
- Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
- instrPrev = this->LowerCallIDynamicSpread(instr, (ushort)flags);
- break;
- }
- case Js::OpCode::CallI:
- case Js::OpCode::CallINew:
- case Js::OpCode::CallIFixed:
- case Js::OpCode::CallINewTargetNew:
- {
- Js::CallFlags flags = Js::CallFlags_None;
- if (instr->isCtorCall)
- {
- flags = Js::CallFlags_New;
- }
- else
- {
- if (instr->m_opcode == Js::OpCode::CallINew)
- {
- flags = Js::CallFlags_New;
- }
- else if (instr->m_opcode == Js::OpCode::CallINewTargetNew)
- {
- flags = (Js::CallFlags) (Js::CallFlags_New | Js::CallFlags_ExtraArg | Js::CallFlags_NewTarget);
- }
- if (instr->GetDst())
- {
- flags = (Js::CallFlags) (flags | Js::CallFlags_Value);
- }
- else
- {
- flags = (Js::CallFlags) (flags | Js::CallFlags_NotUsed);
- }
- }
- if (!PHASE_OFF(Js::CallFastPathPhase, this->m_func) && !noMathFastPath)
- {
- // We shouldn't have turned this instruction into a fixed method call if we're calling one of the
- // built-ins we still inline in the lowerer.
- Assert(instr->m_opcode != Js::OpCode::CallIFixed || !Func::IsBuiltInInlinedInLowerer(instr->GetSrc1()));
- // Disable InlineBuiltInLibraryCall as it does not work well with 2nd chance reg alloc
- // and may invalidate live on back edge data by introducing refs across loops. See Winblue Bug: 577641
- //// Callee may still be a library built-in; if so, generate it inline.
- //if (this->InlineBuiltInLibraryCall(instr))
- //{
- // m_lowererMD.LowerCallI(instr, (ushort)flags, true /*isHelper*/);
- //}
- //else
- //{
- m_lowererMD.LowerCallI(instr, (ushort)flags);
- //}
- }
- else
- {
- m_lowererMD.LowerCallI(instr, (ushort)flags);
- }
- break;
- }
- case Js::OpCode::AsmJsCallI:
- instrPrev = m_lowererMD.LowerAsmJsCallI(instr);
- break;
- case Js::OpCode::AsmJsCallE:
- instrPrev = m_lowererMD.LowerAsmJsCallE(instr);
- break;
- case Js::OpCode::CallIEval:
- {
- Js::CallFlags flags = (Js::CallFlags)(Js::CallFlags_ExtraArg | (instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed));
- if (IsSpreadCall(instr))
- {
- instrPrev = LowerSpreadCall(instr, flags);
- }
- else
- {
- m_lowererMD.LowerCallI(instr, (ushort)flags);
- }
- #ifdef PERF_HINT
- if (PHASE_TRACE1(Js::PerfHintPhase))
- {
- WritePerfHint(PerfHints::CallsEval, this->m_func, instr->GetByteCodeOffset());
- }
- #endif
- break;
- }
- case Js::OpCode::CallHelper:
- instrPrev = m_lowererMD.LowerCallHelper(instr);
- break;
- case Js::OpCode::Ret:
- if (instr->m_next->m_opcode != Js::OpCode::FunctionExit)
- {
- // If this RET isn't at the end of the function, insert a branch to
- // the epilog.
- IR::Instr *exitPrev = m_func->m_exitInstr->m_prev;
- if (!exitPrev->IsLabelInstr())
- {
- exitPrev = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- m_func->m_exitInstr->InsertBefore(exitPrev);
- }
- IR::BranchInstr *exitBr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode,
- exitPrev->AsLabelInstr(), m_func);
- instr->InsertAfter(exitBr);
- }
- m_lowererMD.LowerRet(instr);
- break;
- case Js::OpCode::LdArgumentsFromFrame:
- this->LoadArgumentsFromFrame(instr);
- break;
- case Js::OpCode::LdC_A_I4:
- {
- IR::Opnd *src1 = instr->UnlinkSrc1();
- AssertMsg(src1->IsIntConstOpnd(), "Source of LdC_A_I4 should be an IntConst...");
- instrPrev = this->LowerLoadVar(instr,
- IR::AddrOpnd::NewFromNumber(static_cast<int32>(src1->AsIntConstOpnd()->GetValue()), this->m_func));
- src1->Free(this->m_func);
- break;
- }
- case Js::OpCode::LdC_A_R8:
- {
- IR::Opnd *src1 = instr->UnlinkSrc1();
- AssertMsg(src1->IsFloatConstOpnd(), "Source of LdC_A_R8 should be a FloatConst...");
- instrPrev = this->LowerLoadVar(instr, src1->AsFloatConstOpnd()->GetAddrOpnd(this->m_func));
- src1->Free(this->m_func);
- break;
- }
- case Js::OpCode::LdC_F8_R8:
- {
- IR::Opnd *src1 = instr->UnlinkSrc1();
- AssertMsg(src1->IsFloatConstOpnd() || src1->IsFloat32ConstOpnd(), "Source of LdC_F8_R8 should be a FloatConst...");
- if (src1->IsFloatConstOpnd())
- {
- instrPrev = m_lowererMD.LoadFloatValue(instr->UnlinkDst()->AsRegOpnd(), src1->AsFloatConstOpnd()->m_value, instr);
- }
- else
- {
- instrPrev = m_lowererMD.LoadFloatValue(instr->UnlinkDst()->AsRegOpnd(), src1->AsFloat32ConstOpnd()->m_value, instr);
- }
- src1->Free(this->m_func);
- instr->Remove();
- break;
- }
- case Js::OpCode::NewRegEx:
- instrPrev = this->LowerNewRegEx(instr);
- break;
- case Js::OpCode::Conv_Obj:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_ConvObject);
- break;
- case Js::OpCode::NewUnscopablesWrapperObject:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_NewUnscopablesWrapperObject);
- break;
- case Js::OpCode::LdCustomSpreadIteratorList:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_ToSpreadedFunctionArgument);
- break;
- case Js::OpCode::Conv_Numeric:
- case Js::OpCode::Conv_Num:
- this->LowerConvNum(instr, noMathFastPath);
- break;
- case Js::OpCode::Incr_Num_A:
- case Js::OpCode::Incr_A:
- if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerUnaryHelperMem(instr, IR::HelperOp_Increment);
- }
- else
- {
- instr->SetSrc2(IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(1), IR::AddrOpndKindConstantVar, this->m_func));
- m_lowererMD.GenerateFastAdd(instr);
- instr->FreeSrc2();
- this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Increment));
- }
- break;
- case Js::OpCode::Decr_Num_A:
- case Js::OpCode::Decr_A:
- if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerUnaryHelperMem(instr, IR::HelperOp_Decrement);
- }
- else
- {
- instr->SetSrc2(IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(1), IR::AddrOpndKindConstantVar, this->m_func));
- m_lowererMD.GenerateFastSub(instr);
- instr->FreeSrc2();
- this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Decrement));
- }
- break;
- case Js::OpCode::Neg_A:
- if (instr->GetDst()->IsFloat())
- {
- Assert(instr->GetSrc1()->IsFloat());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerUnaryHelperMem(instr, IR::HelperOp_Negate);
- }
- else if (m_lowererMD.GenerateFastNeg(instr))
- {
- this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Negate));
- }
- break;
- case Js::OpCode::Not_A:
- if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerUnaryHelperMem(instr, IR::HelperOp_Not);
- }
- else if (m_lowererMD.GenerateFastNot(instr))
- {
- this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Not));
- }
- break;
- case Js::OpCode::BrEq_I4:
- case Js::OpCode::BrNeq_I4:
- case Js::OpCode::BrGt_I4:
- case Js::OpCode::BrGe_I4:
- case Js::OpCode::BrLt_I4:
- case Js::OpCode::BrLe_I4:
- case Js::OpCode::BrUnGt_I4:
- case Js::OpCode::BrUnGe_I4:
- case Js::OpCode::BrUnLt_I4:
- case Js::OpCode::BrUnLe_I4:
- {
- // See calls to MarkOneFltTmpSym under BrSrEq. This is to handle the case
- // where a branch is type-specialized and uses the result of a float pref op,
- // which must then be saved to var at the def.
- StackSym *sym = instr->GetSrc1()->GetStackSym();
- if (sym)
- {
- sym = sym->GetVarEquivSym(nullptr);
- }
- sym = instr->GetSrc2()->GetStackSym();
- if (sym)
- {
- sym = sym->GetVarEquivSym(nullptr);
- }
- }
- // FALLTHROUGH
- case Js::OpCode::Neg_I4:
- case Js::OpCode::Not_I4:
- case Js::OpCode::Add_I4:
- case Js::OpCode::Sub_I4:
- case Js::OpCode::Mul_I4:
- case Js::OpCode::RemU_I4:
- case Js::OpCode::Rem_I4:
- case Js::OpCode::Or_I4:
- case Js::OpCode::Xor_I4:
- case Js::OpCode::And_I4:
- case Js::OpCode::Shl_I4:
- case Js::OpCode::Shr_I4:
- case Js::OpCode::ShrU_I4:
- case Js::OpCode::Rol_I4:
- case Js::OpCode::Ror_I4:
- case Js::OpCode::BrTrue_I4:
- case Js::OpCode::BrFalse_I4:
- #ifdef _M_IX86
- if (
- instr->GetDst() && instr->GetDst()->IsInt64() ||
- instr->GetSrc1() && instr->GetSrc1()->IsInt64() ||
- instr->GetSrc2() && instr->GetSrc2()->IsInt64()
- )
- {
- m_lowererMD.EmitInt64Instr(instr);
- break;
- }
- #endif
- if (instr->HasBailOutInfo())
- {
- const auto bailOutKind = instr->GetBailOutKind();
- if (bailOutKind & IR::BailOutOnResultConditions ||
- bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck)
- {
- const auto nonBailOutInstr = SplitBailOnResultCondition(instr);
- IR::LabelInstr *bailOutLabel, *skipBailOutLabel;
- LowerBailOnResultCondition(instr, &bailOutLabel, &skipBailOutLabel);
- LowerInstrWithBailOnResultCondition(nonBailOutInstr, bailOutKind, bailOutLabel, skipBailOutLabel);
- }
- else if (bailOutKind == IR::BailOnModByPowerOf2)
- {
- Assert(instr->m_opcode == Js::OpCode::Rem_I4);
- bool fastPath = GenerateSimplifiedInt4Rem(instr);
- Assert(fastPath);
- instr->FreeSrc1();
- instr->FreeSrc2();
- this->GenerateBailOut(instr);
- }
- }
- else
- {
- if (instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::RemU_I4)
- {
- // fast path
- this->GenerateSimplifiedInt4Rem(instr);
- // slow path
- this->LowerRemI4(instr);
- }
- #if defined(_M_IX86) || defined(_M_X64)
- else if (instr->m_opcode == Js::OpCode::Mul_I4)
- {
- if (!LowererMD::GenerateSimplifiedInt4Mul(instr))
- {
- m_lowererMD.EmitInt4Instr(instr);
- }
- }
- #endif
- else
- {
- m_lowererMD.EmitInt4Instr(instr);
- }
- }
- break;
- case Js::OpCode::TrapIfMinIntOverNegOne:
- LowerTrapIfMinIntOverNegOne(instr);
- break;
- case Js::OpCode::TrapIfTruncOverflow:
- LowererMD::ChangeToAssign(instr);
- break;
- case Js::OpCode::TrapIfZero:
- LowerTrapIfZero(instr);
- break;
- case Js::OpCode::TrapIfUnalignedAccess:
- instrPrev = LowerTrapIfUnalignedAccess(instr);
- break;
- case Js::OpCode::DivU_I4:
- case Js::OpCode::Div_I4:
- this->LowerDivI4(instr);
- break;
- case Js::OpCode::Typeof:
- m_lowererMD.LowerTypeof(instr);
- break;
- case Js::OpCode::TypeofElem:
- this->LowerLdElemI(instr, IR::HelperOp_TypeofElem, false);
- break;
- case Js::OpCode::LdLen_A:
- {
- bool fastPath = !noMathFastPath;
- if (!fastPath && instr->HasBailOutInfo())
- {
- // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
- // bailouts require the fast path.
- const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if (bailOutKind & IR::BailOutKindBits)
- {
- fastPath = true;
- }
- else
- {
- const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
- fastPath =
- bailOutKindMinusBits &&
- bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
- bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
- }
- }
- bool instrIsInHelperBlock = false;
- if (!fastPath)
- {
- LowerLdLen(instr, false);
- }
- else if (GenerateFastLdLen(instr, &instrIsInHelperBlock))
- {
- Assert(
- !instr->HasBailOutInfo() ||
- (instr->GetBailOutKind() & ~IR::BailOutKindBits) != IR::BailOutOnIrregularLength);
- LowerLdLen(instr, instrIsInHelperBlock);
- }
- break;
- }
- case Js::OpCode::LdThis:
- {
- if (noFieldFastPath || !GenerateLdThisCheck(instr))
- {
- IR::JnHelperMethod meth;
- if (instr->IsJitProfilingInstr())
- {
- Assert(instr->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
- m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
- meth = IR::HelperSimpleProfiledLdThis;
- this->LowerBinaryHelper(instr, meth);
- }
- else
- {
- meth = IR::HelperLdThisNoFastPath;
- this->LowerBinaryHelperMem(instr, meth);
- }
- }
- else
- {
- this->LowerBinaryHelperMem(instr, IR::HelperLdThis);
- }
- break;
- }
- case Js::OpCode::LdNativeCodeData:
- Assert(m_func->IsOOPJIT());
- instrPrev = LowerLdNativeCodeData(instr);
- break;
- case Js::OpCode::StrictLdThis:
- if (noFieldFastPath)
- {
- IR::JnHelperMethod meth;
- if (instr->IsJitProfilingInstr())
- {
- Assert(instr->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
- m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
- meth = IR::HelperSimpleProfiledStrictLdThis;
- this->LowerUnaryHelper(instr, meth);
- }
- else
- {
- meth = IR::HelperStrictLdThis;
- this->LowerUnaryHelperMem(instr, meth);
- }
- }
- else
- {
- this->GenerateLdThisStrict(instr);
- instr->Remove();
- }
- break;
- case Js::OpCode::CheckThis:
- GenerateLdThisCheck(instr);
- instr->FreeSrc1();
- this->GenerateBailOut(instr);
- break;
- case Js::OpCode::StrictCheckThis:
- this->GenerateLdThisStrict(instr);
- instr->FreeSrc1();
- this->GenerateBailOut(instr);
- break;
- case Js::OpCode::NewScArray:
- instrPrev = this->LowerNewScArray(instr);
- break;
- case Js::OpCode::NewScArrayWithMissingValues:
- this->LowerUnaryHelperMem(instr, IR::HelperScrArr_OP_NewScArrayWithMissingValues);
- break;
- case Js::OpCode::NewScIntArray:
- instrPrev = this->LowerNewScIntArray(instr);
- break;
- case Js::OpCode::NewScFltArray:
- instrPrev = this->LowerNewScFltArray(instr);
- break;
- case Js::OpCode::InitForInEnumerator:
- this->LowerInitForInEnumerator(instr);
- break;
- case Js::OpCode::Add_A:
- if (instr->GetDst()->IsFloat())
- {
- Assert(instr->GetSrc1()->IsFloat());
- Assert(instr->GetSrc2()->IsFloat());
- // we don't want to mix float32 and float64
- Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
- Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOp_Add);
- }
- else if (m_lowererMD.TryGenerateFastMulAdd(instr, &instrPrev))
- {
- }
- else
- {
- m_lowererMD.GenerateFastAdd(instr);
- this->LowerBinaryHelperMemWithTemp3(instr, IR_HELPER_OP_FULL_OR_INPLACE(Add), IR::HelperOp_AddLeftDead);
- }
- break;
- case Js::OpCode::Div_A:
- {
- if (instr->IsJitProfilingInstr()) {
- LowerProfiledBinaryOp(instr->AsJitProfilingInstr(), IR::HelperSimpleProfiledDivide);
- }
- else if (instr->GetDst()->IsFloat())
- {
- Assert(instr->GetSrc1()->IsFloat());
- Assert(instr->GetSrc2()->IsFloat());
- Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
- Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else
- {
- if (!PHASE_OFF(Js::MathFastPathPhase, this->m_func) && !noMathFastPath)
- {
- IR::AddrOpnd *src2 = instr->GetSrc2()->IsAddrOpnd() ? instr->GetSrc2()->AsAddrOpnd() : nullptr;
- if (src2 && src2->IsVar() && Js::TaggedInt::Is(src2->m_address))
- {
- int32 value = Js::TaggedInt::ToInt32(src2->m_address);
- if (Math::IsPow2(value))
- {
- m_lowererMD.GenerateFastDivByPow2(instr);
- }
- }
- }
- this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Divide));
- }
- break;
- }
- case Js::OpCode::Expo_A:
- {
- if (instr->GetDst()->IsFloat())
- {
- Assert(instr->GetSrc1()->IsFloat());
- Assert(instr->GetSrc2()->IsFloat());
- Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
- Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Pow);
- }
- else
- {
- this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Exponentiation));
- }
- break;
- }
- case Js::OpCode::Mul_A:
- if (instr->GetDst()->IsFloat())
- {
- Assert(instr->GetSrc1()->IsFloat());
- Assert(instr->GetSrc2()->IsFloat());
- Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
- Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOp_Multiply);
- }
- else if (m_lowererMD.GenerateFastMul(instr))
- {
- this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Multiply));
- }
- break;
- case Js::OpCode::Rem_A:
- if (instr->GetDst()->IsFloat64())
- {
- this->LowerRemR8(instr);
- }
- else if (instr->IsJitProfilingInstr())
- {
- this->LowerProfiledBinaryOp(instr->AsJitProfilingInstr(), IR::HelperSimpleProfiledRemainder);
- }
- else
- {
- this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Modulus));
- }
- break;
- case Js::OpCode::Sub_A:
- if (instr->GetDst()->IsFloat())
- {
- Assert(instr->GetSrc1()->IsFloat());
- Assert(instr->GetSrc2()->IsFloat());
- Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
- Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOp_Subtract);
- }
- else if (m_lowererMD.TryGenerateFastMulAdd(instr, &instrPrev))
- {
- }
- else
- {
- m_lowererMD.GenerateFastSub(instr);
- this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Subtract));
- }
- break;
- case Js::OpCode::And_A:
- if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOp_And);
- }
- else if (m_lowererMD.GenerateFastAnd(instr))
- {
- this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(And));
- }
- break;
- case Js::OpCode::Or_A:
- if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOp_Or);
- }
- else if (m_lowererMD.GenerateFastOr(instr))
- {
- this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Or));
- }
- break;
- case Js::OpCode::Xor_A:
- if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastXor(instr))
- {
- this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Xor));
- }
- break;
- case Js::OpCode::Shl_A:
- if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftLeft(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftLeft);
- }
- break;
- case Js::OpCode::Shr_A:
- if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftRight(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftRight);
- }
- break;
- case Js::OpCode::ShrU_A:
- if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftRight(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftRightU);
- }
- break;
- case Js::OpCode::CmEq_A:
- {
- instrPrev = LowerEqualityCompare(instr, IR::HelperOP_CmEq_A);
- break;
- }
- case Js::OpCode::CmNeq_A:
- {
- instrPrev = LowerEqualityCompare(instr, IR::HelperOP_CmNeq_A);
- break;
- }
- case Js::OpCode::CmSrEq_A:
- instrPrev = LowerEqualityCompare(instr, IR::HelperOP_CmSrEq_A);
- break;
- case Js::OpCode::CmSrNeq_A:
- instrPrev = LowerEqualityCompare(instr, IR::HelperOP_CmSrNeq_A);
- break;
- case Js::OpCode::CmGt_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- this->m_lowererMD.GenerateFastCmXxR8(instr);
- }
- else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmGt_A);
- }
- break;
- case Js::OpCode::CmGe_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- this->m_lowererMD.GenerateFastCmXxR8(instr);
- }
- else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmGe_A);
- }
- break;
- case Js::OpCode::CmLt_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- this->m_lowererMD.GenerateFastCmXxR8(instr);
- }
- else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmLt_A);
- }
- break;
- case Js::OpCode::CmLe_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- this->m_lowererMD.GenerateFastCmXxR8(instr);
- }
- else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmLe_A);
- }
- break;
- case Js::OpCode::CmEq_I4:
- case Js::OpCode::CmNeq_I4:
- case Js::OpCode::CmGe_I4:
- case Js::OpCode::CmGt_I4:
- case Js::OpCode::CmLe_I4:
- case Js::OpCode::CmLt_I4:
- case Js::OpCode::CmUnGe_I4:
- case Js::OpCode::CmUnGt_I4:
- case Js::OpCode::CmUnLe_I4:
- case Js::OpCode::CmUnLt_I4:
- this->m_lowererMD.GenerateFastCmXxI4(instr);
- break;
- case Js::OpCode::Conv_Bool:
- instrPrev = this->m_lowererMD.GenerateConvBool(instr);
- break;
- case Js::OpCode::IsInst:
- this->GenerateFastIsInst(instr);
- instrPrev = this->LowerIsInst(instr, IR::HelperScrObj_OP_IsInst);
- break;
- case Js::OpCode::IsIn:
- this->GenerateFastArrayIsIn(instr);
- this->GenerateFastObjectIsIn(instr);
- this->LowerBinaryHelperMem(instr, IR::HelperOp_IsIn);
- break;
- case Js::OpCode::LdArrViewElem:
- instrPrev = LowerLdArrViewElem(instr);
- break;
- case Js::OpCode::StAtomicWasm:
- instrPrev = LowerStAtomicsWasm(instr);
- break;
- case Js::OpCode::StArrViewElem:
- instrPrev = LowerStArrViewElem(instr);
- break;
- case Js::OpCode::LdAtomicWasm:
- instrPrev = LowerLdAtomicsWasm(instr);
- break;
- case Js::OpCode::LdArrViewElemWasm:
- instrPrev = LowerLdArrViewElemWasm(instr);
- break;
- case Js::OpCode::Memset:
- case Js::OpCode::Memcopy:
- {
- instrPrev = LowerMemOp(instr);
- break;
- }
- case Js::OpCode::ArrayDetachedCheck:
- instrPrev = LowerArrayDetachedCheck(instr);
- break;
- case Js::OpCode::StElemI_A:
- case Js::OpCode::StElemI_A_Strict:
- {
- // Note: under debugger (Fast F12) don't let GenerateFastStElemI which calls into ToNumber_Helper
- // which takes double, and currently our helper wrapper doesn't support double.
- bool fastPath = !noMathFastPath && !m_func->IsJitInDebugMode();
- if (!fastPath && instr->HasBailOutInfo())
- {
- // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
- // bailouts require the fast path.
- const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- const IR::BailOutKind bailOutKindBits = bailOutKind & IR::BailOutKindBits;
- if (bailOutKindBits & ~(IR::BailOutOnMissingValue | IR::BailOutConvertedNativeArray))
- {
- fastPath = true;
- }
- else
- {
- const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
- fastPath =
- bailOutKindMinusBits &&
- bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
- bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
- }
- }
- IR::Opnd * opnd = instr->GetDst();
- IR::Opnd * baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
- ValueType profiledBaseValueType = baseOpnd->AsRegOpnd()->GetValueType();
- if (profiledBaseValueType.IsUninitialized() && baseOpnd->AsRegOpnd()->m_sym->IsSingleDef())
- {
- baseOpnd->SetValueType(baseOpnd->FindProfiledValueType());
- }
- bool instrIsInHelperBlock = false;
- if (!fastPath)
- {
- this->LowerStElemI(
- instr,
- instr->m_opcode == Js::OpCode::StElemI_A ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode,
- false);
- }
- else if (GenerateFastStElemI(instr, &instrIsInHelperBlock))
- {
- #if DBG
- if (instr->HasBailOutInfo())
- {
- const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- Assert(
- (bailOutKind & ~IR::BailOutKindBits) != IR::BailOutConventionalTypedArrayAccessOnly &&
- !(
- bailOutKind &
- (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)
- ));
- }
- #endif
- this->LowerStElemI(
- instr,
- instr->m_opcode == Js::OpCode::StElemI_A ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode,
- instrIsInHelperBlock);
- }
- break;
- }
- case Js::OpCode::LdElemI_A:
- case Js::OpCode::LdMethodElem:
- {
- bool fastPath =
- !noMathFastPath &&
- (
- instr->m_opcode != Js::OpCode::LdMethodElem ||
- instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyObject()
- );
- if (!fastPath && instr->HasBailOutInfo())
- {
- // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
- // bailouts require the fast path.
- const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if (bailOutKind & IR::BailOutKindBits)
- {
- fastPath = true;
- }
- else
- {
- const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
- fastPath =
- bailOutKindMinusBits &&
- bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
- bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
- }
- }
- IR::Opnd * opnd = instr->GetSrc1();
- IR::Opnd * baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
- ValueType profiledBaseValueType = baseOpnd->AsRegOpnd()->GetValueType();
- if (profiledBaseValueType.IsUninitialized() && baseOpnd->AsRegOpnd()->m_sym->IsSingleDef())
- {
- baseOpnd->SetValueType(baseOpnd->FindProfiledValueType());
- }
- bool instrIsInHelperBlock = false;
- if (!fastPath)
- {
- this->LowerLdElemI(
- instr,
- instr->m_opcode == Js::OpCode::LdElemI_A ? IR::HelperOp_GetElementI : IR::HelperOp_GetMethodElement,
- false);
- }
- else if (GenerateFastLdElemI(instr, &instrIsInHelperBlock))
- {
- #if DBG
- if (instr->HasBailOutInfo())
- {
- const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- Assert(
- (bailOutKind & ~IR::BailOutKindBits) != IR::BailOutConventionalTypedArrayAccessOnly &&
- !(
- bailOutKind &
- (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)
- ));
- }
- #endif
- this->LowerLdElemI(
- instr,
- instr->m_opcode == Js::OpCode::LdElemI_A ? IR::HelperOp_GetElementI : IR::HelperOp_GetMethodElement,
- instrIsInHelperBlock);
- }
- break;
- }
- case Js::OpCode::InitSetElemI:
- instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitElemSetter);
- break;
- case Js::OpCode::InitGetElemI:
- instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitElemGetter);
- break;
- case Js::OpCode::InitComputedProperty:
- instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitComputedProperty);
- break;
- case Js::OpCode::Delete_A:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_Delete);
- break;
- case Js::OpCode::DeleteElemI_A:
- this->LowerDeleteElemI(instr, false);
- break;
- case Js::OpCode::DeleteElemIStrict_A:
- this->LowerDeleteElemI(instr, true);
- break;
- case Js::OpCode::BytecodeArgOutCapture:
- m_lowererMD.ChangeToAssign(instr);
- break;
- case Js::OpCode::UnwrapWithObj:
- this->LowerUnaryHelper(instr, IR::HelperOp_UnwrapWithObj);
- break;
- #ifdef ENABLE_WASM
- case Js::OpCode::CheckWasmSignature:
- this->LowerCheckWasmSignature(instr);
- break;
- case Js::OpCode::LdWasmFunc:
- instrPrev = this->LowerLdWasmFunc(instr);
- break;
- case Js::OpCode::GrowWasmMemory:
- instrPrev = this->LowerGrowWasmMemory(instr);
- break;
- #endif
- case Js::OpCode::Ld_I4:
- LowererMD::ChangeToAssign(instr);
- break;
- case Js::OpCode::LdAsmJsFunc:
- if (instr->GetSrc1()->IsIndirOpnd())
- {
- IR::IndirOpnd* indir = instr->GetSrc1()->AsIndirOpnd();
- byte scale = m_lowererMD.GetDefaultIndirScale();
- if (!indir->GetIndexOpnd())
- {
- // If we have a constant offset, we need to apply the scale now
- int32 offset;
- if (Int32Math::Shl(1, scale, &offset) || Int32Math::Mul(offset, indir->GetOffset(), &offset))
- {
- // The constant is too big to offset this array. Throw out of range.
- // Todo:: throw a better error message for this scenario
- GenerateRuntimeError(instr, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
- }
- indir->SetOffset(offset);
- }
- else
- {
- indir->SetScale(scale);
- }
- }
- //fallthrough
- case Js::OpCode::Ld_A:
- case Js::OpCode::InitConst:
- if (instr->IsJitProfilingInstr() && instr->AsJitProfilingInstr()->isBeginSwitch) {
- LowerProfiledBeginSwitch(instr->AsJitProfilingInstr());
- break;
- }
- m_lowererMD.ChangeToAssign(instr);
- if (instr->HasBailOutInfo())
- {
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if (bailOutKind == IR::BailOutExpectingString)
- {
- this->LowerBailOnNotString(instr);
- }
- else
- {
- // Should not reach here as there are only 1 BailOutKind (BailOutExpectingString) currently associated with the Load Instr
- Assert(false);
- }
- }
- break;
- case Js::OpCode::LdIndir:
- Assert(instr->GetDst());
- Assert(instr->GetDst()->IsRegOpnd());
- Assert(instr->GetSrc1());
- Assert(instr->GetSrc1()->IsIndirOpnd());
- Assert(!instr->GetSrc2());
- m_lowererMD.ChangeToAssign(instr);
- break;
- case Js::OpCode::FromVar:
- Assert(instr->GetSrc1()->GetType() == TyVar);
- if (instr->GetDst()->GetType() == TyInt32)
- {
- if (m_lowererMD.EmitLoadInt32(instr, !(instr->HasBailOutInfo() && (instr->GetBailOutKind() == IR::BailOutOnNotPrimitive))))
- {
- // Bail out instead of calling a helper
- Assert(instr->GetBailOutKind() == IR::BailOutIntOnly || instr->GetBailOutKind() == IR::BailOutExpectingInteger);
- Assert(!instr->GetSrc1()->GetValueType().IsInt()); // when we know it's an int, it should not have bailout info, to avoid generating a bailout path that will never be taken
- instr->UnlinkSrc1();
- instr->UnlinkDst();
- GenerateBailOut(instr);
- }
- }
- else if (instr->GetDst()->IsFloat())
- {
- if (m_func->GetJITFunctionBody()->IsAsmJsMode())
- {
- m_lowererMD.EmitLoadFloat(instr->GetDst(), instr->GetSrc1(), instr);
- instr->Remove();
- }
- else
- {
- m_lowererMD.EmitLoadFloatFromNumber(instr->GetDst(), instr->GetSrc1(), instr);
- }
- }
- else if (instr->GetDst()->IsInt64())
- {
- Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
- GenerateRuntimeError(instr, WASMERR_InvalidTypeConversion);
- instr->ReplaceSrc1(IR::Int64ConstOpnd::New(0, TyInt64, m_func));
- LowererMD::ChangeToAssign(instr);
- }
- #ifdef ENABLE_WASM_SIMD
- else if (instr->GetDst()->IsSimd128())
- {
- Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
- GenerateRuntimeError(instr, WASMERR_InvalidTypeConversion);
- instr->ReplaceSrc1(IR::Simd128ConstOpnd::New({ 0,0,0,0 }, instr->GetDst()->GetType(), m_func));
- LowererMD::ChangeToAssign(instr);
- }
- #endif
- else
- {
- Assert(UNREACHED);
- }
- break;
- case Js::OpCode::ArgOut_A:
- // I don't know if this can happen in asm.js mode, but if it can, we might want to handle differently
- Assert(!m_func->GetJITFunctionBody()->IsAsmJsMode());
- // fall-through
- case Js::OpCode::ArgOut_A_Inline:
- case Js::OpCode::ArgOut_A_Dynamic:
- {
- // ArgOut/StartCall are normally lowered by the lowering of the associated call instr.
- // If the call becomes unreachable, we could end up with an orphan ArgOut or StartCall.
- // Change the ArgOut into a store to the stack for bailouts
- instr->FreeSrc2();
- StackSym *argSym = instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
- argSym->m_offset = this->m_func->StackAllocate(sizeof(Js::Var));
- argSym->m_allocated = true;
- argSym->m_isOrphanedArg = true;
- this->m_lowererMD.ChangeToAssign(instr);
- }
- break;
- case Js::OpCode::LoweredStartCall:
- case Js::OpCode::StartCall:
- // ArgOut/StartCall are normally lowered by the lowering of the associated call instr.
- // If the call becomes unreachable, we could end up with an orphan ArgOut or StartCall.
- // We'll just delete these StartCalls during peeps.
- break;
- case Js::OpCode::ToVar:
- Assert(instr->GetDst()->GetType() == TyVar);
- if (instr->GetSrc1()->GetType() == TyInt32)
- {
- m_lowererMD.EmitLoadVar(instr);
- }
- else if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->IsRegOpnd());
- IR::RegOpnd* float64Opnd = instr->GetSrc1()->AsRegOpnd();
- if (float64Opnd->IsFloat32())
- {
- IR::RegOpnd* float64ConvOpnd = IR::RegOpnd::New(TyFloat64, m_func);
- m_lowererMD.EmitFloat32ToFloat64(float64ConvOpnd, float64Opnd, instr);
- float64Opnd = float64ConvOpnd;
- }
- m_lowererMD.SaveDoubleToVar(
- instr->GetDst()->AsRegOpnd(),
- float64Opnd, instr, instr);
- instr->Remove();
- }
- else if (instr->GetSrc1()->IsInt64() || instr->GetSrc1()->IsSimd128())
- {
- Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
- GenerateRuntimeError(instr, WASMERR_InvalidTypeConversion);
- instr->ReplaceSrc1(IR::IntConstOpnd::New(0, TyMachReg, m_func));
- LowererMD::ChangeToAssign(instr);
- }
- else
- {
- Assert(UNREACHED);
- }
- break;
- case Js::OpCode::Conv_Prim_Sat:
- {
- GenerateTruncWithCheck<true /* Saturate */>(instr);
- break;
- }
- case Js::OpCode::Conv_Prim:
- {
- if (IR::Instr::FindSingleDefInstr(Js::OpCode::TrapIfTruncOverflow, instr->GetSrc1()))
- {
- GenerateTruncWithCheck<false /* Saturate */>(instr);
- break;
- }
- if (instr->GetDst()->IsFloat())
- {
- if (instr->GetSrc1()->IsIntConstOpnd())
- {
- LoadFloatFromNonReg(instr->UnlinkSrc1(), instr->UnlinkDst(), instr);
- }
- else if (instr->GetSrc1()->IsInt32())
- {
- m_lowererMD.EmitIntToFloat(instr->GetDst(), instr->GetSrc1(), instr);
- }
- else if (instr->GetSrc1()->IsUInt32())
- {
- m_lowererMD.EmitUIntToFloat(instr->GetDst(), instr->GetSrc1(), instr);
- }
- else if (instr->GetSrc1()->IsInt64())
- {
- m_lowererMD.EmitInt64toFloat(instr->GetDst(), instr->GetSrc1(), instr);
- }
- else
- {
- Assert(instr->GetDst()->IsFloat64());
- Assert(instr->GetSrc1()->IsFloat32());
- m_lowererMD.EmitFloat32ToFloat64(instr->GetDst(), instr->GetSrc1(), instr);
- }
- }
- else if (instr->GetDst()->IsInt64())
- {
- if (instr->GetSrc1()->IsInt32())
- {
- m_lowererMD.EmitIntToLong(instr->GetDst(), instr->GetSrc1(), instr);
- }
- else if (instr->GetSrc1()->IsUInt32())
- {
- m_lowererMD.EmitUIntToLong(instr->GetDst(), instr->GetSrc1(), instr);
- }
- else if (instr->GetSrc1()->IsInt64() && instr->GetSrc2())
- {
- m_lowererMD.EmitSignExtend(instr);
- }
- else
- {
- Assert(0);
- }
- }
- else
- {
- Assert(instr->GetDst()->IsInt32());
- if (instr->GetSrc1()->IsInt64())
- {
- m_lowererMD.EmitLongToInt(instr->GetDst(), instr->GetSrc1(), instr);
- }
- else if ((instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32()) && instr->GetSrc2())
- {
- m_lowererMD.EmitSignExtend(instr);
- }
- else
- {
- Assert(instr->GetSrc1()->IsFloat());
- m_lowererMD.EmitFloatToInt(instr->GetDst(), instr->GetSrc1(), instr);
- }
- }
- instr->Remove();
- break;
- }
- case Js::OpCode::FunctionExit:
- LowerFunctionExit(instr);
- // The rest of Epilog generation happens after reg allocation
- break;
- case Js::OpCode::FunctionEntry:
- LowerFunctionEntry(instr);
- // The rest of Prolog generation happens after reg allocation
- break;
- case Js::OpCode::ArgIn_Rest:
- case Js::OpCode::ArgIn_A:
- if (m_func->GetJITFunctionBody()->IsAsmJsMode() && !m_func->IsLoopBody())
- {
- instrPrev = LowerArgInAsmJs(instr);
- }
- else
- {
- instrPrev = LowerArgIn(instr);
- }
- break;
- case Js::OpCode::Label:
- if (instr->AsLabelInstr()->m_isLoopTop)
- {
- if (this->outerMostLoopLabel == instr)
- {
- noFieldFastPath = !defaultDoFastPath;
- noMathFastPath = !defaultDoFastPath;
- this->outerMostLoopLabel = nullptr;
- instr->AsLabelInstr()->GetLoop()->isProcessed = true;
- }
- this->m_func->MarkConstantAddressSyms(instr->AsLabelInstr()->GetLoop()->regAlloc.liveOnBackEdgeSyms);
- instr->AsLabelInstr()->GetLoop()->regAlloc.liveOnBackEdgeSyms->Or(this->addToLiveOnBackEdgeSyms);
- }
- break;
- case Js::OpCode::Br:
- instr->m_opcode = LowererMD::MDUncondBranchOpcode;
- break;
- case Js::OpCode::BrFncEqApply:
- LowerBrFncApply(instr, IR::HelperOp_OP_BrFncEqApply);
- break;
- case Js::OpCode::BrFncNeqApply:
- LowerBrFncApply(instr, IR::HelperOp_OP_BrFncNeqApply);
- break;
- case Js::OpCode::BrHasSideEffects:
- case Js::OpCode::BrNotHasSideEffects:
- m_lowererMD.GenerateFastBrS(instr->AsBranchInstr());
- break;
- case Js::OpCode::BrFalse_A:
- case Js::OpCode::BrTrue_A:
- if (instr->GetSrc1()->IsFloat())
- {
- GenerateFastBrBool(instr->AsBranchInstr());
- }
- else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) ||
- noMathFastPath ||
- GenerateFastBrBool(instr->AsBranchInstr()))
- {
- this->LowerBrBMem(instr, IR::HelperConv_ToBoolean);
- }
- break;
- case Js::OpCode::BrOnObject_A:
- if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerBrOnObject(instr, IR::HelperOp_IsObject);
- }
- else
- {
- GenerateFastBrOnObject(instr);
- }
- break;
- case Js::OpCode::BrOnBaseConstructorKind:
- this->LowerBrOnClassConstructor(instr, IR::HelperOp_IsBaseConstructorKind);
- break;
- case Js::OpCode::BrOnClassConstructor:
- this->LowerBrOnClassConstructor(instr, IR::HelperOp_IsClassConstructor);
- break;
- case Js::OpCode::BrAddr_A:
- case Js::OpCode::BrNotAddr_A:
- case Js::OpCode::BrNotNull_A:
- m_lowererMD.LowerCondBranch(instr);
- break;
- case Js::OpCode::BrEq_A:
- case Js::OpCode::BrNotNeq_A:
- instrPrev = LowerEqualityBranch(instr, IR::HelperOp_Equal);
- break;
- case Js::OpCode::BrGe_A:
- case Js::OpCode::BrNotGe_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
- {
- this->LowerBrCMem(instr, IR::HelperOp_GreaterEqual, false, false /*isHelper*/);
- }
- else
- {
- this->LowerBrCMem(instr, IR::HelperOp_GreaterEqual, true, false /*isHelper*/);
- }
- break;
- case Js::OpCode::BrGt_A:
- case Js::OpCode::BrNotGt_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
- {
- this->LowerBrCMem(instr, IR::HelperOp_Greater, false, false /*isHelper*/);
- }
- else
- {
- this->LowerBrCMem(instr, IR::HelperOp_Greater, true, false /*isHelper*/);
- }
- break;
- case Js::OpCode::BrLt_A:
- case Js::OpCode::BrNotLt_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
- {
- this->LowerBrCMem(instr, IR::HelperOp_Less, false, false /*isHelper*/);
- }
- else
- {
- this->LowerBrCMem(instr, IR::HelperOp_Less, true, false /*isHelper*/);
- }
- break;
- case Js::OpCode::BrLe_A:
- case Js::OpCode::BrNotLe_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
- {
- this->LowerBrCMem(instr, IR::HelperOp_LessEqual, false, false /*isHelper*/);
- }
- else
- {
- this->LowerBrCMem(instr, IR::HelperOp_LessEqual, true, false /*isHelper*/);
- }
- break;
- case Js::OpCode::BrNeq_A:
- case Js::OpCode::BrNotEq_A:
- instrPrev = LowerEqualityBranch(instr, IR::HelperOp_NotEqual);
- break;
- case Js::OpCode::MultiBr:
- {
- IR::MultiBranchInstr * multiBranchInstr = instr->AsBranchInstr()->AsMultiBrInstr();
- switch (multiBranchInstr->m_kind)
- {
- case IR::MultiBranchInstr::StrDictionary:
- this->GenerateSwitchStringLookup(instr);
- break;
- case IR::MultiBranchInstr::SingleCharStrJumpTable:
- this->GenerateSingleCharStrJumpTableLookup(instr);
- m_func->m_totalJumpTableSizeInBytesForSwitchStatements += (multiBranchInstr->GetBranchJumpTable()->tableSize * sizeof(void*));
- break;
- case IR::MultiBranchInstr::IntJumpTable:
- this->LowerMultiBr(instr);
- m_func->m_totalJumpTableSizeInBytesForSwitchStatements += (multiBranchInstr->GetBranchJumpTable()->tableSize * sizeof(void*));
- break;
- default:
- Assert(false);
- }
- break;
- }
- case Js::OpCode::BrSrEq_A:
- case Js::OpCode::BrSrNotNeq_A:
- instrPrev = LowerEqualityBranch(instr, IR::HelperOp_StrictEqual);
- break;
- case Js::OpCode::BrSrNeq_A:
- case Js::OpCode::BrSrNotEq_A:
- instrPrev = LowerEqualityBranch(instr, IR::HelperOp_NotStrictEqual);
- break;
- case Js::OpCode::BrOnEmpty:
- case Js::OpCode::BrOnNotEmpty:
- if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func))
- {
- this->GenerateFastBrBReturn(instr);
- this->LowerBrBReturn(instr, IR::HelperOp_OP_BrOnEmpty, true);
- }
- else
- {
- this->LowerBrBReturn(instr, IR::HelperOp_OP_BrOnEmpty, false);
- }
- break;
- case Js::OpCode::BrOnHasProperty:
- case Js::OpCode::BrOnNoProperty:
- this->LowerBrProperty(instr, IR::HelperOp_HasProperty);
- break;
- case Js::OpCode::BrOnException:
- Assert(!this->m_func->DoGlobOpt());
- instr->Remove();
- break;
- case Js::OpCode::BrOnNoException:
- instr->m_opcode = LowererMD::MDUncondBranchOpcode;
- break;
- case Js::OpCode::StSlot:
- {
- PropertySym *propertySym = instr->GetDst()->AsSymOpnd()->m_sym->AsPropertySym();
- instrPrev = AddSlotArrayCheck(propertySym, instr);
- this->LowerStSlot(instr);
- break;
- }
- case Js::OpCode::StSlotChkUndecl:
- {
- PropertySym *propertySym = instr->GetDst()->AsSymOpnd()->m_sym->AsPropertySym();
- instrPrev = AddSlotArrayCheck(propertySym, instr);
- this->LowerStSlotChkUndecl(instr);
- break;
- }
- case Js::OpCode::ProfiledLoopStart:
- {
- Assert(m_func->DoSimpleJitDynamicProfile());
- Assert(instr->IsJitProfilingInstr());
- // Check for the helper instr from IRBuilding (it won't be there if there are no LoopEnds due to an infinite loop)
- auto prev = instr->m_prev;
- if (prev->IsJitProfilingInstr() && prev->AsJitProfilingInstr()->isLoopHelper)
- {
- auto saveOpnd = prev->UnlinkDst();
- instrPrev = prev->m_prev;
- prev->Remove();
- const auto starFlag = GetImplicitCallFlagsOpnd();
- IR::AutoReuseOpnd a(starFlag, m_func);
- this->InsertMove(saveOpnd, starFlag, instr);
- this->InsertMove(starFlag, CreateClearImplicitCallFlagsOpnd(), instr);
- }
- else
- {
- #if DBG
- // Double check that we indeed do not have a LoopEnd that is part of the same loop for the rest of the function
- auto cur = instr;
- auto loopNumber = instr->AsJitProfilingInstr()->loopNumber;
- while (cur)
- {
- Assert(cur->m_opcode != Js::OpCode::ProfiledLoopEnd || cur->IsJitProfilingInstr() && cur->AsJitProfilingInstr()->loopNumber != loopNumber);
- cur = cur->m_next;
- }
- #endif
- }
- // If we turned off fulljit, there's no reason to do this.
- if (PHASE_OFF(Js::FullJitPhase, m_func))
- {
- instr->Remove();
- }
- else
- {
- Assert(instr->GetDst());
- instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleGetScheduledEntryPoint, m_func));
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateUint32Opnd(instr->AsJitProfilingInstr()->loopNumber, m_func));
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
- this->m_lowererMD.LowerCall(instr, 0);
- }
- break;
- }
- case Js::OpCode::ProfiledLoopBodyStart:
- {
- Assert(m_func->DoSimpleJitDynamicProfile());
- const auto loopNum = instr->AsJitProfilingInstr()->loopNumber;
- Assert(loopNum < m_func->GetJITFunctionBody()->GetLoopCount());
- auto entryPointOpnd = instr->UnlinkSrc1();
- auto dobailout = instr->UnlinkDst();
- const auto dobailoutType = TyUint8;
- Assert(dobailout->GetType() == TyUint8 && sizeof(decltype(Js::SimpleJitHelpers::IsLoopCodeGenDone(nullptr))) == 1);
- m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(0, TyUint32, m_func)); // zero indicates that we do not want to add flags back in
- m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(loopNum, TyUint32, m_func));
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
- instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleRecordLoopImplicitCallFlags, m_func));
- m_lowererMD.LowerCall(instr, 0);
- // Outline of JITed code:
- //
- // LoopStart:
- // entryPoint = GetScheduledEntryPoint(framePtr, loopNum)
- // LoopBodyStart:
- // uint8 dobailout;
- // if (entryPoint) {
- // dobailout = IsLoopCodeGenDone(entryPoint)
- // } else {
- // dobailout = ++interpretCount >= threshold
- // }
- // // already exists from IRBuilding:
- // if (dobailout) {
- // Bailout
- // }
- if (PHASE_OFF(Js::FullJitPhase, m_func) || !m_func->GetJITFunctionBody()->DoJITLoopBody())
- {
- // If we're not doing fulljit, we've turned off JitLoopBodies, or if we don't have loop headers allocated (the function has a Try, etc)
- // just move false to dobailout
- this->InsertMove(dobailout, IR::IntConstOpnd::New(0, dobailoutType, m_func, true), instr->m_next);
- }
- else if (m_func->GetWorkItem()->GetJITTimeInfo()->ForceJITLoopBody())
- {
- // If we're forcing jit loop bodies, move true to dobailout
- this->InsertMove(dobailout, IR::IntConstOpnd::New(1, dobailoutType, m_func, true), instr->m_next);
- }
- else
- {
- // Put in the labels
- auto entryPointIsNull = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- auto checkDoBailout = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- instr->InsertAfter(checkDoBailout);
- instr->InsertAfter(entryPointIsNull);
- this->InsertCompareBranch(entryPointOpnd, IR::AddrOpnd::New(nullptr, IR::AddrOpndKindDynamicMisc, m_func), Js::OpCode::BrEq_A, false, entryPointIsNull, instr->m_next);
- // If the entry point is not null
- auto isCodeGenDone = IR::Instr::New(Js::OpCode::Call, dobailout, IR::HelperCallOpnd::New(IR::HelperSimpleIsLoopCodeGenDone, m_func), m_func);
- entryPointIsNull->InsertBefore(isCodeGenDone);
- m_lowererMD.LoadHelperArgument(isCodeGenDone, entryPointOpnd);
- m_lowererMD.LowerCall(isCodeGenDone, 0);
- this->InsertBranch(LowererMD::MDUncondBranchOpcode, true, checkDoBailout, entryPointIsNull);
- const auto type = TyUint32;
- auto countReg = IR::RegOpnd::New(type, m_func);
- auto countAddr = IR::MemRefOpnd::New(m_func->GetJITFunctionBody()->GetLoopHeaderAddr(loopNum) + Js::LoopHeader::GetOffsetOfInterpretCount(), type, m_func);
- IR::AutoReuseOpnd a(countReg, m_func), b(countAddr, m_func);
- this->InsertAdd(false, countReg, countAddr, IR::IntConstOpnd::New(1, type, m_func, true), checkDoBailout);
- this->InsertMove(countAddr, countReg, checkDoBailout);
- this->InsertMove(dobailout, IR::IntConstOpnd::New(0, dobailoutType, m_func, true), checkDoBailout);
- this->InsertCompareBranch(countReg, IR::IntConstOpnd::New(m_func->GetJITFunctionBody()->GetLoopHeaderData(loopNum)->interpretCount, type, m_func), Js::OpCode::BrLt_A, checkDoBailout, checkDoBailout);
- this->InsertMove(dobailout, IR::IntConstOpnd::New(1, dobailoutType, m_func, true), checkDoBailout);
- // fallthrough
- // Label checkDoBailout (inserted above)
- }
- }
- break;
- case Js::OpCode::ProfiledLoopEnd:
- {
- Assert(m_func->DoSimpleJitDynamicProfile());
- // This is set up in IRBuilding
- Assert(instr->GetSrc1());
- IR::Opnd* savedFlags = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, savedFlags);
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateUint32Opnd(instr->AsJitProfilingInstr()->loopNumber, m_func));
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
- instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleRecordLoopImplicitCallFlags, m_func));
- m_lowererMD.LowerCall(instr, 0);
- }
- break;
- case Js::OpCode::InitLoopBodyCount:
- Assert(this->m_func->IsLoopBody());
- instr->SetSrc1(IR::IntConstOpnd::New(0, TyUint32, this->m_func));
- this->m_lowererMD.ChangeToAssign(instr);
- break;
- case Js::OpCode::StLoopBodyCount:
- Assert(this->m_func->IsLoopBody());
- this->LowerStLoopBodyCount(instr);
- break;
- case Js::OpCode::IncrLoopBodyCount:
- {
- Assert(this->m_func->IsLoopBody());
- instr->m_opcode = Js::OpCode::Add_I4;
- instr->SetSrc2(IR::IntConstOpnd::New(1, TyUint32, this->m_func));
- this->m_lowererMD.EmitInt4Instr(instr);
- // Update the jittedLoopIterations field on the entryPointInfo
- IR::MemRefOpnd *iterationsAddressOpnd = IR::MemRefOpnd::New(this->m_func->GetJittedLoopIterationsSinceLastBailoutAddress(), TyUint32, this->m_func);
- InsertMove(iterationsAddressOpnd, instr->GetDst(), instr);
- break;
- }
- #if !FLOATVAR
- case Js::OpCode::StSlotBoxTemp:
- this->LowerStSlotBoxTemp(instr);
- break;
- #endif
- case Js::OpCode::LdSlot:
- {
- PropertySym *propertySym = instr->GetSrc1()->AsSymOpnd()->m_sym->AsPropertySym();
- instrPrev = AddSlotArrayCheck(propertySym, instr);
- }
- case Js::OpCode::LdSlotArr:
- {
- Js::ProfileId profileId;
- IR::Instr *profileBeforeInstr;
- if (instr->IsJitProfilingInstr())
- {
- profileId = instr->AsJitProfilingInstr()->profileId;
- Assert(profileId != Js::Constants::NoProfileId);
- profileBeforeInstr = instr->m_next;
- }
- else
- {
- profileId = Js::Constants::NoProfileId;
- profileBeforeInstr = nullptr;
- }
- this->LowerLdSlot(instr);
- if (profileId != Js::Constants::NoProfileId)
- {
- LowerProfileLdSlot(instr->GetDst(), instr->m_func, profileId, profileBeforeInstr);
- }
- break;
- }
- case Js::OpCode::ChkUndecl:
- instrPrev = this->LowerChkUndecl(instr);
- break;
- case Js::OpCode::LdArrHead:
- this->LowerLdArrHead(instr);
- break;
- case Js::OpCode::StElemC:
- case Js::OpCode::StArrSegElemC:
- this->LowerStElemC(instr);
- break;
- case Js::OpCode::LdEnv:
- instrPrev = this->LowerLdEnv(instr);
- break;
- case Js::OpCode::LdAsmJsEnv:
- instrPrev = this->LowerLdAsmJsEnv(instr);
- break;
- case Js::OpCode::LdElemUndef:
- this->LowerLdElemUndef(instr);
- break;
- case Js::OpCode::LdElemUndefScoped:
- this->LowerElementUndefinedScopedMem(instr, IR::HelperOp_LdElemUndefScoped);
- break;
- case Js::OpCode::EnsureNoRootFld:
- this->LowerElementUndefined(instr, IR::HelperOp_EnsureNoRootProperty);
- break;
- case Js::OpCode::EnsureNoRootRedeclFld:
- this->LowerElementUndefined(instr, IR::HelperOp_EnsureNoRootRedeclProperty);
- break;
- case Js::OpCode::EnsureCanDeclGloFunc:
- this->LowerElementUndefined(instr, IR::HelperOp_EnsureCanDeclGloFunc);
- break;
- case Js::OpCode::ScopedEnsureNoRedeclFld:
- this->LowerElementUndefinedScoped(instr, IR::HelperOp_EnsureNoRedeclPropertyScoped);
- break;
- case Js::OpCode::LdFuncExpr:
- // src = function Expression
- LoadFuncExpression(instr);
- this->GenerateGetCurrentFunctionObject(instr);
- break;
- case Js::OpCode::LdNewTarget:
- this->GenerateLoadNewTarget(instr);
- break;
- case Js::OpCode::ChkNewCallFlag:
- this->GenerateCheckForCallFlagNew(instr);
- break;
- case Js::OpCode::StFuncExpr:
- // object.propid = src
- LowerStFld(instr, IR::HelperOp_StFunctionExpression, IR::HelperOp_StFunctionExpression, false);
- break;
- case Js::OpCode::InitLetFld:
- case Js::OpCode::InitRootLetFld:
- LowerStFld(instr, IR::HelperOp_InitLetFld, IR::HelperOp_InitLetFld, false);
- break;
- case Js::OpCode::InitConstFld:
- case Js::OpCode::InitRootConstFld:
- LowerStFld(instr, IR::HelperOp_InitConstFld, IR::HelperOp_InitConstFld, false);
- break;
- case Js::OpCode::InitUndeclRootLetFld:
- LowerElementUndefined(instr, IR::HelperOp_InitUndeclRootLetFld);
- break;
- case Js::OpCode::InitUndeclRootConstFld:
- LowerElementUndefined(instr, IR::HelperOp_InitUndeclRootConstFld);
- break;
- case Js::OpCode::InitUndeclConsoleLetFld:
- LowerElementUndefined(instr, IR::HelperOp_InitUndeclConsoleLetFld);
- break;
- case Js::OpCode::InitUndeclConsoleConstFld:
- LowerElementUndefined(instr, IR::HelperOp_InitUndeclConsoleConstFld);
- break;
- case Js::OpCode::InitClassMember:
- LowerStFld(instr, IR::HelperOp_InitClassMember, IR::HelperOp_InitClassMember, false);
- break;
- case Js::OpCode::InitClassMemberComputedName:
- instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberComputedName);
- break;
- case Js::OpCode::InitClassMemberGetComputedName:
- instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberGetComputedName);
- break;
- case Js::OpCode::InitClassMemberSetComputedName:
- instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberSetComputedName);
- break;
- case Js::OpCode::InitClassMemberGet:
- instrPrev = this->LowerStFld(instr, IR::HelperOp_InitClassMemberGet, IR::HelperOp_InitClassMemberGet, false);
- break;
- case Js::OpCode::InitClassMemberSet:
- instrPrev = this->LowerStFld(instr, IR::HelperOp_InitClassMemberSet, IR::HelperOp_InitClassMemberSet, false);
- break;
- case Js::OpCode::NewStackFrameDisplay:
- this->LowerLdFrameDisplay(instr, m_func->DoStackFrameDisplay());
- break;
- case Js::OpCode::LdFrameDisplay:
- this->LowerLdFrameDisplay(instr, false);
- break;
- case Js::OpCode::LdInnerFrameDisplay:
- this->LowerLdInnerFrameDisplay(instr);
- break;
- case Js::OpCode::Throw:
- case Js::OpCode::InlineThrow:
- case Js::OpCode::EHThrow:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_Throw);
- break;
- case Js::OpCode::TryCatch:
- instrPrev = this->LowerTry(instr, true /*try-catch*/);
- break;
- case Js::OpCode::TryFinally:
- instrPrev = this->LowerTry(instr, false /*try-finally*/);
- break;
- case Js::OpCode::Catch:
- instrPrev = this->LowerCatch(instr);
- break;
- case Js::OpCode::Finally:
- instr->Remove();
- break;
- case Js::OpCode::LeaveNull:
- if (this->m_func->DoOptimizeTry() || (this->m_func->IsSimpleJit() && this->m_func->hasBailout))
- {
- instr->Remove();
- }
- else
- {
- instrPrev = m_lowererMD.LowerLeaveNull(instr);
- }
- break;
- case Js::OpCode::Leave:
- if (this->m_func->HasTry() && this->m_func->DoOptimizeTry())
- {
- // Required in Register Allocator to mark region boundaries
- break;
- }
- instrPrev = this->LowerLeave(instr, instr->AsBranchInstr()->GetTarget(), false /*fromFinalLower*/, instr->AsBranchInstr()->m_isOrphanedLeave);
- break;
- case Js::OpCode::BailOnException:
- instrPrev = this->LowerBailOnException(instr);
- break;
- case Js::OpCode::BailOnEarlyExit:
- instrPrev = this->LowerBailOnEarlyExit(instr);
- break;
- case Js::OpCode::RuntimeTypeError:
- case Js::OpCode::InlineRuntimeTypeError:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeTypeError);
- break;
- case Js::OpCode::RuntimeReferenceError:
- case Js::OpCode::InlineRuntimeReferenceError:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeReferenceError);
- break;
- case Js::OpCode::Break:
- // Inline breakpoint: for now do nothing.
- break;
- case Js::OpCode::Nop:
- // This may need support for debugging the JIT, but for now just remove the instruction.
- instr->Remove();
- break;
- case Js::OpCode::Unused:
- // Currently Unused is used with ScopedLdInst to keep the second dst alive, but we don't need to lower it.
- instr->Remove();
- break;
- case Js::OpCode::StatementBoundary:
- // This instruction is merely to help convey source info through the IR
- // and eventually generate the nativeOffset maps.
- #if DBG_DUMP && DBG
- // If we have a JITStatementBreakpoint, then we should break on this statement
- {
- uint32 statementIndex = instr->AsPragmaInstr()->m_statementIndex;
- if (Js::Configuration::Global.flags.StatementDebugBreak.Contains(instr->m_func->GetSourceContextId(), instr->m_func->GetLocalFunctionId(), statementIndex))
- {
- IR::Instr* tempinstr = instr;
- Assert(tempinstr != nullptr);
- // go past any labels, and then add a debug breakpoint
- while (tempinstr->m_next != nullptr && tempinstr->m_next->m_opcode == Js::OpCode::Label)
- {
- tempinstr = tempinstr->m_next;
- }
- this->m_lowererMD.GenerateDebugBreak(tempinstr);
- }
- }
- #endif
- break;
- case Js::OpCode::BailOnNotPolymorphicInlinee:
- instrPrev = LowerBailOnNotPolymorphicInlinee(instr);
- break;
- case Js::OpCode::BailOnNoSimdTypeSpec:
- case Js::OpCode::BailOnNoProfile:
- this->GenerateBailOut(instr, nullptr, nullptr);
- break;
- case Js::OpCode::BailOnNotSpreadable:
- instrPrev = this->LowerBailOnNotSpreadable(instr);
- break;
- case Js::OpCode::BailOnNotStackArgs:
- instrPrev = this->LowerBailOnNotStackArgs(instr);
- break;
- case Js::OpCode::BailOnEqual:
- case Js::OpCode::BailOnNotEqual:
- instrPrev = this->LowerBailOnEqualOrNotEqual(instr);
- break;
- case Js::OpCode::BailOnNegative:
- LowerBailOnNegative(instr);
- break;
- #ifdef ENABLE_SCRIPT_DEBUGGING
- case Js::OpCode::BailForDebugger:
- instrPrev = this->LowerBailForDebugger(instr);
- break;
- #endif
- case Js::OpCode::BailOnNotObject:
- instrPrev = this->LowerBailOnNotObject(instr);
- break;
- case Js::OpCode::CheckIsFuncObj:
- instrPrev = this->LowerCheckIsFuncObj(instr);
- break;
- case Js::OpCode::CheckFuncInfo:
- instrPrev = this->LowerCheckIsFuncObj(instr, true);
- break;
- case Js::OpCode::BailOnNotBuiltIn:
- instrPrev = this->LowerBailOnNotBuiltIn(instr);
- break;
- case Js::OpCode::BailOnNotArray:
- {
- IR::Instr *bailOnNotArray = nullptr, *bailOnMissingValue = nullptr;
- SplitBailOnNotArray(instr, &bailOnNotArray, &bailOnMissingValue);
- IR::RegOpnd *const arrayOpnd = LowerBailOnNotArray(bailOnNotArray);
- if (bailOnMissingValue)
- {
- LowerBailOnMissingValue(bailOnMissingValue, arrayOpnd);
- }
- break;
- }
- case Js::OpCode::BoundCheck:
- case Js::OpCode::UnsignedBoundCheck:
- LowerBoundCheck(instr);
- break;
- case Js::OpCode::BailTarget:
- instrPrev = this->LowerBailTarget(instr);
- break;
- case Js::OpCode::InlineeStart:
- this->LowerInlineeStart(instr);
- break;
- case Js::OpCode::EndCallForPolymorphicInlinee:
- instr->Remove();
- break;
- case Js::OpCode::InlineeEnd:
- this->LowerInlineeEnd(instr);
- break;
- case Js::OpCode::InlineBuiltInEnd:
- case Js::OpCode::InlineNonTrackingBuiltInEnd:
- this->LowerInlineBuiltIn(instr);
- break;
- case Js::OpCode::ExtendArg_A:
- if (instr->GetSrc1()->IsRegOpnd())
- {
- IR::RegOpnd *src1 = instr->GetSrc1()->AsRegOpnd();
- this->addToLiveOnBackEdgeSyms->Clear(src1->m_sym->m_id);
- }
- instr->Remove();
- break;
- case Js::OpCode::InlineBuiltInStart:
- case Js::OpCode::BytecodeArgOutUse:
- case Js::OpCode::ArgOut_A_InlineBuiltIn:
- instr->Remove();
- break;
- case Js::OpCode::DeadBrEqual:
- this->LowerBinaryHelperMem(instr, IR::HelperOp_Equal);
- break;
- case Js::OpCode::DeadBrSrEqual:
- this->LowerBinaryHelperMem(instr, IR::HelperOp_StrictEqual);
- break;
- case Js::OpCode::DeadBrRelational:
- this->LowerBinaryHelperMem(instr, IR::HelperOp_Greater);
- break;
- case Js::OpCode::DeadBrOnHasProperty:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_HasProperty);
- break;
- case Js::OpCode::DeletedNonHelperBranch:
- break;
- case Js::OpCode::InitClass:
- instrPrev = this->LowerInitClass(instr);
- break;
- case Js::OpCode::NewConcatStrMulti:
- this->LowerNewConcatStrMulti(instr);
- break;
- case Js::OpCode::NewConcatStrMultiBE:
- this->LowerNewConcatStrMultiBE(instr);
- break;
- case Js::OpCode::SetConcatStrMultiItem:
- this->LowerSetConcatStrMultiItem(instr);
- break;
- case Js::OpCode::SetConcatStrMultiItemBE:
- Assert(instr->GetSrc1()->IsRegOpnd());
- this->addToLiveOnBackEdgeSyms->Clear(instr->GetSrc1()->GetStackSym()->m_id);
- // code corresponding to it should already have been generated while lowering NewConcatStrMultiBE
- instr->Remove();
- break;
- case Js::OpCode::Conv_Str:
- this->LowerConvStr(instr);
- break;
- case Js::OpCode::Coerce_Str:
- this->LowerCoerseStr(instr);
- break;
- case Js::OpCode::Coerce_StrOrRegex:
- this->LowerCoerseStrOrRegex(instr);
- break;
- case Js::OpCode::Coerce_Regex:
- this->LowerCoerseRegex(instr);
- break;
- case Js::OpCode::Conv_PrimStr:
- this->LowerConvPrimStr(instr);
- break;
- case Js::OpCode::ClearAttributes:
- this->LowerBinaryHelper(instr, IR::HelperOP_ClearAttributes);
- break;
- case Js::OpCode::SpreadArrayLiteral:
- this->LowerSpreadArrayLiteral(instr);
- break;
- case Js::OpCode::CallIExtended:
- {
- // Currently, the only use for CallIExtended is a call that uses spread.
- Assert(IsSpreadCall(instr));
- instrPrev = this->LowerSpreadCall(instr, Js::CallFlags_None);
- break;
- }
- case Js::OpCode::CallIExtendedNew:
- {
- // Currently, the only use for CallIExtended is a call that uses spread.
- Assert(IsSpreadCall(instr));
- instrPrev = this->LowerSpreadCall(instr, Js::CallFlags_New);
- break;
- }
- case Js::OpCode::CallIExtendedNewTargetNew:
- {
- // Currently, the only use for CallIExtended is a call that uses spread.
- Assert(IsSpreadCall(instr));
- instrPrev = this->LowerSpreadCall(instr, (Js::CallFlags)(Js::CallFlags_New | Js::CallFlags_ExtraArg | Js::CallFlags_NewTarget));
- break;
- }
- case Js::OpCode::LdSpreadIndices:
- instr->Remove();
- break;
- case Js::OpCode::LdHomeObj:
- this->GenerateLdHomeObj(instr);
- break;
- case Js::OpCode::LdHomeObjProto:
- this->GenerateLdHomeObjProto(instr);
- break;
- case Js::OpCode::LdFuncObj:
- this->GenerateLdFuncObj(instr);
- break;
- case Js::OpCode::LdFuncObjProto:
- this->GenerateLdFuncObjProto(instr);
- break;
- case Js::OpCode::ImportCall:
- {
- IR::Opnd *src1Opnd = instr->UnlinkSrc1();
- IR::Opnd *functionObjOpnd = nullptr;
- m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
- LoadScriptContext(instr);
- m_lowererMD.LoadHelperArgument(instr, src1Opnd);
- m_lowererMD.LoadHelperArgument(instr, functionObjOpnd);
- m_lowererMD.ChangeToHelperCall(instr, IR::HelperImportCall);
- break;
- }
- case Js::OpCode::SetComputedNameVar:
- {
- IR::Opnd *src2Opnd = instr->UnlinkSrc2();
- IR::Opnd *src1Opnd = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, src2Opnd);
- m_lowererMD.LoadHelperArgument(instr, src1Opnd);
- m_lowererMD.ChangeToHelperCall(instr, IR::HelperSetComputedNameVar);
- break;
- }
- case Js::OpCode::InlineeMetaArg:
- {
- m_lowererMD.ChangeToAssign(instr);
- break;
- }
- case Js::OpCode::Yield:
- {
- instr->FreeSrc1(); // Source is not actually used by the backend other than to calculate lifetime
- IR::Opnd* dstOpnd = instr->UnlinkDst();
- // prm2 is the ResumeYieldData pointer per calling convention established in JavascriptGenerator::CallGenerator
- // This is the value the bytecode expects to be in the dst register of the Yield opcode after resumption.
- // Load it here after the bail-in.
- StackSym *resumeYieldDataSym = StackSym::NewImplicitParamSym(4, m_func);
- m_func->SetArgOffset(resumeYieldDataSym, (LowererMD::GetFormalParamOffset() + 1) * MachPtr);
- IR::SymOpnd * resumeYieldDataOpnd = IR::SymOpnd::New(resumeYieldDataSym, TyMachPtr, m_func);
- AssertMsg(instr->m_next->IsLabelInstr(), "Expect the resume label to immediately follow Yield instruction");
- InsertMove(dstOpnd, resumeYieldDataOpnd, instr->m_next->m_next);
- GenerateBailOut(instr);
- break;
- }
- case Js::OpCode::ResumeYield:
- case Js::OpCode::ResumeYieldStar:
- {
- IR::Opnd *srcOpnd1 = instr->UnlinkSrc1();
- IR::Opnd *srcOpnd2 = instr->m_opcode == Js::OpCode::ResumeYieldStar ? instr->UnlinkSrc2() : IR::AddrOpnd::NewNull(m_func);
- m_lowererMD.LoadHelperArgument(instr, srcOpnd2);
- m_lowererMD.LoadHelperArgument(instr, srcOpnd1);
- m_lowererMD.ChangeToHelperCall(instr, IR::HelperResumeYield);
- break;
- }
- case Js::OpCode::GeneratorResumeJumpTable:
- {
- // Lowered in LowerPrologEpilog so that the jumps introduced are not considered to be part of the flow for the RegAlloc phase.
- // Introduce a BailOutNoSave label if there were yield points that were elided due to optimizations. They could still be hit
- // if an active generator object had been paused at such a yield point when the function body was JITed. So safe guard such a
- // case by having the native code simply jump back to the interpreter for such yield points.
- IR::LabelInstr *bailOutNoSaveLabel = nullptr;
- m_func->MapUntilYieldOffsetResumeLabels([this, &bailOutNoSaveLabel](int, const YieldOffsetResumeLabel& yorl)
- {
- if (yorl.Second() == nullptr)
- {
- if (bailOutNoSaveLabel == nullptr)
- {
- bailOutNoSaveLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- }
- return true;
- }
- return false;
- });
- // Insert the bailoutnosave label somewhere along with a call to BailOutNoSave helper
- if (bailOutNoSaveLabel != nullptr)
- {
- IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->m_prev;
- IR::LabelInstr * exitTargetInstr;
- if (exitPrevInstr->IsLabelInstr())
- {
- exitTargetInstr = exitPrevInstr->AsLabelInstr();
- exitPrevInstr = exitPrevInstr->m_prev;
- }
- else
- {
- exitTargetInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
- exitPrevInstr->InsertAfter(exitTargetInstr);
- }
- bailOutNoSaveLabel->m_hasNonBranchRef = true;
- bailOutNoSaveLabel->isOpHelper = true;
- IR::Instr* bailOutCall = IR::Instr::New(Js::OpCode::Call, m_func);
- exitPrevInstr->InsertAfter(bailOutCall);
- exitPrevInstr->InsertAfter(bailOutNoSaveLabel);
- exitPrevInstr->InsertAfter(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, exitTargetInstr, m_func));
- IR::RegOpnd * frameRegOpnd = IR::RegOpnd::New(nullptr, LowererMD::GetRegFramePointer(), TyMachPtr, m_func);
- m_lowererMD.LoadHelperArgument(bailOutCall, frameRegOpnd);
- m_lowererMD.ChangeToHelperCall(bailOutCall, IR::HelperNoSaveRegistersBailOutForElidedYield);
- m_func->m_bailOutNoSaveLabel = bailOutNoSaveLabel;
- }
- break;
- }
- case Js::OpCode::FrameDisplayCheck:
- instrPrev = this->LowerFrameDisplayCheck(instr);
- break;
- case Js::OpCode::SlotArrayCheck:
- instrPrev = this->LowerSlotArrayCheck(instr);
- break;
- #if DBG
- case Js::OpCode::CheckLowerIntBound:
- instrPrev = this->LowerCheckLowerIntBound(instr);
- break;
- case Js::OpCode::CheckUpperIntBound:
- instrPrev = this->LowerCheckUpperIntBound(instr);
- break;
- #endif
- #ifdef ENABLE_WASM
- case Js::OpCode::Copysign_A:
- m_lowererMD.GenerateCopysign(instr);
- break;
- case Js::OpCode::Trunc_A:
- if (!AutoSystemInfo::Data.SSE4_1Available())
- {
- m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_TruncFlt, IR::HelperDirectMath_TruncDb);
- break;
- }
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
- break;
- case Js::OpCode::Nearest_A:
- if (!AutoSystemInfo::Data.SSE4_1Available())
- {
- m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_NearestFlt, IR::HelperDirectMath_NearestDb);
- break;
- }
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
- break;
- case Js::OpCode::ThrowRuntimeError:
- GenerateThrow(instr->UnlinkSrc1(), instr);
- instr->Remove();
- break;
- #endif //ENABLE_WASM
- case Js::OpCode::SpeculatedLoadFence:
- {
- AssertOrFailFast(instr->m_kind == IR::InstrKindByteCodeUses);
- #ifdef _M_ARM
- AssertOrFailFastMsg(false, "We shouldn't perform this hoisting on ARM");
- #else
- IR::ByteCodeUsesInstr* bcuInstr = static_cast<IR::ByteCodeUsesInstr*>(instr);
- // Most of the time we're not going to be able to remove any masking in a loop, and
- // this instruction can be removed.
- if (bcuInstr->GetByteCodeUpwardExposedUsed() != nullptr && !bcuInstr->GetByteCodeUpwardExposedUsed()->IsEmpty())
- {
- // The generated code is:
- //
- // cmp rax, rax
- // for each symbol to mask:
- // reg(sym) = cmovne reg(sym), reg(sym)
- IR::RegOpnd* temp = IR::RegOpnd::New(TyUint8, instr->m_func);
- InsertMove(temp, IR::IntConstOpnd::New(0, TyUint8, instr->m_func), instr);
- IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
- cmp->SetSrc1(temp);
- cmp->SetSrc2(temp);
- instr->InsertBefore(cmp);
- m_lowererMD.Legalize(cmp);
- FOREACH_BITSET_IN_SPARSEBV(symid, bcuInstr->GetByteCodeUpwardExposedUsed())
- {
- StackSym* thisSym = instr->m_func->m_symTable->Find(symid)->AsStackSym();
- IR::RegOpnd* thisSymReg = IR::RegOpnd::New(thisSym, thisSym->GetType(), instr->m_func);
- Js::OpCode specBlockOp = thisSymReg->IsFloat() ? LowererMD::MDSpecBlockFNEOpcode : LowererMD::MDSpecBlockNEOpcode;
- IR::Instr* cmov = IR::Instr::New(specBlockOp, thisSymReg, thisSymReg, thisSymReg, instr->m_func);
- instr->InsertBefore(cmov);
- m_lowererMD.Legalize(cmov);
- } NEXT_BITSET_IN_SPARSEBV;
- }
- #endif
- instr->Remove();
- break;
- }
- case Js::OpCode::SpreadObjectLiteral:
- this->LowerBinaryHelperMem(instr, IR::HelperSpreadObjectLiteral);
- break;
- case Js::OpCode::Restify:
- instrPrev = this->LowerRestify(instr);
- break;
- case Js::OpCode::NewPropIdArrForCompProps:
- this->LowerUnaryHelperMem(instr, IR::HelperNewPropIdArrForCompProps);
- break;
- case Js::OpCode::StPropIdArrFromVar:
- instrPrev = this->LowerStPropIdArrFromVar(instr);
- break;
- default:
- #ifdef ENABLE_WASM_SIMD
- if (IsSimd128Opcode(instr->m_opcode))
- {
- instrPrev = m_lowererMD.Simd128Instruction(instr);
- break;
- }
- #endif
- AssertMsg(instr->IsLowered(), "Unknown opcode");
- if(!instr->IsLowered())
- {
- Fatal();
- }
- break;
- }
- #if DBG
- LegalizeVerifyRange(instrPrev ? instrPrev->m_next : instrStart,
- verifyLegalizeInstrNext ? verifyLegalizeInstrNext->m_prev : nullptr);
- this->helperCallCheckState = HelperCallCheckState_None;
- #endif
- } NEXT_INSTR_BACKWARD_EDITING_IN_RANGE;
- Assert(this->outerMostLoopLabel == nullptr);
- }
- IR::Opnd *
- Lowerer::LoadFunctionInfoOpnd(IR::Instr * instr)
- {
- return IR::AddrOpnd::New(instr->m_func->GetWorkItem()->GetJITTimeInfo()->GetFunctionInfoAddr(), IR::AddrOpndKindDynamicFunctionInfo, instr->m_func);
- }
- IR::Instr *
- Lowerer::LoadFunctionBody(IR::Instr * instr)
- {
- return m_lowererMD.LoadHelperArgument(instr, LoadFunctionBodyOpnd(instr));
- }
- IR::Instr *
- Lowerer::LoadScriptContext(IR::Instr * instr)
- {
- return m_lowererMD.LoadHelperArgument(instr, LoadScriptContextOpnd(instr));
- }
- IR::Opnd *
- Lowerer::LoadFunctionBodyOpnd(IR::Instr * instr)
- {
- return IR::AddrOpnd::New(instr->m_func->GetJITFunctionBody()->GetAddr(), IR::AddrOpndKindDynamicFunctionBody, instr->m_func);
- }
- IR::Opnd *
- Lowerer::LoadScriptContextOpnd(IR::Instr * instr)
- {
- return IR::AddrOpnd::New(m_func->GetScriptContextInfo()->GetAddr(), IR::AddrOpndKindDynamicScriptContext, this->m_func);
- }
- IR::Opnd *
- Lowerer::LoadScriptContextValueOpnd(IR::Instr * instr, ScriptContextValue valueType)
- {
- ScriptContextInfo *scriptContextInfo = instr->m_func->GetScriptContextInfo();
- switch (valueType)
- {
- case ScriptContextValue::ScriptContextNumberAllocator:
- return IR::AddrOpnd::New(scriptContextInfo->GetNumberAllocatorAddr(), IR::AddrOpndKindDynamicMisc, instr->m_func);
- case ScriptContextValue::ScriptContextRecycler:
- return IR::AddrOpnd::New(scriptContextInfo->GetRecyclerAddr(), IR::AddrOpndKindDynamicMisc, instr->m_func);
- default:
- Assert(false);
- return nullptr;
- }
- }
- IR::Opnd *
- Lowerer::LoadLibraryValueOpnd(IR::Instr * instr, LibraryValue valueType)
- {
- ScriptContextInfo *scriptContextInfo = instr->m_func->GetScriptContextInfo();
- switch (valueType)
- {
- case LibraryValue::ValueEmptyString:
- return IR::AddrOpnd::New(scriptContextInfo->GetEmptyStringAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
- case LibraryValue::ValueUndeclBlockVar:
- return IR::AddrOpnd::New(scriptContextInfo->GetUndeclBlockVarAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
- case LibraryValue::ValueUndefined:
- return IR::AddrOpnd::New(scriptContextInfo->GetUndefinedAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
- case LibraryValue::ValueNull:
- return IR::AddrOpnd::New(scriptContextInfo->GetNullAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
- case LibraryValue::ValueTrue:
- return IR::AddrOpnd::New(scriptContextInfo->GetTrueAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
- case LibraryValue::ValueFalse:
- return IR::AddrOpnd::New(scriptContextInfo->GetFalseAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
- case LibraryValue::ValueNegativeZero:
- return IR::AddrOpnd::New(scriptContextInfo->GetNegativeZeroAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
- case LibraryValue::ValueNumberTypeStatic:
- return IR::AddrOpnd::New(scriptContextInfo->GetNumberTypeStaticAddr(), IR::AddrOpndKindDynamicType, instr->m_func, true);
- case LibraryValue::ValueStringTypeStatic:
- return IR::AddrOpnd::New(scriptContextInfo->GetStringTypeStaticAddr(), IR::AddrOpndKindDynamicType, instr->m_func, true);
- case LibraryValue::ValueSymbolTypeStatic:
- return IR::AddrOpnd::New(scriptContextInfo->GetSymbolTypeStaticAddr(), IR::AddrOpndKindDynamicType, instr->m_func, true);
- case LibraryValue::ValueObjectType:
- return IR::AddrOpnd::New(scriptContextInfo->GetObjectTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
- case LibraryValue::ValueObjectHeaderInlinedType:
- return IR::AddrOpnd::New(scriptContextInfo->GetObjectHeaderInlinedTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
- case LibraryValue::ValueRegexType:
- return IR::AddrOpnd::New(scriptContextInfo->GetRegexTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
- case LibraryValue::ValueArrayConstructor:
- return IR::AddrOpnd::New(scriptContextInfo->GetArrayConstructorAddr(), IR::AddrOpndKindDynamicVar, instr->m_func);
- case LibraryValue::ValueJavascriptArrayType:
- return IR::AddrOpnd::New(scriptContextInfo->GetArrayTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
- case LibraryValue::ValueNativeIntArrayType:
- return IR::AddrOpnd::New(scriptContextInfo->GetNativeIntArrayTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
- case LibraryValue::ValueNativeFloatArrayType:
- return IR::AddrOpnd::New(scriptContextInfo->GetNativeFloatArrayTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
- case LibraryValue::ValueConstructorCacheDefaultInstance:
- return IR::AddrOpnd::New(m_func->GetThreadContextInfo()->GetConstructorCacheDefaultInstanceAddr(), IR::AddrOpndKindDynamicMisc, instr->m_func);
- case LibraryValue::ValueAbsDoubleCst:
- return IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetAbsDoubleCstAddr(), TyMachDouble, instr->m_func, IR::AddrOpndKindDynamicDoubleRef);
- case LibraryValue::ValueCharStringCache:
- return IR::AddrOpnd::New(scriptContextInfo->GetCharStringCacheAddr(), IR::AddrOpndKindDynamicCharStringCache, instr->m_func);
- default:
- Assert(UNREACHED);
- return nullptr;
- }
- }
- IR::Opnd *
- Lowerer::LoadVTableValueOpnd(IR::Instr * instr, VTableValue vtableType)
- {
- return IR::AddrOpnd::New((Js::Var)instr->m_func->GetScriptContextInfo()->GetVTableAddress(vtableType), IR::AddrOpndKindDynamicVtable, this->m_func);
- }
- IR::Opnd *
- Lowerer::LoadOptimizationOverridesValueOpnd(IR::Instr *instr, OptimizationOverridesValue valueType)
- {
- switch (valueType)
- {
- case OptimizationOverridesValue::OptimizationOverridesSideEffects:
- return IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetSideEffectsAddr(), TyInt32, instr->m_func);
- case OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable:
- return IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetArraySetElementFastPathVtableAddr(), TyMachPtr, instr->m_func);
- case OptimizationOverridesValue::OptimizationOverridesIntArraySetElementFastPathVtable:
- return IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetIntArraySetElementFastPathVtableAddr(), TyMachPtr, instr->m_func);
- case OptimizationOverridesValue::OptimizationOverridesFloatArraySetElementFastPathVtable:
- return IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetFloatArraySetElementFastPathVtableAddr(), TyMachPtr, instr->m_func);
- default:
- Assert(UNREACHED);
- return nullptr;
- }
- }
- IR::Opnd *
- Lowerer::LoadNumberAllocatorValueOpnd(IR::Instr *instr, NumberAllocatorValue valueType)
- {
- ScriptContextInfo *scriptContext = instr->m_func->GetScriptContextInfo();
- bool allowNativeCodeBumpAllocation = scriptContext->GetRecyclerAllowNativeCodeBumpAllocation();
- switch (valueType)
- {
- case NumberAllocatorValue::NumberAllocatorEndAddress:
- return IR::MemRefOpnd::New(((char *)scriptContext->GetNumberAllocatorAddr()) + Js::RecyclerJavascriptNumberAllocator::GetEndAddressOffset(), TyMachPtr, instr->m_func);
- case NumberAllocatorValue::NumberAllocatorFreeObjectList:
- return IR::MemRefOpnd::New(
- ((char *)scriptContext->GetNumberAllocatorAddr()) +
- (allowNativeCodeBumpAllocation ? Js::RecyclerJavascriptNumberAllocator::GetFreeObjectListOffset() : Js::RecyclerJavascriptNumberAllocator::GetEndAddressOffset()),
- TyMachPtr, instr->m_func);
- default:
- Assert(false);
- return nullptr;
- }
- }
- IR::Opnd *
- Lowerer::LoadIsInstInlineCacheOpnd(IR::Instr * instr, uint inlineCacheIndex)
- {
- intptr_t inlineCache = instr->m_func->GetJITFunctionBody()->GetIsInstInlineCache(inlineCacheIndex);
- return IR::AddrOpnd::New(inlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func);
- }
- IR::Opnd *
- Lowerer::LoadRuntimeInlineCacheOpnd(IR::Instr * instr, IR::PropertySymOpnd * propertySymOpnd, bool isHelper)
- {
- Assert(propertySymOpnd->m_runtimeInlineCache != 0);
- IR::Opnd * inlineCacheOpnd = nullptr;
- if (instr->m_func->GetJITFunctionBody()->HasInlineCachesOnFunctionObject() && !instr->m_func->IsInlinee())
- {
- inlineCacheOpnd = this->GetInlineCacheFromFuncObjectForRuntimeUse(instr, propertySymOpnd, isHelper);
- }
- else
- {
- intptr_t inlineCache = propertySymOpnd->m_runtimeInlineCache;
- inlineCacheOpnd = IR::AddrOpnd::New(inlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func, /* dontEncode */ true);
- }
- return inlineCacheOpnd;
- }
- bool
- Lowerer::TryGenerateFastCmSrXx(IR::Instr * instr)
- {
- IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
- IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
- if (srcReg2 && IsConstRegOpnd(srcReg2))
- {
- return m_lowererMD.GenerateFastCmSrXxConst(instr);
- }
- else if (srcReg1 && IsConstRegOpnd(srcReg1))
- {
- instr->SwapOpnds();
- return m_lowererMD.GenerateFastCmSrXxConst(instr);
- }
- return false;
- }
- // Generate fast path for StrictEquals when one of the sources are undefined, null, boolean
- bool
- Lowerer::TryGenerateFastBrSrXx(IR::Instr * instr, IR::RegOpnd * srcReg1, IR::RegOpnd * srcReg2, IR::Instr ** pInstrPrev, bool noMathFastPath)
- {
- bool isEqual = !instr->IsNeq();
- if (srcReg2 && IsConstRegOpnd(srcReg2))
- {
- this->GenerateFastBrConst(instr->AsBranchInstr(), GetConstRegOpnd(srcReg2, instr), isEqual);
- instr->Remove();
- return true;
- }
- else if (srcReg1 && IsConstRegOpnd(srcReg1))
- {
- instr->SwapOpnds();
- this->GenerateFastBrConst(instr->AsBranchInstr(), GetConstRegOpnd(srcReg1, instr), isEqual);
- instr->Remove();
- return true;
- }
- return false;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::GenerateFastBrConst
- ///
- ///----------------------------------------------------------------------------
- IR::BranchInstr *
- Lowerer::GenerateFastBrConst(IR::BranchInstr *branchInstr, IR::Opnd * constOpnd, bool isEqual)
- {
- Assert(constOpnd->IsAddrOpnd() || constOpnd->IsIntConstOpnd());
- //
- // Given:
- // BrSrXx_A $L1, s1, s2
- // where s2 is either 'null', 'undefined', 'true' or 'false'
- //
- // Generate:
- //
- // CMP s1, s2
- // JEQ/JNE $L1
- //
- Assert(IsConstRegOpnd(branchInstr->GetSrc2()->AsRegOpnd()));
- IR::RegOpnd *opnd = GetRegOpnd(branchInstr->GetSrc1(), branchInstr, m_func, TyVar);
- IR::BranchInstr *newBranch;
- newBranch = InsertCompareBranch(opnd, constOpnd, isEqual ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A, branchInstr->GetTarget(), branchInstr);
- return newBranch;
- }
- bool
- Lowerer::TryGenerateFastBrEq(IR::Instr * instr)
- {
- IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
- IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
- bool isConst = false;
- if (srcReg1 && this->IsNullOrUndefRegOpnd(srcReg1))
- {
- instr->SwapOpnds();
- isConst = true;
- }
- // Fast path for == null or == undefined
- // if (src == null || src == undefined)
- if (isConst || (srcReg2 && this->IsNullOrUndefRegOpnd(srcReg2)))
- {
- IR::BranchInstr *newBranch;
- newBranch = this->GenerateFastBrConst(instr->AsBranchInstr(),
- this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
- true);
- this->GenerateFastBrConst(instr->AsBranchInstr(),
- this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
- true);
- instr->Remove();
- return true;
- }
- return false;
- }
- bool
- Lowerer::TryGenerateFastBrNeq(IR::Instr * instr)
- {
- IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
- IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
- bool isConst = false;
- if (srcReg1 && this->IsNullOrUndefRegOpnd(srcReg1))
- {
- instr->SwapOpnds();
- isConst = true;
- }
- // Fast path for != null or != undefined
- // if (src != null && src != undefined)
- //
- // That is:
- // if (src == NULL) goto labelEq
- // if (src != undef) goto target
- // labelEq:
- if (isConst || (srcReg2 && this->IsNullOrUndefRegOpnd(srcReg2)))
- {
- IR::LabelInstr *labelEq = instr->GetOrCreateContinueLabel();
- IR::BranchInstr *newBranch;
- newBranch = this->GenerateFastBrConst(instr->AsBranchInstr(),
- this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
- true);
- newBranch->AsBranchInstr()->SetTarget(labelEq);
- this->GenerateFastBrConst(instr->AsBranchInstr(),
- this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
- false);
- instr->Remove();
- return true;
- }
- return false;
- }
- void
- Lowerer::GenerateDynamicObjectAlloc(IR::Instr * newObjInstr, uint inlineSlotCount, uint slotCount, IR::RegOpnd * newObjDst, IR::Opnd * typeSrc)
- {
- size_t headerAllocSize = sizeof(Js::DynamicObject) + inlineSlotCount * sizeof(Js::Var);
- IR::SymOpnd * tempObjectSymOpnd;
- bool isZeroed = GenerateRecyclerOrMarkTempAlloc(newObjInstr, newObjDst, IR::HelperAllocMemForScObject, headerAllocSize, &tempObjectSymOpnd);
- if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
- {
- // Hoist the vtable init to the outer most loop top as it never changes
- InsertMove(tempObjectSymOpnd,
- LoadVTableValueOpnd(this->outerMostLoopLabel, VTableValue::VtableDynamicObject), this->outerMostLoopLabel, false);
- }
- else
- {
- // MOV [newObjDst + offset(vtable)], DynamicObject::vtable
- GenerateMemInit(newObjDst, 0, LoadVTableValueOpnd(newObjInstr, VTableValue::VtableDynamicObject), newObjInstr, isZeroed);
- }
- // MOV [newObjDst + offset(type)], newObjectType
- GenerateMemInit(newObjDst, Js::DynamicObject::GetOffsetOfType(), typeSrc, newObjInstr, isZeroed);
- // CALL JavascriptOperators::AllocMemForVarArray((slotCount - inlineSlotCount) * sizeof(Js::Var))
- if (slotCount > inlineSlotCount)
- {
- size_t auxSlotsAllocSize = (slotCount - inlineSlotCount) * sizeof(Js::Var);
- IR::RegOpnd* auxSlots = IR::RegOpnd::New(TyMachPtr, m_func);
- GenerateRecyclerAllocAligned(IR::HelperAllocMemForVarArray, auxSlotsAllocSize, auxSlots, newObjInstr);
- GenerateMemInit(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), auxSlots, newObjInstr, isZeroed);
- IR::IndirOpnd* newObjAuxSlots = IR::IndirOpnd::New(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachPtr, m_func);
- this->InsertMove(newObjAuxSlots, auxSlots, newObjInstr);
- }
- else
- {
- GenerateMemInitNull(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), newObjInstr, isZeroed);
- }
- GenerateMemInitNull(newObjDst, Js::DynamicObject::GetOffsetOfObjectArray(), newObjInstr, isZeroed);
- }
- void
- Lowerer::LowerNewScObjectSimple(IR::Instr * instr)
- {
- GenerateDynamicObjectAlloc(
- instr,
- 0,
- 0,
- instr->UnlinkDst()->AsRegOpnd(),
- LoadLibraryValueOpnd(
- instr,
- Js::FunctionBody::DoObjectHeaderInliningForEmptyObjects()
- ? LibraryValue::ValueObjectHeaderInlinedType
- : LibraryValue::ValueObjectType));
- instr->Remove();
- }
- void
- Lowerer::LowerNewScObjectLiteral(IR::Instr *newObjInstr)
- {
- Func * func = m_func;
- IR::IntConstOpnd * literalObjectIdOpnd = newObjInstr->UnlinkSrc2()->AsIntConstOpnd();
- intptr_t literalTypeRef = newObjInstr->m_func->GetJITFunctionBody()->GetObjectLiteralTypeRef(literalObjectIdOpnd->AsUint32());
- IR::LabelInstr * helperLabel = nullptr;
- IR::LabelInstr * allocLabel = nullptr;
- IR::Opnd * literalTypeRefOpnd;
- IR::Opnd * literalTypeOpnd;
- IR::Opnd * propertyArrayOpnd;
- IR::IntConstOpnd * propertyArrayIdOpnd = newObjInstr->UnlinkSrc1()->AsIntConstOpnd();
- const Js::PropertyIdArray * propIds = newObjInstr->m_func->GetJITFunctionBody()->ReadPropertyIdArrayFromAuxData(propertyArrayIdOpnd->AsUint32());
- intptr_t propArrayAddr = newObjInstr->m_func->GetJITFunctionBody()->GetAuxDataAddr(propertyArrayIdOpnd->AsUint32());
- uint inlineSlotCapacity = Js::JavascriptOperators::GetLiteralInlineSlotCapacity(propIds);
- uint slotCapacity = Js::JavascriptOperators::GetLiteralSlotCapacity(propIds);
- IR::RegOpnd * dstOpnd;
- literalTypeRefOpnd = IR::AddrOpnd::New(literalTypeRef, IR::AddrOpndKindDynamicMisc, this->m_func);
- propertyArrayOpnd = IR::AddrOpnd::New(propArrayAddr, IR::AddrOpndKindDynamicMisc, this->m_func);
- //#if 0 TODO: OOP JIT, obj literal types
- // should pass in isShared bit through RPC, enable for in-proc jit to see perf impact
- Js::DynamicType * literalType = func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts) ? nullptr : *(Js::DynamicType **)literalTypeRef;
- if (literalType == nullptr || !literalType->GetIsShared())
- {
- helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- allocLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- literalTypeOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(literalTypeOpnd, IR::MemRefOpnd::New(literalTypeRef, TyMachPtr, func), newObjInstr);
- InsertTestBranch(literalTypeOpnd, literalTypeOpnd,
- Js::OpCode::BrEq_A, helperLabel, newObjInstr);
- InsertTestBranch(IR::IndirOpnd::New(literalTypeOpnd->AsRegOpnd(), Js::DynamicType::GetOffsetOfIsShared(), TyInt8, func),
- IR::IntConstOpnd::New(1, TyInt8, func, true), Js::OpCode::BrEq_A, helperLabel, newObjInstr);
- dstOpnd = newObjInstr->GetDst()->AsRegOpnd();
- }
- else
- {
- literalTypeOpnd = IR::AddrOpnd::New(literalType, IR::AddrOpndKindDynamicType, func);
- dstOpnd = newObjInstr->UnlinkDst()->AsRegOpnd();
- Assert(inlineSlotCapacity == literalType->GetTypeHandler()->GetInlineSlotCapacity());
- Assert(slotCapacity == (uint)literalType->GetTypeHandler()->GetSlotCapacity());
- }
- if (helperLabel)
- {
- InsertBranch(Js::OpCode::Br, allocLabel, newObjInstr);
- // Slow path to ensure the type is there
- newObjInstr->InsertBefore(helperLabel);
- IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperEnsureObjectLiteralType, func);
- m_lowererMD.LoadHelperArgument(newObjInstr, literalTypeRefOpnd);
- m_lowererMD.LoadHelperArgument(newObjInstr, propertyArrayOpnd);
- LoadScriptContext(newObjInstr);
- IR::Instr * ensureTypeInstr = IR::Instr::New(Js::OpCode::Call, literalTypeOpnd, opndHelper, func);
- newObjInstr->InsertBefore(ensureTypeInstr);
- m_lowererMD.LowerCall(ensureTypeInstr, 0);
- newObjInstr->InsertBefore(allocLabel);
- }
- else
- {
- Assert(allocLabel == nullptr);
- }
- // For the next call:
- // inlineSlotCapacity == Number of slots to allocate beyond the DynamicObject header
- // slotCapacity - inlineSlotCapacity == Number of aux slots to allocate
- if(Js::FunctionBody::DoObjectHeaderInliningForObjectLiteral(propIds))
- {
- Assert(inlineSlotCapacity >= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity());
- Assert(inlineSlotCapacity == slotCapacity);
- slotCapacity = inlineSlotCapacity -= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity();
- }
- GenerateDynamicObjectAlloc(
- newObjInstr,
- inlineSlotCapacity,
- slotCapacity,
- dstOpnd,
- literalTypeOpnd);
- newObjInstr->Remove();
- }
- IR::Instr*
- Lowerer::LowerProfiledNewScArray(IR::JitProfilingInstr* arrInstr)
- {
- IR::Instr *instrPrev = arrInstr->m_prev;
- /*
- JavascriptArray *ProfilingHelpers::ProfiledNewScArray(
- const uint length,
- FunctionBody *const functionBody,
- const ProfileId profileId)
- */
- m_lowererMD.LoadHelperArgument(arrInstr, IR::Opnd::CreateProfileIdOpnd(arrInstr->profileId, m_func));
- m_lowererMD.LoadHelperArgument(arrInstr, CreateFunctionBodyOpnd(arrInstr->m_func));
- m_lowererMD.LoadHelperArgument(arrInstr, arrInstr->UnlinkSrc1());
- arrInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledNewScArray, m_func));
- m_lowererMD.LowerCall(arrInstr, 0);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerNewScArray(IR::Instr *arrInstr)
- {
- if (arrInstr->IsJitProfilingInstr())
- {
- return LowerProfiledNewScArray(arrInstr->AsJitProfilingInstr());
- }
- IR::Instr *instrPrev = arrInstr->m_prev;
- IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScArray;
- if (arrInstr->IsProfiledInstr() && arrInstr->m_func->HasProfileInfo())
- {
- intptr_t weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
- Assert(weakFuncRef);
- Js::ProfileId profileId = static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
- Js::ArrayCallSiteInfo *arrayInfo = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
- intptr_t arrayInfoAddr = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
- Assert(arrInstr->GetSrc1()->IsConstOpnd());
- GenerateProfiledNewScArrayFastPath(arrInstr, arrayInfo, arrayInfoAddr, weakFuncRef, arrInstr->GetSrc1()->AsIntConstOpnd()->AsUint32());
- if (arrInstr->GetDst() && arrInstr->GetDst()->GetValueType().IsLikelyNativeArray())
- {
- m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
- m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
- helperMethod = IR::HelperScrArr_ProfiledNewScArray;
- }
- }
- LoadScriptContext(arrInstr);
- IR::Opnd *src1Opnd = arrInstr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(arrInstr, src1Opnd);
- m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
- return instrPrev;
- }
- template <typename ArrayType>
- BOOL Lowerer::IsSmallObject(uint32 length)
- {
- if (ArrayType::HasInlineHeadSegment(length))
- return true;
- uint32 alignedHeadSegmentSize = Js::SparseArraySegment<typename ArrayType::TElement>::GetAlignedSize(length);
- size_t allocSize = sizeof(Js::SparseArraySegment<typename ArrayType::TElement>) + alignedHeadSegmentSize * sizeof(typename ArrayType::TElement);
- return HeapInfo::IsSmallObject(HeapInfo::GetAlignedSizeNoCheck(allocSize));
- }
- bool
- Lowerer::GenerateProfiledNewScArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef, uint32 length)
- {
- if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
- {
- return false;
- }
- Func * func = this->m_func;
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- uint32 size = length;
- bool isZeroed;
- IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
- IR::RegOpnd *headOpnd;
- uint32 i = length;
- if (instr->GetDst() && instr->GetDst()->GetValueType().IsLikelyNativeIntArray())
- {
- if (!IsSmallObject<Js::JavascriptNativeIntArray>(length))
- {
- return false;
- }
- GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
- Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
- headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isZeroed);
- const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
- GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
- for (; i < size; i++)
- {
- GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
- Js::JavascriptNativeIntArray::MissingItem, instr, isZeroed);
- }
- }
- else if (instr->GetDst() && instr->GetDst()->GetValueType().IsLikelyNativeFloatArray())
- {
- if (!IsSmallObject<Js::JavascriptNativeFloatArray>(length))
- {
- return false;
- }
- GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
- Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
- headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isZeroed);
- const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
- GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
- // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
- uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
- for (; i < size; i++)
- {
- GenerateMemInit(
- headOpnd, offsetStart + i * sizeof(double),
- GetMissingItemOpndForAssignment(TyFloat64, m_func),
- instr, isZeroed);
- }
- }
- else
- {
- if (!IsSmallObject<Js::JavascriptArray>(length))
- {
- return false;
- }
- uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
- headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptArray>(instr, &size, arrayInfo, &isZeroed);
- const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
- for (; i < size; i++)
- {
- GenerateMemInit(
- headOpnd, offsetStart + i * sizeof(Js::Var),
- GetMissingItemOpndForAssignment(TyVar, m_func),
- instr, isZeroed);
- }
- }
- // Skip pass the helper call
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(helperLabel);
- instr->InsertAfter(doneLabel);
- return true;
- }
- void
- Lowerer::GenerateArrayInfoIsNativeIntArrayTest(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, IR::LabelInstr * helperLabel)
- {
- Func * func = this->m_func;
- InsertTestBranch(IR::MemRefOpnd::New(((char *)arrayInfoAddr) + Js::ArrayCallSiteInfo::GetOffsetOfBits(), TyUint8, func),
- IR::IntConstOpnd::New(Js::ArrayCallSiteInfo::NotNativeIntBit, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
- }
- void
- Lowerer::GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, IR::LabelInstr * helperLabel)
- {
- Func * func = this->m_func;
- InsertCompareBranch(IR::MemRefOpnd::New(((char *)arrayInfoAddr) + Js::ArrayCallSiteInfo::GetOffsetOfBits(), TyUint8, func),
- IR::IntConstOpnd::New(Js::ArrayCallSiteInfo::NotNativeIntBit, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
- }
- template <typename ArrayType>
- static IR::JnHelperMethod GetArrayAllocMemHelper();
- template <>
- IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptArray>()
- {
- return IR::HelperAllocMemForJavascriptArray;
- }
- template <>
- IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptNativeIntArray>()
- {
- return IR::HelperAllocMemForJavascriptNativeIntArray;
- }
- template <>
- IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptNativeFloatArray>()
- {
- return IR::HelperAllocMemForJavascriptNativeFloatArray;
- }
- template <typename ArrayType>
- IR::RegOpnd *
- Lowerer::GenerateArrayLiteralsAlloc(IR::Instr *instr, uint32 * psize, Js::ArrayCallSiteInfo * arrayInfo, bool * pIsHeadSegmentZeroed)
- {
- return GenerateArrayAllocHelper<ArrayType>(instr, psize, arrayInfo, pIsHeadSegmentZeroed, false /* isArrayObjCtor */, false /* isNoArgs */);
- }
- template <typename ArrayType>
- IR::RegOpnd *
- Lowerer::GenerateArrayObjectsAlloc(IR::Instr *instr, uint32 * psize, Js::ArrayCallSiteInfo * arrayInfo, bool * pIsHeadSegmentZeroed, bool isNoArgs)
- {
- return GenerateArrayAllocHelper<ArrayType>(instr, psize, arrayInfo, pIsHeadSegmentZeroed, true /* isArrayObjCtor */, isNoArgs);
- }
- template <typename ArrayType>
- IR::RegOpnd *
- Lowerer::GenerateArrayAllocHelper(IR::Instr *instr, uint32 * psize, Js::ArrayCallSiteInfo * arrayInfo, bool * pIsHeadSegmentZeroed, bool isArrayObjCtor, bool isNoArgs)
- {
- Func * func = this->m_func;
- IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
- // Generate code as in JavascriptArray::NewLiteral
- uint32 count = *psize;
- uint alignedHeadSegmentSize;
- size_t arrayAllocSize;
- IR::RegOpnd * headOpnd = IR::RegOpnd::New(TyMachPtr, func);
- const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func, false);
- IR::Instr * leaHeadInstr = nullptr;
- bool isHeadSegmentZeroed = false;
- if (ArrayType::HasInlineHeadSegment(count))
- {
- if (isArrayObjCtor)
- {
- uint32 allocCount = isNoArgs ? Js::SparseArraySegmentBase::SMALL_CHUNK_SIZE : count;
- arrayAllocSize = Js::JavascriptArray::DetermineAllocationSizeForArrayObjects<ArrayType, 0>(allocCount, nullptr, &alignedHeadSegmentSize);
- }
- else
- {
- uint32 allocCount = count == 0 ? Js::SparseArraySegmentBase::SMALL_CHUNK_SIZE : count;
- arrayAllocSize = Js::JavascriptArray::DetermineAllocationSize<ArrayType, 0>(allocCount, nullptr, &alignedHeadSegmentSize);
- }
- // Note that it is possible for the returned alignedHeadSegmentSize to be greater than INLINE_CHUNK_SIZE because
- // of rounding the *entire* object, including the head segment, to the nearest aligned size. In that case, ensure
- // that this size is still not larger than INLINE_CHUNK_SIZE size because the head segment is still inlined. This
- // keeps consistency with the definition of HasInlineHeadSegment and maintained in the assert below.
- uint inlineChunkSize = Js::SparseArraySegmentBase::INLINE_CHUNK_SIZE;
- alignedHeadSegmentSize = min(alignedHeadSegmentSize, inlineChunkSize);
- Assert(ArrayType::HasInlineHeadSegment(alignedHeadSegmentSize));
- leaHeadInstr = IR::Instr::New(Js::OpCode::LEA, headOpnd,
- IR::IndirOpnd::New(dstOpnd, sizeof(ArrayType), TyMachPtr, func), func);
- isHeadSegmentZeroed = true;
- }
- else
- {
- // Need to allocate the head segment first so that if it throws,
- // we doesn't have the memory assigned to dstOpnd yet
- // Even if the instruction is marked as dstIsTempObject, we still should not allocate
- // that big of a chunk on the stack.
- alignedHeadSegmentSize = Js::SparseArraySegment<typename ArrayType::TElement>::GetAlignedSize(count);
- GenerateRecyclerAlloc(
- IR::HelperAllocMemForSparseArraySegmentBase,
- sizeof(Js::SparseArraySegment<typename ArrayType::TElement>) +
- alignedHeadSegmentSize * sizeof(typename ArrayType::TElement),
- headOpnd,
- instr);
- arrayAllocSize = sizeof(ArrayType);
- }
- *psize = alignedHeadSegmentSize;
- IR::SymOpnd * tempObjectSymOpnd;
- bool isZeroed = GenerateRecyclerOrMarkTempAlloc(instr, dstOpnd,
- GetArrayAllocMemHelper<ArrayType>(), arrayAllocSize, &tempObjectSymOpnd);
- isHeadSegmentZeroed = isHeadSegmentZeroed & isZeroed;
- if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
- {
- // Hoist the vtable init to the outer most loop top as it never changes
- InsertMove(tempObjectSymOpnd,
- this->LoadVTableValueOpnd(this->outerMostLoopLabel, ArrayType::VtableHelper()),
- this->outerMostLoopLabel, false);
- }
- else
- {
- GenerateMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, ArrayType::VtableHelper()), instr, isZeroed);
- }
- GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfType(), this->LoadLibraryValueOpnd(instr, ArrayType::InitialTypeHelper()), instr, isZeroed);
- GenerateMemInitNull(dstOpnd, ArrayType::GetOffsetOfAuxSlots(), instr, isZeroed);
- // Emit the flags and call site index together
- Js::ProfileId arrayCallSiteIndex = (Js::ProfileId)instr->AsProfiledInstr()->u.profileId;
- #if DBG
- if (instr->AsProfiledInstr()->u.profileId < Js::Constants::NoProfileId)
- {
- Assert((uint32)(arrayInfo - instr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(0)) == arrayCallSiteIndex);
- }
- else
- {
- Assert(arrayInfo == nullptr);
- }
- #endif
- // The same at this:
- // GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), (uint16)Js::DynamicObjectFlags::InitialArrayValue, instr, isZeroed);
- // GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayCallSiteIndex(), arrayCallSiteIndex, instr, isZeroed);
- GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), (uint)Js::DynamicObjectFlags::InitialArrayValue | ((uint)arrayCallSiteIndex << 16), instr, isZeroed);
- GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLength(), count, instr, isZeroed);
- if (leaHeadInstr != nullptr)
- {
- instr->InsertBefore(leaHeadInstr);
- ChangeToLea(leaHeadInstr);
- }
- GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfHead(), headOpnd, instr, isZeroed);
- GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLastUsedSegmentOrSegmentMap(), headOpnd, instr, isZeroed);
- // Initialize segment head
- GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLeft(), 0, instr, isHeadSegmentZeroed);
- GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), isArrayObjCtor ? 0 : count, instr, isHeadSegmentZeroed);
- GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfSize(), alignedHeadSegmentSize, instr, isHeadSegmentZeroed);
- GenerateMemInitNull(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfNext(), instr, isHeadSegmentZeroed);
- *pIsHeadSegmentZeroed = isHeadSegmentZeroed;
- return headOpnd;
- }
- template <typename ArrayType>
- IR::RegOpnd *
- Lowerer::GenerateArrayAlloc(IR::Instr *instr, IR::Opnd * arrayLenOpnd, Js::ArrayCallSiteInfo * arrayInfo)
- {
- Func * func = this->m_func;
- IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
- IR::RegOpnd * headOpnd = IR::RegOpnd::New(TyMachPtr, func);
- const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func, false);
- IR::Instr * leaHeadInstr = nullptr;
- IR::Opnd * arraySizeOpnd = IR::RegOpnd::New(TyUint32, func);
- IR::Opnd * alignedArrayAllocSizeOpnd = IR::RegOpnd::New(TyUint32, func);
- IR::LabelInstr * doneCalculatingAllocSize = IR::LabelInstr::New(Js::OpCode::Label, func);
- IR::LabelInstr * skipToNextBucket = nullptr;
- uint8 bucketsCount = ArrayType::AllocationBucketsCount;
- Js::JavascriptArray::EnsureCalculationOfAllocationBuckets<ArrayType>();
- for (uint8 i = 0;i < bucketsCount;i++)
- {
- uint elementsCountToInitialize = ArrayType::allocationBuckets[i][Js::JavascriptArray::MissingElementsCountIndex];
- uint allocationSize = ArrayType::allocationBuckets[i][Js::JavascriptArray::AllocationSizeIndex];
- // Ensure we already have allocation size calculated and within range
- Assert(elementsCountToInitialize > 0 && elementsCountToInitialize <= ArrayType::allocationBuckets[bucketsCount - 1][Js::JavascriptArray::MissingElementsCountIndex]);
- Assert(allocationSize > 0 && allocationSize <= ArrayType::allocationBuckets[bucketsCount - 1][Js::JavascriptArray::AllocationSizeIndex]);
- // CMP arrayLen, currentBucket
- // JG $checkNextBucket
- if (i != (bucketsCount - 1))
- {
- Lowerer::InsertCompare(arrayLenOpnd, IR::IntConstOpnd::New((uint16)ArrayType::allocationBuckets[i][Js::JavascriptArray::AllocationBucketIndex], TyUint32, func), instr);
- skipToNextBucket = IR::LabelInstr::New(Js::OpCode::Label, func);
- Lowerer::InsertBranch(Js::OpCode::BrGt_A, skipToNextBucket, instr);
- }
- // MOV $arrayAlignedSize, <const1>
- // MOV $arrayAllocSize, <const2>
- Lowerer::InsertMove(arraySizeOpnd, IR::IntConstOpnd::New((uint16)elementsCountToInitialize, TyUint32, func), instr);
- Lowerer::InsertMove(alignedArrayAllocSizeOpnd, IR::IntConstOpnd::New((uint16)allocationSize, TyUint32, func), instr);
- // JMP $doneCalculatingAllocSize
- if (i != (bucketsCount - 1))
- {
- Lowerer::InsertBranch(Js::OpCode::Br, doneCalculatingAllocSize, instr);
- instr->InsertBefore(skipToNextBucket);
- }
- }
- instr->InsertBefore(doneCalculatingAllocSize);
- // ***** Call to allocation helper *****
- this->m_lowererMD.LoadHelperArgument(instr, this->LoadScriptContextValueOpnd(instr, ScriptContextValue::ScriptContextRecycler));
- this->m_lowererMD.LoadHelperArgument(instr, alignedArrayAllocSizeOpnd);
- IR::Instr *newObjCall = IR::Instr::New(Js::OpCode::Call, dstOpnd, IR::HelperCallOpnd::New(GetArrayAllocMemHelper<ArrayType>(), func), func);
- instr->InsertBefore(newObjCall);
- this->m_lowererMD.LowerCall(newObjCall, 0);
- // ***** Load headSeg/initialize it *****
- leaHeadInstr = IR::Instr::New(Js::OpCode::LEA, headOpnd,
- IR::IndirOpnd::New(dstOpnd, sizeof(ArrayType), TyMachPtr, func), func);
- GenerateMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, ArrayType::VtableHelper()), instr, true);
- GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfType(), this->LoadLibraryValueOpnd(instr, ArrayType::InitialTypeHelper()), instr, true);
- GenerateMemInitNull(dstOpnd, ArrayType::GetOffsetOfAuxSlots(), instr, true);
- Js::ProfileId arrayCallSiteIndex = (Js::ProfileId)instr->AsProfiledInstr()->u.profileId;
- #if DBG
- if (instr->AsProfiledInstr()->u.profileId < Js::Constants::NoProfileId)
- {
- Assert((uint32)(arrayInfo - instr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(0)) == arrayCallSiteIndex);
- }
- else
- {
- Assert(arrayInfo == nullptr);
- }
- #endif
- // ***** Array object initialization *****
- GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), IR::IntConstOpnd::New((uint16)Js::DynamicObjectFlags::InitialArrayValue, TyUint16, func), instr, true);
- GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLength(), arrayLenOpnd, instr, true);
- if (leaHeadInstr != nullptr)
- {
- instr->InsertBefore(leaHeadInstr);
- ChangeToLea(leaHeadInstr);
- }
- GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfHead(), headOpnd, instr, true);
- GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLastUsedSegmentOrSegmentMap(), headOpnd, instr, true);
- GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLeft(), 0, instr, true);
- GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), 0, instr, true); // Set head segment length to 0
- GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfSize(), arraySizeOpnd, instr, true);
- GenerateMemInitNull(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfNext(), instr, true);
- return headOpnd;
- }
- bool
- Lowerer::GenerateProfiledNewScObjArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef, uint32 length, IR::LabelInstr* labelDone, bool isNoArgs)
- {
- if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func))
- {
- return false;
- }
- Func * func = this->m_func;
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- uint32 size = length;
- bool isZeroed = false;
- IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
- IR::RegOpnd *headOpnd;
- Js::ProfileId profileId = static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId);
- if (arrayInfo && arrayInfo->IsNativeIntArray())
- {
- GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
- Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
- headOpnd = GenerateArrayObjectsAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isZeroed, isNoArgs);
- GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex(), IR::IntConstOpnd::New(profileId, TyUint16, func, true), instr, isZeroed);
- GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
- for (uint i = 0; i < size; i++)
- {
- GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
- Js::JavascriptNativeIntArray::MissingItem, instr, isZeroed);
- }
- }
- else if (arrayInfo && arrayInfo->IsNativeFloatArray())
- {
- GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
- Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
- headOpnd = GenerateArrayObjectsAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isZeroed, isNoArgs);
- GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex(), IR::IntConstOpnd::New(profileId, TyUint16, func, true), instr, isZeroed);
- GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
- // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
- uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
- for (uint i = 0; i < size; i++)
- {
- GenerateMemInit(
- headOpnd, offsetStart + i * sizeof(double),
- GetMissingItemOpndForAssignment(TyFloat64, m_func),
- instr, isZeroed);
- }
- }
- else
- {
- uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
- headOpnd = GenerateArrayObjectsAlloc<Js::JavascriptArray>(instr, &size, arrayInfo, &isZeroed, isNoArgs);
- for (uint i = 0; i < size; i++)
- {
- GenerateMemInit(
- headOpnd, offsetStart + i * sizeof(Js::Var),
- GetMissingItemOpndForAssignment(TyVar, m_func),
- instr, isZeroed);
- }
- }
- // Skip pass the helper call
- InsertBranch(Js::OpCode::Br, labelDone, instr);
- instr->InsertBefore(helperLabel);
- return true;
- }
- template <typename ArrayType>
- bool
- Lowerer::GenerateProfiledNewScObjArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef, IR::LabelInstr* helperLabel,
- IR::LabelInstr* labelDone, IR::Opnd* lengthOpnd, uint32 offsetOfCallSiteIndex, uint32 offsetOfWeakFuncRef)
- {
- if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func))
- {
- return false;
- }
- Func * func = this->m_func;
- IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
- IR::RegOpnd *headOpnd;
- Js::ProfileId profileId = static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId);
- uint sizeOfElement = 0;
- uint allocationBucketsCount = ArrayType::AllocationBucketsCount;
- uint(*allocationBuckets)[Js::JavascriptArray::AllocationBucketsInfoSize];
- allocationBuckets = ArrayType::allocationBuckets;
-
- IRType missingItemType = (arrayInfo ? arrayInfo->IsNativeIntArray() ? IRType::TyInt32 : arrayInfo->IsNativeFloatArray() ? IRType::TyFloat64 : IRType::TyVar : IRType::TyVar);
- IR::LabelInstr * arrayInitDone = IR::LabelInstr::New(Js::OpCode::Label, func);
- bool isNativeArray = arrayInfo && (arrayInfo->IsNativeIntArray() || arrayInfo->IsNativeFloatArray());
- if (arrayInfo && arrayInfo->IsNativeIntArray())
- {
- sizeOfElement = sizeof(int32);
- GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
- }
- else if (arrayInfo && arrayInfo->IsNativeFloatArray())
- {
- sizeOfElement = sizeof(double);
- GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
- }
- else
- {
- sizeOfElement = sizeof(Js::Var);
- }
- lengthOpnd = GenerateUntagVar(lengthOpnd->AsRegOpnd(), helperLabel, instr);
- IR::Opnd* upperBound = IR::IntConstOpnd::New(8, TyUint8, func, true);
- InsertCompare(lengthOpnd, upperBound, instr);
- InsertBranch(Js::OpCode::BrGt_A, true /* isUnsigned */, helperLabel, instr);
- headOpnd = GenerateArrayAlloc<ArrayType>(instr, lengthOpnd, arrayInfo);
- if (isNativeArray)
- {
- Assert(ArrayType::GetOffsetOfArrayFlags() + sizeof(uint16) == offsetOfCallSiteIndex);
- Assert(offsetOfWeakFuncRef > 0);
- GenerateMemInit(dstOpnd, offsetOfCallSiteIndex, IR::IntConstOpnd::New(profileId, TyUint16, func, true), instr, true /* isZeroed */);
- GenerateMemInit(dstOpnd, offsetOfWeakFuncRef, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, true /* isZeroed */);
- }
- uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
- uint missingItemCount = 0;
- uint missingItemInitializedSoFar = 0;
- uint missingItemIndex = 0;
- uint maxAllocationSize = allocationBuckets[allocationBucketsCount - 1][Js::JavascriptArray::AllocationSizeIndex];
- for (uint8 i = 0;i < allocationBucketsCount;i++)
- {
- missingItemCount = allocationBuckets[i][Js::JavascriptArray::MissingElementsCountIndex];
- if (i > 0)
- {
- // Reduce missingItemCount we have already set so far
- missingItemCount -= missingItemInitializedSoFar;
- }
- // Generate array initialization with MissingItem
- for (uint j = 0;j < missingItemCount;j++)
- {
- // Ensure we don't write missingItems past allocation size
- Assert(offsetStart + missingItemIndex * sizeOfElement <= maxAllocationSize);
- GenerateMemInit(headOpnd, offsetStart + missingItemIndex * sizeOfElement, GetMissingItemOpndForAssignment(missingItemType, func), instr, true /*isZeroed*/);
- missingItemIndex++;
- }
- // CMP arrayLen, currentBucket
- // JG $checkNextBucket
- if (i != (allocationBucketsCount - 1))
- {
- Lowerer::InsertCompare(lengthOpnd, IR::IntConstOpnd::New(allocationBuckets[i][Js::JavascriptArray::AllocationBucketIndex], TyUint32, func), instr);
- Lowerer::InsertBranch(Js::OpCode::BrLe_A, arrayInitDone, instr);
- }
- missingItemInitializedSoFar += missingItemCount;
- }
- // Ensure no. of missingItems written are same
- Assert(missingItemIndex == missingItemInitializedSoFar);
- // Ensure no. of missingItems match what present in allocationBuckets
- Assert(missingItemIndex == allocationBuckets[allocationBucketsCount - 1][Js::JavascriptArray::MissingElementsCountIndex]);
- instr->InsertBefore(arrayInitDone);
- Lowerer::InsertBranch(Js::OpCode::Br, labelDone, instr);
- instr->InsertBefore(helperLabel);
- return true;
- }
- void
- Lowerer::GenerateProfiledNewScIntArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef)
- {
- // Helper will deal with ForceES5ARray
- if (PHASE_OFF(Js::ArrayLiteralFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
- {
- return;
- }
- if (!arrayInfo->IsNativeIntArray())
- {
- return;
- }
- if (instr->GetSrc1()->AsAddrOpnd()->GetAddrOpndKind() != IR::AddrOpndKindDynamicAuxBufferRef)
- {
- return;
- }
- Func * func = this->m_func;
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
- IR::AddrOpnd * elementsOpnd = instr->GetSrc1()->AsAddrOpnd();
- Js::AuxArray<int32> * ints = (Js::AuxArray<int32> *)elementsOpnd->m_metadata;
- uint32 size = ints->count;
- // Generate code as in JavascriptArray::NewLiteral
- bool isHeadSegmentZeroed;
- IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
- Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
- IR::RegOpnd * headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isHeadSegmentZeroed);
- const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
- GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicMisc, m_func), instr, isHeadSegmentZeroed);
- // Initialize the elements
- uint i = 0;
- if (ints->count > 16)
- {
- // Do memcpy if > 16
- IR::RegOpnd * dstElementsOpnd = IR::RegOpnd::New(TyMachPtr, func);
- const IR::AutoReuseOpnd autoReuseDstElementsOpnd(dstElementsOpnd, func);
- IR::Opnd * srcOpnd = IR::AddrOpnd::New((intptr_t)elementsOpnd->m_address + Js::AuxArray<int32>::OffsetOfElements(), IR::AddrOpndKindDynamicMisc, func);
- InsertLea(dstElementsOpnd, IR::IndirOpnd::New(headOpnd, sizeof(Js::SparseArraySegmentBase), TyMachPtr, func), instr);
- GenerateMemCopy(dstElementsOpnd, srcOpnd, ints->count * sizeof(int32), instr);
- i = ints->count;
- }
- else
- {
- for (; i < ints->count; i++)
- {
- GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
- ints->elements[i], instr, isHeadSegmentZeroed);
- }
- }
- Assert(i == ints->count);
- for (; i < size; i++)
- {
- GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
- Js::JavascriptNativeIntArray::MissingItem, instr, isHeadSegmentZeroed);
- }
- // Skip pass the helper call
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(helperLabel);
- instr->InsertAfter(doneLabel);
- }
- void
- Lowerer::GenerateProfiledNewScFloatArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef)
- {
- if (PHASE_OFF(Js::ArrayLiteralFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
- {
- return;
- }
- if (!arrayInfo->IsNativeFloatArray())
- {
- return;
- }
- if (instr->GetSrc1()->AsAddrOpnd()->GetAddrOpndKind() != IR::AddrOpndKindDynamicAuxBufferRef)
- {
- return;
- }
- Func * func = this->m_func;
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- // If the array info hasn't mark as not int array yet, go to the helper and mark it.
- // It really is just for assert purpose in JavascriptNativeFloatArray::ToVarArray
- GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
- IR::AddrOpnd * elementsOpnd = instr->GetSrc1()->AsAddrOpnd();
- Js::AuxArray<double> * doubles = (Js::AuxArray<double> *)elementsOpnd->m_metadata;
- uint32 size = doubles->count;
- // Generate code as in JavascriptArray::NewLiteral
- bool isHeadSegmentZeroed;
- IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
- Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
- IR::RegOpnd * headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isHeadSegmentZeroed);
- const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
- GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isHeadSegmentZeroed);
- // Initialize the elements
- IR::RegOpnd * dstElementsOpnd = IR::RegOpnd::New(TyMachPtr, func);
- const IR::AutoReuseOpnd autoReuseDstElementsOpnd(dstElementsOpnd, func);
- IR::Opnd * srcOpnd = IR::AddrOpnd::New((intptr_t)elementsOpnd->m_address + Js::AuxArray<double>::OffsetOfElements(), IR::AddrOpndKindDynamicMisc, func);
- InsertLea(dstElementsOpnd, IR::IndirOpnd::New(headOpnd, sizeof(Js::SparseArraySegmentBase), TyMachPtr, func), instr);
- GenerateMemCopy(dstElementsOpnd, srcOpnd, doubles->count * sizeof(double), instr);
- // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
- uint const offsetStart = sizeof(Js::SparseArraySegmentBase) + doubles->count * sizeof(double);
- uint const missingItem = (size - doubles->count);
- for (uint i = 0; i < missingItem; i++)
- {
- GenerateMemInit(headOpnd, offsetStart + i * sizeof(double),
- GetMissingItemOpndForAssignment(TyFloat64, m_func), instr, isHeadSegmentZeroed);
- }
- // Skip pass the helper call
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(helperLabel);
- instr->InsertAfter(doneLabel);
- }
- IR::Instr *
- Lowerer::LowerNewScIntArray(IR::Instr *arrInstr)
- {
- IR::Instr *instrPrev = arrInstr->m_prev;
- IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScIntArray;
- if ((arrInstr->IsJitProfilingInstr() || arrInstr->IsProfiledInstr()) && arrInstr->m_func->HasProfileInfo())
- {
- intptr_t weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
- if (weakFuncRef)
- {
- // Technically a load of the same memory address either way.
- Js::ProfileId profileId =
- arrInstr->IsJitProfilingInstr()
- ? arrInstr->AsJitProfilingInstr()->profileId
- : static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
- Js::ArrayCallSiteInfo *arrayInfo = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
- intptr_t arrayInfoAddr = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
- // Only do fast-path if it isn't a JitProfiling instr and not copy-on-access array
- if (arrInstr->IsProfiledInstr()
- #if ENABLE_COPYONACCESS_ARRAY
- && (PHASE_OFF1(Js::Phase::CopyOnAccessArrayPhase) || arrayInfo->isNotCopyOnAccessArray) && !PHASE_FORCE1(Js::Phase::CopyOnAccessArrayPhase)
- #endif
- )
- {
- GenerateProfiledNewScIntArrayFastPath(arrInstr, arrayInfo, arrayInfoAddr, weakFuncRef);
- }
- m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
- m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
- helperMethod = IR::HelperScrArr_ProfiledNewScIntArray;
- }
- }
- LoadScriptContext(arrInstr);
- IR::Opnd *elementsOpnd = arrInstr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(arrInstr, elementsOpnd);
- m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerNewScFltArray(IR::Instr *arrInstr)
- {
- IR::Instr *instrPrev = arrInstr->m_prev;
- IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScFltArray;
- if ((arrInstr->IsJitProfilingInstr() || arrInstr->IsProfiledInstr()) && arrInstr->m_func->HasProfileInfo())
- {
- intptr_t weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
- if (weakFuncRef)
- {
- Js::ProfileId profileId =
- arrInstr->IsJitProfilingInstr()
- ? arrInstr->AsJitProfilingInstr()->profileId
- : static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
- Js::ArrayCallSiteInfo *arrayInfo = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
- intptr_t arrayInfoAddr = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
- // Only do fast-path if it isn't a JitProfiling instr
- if (arrInstr->IsProfiledInstr()) {
- GenerateProfiledNewScFloatArrayFastPath(arrInstr, arrayInfo, arrayInfoAddr, weakFuncRef);
- }
- m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
- m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
- helperMethod = IR::HelperScrArr_ProfiledNewScFltArray;
- }
- }
- LoadScriptContext(arrInstr);
- IR::Opnd *elementsOpnd = arrInstr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(arrInstr, elementsOpnd);
- m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerArraySegmentVars(IR::Instr *arrayInstr)
- {
- IR::Instr * instrPrev;
- IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperArraySegmentVars, m_func);
- instrPrev = m_lowererMD.LoadHelperArgument(arrayInstr, arrayInstr->UnlinkSrc2());
- m_lowererMD.LoadHelperArgument(arrayInstr, arrayInstr->UnlinkSrc1());
- arrayInstr->m_opcode = Js::OpCode::Call;
- arrayInstr->SetSrc1(opndHelper);
- m_lowererMD.LowerCall(arrayInstr, 0);
- return instrPrev;
- }
- IR::Instr* Lowerer::LowerProfiledNewArray(IR::JitProfilingInstr* instr, bool hasArgs)
- {
- // Use the special helper which checks whether Array has been overwritten by the user and if
- // it hasn't, possibly allocates a native array
- // Insert a temporary label before the instruction we're about to lower, so that we can return
- // the first instruction above that needs to be lowered after we're done - regardless of argument
- // list, StartCall, etc.
- IR::Instr* startMarkerInstr = InsertLoweredRegionStartMarker(instr);
- Assert(instr->isNewArray);
- Assert(instr->arrayProfileId != Js::Constants::NoProfileId);
- Assert(instr->profileId != Js::Constants::NoProfileId);
- bool isSpreadCall = instr->m_opcode == Js::OpCode::NewScObjectSpread || instr->m_opcode == Js::OpCode::NewScObjArraySpread;
- m_lowererMD.LoadNewScObjFirstArg(instr, IR::AddrOpnd::New(nullptr, IR::AddrOpndKindConstantVar, m_func, true), isSpreadCall ? 1 : 0);
- if (isSpreadCall)
- {
- this->LowerSpreadCall(instr, Js::CallFlags_New, true);
- }
- else
- {
- const int32 argCount = m_lowererMD.LowerCallArgs(instr, Js::CallFlags_New, 4);
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->arrayProfileId, m_func));
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
- m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
- instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledNewScObjArray, m_func));
- m_lowererMD.LowerCall(instr, static_cast<Js::ArgSlot>(argCount));
- }
- return RemoveLoweredRegionStartMarker(startMarkerInstr);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerNewScObject
- ///
- /// Machine independent lowering of a CallI instr.
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerNewScObject(IR::Instr *newObjInstr, bool callCtor, bool hasArgs, bool isBaseClassConstructorNewScObject)
- {
- if (newObjInstr->IsJitProfilingInstr() && newObjInstr->AsJitProfilingInstr()->isNewArray)
- {
- Assert(callCtor);
- return LowerProfiledNewArray(newObjInstr->AsJitProfilingInstr(), hasArgs);
- }
- bool isSpreadCall = newObjInstr->m_opcode == Js::OpCode::NewScObjectSpread ||
- newObjInstr->m_opcode == Js::OpCode::NewScObjArraySpread;
- Func* func = newObjInstr->m_func;
- // Insert a temporary label before the instruction we're about to lower, so that we can return
- // the first instruction above that needs to be lowered after we're done - regardless of argument
- // list, StartCall, etc.
- IR::Instr* startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
- IR::Opnd *ctorOpnd = newObjInstr->GetSrc1();
- IR::RegOpnd *newObjDst = newObjInstr->GetDst()->AsRegOpnd();
- Assert(!callCtor || !hasArgs || (newObjInstr->GetSrc2() != nullptr /*&& newObjInstr->GetSrc2()->IsSymOpnd()*/));
- bool skipNewScObj = false;
- bool returnNewScObj = false;
- bool emitBailOut = false;
- // If we haven't yet split NewScObject into NewScObjectNoCtor and CallI, we will need a temporary register
- // to hold the result of the object allocation.
- IR::RegOpnd* createObjDst = callCtor ? IR::RegOpnd::New(TyVar, func) : newObjDst;
- IR::LabelInstr* helperOrBailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, /* isOpHelper = */ true);
- IR::LabelInstr* callCtorLabel = IR::LabelInstr::New(Js::OpCode::Label, func, /* isOpHelper = */ false);
- // Try to emit the fast allocation and construction path.
- bool usedFixedCtorCache = TryLowerNewScObjectWithFixedCtorCache(newObjInstr, createObjDst, helperOrBailoutLabel, callCtorLabel, skipNewScObj, returnNewScObj, emitBailOut);
- AssertMsg(!skipNewScObj || callCtor, "What will we return if we skip the default new object and don't call the ctor?");
- Assert(!skipNewScObj || !returnNewScObj);
- Assert(usedFixedCtorCache || !skipNewScObj);
- Assert(!usedFixedCtorCache || newObjInstr->HasFixedFunctionAddressTarget());
- Assert(!skipNewScObj || !emitBailOut);
- #if DBG && 0 // TODO: OOP JIT, enable assert
- if (usedFixedCtorCache)
- {
- Js::JavascriptFunction* ctor = newObjInstr->GetFixedFunction();
- Js::FunctionInfo* ctorInfo = ctor->GetFunctionInfo();
- Assert((ctorInfo->GetAttributes() & Js::FunctionInfo::Attributes::ErrorOnNew) == 0);
- Assert(!!(ctorInfo->GetAttributes() & Js::FunctionInfo::Attributes::SkipDefaultNewObject) == skipNewScObj);
- }
- #endif
- IR::Instr* startCallInstr = nullptr;
- if (callCtor && hasArgs)
- {
- hasArgs = !newObjInstr->HasEmptyArgOutChain(&startCallInstr);
- }
- // If we're not skipping the default new object, let's emit bailout or a call to NewScObject* helper
- IR::JnHelperMethod newScHelper = IR::HelperInvalid;
- IR::Instr *newScObjCall = nullptr;
- if (!skipNewScObj)
- {
- // If we emitted the fast path, this block is a helper block.
- if (usedFixedCtorCache)
- {
- newObjInstr->InsertBefore(helperOrBailoutLabel);
- }
- if (emitBailOut)
- {
- IR::Instr* bailOutInstr = newObjInstr;
- newObjInstr = IR::Instr::New(newObjInstr->m_opcode, func);
- bailOutInstr->TransferTo(newObjInstr);
- bailOutInstr->m_opcode = Js::OpCode::BailOut;
- bailOutInstr->InsertAfter(newObjInstr);
- GenerateBailOut(bailOutInstr);
- }
- else
- {
- Assert(!newObjDst->CanStoreTemp());
- // createObjDst = NewScObject...(ctorOpnd)
- newScHelper = !callCtor ?
- (isBaseClassConstructorNewScObject ?
- (hasArgs ? IR::HelperNewScObjectNoCtorFull : IR::HelperNewScObjectNoArgNoCtorFull) :
- (hasArgs ? IR::HelperNewScObjectNoCtor : IR::HelperNewScObjectNoArgNoCtor)) :
- (hasArgs || usedFixedCtorCache ? IR::HelperNewScObjectNoCtor : IR::HelperNewScObjectNoArg);
- LoadScriptContext(newObjInstr);
- m_lowererMD.LoadHelperArgument(newObjInstr, newObjInstr->GetSrc1());
- newScObjCall = IR::Instr::New(Js::OpCode::Call, createObjDst, IR::HelperCallOpnd::New(newScHelper, func), func);
- newObjInstr->InsertBefore(newScObjCall);
- m_lowererMD.LowerCall(newScObjCall, 0);
- }
- }
- // If we call HelperNewScObjectNoArg directly, we won't be calling the constructor from here, because the helper will do it.
- // We could probably avoid this complexity by converting NewScObjectNoArg to NewScObject in the IRBuilder, once we have dedicated
- // code paths for new Object() and new Array().
- callCtor &= hasArgs || usedFixedCtorCache;
- AssertMsg(!skipNewScObj || callCtor, "What will we return if we skip the default new object and don't call the ctor?");
- newObjInstr->InsertBefore(callCtorLabel);
- if (callCtor && usedFixedCtorCache)
- {
- IR::JnHelperMethod ctorHelper = IR::JnHelperMethodCount;
- // If we have no arguments (i.e. the argument chain is empty), we can recognize a couple of common special cases, such
- // as new Object() or new Array(), for which we have optimized helpers.
- FixedFieldInfo* ctor = newObjInstr->GetFixedFunction();
- intptr_t ctorInfo = ctor->GetFuncInfoAddr();
- if (!hasArgs && (ctorInfo == m_func->GetThreadContextInfo()->GetJavascriptObjectNewInstanceAddr() || ctorInfo == m_func->GetThreadContextInfo()->GetJavascriptArrayNewInstanceAddr()))
- {
- if (ctorInfo == m_func->GetThreadContextInfo()->GetJavascriptObjectNewInstanceAddr())
- {
- Assert(skipNewScObj);
- ctorHelper = IR::HelperNewJavascriptObjectNoArg;
- callCtor = false;
- }
- else if (ctorInfo == m_func->GetThreadContextInfo()->GetJavascriptArrayNewInstanceAddr())
- {
- Assert(skipNewScObj);
- ctorHelper = IR::HelperNewJavascriptArrayNoArg;
- callCtor = false;
- }
- if (!callCtor)
- {
- LoadScriptContext(newObjInstr);
- IR::Instr *ctorCall = IR::Instr::New(Js::OpCode::Call, newObjDst, IR::HelperCallOpnd::New(ctorHelper, func), func);
- newObjInstr->InsertBefore(ctorCall);
- m_lowererMD.LowerCall(ctorCall, 0);
- }
- }
- }
- IR::AutoReuseOpnd autoReuseSavedCtorOpnd;
- if (callCtor)
- {
- // Load the first argument, which is either the object just created or null. Spread has an extra argument.
- IR::Instr * argInstr = this->m_lowererMD.LoadNewScObjFirstArg(newObjInstr, createObjDst, isSpreadCall ? 1 : 0);
- IR::Instr * insertAfterCtorInstr = newObjInstr->m_next;
- if (skipNewScObj)
- {
- // Since we skipped the default new object, we must be returning whatever the constructor returns
- // (which better be an Object), so let's just use newObjDst directly.
- // newObjDst = newObjInstr->m_src1(createObjDst, ...)
- Assert(newObjInstr->GetDst() == newObjDst);
- if (isSpreadCall)
- {
- newObjInstr = this->LowerSpreadCall(newObjInstr, Js::CallFlags_New);
- }
- else
- {
- newObjInstr = this->m_lowererMD.LowerCallI(newObjInstr, Js::CallFlags_New, false, argInstr);
- }
- }
- else
- {
- // We may need to return the default new object or whatever the constructor returns. Let's stash
- // away the constructor's return in a temporary operand, and do the right check, if necessary.
- // ctorResultObjOpnd = newObjInstr->m_src1(createObjDst, ...)
- IR::RegOpnd *ctorResultObjOpnd = IR::RegOpnd::New(TyVar, func);
- newObjInstr->UnlinkDst();
- newObjInstr->SetDst(ctorResultObjOpnd);
- if (isSpreadCall)
- {
- newObjInstr = this->LowerSpreadCall(newObjInstr, Js::CallFlags_New);
- }
- else
- {
- newObjInstr = this->m_lowererMD.LowerCallI(newObjInstr, Js::CallFlags_New, false, argInstr);
- }
- if (returnNewScObj)
- {
- // MOV newObjDst, createObjDst
- this->InsertMove(newObjDst, createObjDst, insertAfterCtorInstr);
- }
- else
- {
- LowerGetNewScObjectCommon(ctorResultObjOpnd, ctorResultObjOpnd, createObjDst, insertAfterCtorInstr);
- this->InsertMove(newObjDst, ctorResultObjOpnd, insertAfterCtorInstr);
- }
- }
- // We don't ever need to update the constructor cache, if we hard coded it. Caches requiring update after constructor
- // don't get cloned, and those that don't require update will never need one anymore.
- if (!usedFixedCtorCache)
- {
- LowerUpdateNewScObjectCache(insertAfterCtorInstr, newObjDst, ctorOpnd, false /* isCtorFunction */);
- }
- }
- else
- {
- if (newObjInstr->IsJitProfilingInstr())
- {
- Assert(m_func->IsSimpleJit());
- Assert(!CONFIG_FLAG(NewSimpleJit));
- // This path skipped calling the Ctor, which skips calling LowerCallI with newObjInstr, meaning that the call will not be profiled.
- // So we insert it manually here.
- if(newScHelper == IR::HelperNewScObjectNoArg &&
- newObjDst &&
- ctorOpnd->IsRegOpnd() &&
- newObjDst->AsRegOpnd()->m_sym == ctorOpnd->AsRegOpnd()->m_sym)
- {
- Assert(newObjInstr->m_func->IsSimpleJit());
- Assert(createObjDst != newObjDst);
- // The function object sym is going to be overwritten, so save it in a temp for profiling
- IR::RegOpnd *const savedCtorOpnd = IR::RegOpnd::New(ctorOpnd->GetType(), newObjInstr->m_func);
- autoReuseSavedCtorOpnd.Initialize(savedCtorOpnd, newObjInstr->m_func);
- Lowerer::InsertMove(savedCtorOpnd, ctorOpnd, newObjInstr);
- ctorOpnd = savedCtorOpnd;
- }
- // It is a constructor (CallFlags_New) and therefore a single argument (this) would have been given.
- const auto info = Lowerer::MakeCallInfoConst(Js::CallFlags_New, 1, func);
- Assert(newScObjCall);
- IR::JitProfilingInstr *const newObjJitProfilingInstr = newObjInstr->AsJitProfilingInstr();
- GenerateCallProfiling(
- newObjJitProfilingInstr->profileId,
- newObjJitProfilingInstr->inlineCacheIndex,
- createObjDst,
- ctorOpnd,
- info,
- false,
- newScObjCall,
- newObjInstr);
- }
- // MOV newObjDst, createObjDst
- if (!skipNewScObj && createObjDst != newObjDst)
- {
- this->InsertMove(newObjDst, createObjDst, newObjInstr);
- }
- newObjInstr->Remove();
- }
- // Return the first instruction above the region we've just lowered.
- return RemoveLoweredRegionStartMarker(startMarkerInstr);
- }
- IR::Instr*
- Lowerer::GenerateCallProfiling(Js::ProfileId profileId, Js::InlineCacheIndex inlineCacheIndex, IR::Opnd* retval, IR::Opnd*calleeFunctionObjOpnd, IR::Opnd* callInfo, bool returnTypeOnly, IR::Instr*callInstr,IR::Instr*insertAfter)
- {
- // This should only ever happen in profiling simplejit
- Assert(m_func->DoSimpleJitDynamicProfile());
- // Make sure they gave us the correct call instruction
- #if defined(_M_IX86) || defined(_M_X64)
- Assert(callInstr->m_opcode == Js::OpCode::CALL);
- #elif defined(_M_ARM)
- Assert(callInstr->m_opcode == Js::OpCode::BLX);
- #elif defined(_M_ARM64)
- Assert(callInstr->m_opcode == Js::OpCode::BLR);
- #endif
- Func*const func = insertAfter->m_func;
- {
- // First, we should save the implicit call flags
- const auto starFlag = GetImplicitCallFlagsOpnd();
- const auto saveOpnd = IR::RegOpnd::New(starFlag->GetType(), func);
- IR::AutoReuseOpnd a(starFlag, func), b(saveOpnd, func);
- //Save the flags (before call) and restore them (after the call)
- this->InsertMove(saveOpnd, starFlag, callInstr);
- // Note: On arm this is slightly inefficient because it forces a reload of the memory location to a reg (whereas x86 can load straight from hard-coded memory into a reg)
- // But it works and making it not reload the memory location would force more refactoring.
- this->InsertMove(starFlag, saveOpnd, insertAfter->m_next);
- }
- // Profile a call that just happened: push some extra info on the stack and call the helper
- if (!retval)
- {
- if (returnTypeOnly)
- {
- // If we are only supposed to profile the return type but don't use the return value, we might
- // as well do nothing!
- return insertAfter;
- }
- retval = IR::AddrOpnd::NewNull(func);
- }
- IR::Instr* profileCall = IR::Instr::New(Js::OpCode::Call, func);
- bool needInlineCacheIndex;
- IR::JnHelperMethod helperMethod;
- if (returnTypeOnly)
- {
- needInlineCacheIndex = false;
- helperMethod = IR::HelperSimpleProfileReturnTypeCall;
- }
- else if(inlineCacheIndex == Js::Constants::NoInlineCacheIndex)
- {
- needInlineCacheIndex = false;
- helperMethod = IR::HelperSimpleProfileCall_DefaultInlineCacheIndex;
- }
- else
- {
- needInlineCacheIndex = true;
- helperMethod = IR::HelperSimpleProfileCall;
- }
- profileCall->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
- insertAfter->InsertAfter(profileCall);
- m_lowererMD.LoadHelperArgument(profileCall, callInfo);
- m_lowererMD.LoadHelperArgument(profileCall, calleeFunctionObjOpnd);
- m_lowererMD.LoadHelperArgument(profileCall, retval);
- if(needInlineCacheIndex)
- {
- m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateInlineCacheIndexOpnd(inlineCacheIndex, func));
- }
- m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateProfileIdOpnd(profileId, func));
- // Push the frame pointer so that the profiling call can grab the stack layout
- m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateFramePointerOpnd(func));
- // No args: the helper is stdcall
- return m_lowererMD.LowerCall(profileCall, 0);
- }
- bool Lowerer::TryLowerNewScObjectWithFixedCtorCache(IR::Instr* newObjInstr, IR::RegOpnd* newObjDst,
- IR::LabelInstr* helperOrBailoutLabel, IR::LabelInstr* callCtorLabel, bool& skipNewScObj, bool& returnNewScObj, bool& emitBailOut)
- {
- skipNewScObj = false;
- returnNewScObj = false;
- AssertMsg(!PHASE_OFF(Js::ObjTypeSpecNewObjPhase, this->m_func) || !newObjInstr->HasBailOutInfo(),
- "Why do we have bailout on NewScObject when ObjTypeSpecNewObj is off?");
- if (PHASE_OFF(Js::FixedNewObjPhase, newObjInstr->m_func) && PHASE_OFF(Js::ObjTypeSpecNewObjPhase, this->m_func))
- {
- return false;
- }
- JITTimeConstructorCache * ctorCache;
- if (newObjInstr->HasBailOutInfo() && !newObjInstr->HasLazyBailOut())
- {
- Assert(newObjInstr->IsNewScObjectInstr());
- Assert(newObjInstr->IsProfiledInstr());
- Assert(newObjInstr->GetBailOutKind() == IR::BailOutFailedCtorGuardCheck || newObjInstr->HasLazyBailOut());
- emitBailOut = true;
- ctorCache = newObjInstr->m_func->GetConstructorCache(static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId));
- Assert(ctorCache != nullptr);
- Assert(!ctorCache->SkipNewScObject());
- Assert(!ctorCache->IsTypeFinal() || ctorCache->CtorHasNoExplicitReturnValue());
- LinkCtorCacheToGuardedProperties(ctorCache);
- }
- else
- {
- if (newObjInstr->m_opcode == Js::OpCode::NewScObjArray || newObjInstr->m_opcode == Js::OpCode::NewScObjArraySpread)
- {
- // These instr's carry a profile that indexes the array call site info, not the ctor cache.
- return false;
- }
- ctorCache = newObjInstr->IsProfiledInstr() ? newObjInstr->m_func->GetConstructorCache(static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId)) : nullptr;
- if (ctorCache == nullptr)
- {
- if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(_u("FixedNewObj: function %s (%s): lowering non-fixed new script object for %s, because %s.\n"),
- newObjInstr->m_func->GetJITFunctionBody()->GetDisplayName(), newObjInstr->m_func->GetDebugNumberSet(debugStringBuffer), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
- newObjInstr->IsProfiledInstr() ? _u("constructor cache hasn't been cloned") : _u("instruction is not profiled"));
- Output::Flush();
- }
- return false;
- }
- }
- Assert(ctorCache != nullptr);
- // We should only have cloned if the script contexts match.
- // TODO: oop jit, add ctorCache->scriptContext for tracing assert
- // Assert(newObjInstr->m_func->GetScriptContextInfo()->GetAddr() == ctorCache->scriptContext);
- // Built-in constructors don't need a default new object. Since we know which constructor we're calling, we can skip creating a default
- // object and call a specialized helper (or even constructor, directly) avoiding the checks in generic NewScObjectCommon.
- if (ctorCache->SkipNewScObject())
- {
- #if 0 // TODO: oop jit, add constructor info for tracing
- if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func))
- {
- const Js::JavascriptFunction* ctor = ctorCache->constructor;
- Js::FunctionBody* ctorBody = ctor->GetFunctionInfo()->HasBody() ? ctor->GetFunctionInfo()->GetFunctionBody() : nullptr;
- const char16* ctorName = ctorBody != nullptr ? ctorBody->GetDisplayName() : _u("<unknown>");
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- char16 debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(_u("FixedNewObj: function %s (%s): lowering skipped new script object for %s with %s ctor <unknown> (%s %s).\n"),
- newObjInstr->m_func->GetJITFunctionBody()->GetDisplayName(), newObjInstr->m_func->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
- newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? _u("inlined") : _u("called"),
- ctorName, ctorBody ? ctorBody->GetDebugNumberSet(debugStringBuffer) : _u("(null)"));
- Output::Flush();
- }
- #endif
- // All built-in constructors share a special singleton cache that is never checked and never invalidated. It cannot be used
- // as a guard to protect any property operations downstream from the constructor. If this ever becomes a performance issue,
- // we could have a dedicated cache for each built-in constructor, populate it and invalidate it as any other constructor cache.
- AssertMsg(!emitBailOut, "Can't bail out on constructor cache guard for built-in constructors.");
- skipNewScObj = true;
- IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
- this->InsertMove(newObjDst, zeroOpnd, newObjInstr);
- return true;
- }
- AssertMsg(ctorCache->GetType() != nullptr, "Why did we hard-code a mismatched, invalidated or polymorphic constructor cache?");
- #if 0 // TODO: oop jit, add constructor info for tracing
- if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func))
- {
- const Js::JavascriptFunction* constructor = ctorCache->constructor;
- Js::FunctionBody* constructorBody = constructor->GetFunctionInfo()->HasBody() ? constructor->GetFunctionInfo()->GetFunctionBody() : nullptr;
- const char16* constructorName = constructorBody != nullptr ? constructorBody->GetDisplayName() : _u("<unknown>");
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- char16 debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func))
- {
- Output::Print(_u("FixedNewObj: function %s (%s): lowering fixed new script object for %s with %s ctor <unknown> (%s %s): type = %p, slots = %d, inlined slots = %d.\n"),
- newObjInstr->m_func->GetJITFunctionBody()->GetDisplayName(), newObjInstr->m_func->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
- newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? _u("inlined") : _u("called"),
- constructorName, constructorBody ? constructorBody->GetDebugNumberSet(debugStringBuffer) : _u("(null)"),
- ctorCache->type, ctorCache->slotCount, ctorCache->inlineSlotCount);
- }
- else
- {
- Output::Print(_u("FixedNewObj: function %s (%s): lowering fixed new script object for %s with %s ctor <unknown> (%s %s): slots = %d, inlined slots = %d.\n"),
- newObjInstr->m_func->GetJITFunctionBody()->GetDisplayName(), newObjInstr->m_func->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
- newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? _u("inlined") : _u("called"),
- constructorName, debugStringBuffer, ctorCache->slotCount, ctorCache->inlineSlotCount);
- }
- Output::Flush();
- }
- #endif
- // If the constructor has no return statements, we can safely return the object that was created here.
- // No need to check what the constructor returned - it must be undefined.
- returnNewScObj = ctorCache->CtorHasNoExplicitReturnValue();
- Assert(Js::ConstructorCache::GetSizeOfGuardValue() == static_cast<size_t>(TySize[TyMachPtr]));
- IR::MemRefOpnd* guardOpnd = IR::MemRefOpnd::New(ctorCache->GetRuntimeCacheGuardAddr(), TyMachReg, this->m_func,
- IR::AddrOpndKindDynamicGuardValueRef);
- IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
- InsertCompareBranch(guardOpnd, zeroOpnd, Js::OpCode::BrEq_A, helperOrBailoutLabel, newObjInstr);
- // If we are calling new on a class constructor, the contract is that we pass new.target as the 'this' argument.
- // function is the constructor on which we called new - which is new.target.
- FixedFieldInfo* ctor = newObjInstr->GetFixedFunction();
- if (ctor->IsClassCtor())
- {
- // MOV newObjDst, function
- this->InsertMove(newObjDst, newObjInstr->GetSrc1(), newObjInstr);
- }
- else
- {
- JITTypeHolder newObjectType(ctorCache->GetType());
- Assert(newObjectType->IsShared());
- IR::AddrOpnd* typeSrc = IR::AddrOpnd::New(newObjectType->GetAddr(), IR::AddrOpndKindDynamicType, m_func);
- // For the next call:
- // inlineSlotSize == Number of slots to allocate beyond the DynamicObject header
- // slotSize - inlineSlotSize == Number of aux slots to allocate
- int inlineSlotSize = ctorCache->GetInlineSlotCount();
- int slotSize = ctorCache->GetSlotCount();
- if (newObjectType->GetTypeHandler()->IsObjectHeaderInlinedTypeHandler())
- {
- Assert(inlineSlotSize >= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity());
- Assert(inlineSlotSize == slotSize);
- slotSize = inlineSlotSize -= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity();
- }
- GenerateDynamicObjectAlloc(newObjInstr, inlineSlotSize, slotSize, newObjDst, typeSrc);
- }
- // JMP $callCtor
- IR::BranchInstr *callCtorBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, callCtorLabel, m_func);
- newObjInstr->InsertBefore(callCtorBranch);
- return true;
- }
- void
- Lowerer::GenerateRecyclerAllocAligned(IR::JnHelperMethod allocHelper, size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, bool inOpHelper)
- {
- IR::LabelInstr * allocDoneLabel = nullptr;
- if (!PHASE_OFF(Js::JitAllocNewObjPhase, insertionPointInstr->m_func) && HeapInfo::IsSmallObject(allocSize))
- {
- IR::LabelInstr * allocHelperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- allocDoneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, inOpHelper);
- this->m_lowererMD.GenerateFastRecyclerAlloc(allocSize, newObjDst, insertionPointInstr, allocHelperLabel, allocDoneLabel);
- // $allocHelper:
- insertionPointInstr->InsertBefore(allocHelperLabel);
- }
- // call JavascriptOperators::AllocMemForScObject(allocSize, scriptContext->GetRecycler())
- this->m_lowererMD.LoadHelperArgument(insertionPointInstr, this->LoadScriptContextValueOpnd(insertionPointInstr, ScriptContextValue::ScriptContextRecycler));
- this->m_lowererMD.LoadHelperArgument(insertionPointInstr, IR::IntConstOpnd::New((int32)allocSize, TyUint32, m_func, true));
- IR::Instr *newObjCall = IR::Instr::New(Js::OpCode::Call, newObjDst, IR::HelperCallOpnd::New(allocHelper, m_func), m_func);
- insertionPointInstr->InsertBefore(newObjCall);
- this->m_lowererMD.LowerCall(newObjCall, 0);
- if (allocDoneLabel != nullptr)
- {
- // $allocDone:
- insertionPointInstr->InsertBefore(allocDoneLabel);
- }
- }
- IR::Instr *
- Lowerer::LowerGetNewScObject(IR::Instr *instr)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::GetNewScObject);
- Assert(instr->GetDst());
- Assert(instr->GetSrc1());
- Assert(instr->GetSrc2());
- const auto instrPrev = instr->m_prev;
- Assert(instrPrev);
- LowerGetNewScObjectCommon(
- instr->GetDst()->AsRegOpnd(),
- instr->GetSrc1()->AsRegOpnd(),
- instr->GetSrc2()->AsRegOpnd(),
- instr);
- instr->Remove();
- return instrPrev;
- }
- void
- Lowerer::LowerGetNewScObjectCommon(
- IR::RegOpnd *const resultObjOpnd,
- IR::RegOpnd *const constructorReturnOpnd,
- IR::RegOpnd *const newObjOpnd,
- IR::Instr *insertBeforeInstr)
- {
- Assert(resultObjOpnd);
- Assert(constructorReturnOpnd);
- Assert(newObjOpnd);
- Assert(insertBeforeInstr);
- // (newObjOpnd == 'this' value passed to constructor)
- //
- // if (!IsJsObject(constructorReturnOpnd))
- // goto notObjectLabel
- // newObjOpnd = constructorReturnOpnd
- // notObjectLabel:
- // resultObjOpnd = newObjOpnd
- if(!constructorReturnOpnd->IsEqual(newObjOpnd))
- {
- // Need to check whether the constructor returned an object
- IR::LabelInstr *notObjectLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- Assert(insertBeforeInstr->m_prev);
- IR::LabelInstr *const doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- insertBeforeInstr->InsertBefore(doneLabel);
- insertBeforeInstr = doneLabel;
- #if defined(_M_ARM32_OR_ARM64)
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, constructorReturnOpnd);
- IR::Opnd * targetOpnd = IR::RegOpnd::New(StackSym::New(TyInt32,m_func), TyInt32, m_func);
- IR::Instr * callIsObjectInstr = IR::Instr::New(Js::OpCode::Call, targetOpnd, m_func);
- insertBeforeInstr->InsertBefore(callIsObjectInstr);
- this->m_lowererMD.ChangeToHelperCall(callIsObjectInstr, IR::HelperOp_IsObject);
- InsertTestBranch( targetOpnd, targetOpnd, Js::OpCode::BrEq_A, notObjectLabel,insertBeforeInstr);
- #else
- m_lowererMD.GenerateIsJsObjectTest(constructorReturnOpnd, insertBeforeInstr, notObjectLabel);
- #endif
- // Value returned by constructor is an object (use constructorReturnOpnd)
- if(!resultObjOpnd->IsEqual(constructorReturnOpnd))
- {
- this->InsertMove(resultObjOpnd, constructorReturnOpnd, insertBeforeInstr);
- }
- insertBeforeInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, doneLabel, m_func));
- // Value returned by constructor is not an object (use newObjOpnd)
- insertBeforeInstr->InsertBefore(notObjectLabel);
- }
- if(!resultObjOpnd->IsEqual(newObjOpnd))
- {
- this->InsertMove(resultObjOpnd, newObjOpnd, insertBeforeInstr);
- }
- // fall through to insertBeforeInstr or doneLabel
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerUpdateNewScObjectCache
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerUpdateNewScObjectCache(IR::Instr * insertInstr, IR::Opnd *dst, IR::Opnd *src1, const bool isCtorFunction)
- {
- // if (!isCtorFunction)
- // {
- // MOV r1, [src1 + offset(type)] -- check base TypeIds_Function
- // CMP [r1 + offset(typeId)], TypeIds_Function
- // }
- // JNE $fallThru
- // MOV r2, [src1 + offset(constructorCache)]
- // MOV r3, [r2 + offset(updateAfterCtor)]
- // TEST r3, r3 -- check if updateAfterCtor is 0
- // JEQ $fallThru
- // CALL UpdateNewScObjectCache(src1, dst, scriptContext)
- // $fallThru:
- IR::LabelInstr *labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- src1 = GetRegOpnd(src1, insertInstr, m_func, TyMachReg);
- // Check if constructor is a function if we don't already know it.
- if (!isCtorFunction)
- {
- IR::RegOpnd* src1RegOpnd = src1->AsRegOpnd();
- // MOV r1, [src1 + offset(type)] -- check base TypeIds_Function
- IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src1RegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
- Lowerer::InsertMove(r1, indirOpnd, insertInstr);
- // CMP [r1 + offset(typeId)], TypeIds_Function
- // JNE $fallThru
- indirOpnd = IR::IndirOpnd::New(r1, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
- IR::IntConstOpnd *intOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, this->m_func, true);
- IR::BranchInstr* branchInstr = InsertCompareBranch(indirOpnd, intOpnd, Js::OpCode::BrNeq_A, labelFallThru, insertInstr);
- InsertObjectPoison(src1RegOpnd, branchInstr, insertInstr, false);
- }
- // Every function has a constructor cache, even if only the default blank one.
- // r2 = MOV JavascriptFunction->constructorCache
- IR::RegOpnd *r2 = IR::RegOpnd::New(TyVar, this->m_func);
- IR::IndirOpnd *opndIndir = IR::IndirOpnd::New(src1->AsRegOpnd(), Js::JavascriptFunction::GetOffsetOfConstructorCache(), TyMachReg, this->m_func);
- IR::Instr *instr = Lowerer::InsertMove(r2, opndIndir, insertInstr);
- // r3 = constructorCache->updateAfterCtor
- IR::RegOpnd *r3 = IR::RegOpnd::New(TyInt8, this->m_func);
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(r2, Js::ConstructorCache::GetOffsetOfUpdateAfterCtor(), TyUint8, this->m_func);
- instr = Lowerer::InsertMove(r3, indirOpnd, insertInstr);
- // TEST r3, r3 -- check if updateAfterCtor is 0
- // JEQ $fallThru
- InsertTestBranch(r3, r3, Js::OpCode::BrEq_A, labelFallThru, insertInstr);
- // r2 = UpdateNewScObjectCache(src1, dst, scriptContext)
- insertInstr->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true)); // helper label for uncommon path
- IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperUpdateNewScObjectCache, m_func);
- LoadScriptContext(insertInstr);
- m_lowererMD.LoadHelperArgument(insertInstr, dst);
- m_lowererMD.LoadHelperArgument(insertInstr, src1);
- instr = IR::Instr::New(Js::OpCode::Call, m_func);
- instr->SetSrc1(opndHelper);
- insertInstr->InsertBefore(instr);
- m_lowererMD.LowerCall(instr, 0);
- // $fallThru:
- insertInstr->InsertBefore(labelFallThru);
- return insertInstr;
- }
- IR::Instr *
- Lowerer::LowerNewScObjArray(IR::Instr *newObjInstr)
- {
- if (newObjInstr->HasEmptyArgOutChain())
- {
- newObjInstr->FreeSrc2();
- return LowerNewScObjArrayNoArg(newObjInstr);
- }
- IR::Instr* startMarkerInstr = nullptr;
- IR::Opnd *targetOpnd = newObjInstr->GetSrc1();
- Func *func = newObjInstr->m_func;
- if (!targetOpnd->IsAddrOpnd())
- {
- if (!newObjInstr->HasBailOutInfo() || newObjInstr->OnlyHasLazyBailOut())
- {
- return this->LowerNewScObject(newObjInstr, true, true);
- }
- // Insert a temporary label before the instruction we're about to lower, so that we can return
- // the first instruction above that needs to be lowered after we're done - regardless of argument
- // list, StartCall, etc.
- startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
- // For whatever reason, we couldn't do a fixed function check on the call target.
- // Generate a runtime check on the target.
- Assert(
- newObjInstr->GetBailOutKind() == IR::BailOutOnNotNativeArray ||
- newObjInstr->GetBailOutKind() == BailOutInfo::WithLazyBailOut(IR::BailOutOnNotNativeArray)
- );
- IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertCompareBranch(
- targetOpnd,
- LoadLibraryValueOpnd(newObjInstr, LibraryValue::ValueArrayConstructor),
- Js::OpCode::BrEq_A,
- true,
- labelSkipBailOut,
- newObjInstr);
- IR::ProfiledInstr *instrNew = IR::ProfiledInstr::New(newObjInstr->m_opcode, newObjInstr->UnlinkDst(), newObjInstr->UnlinkSrc1(), newObjInstr->UnlinkSrc2(), func);
- instrNew->u.profileId = newObjInstr->AsProfiledInstr()->u.profileId;
- newObjInstr->InsertAfter(instrNew);
- newObjInstr->m_opcode = Js::OpCode::BailOut;
- GenerateBailOut(newObjInstr);
- instrNew->InsertBefore(labelSkipBailOut);
- newObjInstr = instrNew;
- }
- else
- {
- // Insert a temporary label before the instruction we're about to lower, so that we can return
- // the first instruction above that needs to be lowered after we're done - regardless of argument
- // list, StartCall, etc.
- startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
- }
- intptr_t weakFuncRef = 0;
- Js::ArrayCallSiteInfo *arrayInfo = nullptr;
- intptr_t arrayInfoAddr = 0;
- Assert(newObjInstr->IsProfiledInstr());
- IR::RegOpnd *resultObjOpnd = newObjInstr->GetDst()->AsRegOpnd();
- IR::Instr * insertInstr = newObjInstr->m_next;
- Js::ProfileId profileId = static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId);
- // We may not have profileId if we converted a NewScObject to NewScObjArray
- if (profileId != Js::Constants::NoProfileId)
- {
- arrayInfo = func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
- arrayInfoAddr = func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
- Assert(arrayInfo);
- weakFuncRef = func->GetWeakFuncRef();
- Assert(weakFuncRef);
- }
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
- IR::Opnd *linkOpnd = newObjInstr->GetSrc2();
- Assert(linkOpnd->IsSymOpnd());
- StackSym *linkSym = linkOpnd->AsSymOpnd()->m_sym->AsStackSym();
- Assert(linkSym->IsSingleDef());
- IR::Instr* argInstr = linkSym->GetInstrDef();
- IR::Opnd *opndOfArrayCtor = argInstr->GetSrc1();
- const uint16 upperBoundValue = 8;
- // Generate fast path only if it meets all the conditions:
- // 1. It is the only parameter and it is a likely int
- // 2a. If 1st parameter is a variable, emit fast path with checks
- // 2b. If 1st parameter is a constant, it is in range 0 and upperBoundValue (inclusive)
- if (opndOfArrayCtor->GetValueType().IsLikelyInt() && (opndOfArrayCtor->IsAddrOpnd() || opndOfArrayCtor->IsRegOpnd())) // #1
- {
- if ((linkSym->GetArgSlotNum() == 2)) // 1. It is the only parameter
- {
- AssertMsg(linkSym->IsArgSlotSym(), "Not an argSlot symbol...");
- linkOpnd = argInstr->GetSrc2();
- bool emittedFastPath = false;
- // 2a. If 1st parameter is a variable, emit fast path with checks
- if (opndOfArrayCtor->IsRegOpnd())
- {
- if (!opndOfArrayCtor->AsRegOpnd()->IsNotInt())
- {
- // 3. GenerateFastPath
- if (arrayInfo && arrayInfo->IsNativeIntArray())
- {
- emittedFastPath = GenerateProfiledNewScObjArrayFastPath<Js::JavascriptNativeIntArray>(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, helperLabel, labelDone, opndOfArrayCtor,
- Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex(),
- Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef());
- }
- else if (arrayInfo && arrayInfo->IsNativeFloatArray())
- {
- emittedFastPath = GenerateProfiledNewScObjArrayFastPath<Js::JavascriptNativeFloatArray>(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, helperLabel, labelDone, opndOfArrayCtor,
- Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex(),
- Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef());
- }
- else
- {
- emittedFastPath = GenerateProfiledNewScObjArrayFastPath<Js::JavascriptArray>(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, helperLabel, labelDone, opndOfArrayCtor, 0, 0);
- }
- }
- }
- // 2b. If 1st parameter is a constant, it is in range 0 and upperBoundValue (inclusive)
- else
- {
- int32 length = linkSym->GetIntConstValue();
- if (length >= 0 && length <= upperBoundValue)
- {
- emittedFastPath = GenerateProfiledNewScObjArrayFastPath(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, (uint32)length, labelDone, false);
- }
- }
- // Since we emitted fast path above, move the startCall/argOut instruction right before helper
- if (emittedFastPath)
- {
- linkSym = linkOpnd->AsRegOpnd()->m_sym->AsStackSym();
- AssertMsg(!linkSym->IsArgSlotSym() && linkSym->m_isSingleDef, "Arg tree not single def...");
- IR::Instr* startCallInstr = linkSym->m_instrDef;
- AssertMsg(startCallInstr->GetArgOutCount(false) == 2, "Generating ArrayFastPath for more than 1 parameter not allowed.");
- // Since we emitted fast path above, move the startCall/argOut instruction right before helper
- startCallInstr->Move(newObjInstr);
- argInstr->Move(newObjInstr);
- }
- }
- }
- newObjInstr->UnlinkSrc1();
- IR::Opnd *profileOpnd = IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, func);
- this->m_lowererMD.LoadNewScObjFirstArg(newObjInstr, profileOpnd);
- IR::JnHelperMethod helperMethod = IR::HelperScrArr_ProfiledNewInstance;
- newObjInstr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
- newObjInstr = GenerateDirectCall(newObjInstr, targetOpnd, Js::CallFlags_New);
- IR::BranchInstr* branchInstr = InsertCompareBranch(
- IR::IndirOpnd::New(resultObjOpnd, 0, TyMachPtr, func),
- LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptArray),
- Js::OpCode::BrEq_A,
- true,
- labelDone,
- insertInstr);
- InsertObjectPoison(resultObjOpnd, branchInstr, insertInstr, true);
- // We know we have a native array, so store the weak ref and call site index.
- InsertMove(
- IR::IndirOpnd::New(resultObjOpnd, Js::JavascriptNativeArray::GetOffsetOfArrayCallSiteIndex(), TyUint16, func),
- IR::Opnd::CreateProfileIdOpnd(profileId, func),
- insertInstr);
- InsertMove(
- IR::IndirOpnd::New(resultObjOpnd, Js::JavascriptNativeArray::GetOffsetOfWeakFuncRef(), TyMachReg, func),
- IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, func),
- insertInstr);
- insertInstr->InsertBefore(labelDone);
- return RemoveLoweredRegionStartMarker(startMarkerInstr);
- }
- IR::Instr *
- Lowerer::LowerNewScObjArrayNoArg(IR::Instr *newObjInstr)
- {
- IR::Opnd *targetOpnd = newObjInstr->GetSrc1();
- Func *func = newObjInstr->m_func;
- IR::Instr* startMarkerInstr = nullptr;
- if (!targetOpnd->IsAddrOpnd())
- {
- if (!newObjInstr->HasBailOutInfo() || newObjInstr->OnlyHasLazyBailOut())
- {
- return this->LowerNewScObject(newObjInstr, true, false);
- }
- // Insert a temporary label before the instruction we're about to lower, so that we can return
- // the first instruction above that needs to be lowered after we're done - regardless of argument
- // list, StartCall, etc.
- startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
- // For whatever reason, we couldn't do a fixed function check on the call target.
- // Generate a runtime check on the target.
- Assert(
- newObjInstr->GetBailOutKind() == IR::BailOutOnNotNativeArray ||
- newObjInstr->GetBailOutKind() == BailOutInfo::WithLazyBailOut(IR::BailOutOnNotNativeArray)
- );
- IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertCompareBranch(
- targetOpnd,
- LoadLibraryValueOpnd(newObjInstr, LibraryValue::ValueArrayConstructor),
- Js::OpCode::BrEq_A,
- true,
- labelSkipBailOut,
- newObjInstr);
- IR::ProfiledInstr *instrNew = IR::ProfiledInstr::New(newObjInstr->m_opcode, newObjInstr->UnlinkDst(), newObjInstr->UnlinkSrc1(), func);
- instrNew->u.profileId = newObjInstr->AsProfiledInstr()->u.profileId;
- newObjInstr->InsertAfter(instrNew);
- newObjInstr->m_opcode = Js::OpCode::BailOut;
- GenerateBailOut(newObjInstr);
- instrNew->InsertBefore(labelSkipBailOut);
- newObjInstr = instrNew;
- }
- else
- {
- // Insert a temporary label before the instruction we're about to lower, so that we can return
- // the first instruction above that needs to be lowered after we're done - regardless of argument
- // list, StartCall, etc.
- startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
- }
- Assert(newObjInstr->IsProfiledInstr());
- intptr_t weakFuncRef = 0;
- intptr_t arrayInfoAddr = 0;
- Js::ArrayCallSiteInfo *arrayInfo = nullptr;
- Js::ProfileId profileId = static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId);
- if (profileId != Js::Constants::NoProfileId)
- {
- arrayInfo = func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
- arrayInfoAddr = func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
- Assert(arrayInfo);
- weakFuncRef = func->GetWeakFuncRef();
- Assert(weakFuncRef);
- }
- IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
- GenerateProfiledNewScObjArrayFastPath(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, 0, labelDone, true);
- newObjInstr->InsertAfter(labelDone);
- m_lowererMD.LoadHelperArgument(newObjInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, func));
- m_lowererMD.LoadHelperArgument(newObjInstr, IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, func));
- LoadScriptContext(newObjInstr);
- m_lowererMD.LoadHelperArgument(newObjInstr, targetOpnd);
- newObjInstr->UnlinkSrc1();
- newObjInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperScrArr_ProfiledNewInstanceNoArg, func));
- m_lowererMD.LowerCall(newObjInstr, 0);
- return RemoveLoweredRegionStartMarker(startMarkerInstr);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerPrologEpilog
- ///
- ///----------------------------------------------------------------------------
- void
- Lowerer::LowerPrologEpilog()
- {
- if (m_func->GetJITFunctionBody()->IsCoroutine())
- {
- LowerGeneratorResumeJumpTable();
- }
- IR::Instr * instr;
- instr = m_func->m_headInstr;
- AssertMsg(instr->IsEntryInstr(), "First instr isn't an EntryInstr...");
- m_lowererMD.LowerEntryInstr(instr->AsEntryInstr());
- instr = m_func->m_exitInstr;
- AssertMsg(instr->IsExitInstr(), "Last instr isn't an ExitInstr...");
- m_lowererMD.LowerExitInstr(instr->AsExitInstr());
- }
- void
- Lowerer::LowerPrologEpilogAsmJs()
- {
- IR::Instr * instr;
- instr = m_func->m_headInstr;
- AssertMsg(instr->IsEntryInstr(), "First instr isn't an EntryInstr...");
- m_lowererMD.LowerEntryInstr(instr->AsEntryInstr());
- instr = m_func->m_exitInstr;
- AssertMsg(instr->IsExitInstr(), "Last instr isn't an ExitInstr...");
- m_lowererMD.LowerExitInstrAsmJs(instr->AsExitInstr());
- }
- void
- Lowerer::LowerGeneratorResumeJumpTable()
- {
- Assert(m_func->GetJITFunctionBody()->IsCoroutine());
- IR::Instr * jumpTableInstr = m_func->m_headInstr;
- AssertMsg(jumpTableInstr->IsEntryInstr(), "First instr isn't an EntryInstr...");
- // Hope to do away with this linked list scan by moving this lowering to a post-prolog-epilog/pre-encoder phase that is common to all architectures (currently such phase is only available on amd64/arm)
- while (jumpTableInstr->m_opcode != Js::OpCode::GeneratorResumeJumpTable)
- {
- jumpTableInstr = jumpTableInstr->m_next;
- }
- IR::Opnd * srcOpnd = jumpTableInstr->UnlinkSrc1();
- m_func->MapYieldOffsetResumeLabels([&](int i, const YieldOffsetResumeLabel& yorl)
- {
- uint32 offset = yorl.First();
- IR::LabelInstr * label = yorl.Second();
- if (label != nullptr && label->m_hasNonBranchRef)
- {
- // Also fix up the bailout at the label with the jump to epilog that was not emitted in GenerateBailOut()
- Assert(label->m_prev->HasBailOutInfo());
- GenerateJumpToEpilogForBailOut(label->m_prev->GetBailOutInfo(), label->m_prev);
- }
- else if (label == nullptr)
- {
- label = m_func->m_bailOutNoSaveLabel;
- }
- // For each offset label pair, insert a compare of the offset and branch if equal to the label
- InsertCompareBranch(srcOpnd, IR::IntConstOpnd::New(offset, TyUint32, m_func), Js::OpCode::BrSrEq_A, label, jumpTableInstr);
- });
- jumpTableInstr->Remove();
- }
- void
- Lowerer::DoInterruptProbes()
- {
- this->m_func->SetHasInstrNumber(true);
- uint instrCount = 1;
- FOREACH_INSTR_IN_FUNC(instr, this->m_func)
- {
- instr->SetNumber(instrCount++);
- if (instr->IsLabelInstr())
- {
- IR::LabelInstr *labelInstr = instr->AsLabelInstr();
- if (labelInstr->m_isLoopTop)
- {
- // For every loop top label, insert the following:
- // cmp sp, ThreadContext::stackLimitForCurrentThread
- // bgt $continue
- // $helper:
- // call JavascriptOperators::ScriptAbort
- // b $exit
- // $continue:
- IR::LabelInstr *newLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- labelInstr->InsertAfter(newLabel);
- this->InsertOneLoopProbe(newLabel, newLabel);
- }
- }
- }
- NEXT_INSTR_IN_FUNC;
- }
- // Insert an interrupt probe at each loop back branch. (Currently uncalled, since we're inserting
- // probes at loop tops instead of back edges, but kept around because it may prove useful.)
- uint
- Lowerer::DoLoopProbeAndNumber(IR::BranchInstr *branchInstr)
- {
- IR::LabelInstr *labelInstr = branchInstr->GetTarget();
- if (labelInstr == nullptr || labelInstr->GetNumber() == 0)
- {
- // Forward branch (possibly an indirect jump after try-catch-finally); nothing to do.
- return branchInstr->GetNumber() + 1;
- }
- Assert(labelInstr->m_isLoopTop);
- // Insert a stack probe at this branch. Number all the instructions we insert
- // and return the next instruction number.
- uint number = branchInstr->GetNumber();
- IR::Instr *instrPrev = branchInstr->m_prev;
- IR::Instr *instrNext = branchInstr->m_next;
- if (branchInstr->IsUnconditional())
- {
- // B $loop ==>
- // cmp [], 0
- // beq $loop
- // $helper:
- // call abort
- // b $exit
- this->InsertOneLoopProbe(branchInstr, labelInstr);
- branchInstr->Remove();
- }
- else
- {
- // Bcc $loop ==>
- // Binv $notloop
- // cmp [], 0
- // beq $loop
- // $helper:
- // call abort
- // b $exit
- // $notloop:
- IR::LabelInstr *loopExitLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- branchInstr->SetTarget(loopExitLabel);
- LowererMD::InvertBranch(branchInstr);
- branchInstr->InsertAfter(loopExitLabel);
- this->InsertOneLoopProbe(loopExitLabel, labelInstr);
- }
- FOREACH_INSTR_IN_RANGE(instr, instrPrev->m_next, instrNext->m_prev)
- {
- instr->SetNumber(number++);
- }
- NEXT_INSTR_IN_RANGE;
- return number;
- }
- void
- Lowerer::InsertOneLoopProbe(IR::Instr *insertInstr, IR::LabelInstr *loopLabel)
- {
- // Insert one interrupt probe at the given instruction. Probe the stack and call the abort helper
- // directly if the probe fails.
- IR::Opnd *memRefOpnd = IR::MemRefOpnd::New(
- m_func->GetThreadContextInfo()->GetThreadStackLimitAddr(),
- TyMachReg, this->m_func);
- IR::RegOpnd *regStackPointer = IR::RegOpnd::New(
- NULL, this->m_lowererMD.GetRegStackPointer(), TyMachReg, this->m_func);
- InsertCompareBranch(regStackPointer, memRefOpnd, Js::OpCode::BrGt_A, loopLabel, insertInstr);
- IR::LabelInstr *helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- insertInstr->InsertBefore(helperLabel);
- IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperScriptAbort, this->m_func);
- IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
- instr->SetSrc1(helperOpnd);
- insertInstr->InsertBefore(instr);
- this->m_lowererMD.LowerCall(instr, 0);
- // Jump to the exit after the helper call. This instruction will never be reached, but the jump
- // indicates that nothing is live after the call (to avoid useless spills in code that will
- // be executed).
- instr = this->m_func->m_exitInstr->GetPrevRealInstrOrLabel();
- if (instr->IsLabelInstr())
- {
- helperLabel = instr->AsLabelInstr();
- }
- else
- {
- helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- this->m_func->m_exitInstr->InsertBefore(helperLabel);
- }
- instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, helperLabel, this->m_func);
- insertInstr->InsertBefore(instr);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LoadPropertySymAsArgument
- ///
- /// Generate code to pass a fieldSym as argument to a helper.
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LoadPropertySymAsArgument(IR::Instr *instr, IR::Opnd *fieldSrc)
- {
- IR::Instr * instrPrev;
- AssertMsg(fieldSrc->IsSymOpnd() && fieldSrc->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as src of LdFld");
- IR::SymOpnd *symOpnd = fieldSrc->AsSymOpnd();
- PropertySym * fieldSym = symOpnd->m_sym->AsPropertySym();
- IR::IntConstOpnd * indexOpnd = IR::IntConstOpnd::New(fieldSym->m_propertyId, TyInt32, m_func, /*dontEncode*/true);
- instrPrev = m_lowererMD.LoadHelperArgument(instr, indexOpnd);
- IR::RegOpnd * instanceOpnd = symOpnd->CreatePropertyOwnerOpnd(m_func);
- m_lowererMD.LoadHelperArgument(instr, instanceOpnd);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LoadFunctionBodyAsArgument
- ///
- /// Special case: the "property ID" is a key into the ScriptContext's FunctionBody map
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LoadFunctionBodyAsArgument(IR::Instr *instr, IR::IntConstOpnd * functionBodySlotOpnd, IR::RegOpnd * envOpnd)
- {
- IR::Instr * instrPrev;
- // We need to pass in the function reference, we can't embed the pointer to the function proxy here.
- // The function proxy may be deferred parsed/serialize, and may 'progress' to a real function body after it is undeferred
- // At which point the deferred function proxy may be collect.
- // Just pass it the address where we will find the function proxy/body
- Js::FunctionInfoPtrPtr infoRef = instr->m_func->GetJITFunctionBody()->GetNestedFuncRef((uint)functionBodySlotOpnd->GetValue());
- AssertMsg(infoRef, "Expected FunctionProxy for index of NewScFunc or NewScGenFunc opnd");
- IR::AddrOpnd * indexOpnd = IR::AddrOpnd::New((Js::Var)infoRef, IR::AddrOpndKindDynamicMisc, m_func);
- instrPrev = m_lowererMD.LoadHelperArgument(instr, indexOpnd);
- m_lowererMD.LoadHelperArgument(instr, envOpnd);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerProfiledLdFld(IR::JitProfilingInstr *ldFldInstr)
- {
- const auto instrPrev = ldFldInstr->m_prev;
- auto src = ldFldInstr->UnlinkSrc1();
- AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
- IR::JnHelperMethod helper = IR::HelperInvalid;
- switch (ldFldInstr->m_opcode)
- {
- case Js::OpCode::LdFld:
- helper = IR::HelperProfiledLdFld;
- goto ldFldCommon;
- case Js::OpCode::LdRootFld:
- helper = IR::HelperProfiledLdRootFld;
- goto ldFldCommon;
- case Js::OpCode::LdMethodFld:
- helper = IR::HelperProfiledLdMethodFld;
- goto ldFldCommon;
- case Js::OpCode::LdRootMethodFld:
- helper = IR::HelperProfiledLdRootMethodFld;
- goto ldFldCommon;
- case Js::OpCode::LdFldForCallApplyTarget:
- helper = IR::HelperProfiledLdFld_CallApplyTarget;
- goto ldFldCommon;
- case Js::OpCode::LdFldForTypeOf:
- helper = IR::HelperProfiledLdFldForTypeOf;
- goto ldFldCommon;
- case Js::OpCode::LdRootFldForTypeOf:
- helper = IR::HelperProfiledLdRootFldForTypeOf;
- goto ldFldCommon;
- ldFldCommon:
- {
- Assert(ldFldInstr->profileId == Js::Constants::NoProfileId);
- /*
- Var ProfilingHelpers::ProfiledLdFld_Jit(
- const Var instance,
- const PropertyId propertyId,
- const InlineCacheIndex inlineCacheIndex,
- void *const framePointer)
- */
- m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
- m_lowererMD.LoadHelperArgument(
- ldFldInstr,
- IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
- LoadPropertySymAsArgument(ldFldInstr, src);
- break;
- }
- case Js::OpCode::LdSuperFld:
- {
- Assert(ldFldInstr->profileId == Js::Constants::NoProfileId);
- IR::Opnd * src2 = nullptr;
- /*
- Var ProfilingHelpers::ProfiledLdSuperFld_Jit(
- const Var instance,
- const PropertyId propertyId,
- const InlineCacheIndex inlineCacheIndex,
- void *const framePointer,
- const Var thisInstance)
- */
- src2 = ldFldInstr->UnlinkSrc2();
- m_lowererMD.LoadHelperArgument(ldFldInstr, src2 );
- m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
- m_lowererMD.LoadHelperArgument(
- ldFldInstr,
- IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
- LoadPropertySymAsArgument(ldFldInstr, src);
- helper = IR::HelperProfiledLdSuperFld;
- break;
- }
- case Js::OpCode::LdLen_A:
- Assert(ldFldInstr->profileId != Js::Constants::NoProfileId);
- /*
- Var ProfilingHelpers::ProfiledLdLen_Jit(
- const Var instance,
- const PropertyId propertyId,
- const InlineCacheIndex inlineCacheIndex,
- const ProfileId profileId,
- void *const framePointer)
- */
- m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
- m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateProfileIdOpnd(ldFldInstr->profileId, m_func));
- m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
- LoadPropertySymAsArgument(ldFldInstr, src);
- helper = IR::HelperProfiledLdLen;
- break;
- default:
- Assert(false);
- }
- ldFldInstr->SetSrc1(IR::HelperCallOpnd::New(helper, m_func));
- m_lowererMD.LowerCall(ldFldInstr, 0);
- return instrPrev;
- }
- void
- Lowerer::GenerateProtoLdFldFromFlagInlineCache(
- IR::Instr * insertBeforeInstr,
- IR::Opnd * opndDst,
- IR::RegOpnd * opndInlineCache,
- IR::LabelInstr * labelFallThru,
- bool isInlineSlot)
- {
- // Generate:
- //
- // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
- // s1 = MOV [&s1->slots] -- load the slot array
- // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
- // dst = MOV [s1 + s2*4]
- // JMP $fallthru
- IR::Opnd* inlineCacheObjOpnd;
- IR::IndirOpnd * opndIndir;
- IR::RegOpnd * opndObjSlots = nullptr;
- inlineCacheObjOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.object), TyMachReg, this->m_func);
- // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
- IR::RegOpnd *opndObject = IR::RegOpnd::New(TyMachReg, this->m_func);
- InsertMove(opndObject, inlineCacheObjOpnd, insertBeforeInstr, false);
- if (!isInlineSlot)
- {
- // s1 = MOV [&s1->slots] -- load the slot array
- opndObjSlots = IR::RegOpnd::New(TyMachReg, this->m_func);
- opndIndir = IR::IndirOpnd::New(opndObject, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
- InsertMove(opndObjSlots, opndIndir, insertBeforeInstr, false);
- }
- // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
- IR::RegOpnd *opndSlotIndex = IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::Opnd* slotIndexOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.slotIndex), TyUint16, this->m_func);
- InsertMove(opndSlotIndex, slotIndexOpnd, insertBeforeInstr, false);
- if (isInlineSlot)
- {
- // dst = MOV [s1 + s2*4]
- opndIndir = IR::IndirOpnd::New(opndObject, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
- }
- else
- {
- // dst = MOV [s1 + s2*4]
- opndIndir = IR::IndirOpnd::New(opndObjSlots, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
- }
- InsertMove(opndDst, opndIndir, insertBeforeInstr, false);
- // JMP $fallthru
- InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
- }
- void
- Lowerer::GenerateLocalLdFldFromFlagInlineCache(
- IR::Instr * insertBeforeInstr,
- IR::RegOpnd * opndBase,
- IR::Opnd * opndDst,
- IR::RegOpnd * opndInlineCache,
- IR::LabelInstr * labelFallThru,
- bool isInlineSlot)
- {
- // Generate:
- //
- // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
- // s1 = MOV [&s1->slots] -- load the slot array
- // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
- // dst = MOV [s1 + s2*4]
- // JMP $fallthru
- IR::IndirOpnd * opndIndir;
- IR::RegOpnd * opndObjSlots = nullptr;
- if (!isInlineSlot)
- {
- // s1 = MOV [&s1->slots] -- load the slot array
- opndObjSlots = IR::RegOpnd::New(TyMachReg, this->m_func);
- opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
- InsertMove(opndObjSlots, opndIndir, insertBeforeInstr, false);
- }
- // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
- IR::RegOpnd *opndSlotIndex = IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::Opnd* slotIndexOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.slotIndex), TyUint16, this->m_func);
- InsertMove(opndSlotIndex, slotIndexOpnd, insertBeforeInstr, false);
- if (isInlineSlot)
- {
- // dst = MOV [s1 + s2*4]
- opndIndir = IR::IndirOpnd::New(opndBase, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
- }
- else
- {
- // dst = MOV [s1 + s2*4]
- opndIndir = IR::IndirOpnd::New(opndObjSlots, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
- }
- InsertMove(opndDst, opndIndir, insertBeforeInstr, false);
- // JMP $fallthru
- InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
- }
- void
- Lowerer::GenerateFlagProtoCheck(
- IR::Instr * insertBeforeInstr,
- IR::RegOpnd * opndInlineCache,
- IR::LabelInstr * labelNotOnProto)
- {
- // Generate:
- //
- // TEST [&(inlineCache->u.accessor.isOnProto)], Js::FlagIsOnProto
- // JEQ $next
- IR::Opnd* flagsOpnd;
- flagsOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.rawUInt16), TyInt8, insertBeforeInstr->m_func);
- uint isOnProtoFlagMask = Js::InlineCache::GetIsOnProtoFlagMask();
- InsertTestBranch(flagsOpnd, IR::IntConstOpnd::New(isOnProtoFlagMask, TyInt8, this->m_func), Js::OpCode::BrEq_A, labelNotOnProto, insertBeforeInstr);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::GenerateFastLdMethodFromFlags
- ///
- /// Make use of the helper to cache the type and slot index used to do a LdFld
- /// and do an inline load from the appropriate slot if the type hasn't changed
- /// since the last time this LdFld was executed.
- ///
- ///----------------------------------------------------------------------------
- bool
- Lowerer::GenerateFastLdMethodFromFlags(IR::Instr * instrLdFld)
- {
- IR::LabelInstr * labelFallThru;
- IR::LabelInstr * bailOutLabel;
- IR::Opnd * opndSrc;
- IR::Opnd * opndDst;
- IR::RegOpnd * opndBase;
- IR::RegOpnd * opndType;
- IR::RegOpnd * opndInlineCache;
- opndSrc = instrLdFld->GetSrc1();
- AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->IsPropertySymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(),
- "Expected property sym operand as src of LdFldFlags");
- IR::PropertySymOpnd * propertySymOpnd = opndSrc->AsPropertySymOpnd();
- Assert(!instrLdFld->DoStackArgsOpt());
- if (propertySymOpnd->IsTypeCheckSeqCandidate())
- {
- AssertMsg(propertySymOpnd->HasObjectTypeSym(), "Type optimized property sym operand without a type sym?");
- StackSym *typeSym = propertySymOpnd->GetObjectTypeSym();
- opndType = IR::RegOpnd::New(typeSym, TyMachReg, this->m_func);
- }
- else
- {
- opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
- }
- opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
- opndDst = instrLdFld->GetDst();
- opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
- labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- // Label to jump to (or fall through to) when bailing out
- bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instrLdFld->m_func, true /* isOpHelper */);
- InsertMove(opndInlineCache, LoadRuntimeInlineCacheOpnd(instrLdFld, propertySymOpnd), instrLdFld);
- IR::LabelInstr * labelFlagAux = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- // Check the flag cache with the untagged type
- GenerateObjectTestAndTypeLoad(instrLdFld, opndBase, opndType, bailOutLabel);
- GenerateFlagInlineCacheCheck(instrLdFld, opndType, opndInlineCache, labelFlagAux);
- IR::LabelInstr * labelFlagInlineLocal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- GenerateFlagProtoCheck(instrLdFld, opndInlineCache, labelFlagInlineLocal);
- GenerateProtoLdFldFromFlagInlineCache(instrLdFld, opndDst, opndInlineCache, labelFallThru, true);
- instrLdFld->InsertBefore(labelFlagInlineLocal);
- GenerateLocalLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
- // Check the flag cache with the tagged type
- instrLdFld->InsertBefore(labelFlagAux);
- IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
- m_lowererMD.GenerateLoadTaggedType(instrLdFld, opndType, opndTaggedType);
- GenerateFlagInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, bailOutLabel);
- IR::LabelInstr * labelFlagAuxLocal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- GenerateFlagProtoCheck(instrLdFld, opndInlineCache, labelFlagAuxLocal);
- GenerateProtoLdFldFromFlagInlineCache(instrLdFld, opndDst, opndInlineCache, labelFallThru, false);
- instrLdFld->InsertBefore(labelFlagAuxLocal);
- GenerateLocalLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
- instrLdFld->InsertBefore(bailOutLabel);
- instrLdFld->InsertAfter(labelFallThru);
- // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
- // ordering instructions anymore.
- instrLdFld->UnlinkSrc1();
- GenerateBailOut(instrLdFld);
- return true;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerLdFld
- ///
- /// Lower an instruction (LdFld, ScopedLdFld) that takes a property
- /// reference as a source and puts a result in a register.
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerLdFld(
- IR::Instr * ldFldInstr,
- IR::JnHelperMethod helperMethod,
- IR::JnHelperMethod polymorphicHelperMethod,
- bool useInlineCache,
- IR::LabelInstr *labelBailOut,
- bool isHelper)
- {
- if (ldFldInstr->IsJitProfilingInstr())
- {
- // If we want to profile then do something completely different
- return this->LowerProfiledLdFld(ldFldInstr->AsJitProfilingInstr());
- }
- IR::Opnd *src;
- IR::Instr *instrPrev = ldFldInstr->m_prev;
- src = ldFldInstr->UnlinkSrc1();
- if (ldFldInstr->m_opcode == Js::OpCode::LdSuperFld)
- {
- IR::Opnd * src2 = nullptr;
- src2 = ldFldInstr->UnlinkSrc2();
- m_lowererMD.LoadHelperArgument(ldFldInstr, src2);
- }
- AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
- if (useInlineCache)
- {
- IR::Opnd * inlineCacheOpnd;
- AssertMsg(src->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
- if (src->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache && polymorphicHelperMethod != helperMethod)
- {
- JITTimePolymorphicInlineCache * polymorphicInlineCache = src->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache;
- helperMethod = polymorphicHelperMethod;
- inlineCacheOpnd = IR::AddrOpnd::New(polymorphicInlineCache->GetAddr(), IR::AddrOpndKindDynamicInlineCache, this->m_func);
- }
- else
- {
- // Need to load runtime inline cache opnd first before loading any helper argument
- // because LoadRuntimeInlineCacheOpnd may create labels marked as helper,
- // and cause op helper register push/pop save in x86, messing up with any helper arguments that is already pushed
- inlineCacheOpnd = this->LoadRuntimeInlineCacheOpnd(ldFldInstr, src->AsPropertySymOpnd(), isHelper);
- }
- this->LoadPropertySymAsArgument(ldFldInstr, src);
- this-> m_lowererMD.LoadHelperArgument(
- ldFldInstr,
- IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
- this->m_lowererMD.LoadHelperArgument(ldFldInstr, inlineCacheOpnd);
- this->m_lowererMD.LoadHelperArgument(ldFldInstr, LoadFunctionBodyOpnd(ldFldInstr));
- }
- else
- {
- LoadScriptContext(ldFldInstr);
- this->LoadPropertySymAsArgument(ldFldInstr, src);
- }
- // Do we need to reload the type and slot array after the helper returns?
- // (We do if there's a propertySymOpnd downstream that needs it, i.e., the type is not dead.)
- IR::RegOpnd *opndBase = src->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func);
- m_lowererMD.ChangeToHelperCall(ldFldInstr, helperMethod, labelBailOut, opndBase, src->AsSymOpnd()->IsPropertySymOpnd() ? src->AsSymOpnd()->AsPropertySymOpnd() : nullptr, isHelper);
- return instrPrev;
- }
- bool
- Lowerer::GenerateLdFldWithCachedType(IR::Instr * instrLdFld, bool* continueAsHelperOut, IR::LabelInstr** labelHelperOut, IR::RegOpnd** typeOpndOut)
- {
- IR::Instr *instr;
- IR::Opnd *opnd;
- IR::LabelInstr *labelObjCheckFailed = nullptr;
- IR::LabelInstr *labelTypeCheckFailed = nullptr;
- IR::LabelInstr *labelDone = nullptr;
- Assert(continueAsHelperOut != nullptr);
- *continueAsHelperOut = false;
- Assert(labelHelperOut != nullptr);
- *labelHelperOut = nullptr;
- Assert(typeOpndOut != nullptr);
- *typeOpndOut = nullptr;
- Assert(instrLdFld->GetSrc1()->IsSymOpnd());
- if (!instrLdFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
- {
- return false;
- }
- IR::PropertySymOpnd *propertySymOpnd = instrLdFld->GetSrc1()->AsPropertySymOpnd();
- if (!propertySymOpnd->IsTypeCheckSeqCandidate())
- {
- return false;
- }
- AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
- if (!propertySymOpnd->IsTypeCheckSeqParticipant() && !propertySymOpnd->NeedsLocalTypeCheck())
- {
- return false;
- }
- Assert(!propertySymOpnd->NeedsTypeCheckAndBailOut() || (instrLdFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrLdFld->GetBailOutKind())));
- // In the backwards pass we only add guarded property operations to instructions that are not already
- // protected by an upstream type check.
- Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
- PHASE_PRINT_TESTTRACE(
- Js::ObjTypeSpecPhase,
- this->m_func,
- _u("Field load: %s, property ID: %d, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n"),
- Js::OpCodeUtil::GetOpCodeName(instrLdFld->m_opcode),
- propertySymOpnd->m_sym->AsPropertySym()->m_propertyId,
- this->m_func->GetJITFunctionBody()->GetDisplayName(),
- propertySymOpnd->m_inlineCacheIndex,
- propertySymOpnd->GetCacheLayoutString(),
- propertySymOpnd->IsTypeChecked() ? _u("true") : _u("false"));
- if (propertySymOpnd->HasFinalType() && !propertySymOpnd->IsLoadedFromProto())
- {
- propertySymOpnd->UpdateSlotForFinalType();
- }
- // TODO (ObjTypeSpec): If ((PropertySym*)propertySymOpnd->m_sym)->m_stackSym->m_isIntConst consider emitting a direct
- // jump to helper or bailout. If we have a type check bailout, we could even abort compilation.
- bool hasTypeCheckBailout = instrLdFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrLdFld->GetBailOutKind());
- // If the hard-coded type is not available here, do a type check, and branch to the helper if the check fails.
- // In the prototype case, we have to check the type even if it was checked upstream, to cover the case where
- // the property has been added locally. Note that this is not necessary if the proto chain has been checked,
- // because then we know there's been no store of the property since the type was checked.
- bool emitPrimaryTypeCheck = propertySymOpnd->NeedsPrimaryTypeCheck();
- bool emitLocalTypeCheck = propertySymOpnd->NeedsLocalTypeCheck();
- bool emitLoadFromProtoTypeCheck = propertySymOpnd->NeedsLoadFromProtoTypeCheck();
- bool emitTypeCheck = emitPrimaryTypeCheck || emitLocalTypeCheck || emitLoadFromProtoTypeCheck;
- if (emitTypeCheck)
- {
- if (emitLoadFromProtoTypeCheck)
- {
- propertySymOpnd->EnsureGuardedPropOps(this->m_func->m_alloc);
- propertySymOpnd->SetGuardedPropOp(propertySymOpnd->GetObjTypeSpecFldId());
- }
- labelTypeCheckFailed = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- labelObjCheckFailed = hasTypeCheckBailout ? labelTypeCheckFailed : IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- *typeOpndOut = this->GenerateCachedTypeCheck(instrLdFld, propertySymOpnd, labelObjCheckFailed, labelTypeCheckFailed);
- }
- IR::Opnd *opndSlotArray;
- if (propertySymOpnd->IsLoadedFromProto())
- {
- opndSlotArray = this->LoadSlotArrayWithCachedProtoType(instrLdFld, propertySymOpnd);
- }
- else
- {
- opndSlotArray = this->LoadSlotArrayWithCachedLocalType(instrLdFld, propertySymOpnd);
- }
- // Load the value from the slot, getting the slot ID from the cache.
- uint16 index = propertySymOpnd->GetSlotIndex();
- Assert(index != -1);
- if (opndSlotArray->IsRegOpnd())
- {
- opnd = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, this->m_func);
- }
- else
- {
- Assert(opndSlotArray->IsMemRefOpnd());
- opnd = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, this->m_func, IR::AddrOpndKindDynamicPropertySlotRef);
- }
- Lowerer::InsertMove(instrLdFld->GetDst(), opnd, instrLdFld);
- // We eliminate the helper, or the type check succeeds, or we bail out before the operation.
- // Either delete the original instruction or replace it with a bailout.
- if (!emitPrimaryTypeCheck && !emitLocalTypeCheck && !emitLoadFromProtoTypeCheck)
- {
- Assert(labelTypeCheckFailed == nullptr);
- AssertMsg(!instrLdFld->HasBailOutInfo() || instrLdFld->HasLazyBailOut(), "Why does a direct field load have bailout that is not lazy?");
- instrLdFld->Remove();
- return true;
- }
- // Otherwise, branch around the bailout or helper.
- labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
- instrLdFld->InsertBefore(instr);
- // Insert the bailout or helper label here.
- instrLdFld->InsertBefore(labelTypeCheckFailed);
- instrLdFld->InsertAfter(labelDone);
- if (hasTypeCheckBailout)
- {
- AssertMsg(PHASE_ON1(Js::ObjTypeSpecIsolatedFldOpsWithBailOutPhase) || !propertySymOpnd->IsTypeDead(),
- "Why does a field load have a type check bailout, if its type is dead?");
- // Convert the original instruction to a bailout.
- if (instrLdFld->GetBailOutInfo()->bailOutInstr != instrLdFld)
- {
- // Set the cache index in the bailout info so that the bailout code will write it into the
- // bailout record at runtime.
- instrLdFld->GetBailOutInfo()->polymorphicCacheIndex = propertySymOpnd->m_inlineCacheIndex;
- }
- instrLdFld->FreeDst();
- instrLdFld->FreeSrc1();
- instrLdFld->m_opcode = Js::OpCode::BailOut;
- this->GenerateBailOut(instrLdFld);
- return true;
- }
- else
- {
- *continueAsHelperOut = true;
- Assert(labelObjCheckFailed != nullptr && labelObjCheckFailed != labelTypeCheckFailed);
- *labelHelperOut = labelObjCheckFailed;
- return false;
- }
- }
- template<bool isRoot>
- IR::Instr* Lowerer::GenerateCompleteLdFld(IR::Instr* instr, bool emitFastPath, IR::JnHelperMethod monoHelperAfterFastPath, IR::JnHelperMethod polyHelperAfterFastPath,
- IR::JnHelperMethod monoHelperWithoutFastPath, IR::JnHelperMethod polyHelperWithoutFastPath)
- {
- if(instr->CallsAccessor() && instr->HasBailOutInfo())
- {
- Assert(!BailOutInfo::IsBailOutOnImplicitCalls(instr->GetBailOutKind()));
- }
- IR::Instr* prevInstr = instr->m_prev;
- IR::LabelInstr* labelHelper = nullptr;
- IR::LabelInstr* labelBailOut = nullptr;
- bool isHelper = false;
- IR::RegOpnd* typeOpnd = nullptr;
- if (isRoot)
- {
- // Don't do the fast path here if emitFastPath is false, even if we can.
- if (emitFastPath && (this->GenerateLdFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd) || this->GenerateNonConfigurableLdRootFld(instr)))
- {
- Assert(labelHelper == nullptr);
- return prevInstr;
- }
- }
- else
- {
- if (this->GenerateLdFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd))
- {
- Assert(labelHelper == nullptr);
- return prevInstr;
- }
- }
- if (emitFastPath)
- {
- if (!GenerateFastLdFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, &labelBailOut, typeOpnd, &isHelper, &labelHelper))
- {
- if (labelHelper != nullptr)
- {
- labelHelper->isOpHelper = isHelper;
- instr->InsertBefore(labelHelper);
- }
- prevInstr = LowerLdFld(instr, monoHelperAfterFastPath, polyHelperAfterFastPath, true, labelBailOut, isHelper);
- }
- }
- else
- {
- if (labelHelper != nullptr)
- {
- labelHelper->isOpHelper = isHelper;
- instr->InsertBefore(labelHelper);
- }
- prevInstr = LowerLdFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, true, labelBailOut, isHelper);
- }
- return prevInstr;
- }
- bool
- Lowerer::GenerateCheckFixedFld(IR::Instr * instrChkFld)
- {
- IR::Instr *instr;
- IR::LabelInstr *labelBailOut = nullptr;
- IR::LabelInstr *labelDone = nullptr;
- AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instrChkFld->m_func) ||
- !PHASE_OFF(Js::UseFixedDataPropsPhase, instrChkFld->m_func), "Lowering a check fixed field with fixed data/method phase disabled?");
- Assert(instrChkFld->GetSrc1()->IsSymOpnd() && instrChkFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd());
- IR::PropertySymOpnd *propertySymOpnd = instrChkFld->GetSrc1()->AsPropertySymOpnd();
- AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
- Assert(propertySymOpnd->MayNeedTypeCheckProtection());
- // In the backwards pass we only add guarded property operations to instructions that are not already
- // protected by an upstream type check.
- Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
- // For the non-configurable properties on the global object we do not need a type check. Otherwise,
- // we need a type check and bailout here unless this operation is part of the type check sequence and
- // is protected by a type check upstream.
- bool emitPrimaryTypeCheck = propertySymOpnd->NeedsPrimaryTypeCheck();
- // In addition, we may also need a local type check in case the property comes from the prototype and
- // it may have been overwritten on the instance after the primary type check upstream. If the property
- // comes from the instance, we must still protect against its value changing after the type check, but
- // for this a cheaper guard check is sufficient (see below).
- bool emitFixedFieldTypeCheck = propertySymOpnd->NeedsCheckFixedFieldTypeCheck() &&
- (!propertySymOpnd->IsTypeChecked() || propertySymOpnd->IsLoadedFromProto());
- PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
- uint inlineCacheIndex = propertySymOpnd->m_inlineCacheIndex;
- bool checkFixedDataGenerated = false;
- bool checkFixedTypeGenerated = false;
- OUTPUT_TRACE_FUNC(
- Js::ObjTypeSpecPhase,
- this->m_func,
- _u("Fixed field check: %s, property ID: %d, cache ID: %u, cloned cache: true, layout: %s, redundant check: %s count of props: %u \n"),
- Js::OpCodeUtil::GetOpCodeName(instrChkFld->m_opcode),
- propertySym->m_propertyId,
- inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(), propertySymOpnd->IsTypeChecked() ? _u("true") : _u("false"),
- propertySymOpnd->GetGuardedPropOps() ? propertySymOpnd->GetGuardedPropOps()->Count() : 0);
- if (emitPrimaryTypeCheck || emitFixedFieldTypeCheck)
- {
- labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- if(emitFixedFieldTypeCheck && propertySymOpnd->IsRootObjectNonConfigurableFieldLoad())
- {
- AssertMsg(!propertySymOpnd->GetGuardedPropOps() || propertySymOpnd->GetGuardedPropOps()->IsEmpty(), "This property Guard is used only for one property");
- //We need only cheaper Guard check, if the property belongs to the GlobalObject.
- checkFixedDataGenerated = this->GenerateFixedFieldGuardCheck(instrChkFld, propertySymOpnd, labelBailOut);
- }
- else
- {
- if (emitFixedFieldTypeCheck)
- {
- propertySymOpnd->EnsureGuardedPropOps(this->m_func->m_alloc);
- propertySymOpnd->SetGuardedPropOp(propertySymOpnd->GetObjTypeSpecFldId());
- }
- this->GenerateCachedTypeCheck(instrChkFld, propertySymOpnd, labelBailOut, labelBailOut);
- checkFixedTypeGenerated = true;
- }
- }
- // We may still need this guard if we didn't emit the write protect type check above. This situation arises if we have
- // a fixed field from the instance (not proto) and a property of the same name has been written somewhere between the
- // primary type check and here. Note that we don't need a type check, because we know the fixed field exists on the
- // object even if it has been written since primary type check, but we need to verify the fixed value didn't get overwritten.
- if (!emitPrimaryTypeCheck && !emitFixedFieldTypeCheck && !propertySymOpnd->IsWriteGuardChecked())
- {
- if (!PHASE_OFF(Js::FixedFieldGuardCheckPhase, this->m_func))
- {
- Assert(labelBailOut == nullptr);
- labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- checkFixedDataGenerated = this->GenerateFixedFieldGuardCheck(instrChkFld, propertySymOpnd, labelBailOut);
- }
- }
- // Note that a type handler holds only a weak reference to the singleton instance it represents, so
- // it is possible that the instance gets collected before the type and handler do. Hence, the upstream
- // type check may succeed, even as the original instance no longer exists. However, this would happen
- // only if another instance reached the same type (otherwise we wouldn't ever pass the type check
- // upstream). In that case we would have invalidated all fixed fields on that type, and so the type
- // check (or property guard check, if necessary) above would fail. All in all, we would never attempt
- // to access a fixed field from an instance that has been collected.
- if (!emitPrimaryTypeCheck && !emitFixedFieldTypeCheck && propertySymOpnd->IsWriteGuardChecked())
- {
- Assert(labelBailOut == nullptr);
- AssertMsg(!instrChkFld->HasBailOutInfo(), "Why does a direct fixed field check have bailout?");
- if (propertySymOpnd->ProducesAuxSlotPtr())
- {
- this->GenerateAuxSlotPtrLoad(propertySymOpnd, instrChkFld);
- }
- instrChkFld->Remove();
- return true;
- }
- // With lazy bailout, no checks might be generated for CheckFixedFld, so the code in Lowerer is only an
- // unconditional jmp to get past the bailout helper block. This is a new case and is unexpected, so layout
- // phase will also move the statement boundary preceding CheckFixedFld together with the jmp to after
- // function exit. As a result, source mapping is incorrect. Make sure that this doesn't happen by not
- // generating helper blocks at all if we don't generate checks.
- if (!checkFixedDataGenerated && !checkFixedTypeGenerated)
- {
- instrChkFld->Remove();
- return true;
- }
- labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
- instrChkFld->InsertBefore(instr);
- // Insert the helper label here.
- instrChkFld->InsertBefore(labelBailOut);
- instrChkFld->InsertAfter(labelDone);
- if (propertySymOpnd->ProducesAuxSlotPtr())
- {
- this->GenerateAuxSlotPtrLoad(propertySymOpnd, labelDone->m_next);
- }
- // Convert the original instruction to a bailout.
- Assert(instrChkFld->HasBailOutInfo());
- if (instrChkFld->GetBailOutInfo()->bailOutInstr != instrChkFld)
- {
- // Set the cache index in the bailout info so that the bailout code will write it into the
- // bailout record at runtime.
- instrChkFld->GetBailOutInfo()->polymorphicCacheIndex = inlineCacheIndex;
- }
- instrChkFld->FreeSrc1();
- instrChkFld->m_opcode = Js::OpCode::BailOut;
- this->GenerateBailOut(instrChkFld);
- return true;
- }
- void
- Lowerer::GenerateCheckObjType(IR::Instr * instrChkObjType)
- {
- Assert(instrChkObjType->GetSrc1()->IsSymOpnd() && instrChkObjType->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd());
- IR::PropertySymOpnd *propertySymOpnd = instrChkObjType->GetSrc1()->AsPropertySymOpnd();
- // Why do we have an explicit type check if the cached type has been checked upstream? The dead store pass should have
- // removed this instruction.
- Assert(propertySymOpnd->IsTypeCheckSeqCandidate() && !propertySymOpnd->IsTypeChecked());
- // Why do we have an explicit type check on a non-configurable root field load?
- Assert(!propertySymOpnd->IsRootObjectNonConfigurableFieldLoad());
- PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
- uint inlineCacheIndex = propertySymOpnd->m_inlineCacheIndex;
- PHASE_PRINT_TESTTRACE(
- Js::ObjTypeSpecPhase,
- this->m_func,
- _u("Object type check: %s, property ID: %d, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n"),
- Js::OpCodeUtil::GetOpCodeName(instrChkObjType->m_opcode),
- propertySym->m_propertyId,
- this->m_func->GetJITFunctionBody()->GetDisplayName(),
- inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(), _u("false"));
- IR::LabelInstr* labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- this->GenerateCachedTypeCheck(instrChkObjType, propertySymOpnd, labelBailOut, labelBailOut);
- IR::LabelInstr* labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- IR::Instr* instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
- instrChkObjType->InsertBefore(instr);
- // Insert the bailout label here.
- instrChkObjType->InsertBefore(labelBailOut);
- instrChkObjType->InsertAfter(labelDone);
- if (propertySymOpnd->ProducesAuxSlotPtr())
- {
- this->GenerateAuxSlotPtrLoad(propertySymOpnd, labelDone->m_next);
- }
- // Convert the original instruction to a bailout.
- Assert(instrChkObjType->HasBailOutInfo());
- if (instrChkObjType->GetBailOutInfo()->bailOutInstr != instrChkObjType)
- {
- // Set the cache index in the bailout info so that the bailout code will write it into the
- // bailout record at runtime.
- instrChkObjType->GetBailOutInfo()->polymorphicCacheIndex = inlineCacheIndex;
- }
- instrChkObjType->FreeSrc1();
- instrChkObjType->m_opcode = Js::OpCode::BailOut;
- this->GenerateBailOut(instrChkObjType);
- }
- void
- Lowerer::LowerAdjustObjType(IR::Instr * instrAdjustObjType)
- {
- IR::AddrOpnd *finalTypeOpnd = instrAdjustObjType->UnlinkDst()->AsAddrOpnd();
- IR::AddrOpnd *initialTypeOpnd = instrAdjustObjType->UnlinkSrc2()->AsAddrOpnd();
- IR::RegOpnd *baseOpnd = instrAdjustObjType->UnlinkSrc1()->AsRegOpnd();
- bool adjusted = this->GenerateAdjustBaseSlots(
- instrAdjustObjType, baseOpnd, JITTypeHolder((JITType*)initialTypeOpnd->m_metadata), JITTypeHolder((JITType*)finalTypeOpnd->m_metadata));
- if (instrAdjustObjType->m_opcode == Js::OpCode::AdjustObjTypeReloadAuxSlotPtr)
- {
- Assert(adjusted);
- // We reallocated the aux slots, so reload them if necessary.
- StackSym * auxSlotPtrSym = baseOpnd->m_sym->GetAuxSlotPtrSym();
- Assert(auxSlotPtrSym);
- IR::Opnd *opndIndir = IR::IndirOpnd::New(baseOpnd, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
- IR::RegOpnd *regOpnd = IR::RegOpnd::New(auxSlotPtrSym, TyMachReg, this->m_func);
- regOpnd->SetIsJITOptimizedReg(true);
- Lowerer::InsertMove(regOpnd, opndIndir, instrAdjustObjType);
- }
- this->m_func->PinTypeRef((JITType*)finalTypeOpnd->m_metadata);
- IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrAdjustObjType->m_func);
- this->InsertMove(opnd, finalTypeOpnd, instrAdjustObjType);
- initialTypeOpnd->Free(instrAdjustObjType->m_func);
- instrAdjustObjType->Remove();
- }
- bool
- Lowerer::GenerateNonConfigurableLdRootFld(IR::Instr * instrLdFld)
- {
- if (!instrLdFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
- {
- return false;
- }
- IR::PropertySymOpnd *propertySymOpnd = instrLdFld->GetSrc1()->AsPropertySymOpnd();
- if (!propertySymOpnd->IsRootObjectNonConfigurableFieldLoad())
- {
- return false;
- }
- Assert(!PHASE_OFF(Js::RootObjectFldFastPathPhase, this->m_func));
- Assert(!instrLdFld->HasBailOutInfo() || instrLdFld->HasLazyBailOut());
- if (instrLdFld->HasLazyBailOut())
- {
- instrLdFld->ClearBailOutInfo();
- }
- IR::Opnd * srcOpnd;
- intptr_t rootObject = this->m_func->GetJITFunctionBody()->GetRootObject();
- if (propertySymOpnd->UsesAuxSlot())
- {
- IR::RegOpnd * auxSlotOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- this->InsertMove(auxSlotOpnd, IR::MemRefOpnd::New((byte *)rootObject + Js::DynamicObject::GetOffsetOfAuxSlots(),
- TyMachPtr, this->m_func), instrLdFld);
- srcOpnd = IR::IndirOpnd::New(auxSlotOpnd, propertySymOpnd->GetSlotIndex() * sizeof(Js::Var *),
- TyVar, this->m_func);
- }
- else
- {
- srcOpnd = IR::MemRefOpnd::New((Js::Var *)rootObject + propertySymOpnd->GetSlotIndex(),
- TyVar, this->m_func);
- }
- instrLdFld->ReplaceSrc1(srcOpnd);
- instrLdFld->m_opcode = Js::OpCode::Ld_A;
- LowererMD::ChangeToAssign(instrLdFld);
- return true;
- }
- IR::Instr *
- Lowerer::LowerDelFld(IR::Instr *delFldInstr, IR::JnHelperMethod helperMethod, bool useInlineCache, bool strictMode)
- {
- IR::Instr *instrPrev;
- Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
- if (strictMode)
- {
- propertyOperationFlag = Js::PropertyOperation_StrictMode;
- }
- instrPrev = m_lowererMD.LoadHelperArgument(delFldInstr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
- LowerLdFld(delFldInstr, helperMethod, helperMethod, useInlineCache);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerIsInst(IR::Instr * isInstInstr, IR::JnHelperMethod helperMethod)
- {
- IR::Instr * instrPrev;
- IR::Instr * instrArg;
- IR::RegOpnd * argOpnd;
- // inlineCache
- instrPrev = m_lowererMD.LoadHelperArgument(isInstInstr, LoadIsInstInlineCacheOpnd(isInstInstr, isInstInstr->GetSrc1()->AsIntConstOpnd()->AsUint32()));
- isInstInstr->FreeSrc1();
- argOpnd = isInstInstr->UnlinkSrc2()->AsRegOpnd();
- Assert(argOpnd->m_sym->m_isSingleDef);
- instrArg = argOpnd->m_sym->m_instrDef;
- argOpnd->Free(m_func);
- // scriptContext
- LoadScriptContext(isInstInstr);
- // instance goes last, so remember it now
- IR::Opnd * instanceOpnd = instrArg->UnlinkSrc1();
- argOpnd = instrArg->UnlinkSrc2()->AsRegOpnd();
- Assert(argOpnd->m_sym->m_isSingleDef);
- instrArg->Remove();
- instrArg = argOpnd->m_sym->m_instrDef;
- argOpnd->Free(m_func);
- // function
- IR::Opnd *opnd = instrArg->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(isInstInstr, opnd);
- Assert(instrArg->GetSrc2() == NULL);
- instrArg->Remove();
- // instance
- m_lowererMD.LoadHelperArgument(isInstInstr, instanceOpnd);
- m_lowererMD.ChangeToHelperCall(isInstInstr, helperMethod);
- return instrPrev;
- }
- void
- Lowerer::GenerateStackScriptFunctionInit(StackSym * stackSym, Js::FunctionInfoPtrPtr nestedInfo)
- {
- Func * func = this->m_func;
- Assert(func->HasAnyStackNestedFunc());
- Assert(nextStackFunctionOpnd);
- IR::Instr * insertBeforeInstr = func->GetFunctionEntryInsertionPoint();
- IR::RegOpnd * addressOpnd = IR::RegOpnd::New(TyMachPtr, func);
- const IR::AutoReuseOpnd autoReuseAddressOpnd(addressOpnd, func);
- InsertLea(addressOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertBeforeInstr);
- // Currently we don't initialize the environment until we actually allocate the function, we also
- // walk the list of stack function when we need to box them. so we should use initialize it to NullFrameDisplay
- GenerateStackScriptFunctionInit(addressOpnd, nestedInfo,
- IR::AddrOpnd::New(func->GetThreadContextInfo()->GetNullFrameDisplayAddr(), IR::AddrOpndKindDynamicMisc, func), insertBeforeInstr);
- // Establish the next link
- InsertMove(nextStackFunctionOpnd, addressOpnd, insertBeforeInstr);
- this->nextStackFunctionOpnd = IR::SymOpnd::New(stackSym, sizeof(Js::StackScriptFunction), TyMachPtr, func);
- }
- void
- Lowerer::GenerateScriptFunctionInit(IR::RegOpnd * regOpnd, IR::Opnd * vtableAddressOpnd,
- Js::FunctionInfoPtrPtr nestedInfo, IR::Opnd * envOpnd, IR::Instr * insertBeforeInstr, bool isZeroed)
- {
- Func * func = this->m_func;
- IR::Opnd * functionInfoOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(functionInfoOpnd, IR::MemRefOpnd::New(nestedInfo, TyMachPtr, func), insertBeforeInstr);
- IR::Opnd * functionProxyOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(functionProxyOpnd, IR::IndirOpnd::New(functionInfoOpnd->AsRegOpnd(), Js::FunctionInfo::GetOffsetOfFunctionProxy(), TyMachPtr, func), insertBeforeInstr);
- IR::Opnd * typeOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(typeOpnd, IR::IndirOpnd::New(functionProxyOpnd->AsRegOpnd(), Js::FunctionProxy::GetOffsetOfDeferredPrototypeType(),
- TyMachPtr, func), insertBeforeInstr);
- IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- InsertTestBranch(typeOpnd, typeOpnd, Js::OpCode::BrEq_A, labelHelper, insertBeforeInstr);
- IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- InsertBranch(Js::OpCode::Br, labelDone, insertBeforeInstr);
- insertBeforeInstr->InsertBefore(labelHelper);
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, functionProxyOpnd);
- IR::Instr * callHelperInstr = IR::Instr::New(Js::OpCode::Call, typeOpnd,
- IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperEnsureFunctionProxyDeferredPrototypeType, func), func);
- insertBeforeInstr->InsertBefore(callHelperInstr);
- m_lowererMD.LowerCall(callHelperInstr, 0);
- insertBeforeInstr->InsertBefore(labelDone);
- GenerateMemInit(regOpnd, 0, vtableAddressOpnd, insertBeforeInstr, isZeroed);
- GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfType(), typeOpnd, insertBeforeInstr, isZeroed);
- GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfAuxSlots(), insertBeforeInstr, isZeroed);
- GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfObjectArray(), insertBeforeInstr, isZeroed);
- GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfConstructorCache(),
- LoadLibraryValueOpnd(insertBeforeInstr, LibraryValue::ValueConstructorCacheDefaultInstance),
- insertBeforeInstr, isZeroed);
- GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfFunctionInfo(), functionInfoOpnd, insertBeforeInstr, isZeroed);
- GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfEnvironment(), envOpnd, insertBeforeInstr, isZeroed);
- GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfCachedScopeObj(), insertBeforeInstr, isZeroed);
- GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfHasInlineCaches(), insertBeforeInstr, isZeroed);
- }
- void
- Lowerer::GenerateStackScriptFunctionInit(IR::RegOpnd * regOpnd, Js::FunctionInfoPtrPtr nestedInfo, IR::Opnd * envOpnd, IR::Instr * insertBeforeInstr)
- {
- Func * func = this->m_func;
- GenerateScriptFunctionInit(regOpnd,
- LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableStackScriptFunction),
- nestedInfo, envOpnd, insertBeforeInstr);
- InsertMove(IR::IndirOpnd::New(regOpnd, Js::StackScriptFunction::GetOffsetOfBoxedScriptFunction(), TyMachPtr, func),
- IR::AddrOpnd::NewNull(func), insertBeforeInstr);
- }
- void
- Lowerer::EnsureStackFunctionListStackSym()
- {
- Func * func = this->m_func;
- Assert(func->HasAnyStackNestedFunc());
- #if defined(_M_IX86) || defined(_M_X64)
- Assert(func->m_localStackHeight == (func->HasArgumentSlot()? MachArgsSlotOffset : 0));
- StackSym * stackFunctionListStackSym = StackSym::New(TyMachPtr, func);
- func->StackAllocate(stackFunctionListStackSym, sizeof(Js::ScriptFunction *));
- nextStackFunctionOpnd = IR::SymOpnd::New(stackFunctionListStackSym, TyMachPtr, func);
- #else
- Assert(func->m_localStackHeight == 0);
- nextStackFunctionOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(NULL, FRAME_REG, TyMachReg, func),
- -(int32)(Js::Constants::StackNestedFuncList * sizeof(Js::Var)), TyMachPtr, func);
- #endif
- }
- void
- Lowerer::AllocStackClosure()
- {
- m_func->StackAllocate(m_func->GetLocalFrameDisplaySym(), sizeof(Js::Var));
- m_func->StackAllocate(m_func->GetLocalClosureSym(), sizeof(Js::Var));
- }
- void
- Lowerer::EnsureZeroLastStackFunctionNext()
- {
- Assert(nextStackFunctionOpnd != nullptr);
- Func * func = this->m_func;
- IR::Instr * insertBeforeInstr = func->GetFunctionEntryInsertionPoint();
- InsertMove(nextStackFunctionOpnd, IR::AddrOpnd::NewNull(func), insertBeforeInstr);
- }
- IR::Instr *
- Lowerer::GenerateNewStackScFunc(IR::Instr * newScFuncInstr, IR::RegOpnd ** ppEnvOpnd)
- {
- Assert(newScFuncInstr->m_func->DoStackNestedFunc());
- Func * func = newScFuncInstr->m_func;
- uint index = newScFuncInstr->GetSrc1()->AsIntConstOpnd()->AsUint32();
- Assert(index < func->GetJITFunctionBody()->GetNestedCount());
- IR::LabelInstr * labelNoStackFunc = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertTestBranch(IR::MemRefOpnd::New(func->GetJITFunctionBody()->GetFlagsAddr(), TyInt8, func),
- IR::IntConstOpnd::New(Js::FunctionBody::Flags_StackNestedFunc, TyInt8, func, true),
- Js::OpCode::BrEq_A, labelNoStackFunc, newScFuncInstr);
- Js::FunctionInfoPtrPtr nestedInfo = func->GetJITFunctionBody()->GetNestedFuncRef(index);
- IR::Instr * instrAssignDst;
- IR::RegOpnd * envOpnd = *ppEnvOpnd;
- if (!func->IsLoopBody())
- {
- // the stackAllocate Call below for this sym is passing a size that is not represented by any IRType and hence passing TyMisc for the constructor
- StackSym * stackSym = StackSym::New(TyMisc, func);
- // ScriptFunction and it's next pointer
- this->m_func->StackAllocate(stackSym, sizeof(Js::StackScriptFunction) + sizeof(Js::StackScriptFunction *));
- GenerateStackScriptFunctionInit(stackSym, nestedInfo);
- InsertMove(IR::SymOpnd::New(stackSym, Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, func),
- envOpnd,
- newScFuncInstr);
- instrAssignDst =
- InsertLea(newScFuncInstr->GetDst()->AsRegOpnd(), IR::SymOpnd::New(stackSym, TyMachPtr, func), newScFuncInstr);
- }
- else
- {
- Assert(func->IsTopFunc());
- Assert(func->m_loopParamSym);
- IR::Instr * envDefInstr = envOpnd->AsRegOpnd()->m_sym->m_instrDef;
- Assert(envDefInstr && envDefInstr->m_opcode == Js::OpCode::NewScFuncData);
- IR::RegOpnd * opndFuncPtr = envDefInstr->UnlinkSrc2()->AsRegOpnd();
- Assert(opndFuncPtr);
- envOpnd = envDefInstr->UnlinkSrc1()->AsRegOpnd();
- Assert(envOpnd);
- *ppEnvOpnd = envOpnd;
- envDefInstr->Remove();
- if (index != 0)
- {
- IR::RegOpnd * opnd = IR::RegOpnd::New(TyVar, func);
- InsertAdd(false, opnd, opndFuncPtr, IR::IntConstOpnd::New(index * sizeof(Js::StackScriptFunction), TyMachPtr, func), newScFuncInstr);
- opndFuncPtr = opnd;
- }
- InsertMove(IR::IndirOpnd::New(opndFuncPtr, Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, func),
- envOpnd, newScFuncInstr);
- instrAssignDst = InsertMove(newScFuncInstr->GetDst(), opndFuncPtr, newScFuncInstr);
- }
- InsertBranch(Js::OpCode::Br, labelDone, newScFuncInstr);
- newScFuncInstr->InsertBefore(labelNoStackFunc);
- newScFuncInstr->InsertAfter(labelDone);
- return instrAssignDst;
- }
- IR::Instr *
- Lowerer::LowerNewScFunc(IR::Instr * newScFuncInstr)
- {
- IR::Instr *stackNewScFuncInstr = nullptr;
- IR::RegOpnd * envOpnd = newScFuncInstr->UnlinkSrc2()->AsRegOpnd();
- if (newScFuncInstr->m_func->DoStackNestedFunc())
- {
- stackNewScFuncInstr = GenerateNewStackScFunc(newScFuncInstr, &envOpnd);
- }
- IR::IntConstOpnd * functionBodySlotOpnd = newScFuncInstr->UnlinkSrc1()->AsIntConstOpnd();
- IR::Instr * instrPrev = this->LoadFunctionBodyAsArgument(newScFuncInstr, functionBodySlotOpnd, envOpnd);
- m_lowererMD.ChangeToHelperCall(newScFuncInstr, IR::HelperScrFunc_OP_NewScFunc );
- return stackNewScFuncInstr == nullptr? instrPrev : stackNewScFuncInstr;
- }
- IR::Instr *
- Lowerer::LowerNewScFuncHomeObj(IR::Instr * newScFuncInstr)
- {
- newScFuncInstr->m_opcode = Js::OpCode::CallHelper;
- IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperScrFunc_OP_NewScFuncHomeObj, this->m_func);
- IR::Opnd * src1 = newScFuncInstr->UnlinkSrc1();
- newScFuncInstr->SetSrc1(helperOpnd);
- newScFuncInstr->SetSrc2(src1);
- return newScFuncInstr;
- }
- IR::Instr *
- Lowerer::LowerNewScGenFunc(IR::Instr * newScFuncInstr)
- {
- IR::IntConstOpnd * functionBodySlotOpnd = newScFuncInstr->UnlinkSrc1()->AsIntConstOpnd();
- IR::RegOpnd * envOpnd = newScFuncInstr->UnlinkSrc2()->AsRegOpnd();
- IR::Instr * instrPrev = this->LoadFunctionBodyAsArgument(newScFuncInstr, functionBodySlotOpnd, envOpnd);
- m_lowererMD.ChangeToHelperCall(newScFuncInstr, IR::HelperScrFunc_OP_NewScGenFunc );
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerNewScGenFuncHomeObj(IR::Instr * newScFuncInstr)
- {
- newScFuncInstr->m_opcode = Js::OpCode::CallHelper;
- IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperScrFunc_OP_NewScGenFuncHomeObj, this->m_func);
- IR::Opnd * src1 = newScFuncInstr->UnlinkSrc1();
- newScFuncInstr->SetSrc1(helperOpnd);
- newScFuncInstr->SetSrc2(src1);
- return newScFuncInstr;
- }
- IR::Instr *
- Lowerer::LowerStPropIdArrFromVar(IR::Instr * stPropIdInstr)
- {
- IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperStPropIdArrFromVar, this->m_func);
- IR::Opnd * src1 = stPropIdInstr->UnlinkSrc1();
- stPropIdInstr->SetSrc1(helperOpnd);
- stPropIdInstr->SetSrc2(src1);
- return m_lowererMD.LowerCallHelper(stPropIdInstr);
- }
- IR::Instr *
- Lowerer::LowerRestify(IR::Instr * newRestInstr)
- {
- IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperRestify, this->m_func);
- IR::Opnd * src1 = newRestInstr->UnlinkSrc1();
- newRestInstr->SetSrc1(helperOpnd);
- newRestInstr->SetSrc2(src1);
- return m_lowererMD.LowerCallHelper(newRestInstr);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerScopedLdFld
- ///
- /// Lower a load instruction that takes an additional instance to use as a
- /// a default if the scope chain provided doesn't contain the property.
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerScopedLdFld(IR::Instr * ldFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache)
- {
- IR::Opnd *src;
- IR::Instr *instrPrev = ldFldInstr->m_prev;
- if(!withInlineCache)
- {
- LoadScriptContext(ldFldInstr);
- }
- intptr_t rootObject = m_func->GetJITFunctionBody()->GetRootObject();
- src = IR::AddrOpnd::New(rootObject, IR::AddrOpndKindDynamicVar, this->m_func, true);
- instrPrev = m_lowererMD.LoadHelperArgument(ldFldInstr, src);
- src = ldFldInstr->UnlinkSrc1();
- AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
- this->LoadPropertySymAsArgument(ldFldInstr, src);
- if (withInlineCache)
- {
- AssertMsg(src->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
- m_lowererMD.LoadHelperArgument(
- ldFldInstr,
- IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
- // Not using the polymorphic inline cache because the fast path only uses the monomorphic inline cache
- this->m_lowererMD.LoadHelperArgument(ldFldInstr, this->LoadRuntimeInlineCacheOpnd(ldFldInstr, src->AsPropertySymOpnd()));
- m_lowererMD.LoadHelperArgument(ldFldInstr, LoadFunctionBodyOpnd(ldFldInstr));
- }
- m_lowererMD.ChangeToHelperCall(ldFldInstr, helperMethod);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerScopedLdInst
- ///
- /// Lower a load instruction that takes an additional instance to use as a
- /// a default if the scope chain provided doesn't contain the property.
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerScopedLdInst(IR::Instr *instr, IR::JnHelperMethod helperMethod)
- {
- IR::Opnd *src;
- IR::Instr *instrPrev;
- // last argument is the scriptContext
- instrPrev = LoadScriptContext(instr);
- src = instr->UnlinkSrc2();
- AssertMsg(src->IsRegOpnd(), "Expected Reg opnd as src2");
- // __out Var*. The StackSym is allocated in irbuilder, and here we need to insert a lea
- StackSym* dstSym = src->GetStackSym();
- IR::Instr *load = InsertLoadStackAddress(dstSym, instr);
- IR::Opnd* tempOpnd = load->GetDst();
- m_lowererMD.LoadHelperArgument(instr, tempOpnd);
- // now 3rd last argument is the rootObject of the function. Need to add addrOpnd to
- // pass in the address of the roobObject.
- IR::Opnd * srcOpnd;
- intptr_t rootObject = m_func->GetJITFunctionBody()->GetRootObject();
- srcOpnd = IR::AddrOpnd::New(rootObject, IR::AddrOpndKindDynamicVar, instr->m_func, true);
- instrPrev = m_lowererMD.LoadHelperArgument(instr, srcOpnd);
- // no change, the property field built from irbuilder.
- src = instr->UnlinkSrc1();
- AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
- this->LoadPropertySymAsArgument(instr, src);
- instrPrev = m_lowererMD.ChangeToHelperCall(instr, helperMethod);
- IR::RegOpnd* regOpnd = IR::RegOpnd::New(dstSym, TyVar, m_func);
- IR::SymOpnd*symOpnd = IR::SymOpnd::New(dstSym, TyVar, m_func);
- this->InsertMove(regOpnd, symOpnd, instrPrev);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerScopedDelFld(IR::Instr * delFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache, bool strictMode)
- {
- IR::Instr *instrPrev;
- Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
- if (strictMode)
- {
- propertyOperationFlag = Js::PropertyOperation_StrictMode;
- }
- instrPrev = m_lowererMD.LoadHelperArgument(delFldInstr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
- LowerScopedLdFld(delFldInstr, helperMethod, withInlineCache);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerProfiledStFld(IR::JitProfilingInstr *stFldInstr, Js::PropertyOperationFlags flags)
- {
- Assert(stFldInstr->profileId == Js::Constants::NoProfileId);
- IR::Instr *const instrPrev = stFldInstr->m_prev;
- /*
- void ProfilingHelpers::ProfiledInitFld_Jit(
- const Var instance,
- const PropertyId propertyId,
- const InlineCacheIndex inlineCacheIndex,
- const Var value,
- void *const framePointer)
- void ProfilingHelpers::ProfiledStFld_Jit(
- const Var instance,
- const PropertyId propertyId,
- const InlineCacheIndex inlineCacheIndex,
- const Var value,
- void *const framePointer)
- void ProfilingHelpers::ProfiledStSuperFld_Jit(
- const Var instance,
- const PropertyId propertyId,
- const InlineCacheIndex inlineCacheIndex,
- const Var value,
- void *const framePointer,
- const Var thisInstance)
- {
- */
- m_lowererMD.LoadHelperArgument(stFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
- if (stFldInstr->m_opcode == Js::OpCode::StSuperFld)
- {
- m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc2());
- }
- m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc1());
- IR::Opnd *dst = stFldInstr->UnlinkDst();
- AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
- m_lowererMD.LoadHelperArgument(
- stFldInstr,
- IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
- LoadPropertySymAsArgument(stFldInstr, dst);
- IR::JnHelperMethod helper;
- switch (stFldInstr->m_opcode)
- {
- case Js::OpCode::InitFld:
- case Js::OpCode::InitRootFld:
- helper = IR::HelperProfiledInitFld;
- break;
- case Js::OpCode::StSuperFld:
- helper = IR::HelperProfiledStSuperFld;
- break;
- default:
- helper =
- flags & Js::PropertyOperation_Root
- ? flags & Js::PropertyOperation_StrictMode ? IR::HelperProfiledStRootFld_Strict : IR::HelperProfiledStRootFld
- : flags & Js::PropertyOperation_StrictMode ? IR::HelperProfiledStFld_Strict : IR::HelperProfiledStFld;
- break;
- }
- stFldInstr->SetSrc1(IR::HelperCallOpnd::New(helper, m_func));
- m_lowererMD.LowerCall(stFldInstr, 0);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerStFld
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerStFld(
- IR::Instr * stFldInstr,
- IR::JnHelperMethod helperMethod,
- IR::JnHelperMethod polymorphicHelperMethod,
- bool withInlineCache,
- IR::LabelInstr *labelBailOut,
- bool isHelper,
- bool withPutFlags,
- Js::PropertyOperationFlags flags)
- {
- if (stFldInstr->IsJitProfilingInstr())
- {
- // If we want to profile then do something completely different
- return this->LowerProfiledStFld(stFldInstr->AsJitProfilingInstr(), flags);
- }
- IR::Instr *instrPrev = stFldInstr->m_prev;
- IR::Opnd *dst = stFldInstr->UnlinkDst();
- AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
- IR::Opnd * inlineCacheOpnd = nullptr;
- if (withInlineCache)
- {
- AssertMsg(dst->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
- if (dst->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache && polymorphicHelperMethod != helperMethod)
- {
- JITTimePolymorphicInlineCache * polymorphicInlineCache = dst->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache;
- helperMethod = polymorphicHelperMethod;
- inlineCacheOpnd = IR::AddrOpnd::New(polymorphicInlineCache->GetAddr(), IR::AddrOpndKindDynamicInlineCache, this->m_func);
- }
- else
- {
- // Need to load runtime inline cache opnd first before loading any helper argument
- // because LoadRuntimeInlineCacheOpnd may create labels marked as helper
- // and cause op helper register push/pop save in x86, messing up with any helper arguments that is already pushed
- inlineCacheOpnd = this->LoadRuntimeInlineCacheOpnd(stFldInstr, dst->AsPropertySymOpnd(), isHelper);
- }
- }
- if (withPutFlags)
- {
- m_lowererMD.LoadHelperArgument(stFldInstr,
- IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
- }
- IR::Opnd *src = stFldInstr->UnlinkSrc1();
- if (stFldInstr->m_opcode == Js::OpCode::StSuperFld)
- {
- m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc2());
- }
- m_lowererMD.LoadHelperArgument(stFldInstr, src);
- this->LoadPropertySymAsArgument(stFldInstr, dst);
- if (withInlineCache)
- {
- Assert(inlineCacheOpnd != nullptr);
- this->m_lowererMD.LoadHelperArgument(
- stFldInstr,
- IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
- this->m_lowererMD.LoadHelperArgument(stFldInstr, inlineCacheOpnd);
- this->m_lowererMD.LoadHelperArgument(stFldInstr, LoadFunctionBodyOpnd(stFldInstr));
- }
- IR::RegOpnd *opndBase = dst->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func);
- m_lowererMD.ChangeToHelperCall(stFldInstr, helperMethod, labelBailOut, opndBase, dst->AsSymOpnd()->IsPropertySymOpnd() ? dst->AsSymOpnd()->AsPropertySymOpnd() : nullptr, isHelper);
- return instrPrev;
- }
- IR::Instr* Lowerer::GenerateCompleteStFld(IR::Instr* instr, bool emitFastPath, IR::JnHelperMethod monoHelperAfterFastPath, IR::JnHelperMethod polyHelperAfterFastPath,
- IR::JnHelperMethod monoHelperWithoutFastPath, IR::JnHelperMethod polyHelperWithoutFastPath, bool withPutFlags, Js::PropertyOperationFlags flags)
- {
- if(instr->CallsAccessor() && instr->HasBailOutInfo())
- {
- IR::BailOutKind kindMinusBits = instr->GetBailOutKind() & ~IR::BailOutKindBits;
- Assert(kindMinusBits != IR::BailOutOnImplicitCalls && kindMinusBits != IR::BailOutOnImplicitCallsPreOp);
- }
- IR::Instr* prevInstr = instr->m_prev;
- IR::LabelInstr* labelBailOut = nullptr;
- IR::LabelInstr* labelHelper = nullptr;
- bool isHelper = false;
- IR::RegOpnd* typeOpnd = nullptr;
- if(emitFastPath && GenerateFastStFldForCustomProperty(instr, &labelHelper))
- {
- if(labelHelper)
- {
- Assert(labelHelper->isOpHelper);
- instr->InsertBefore(labelHelper);
- prevInstr = this->LowerStFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
- }
- else
- {
- instr->Remove();
- return prevInstr;
- }
- }
- else if (this->GenerateStFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd))
- {
- Assert(labelHelper == nullptr);
- return prevInstr;
- }
- else if (emitFastPath)
- {
- if (!GenerateFastStFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, &labelBailOut, typeOpnd, &isHelper, &labelHelper, withPutFlags, flags))
- {
- if (labelHelper != nullptr)
- {
- labelHelper->isOpHelper = isHelper;
- instr->InsertBefore(labelHelper);
- }
- prevInstr = this->LowerStFld(instr, monoHelperAfterFastPath, polyHelperAfterFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
- }
- }
- else
- {
- if (labelHelper != nullptr)
- {
- labelHelper->isOpHelper = isHelper;
- instr->InsertBefore(labelHelper);
- }
- prevInstr = this->LowerStFld(instr, monoHelperWithoutFastPath, monoHelperWithoutFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
- }
- return prevInstr;
- }
- void
- Lowerer::GenerateDirectFieldStore(IR::Instr* instrStFld, IR::PropertySymOpnd* propertySymOpnd)
- {
- Func* func = instrStFld->m_func;
- IR::Opnd *opndSlotArray = this->LoadSlotArrayWithCachedLocalType(instrStFld, propertySymOpnd);
- // Store the value to the slot, getting the slot index from the cache.
- uint16 index = propertySymOpnd->GetSlotIndex();
- Assert(index != -1);
- #if defined(RECYCLER_WRITE_BARRIER_JIT) && (defined(_M_IX86) || defined(_M_AMD64))
- if (opndSlotArray->IsRegOpnd())
- {
- IR::IndirOpnd * opndDst = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, func);
- this->GetLowererMD()->GenerateWriteBarrierAssign(opndDst, instrStFld->GetSrc1(), instrStFld);
- }
- else
- {
- Assert(opndSlotArray->IsMemRefOpnd());
- IR::MemRefOpnd * opndDst = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, func);
- this->GetLowererMD()->GenerateWriteBarrierAssign(opndDst, instrStFld->GetSrc1(), instrStFld);
- }
- #else
- IR::Opnd *opnd;
- if (opndSlotArray->IsRegOpnd())
- {
- opnd = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, func);
- }
- else
- {
- opnd = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, func);
- }
- this->InsertMove(opnd, instrStFld->GetSrc1(), instrStFld);
- #endif
- }
- bool
- Lowerer::GenerateStFldWithCachedType(IR::Instr *instrStFld, bool* continueAsHelperOut, IR::LabelInstr** labelHelperOut, IR::RegOpnd** typeOpndOut)
- {
- IR::Instr *instr;
- IR::RegOpnd *typeOpnd = nullptr;
- IR::LabelInstr* labelObjCheckFailed = nullptr;
- IR::LabelInstr *labelTypeCheckFailed = nullptr;
- IR::LabelInstr *labelBothTypeChecksFailed = nullptr;
- IR::LabelInstr *labelDone = nullptr;
- Assert(continueAsHelperOut != nullptr);
- *continueAsHelperOut = false;
- Assert(labelHelperOut != nullptr);
- *labelHelperOut = nullptr;
- Assert(typeOpndOut != nullptr);
- *typeOpndOut = nullptr;
- Assert(instrStFld->GetDst()->IsSymOpnd());
- if (!instrStFld->GetDst()->AsSymOpnd()->IsPropertySymOpnd() || !instrStFld->GetDst()->AsPropertySymOpnd()->IsTypeCheckSeqCandidate())
- {
- return false;
- }
- IR::PropertySymOpnd *propertySymOpnd = instrStFld->GetDst()->AsPropertySymOpnd();
- // If we have any object type spec info, we better not believe this is a load from prototype, since this is a store
- // and we never share inline caches between loads and stores.
- Assert(!propertySymOpnd->HasObjTypeSpecFldInfo() || !propertySymOpnd->IsLoadedFromProto());
- AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
- if (!propertySymOpnd->IsTypeCheckSeqCandidate())
- {
- return false;
- }
- if (!propertySymOpnd->IsTypeCheckSeqParticipant() && !propertySymOpnd->NeedsLocalTypeCheck())
- {
- return false;
- }
- Assert(!propertySymOpnd->NeedsTypeCheckAndBailOut() || (instrStFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrStFld->GetBailOutKind())));
- // In the backwards pass we only add guarded property operations to instructions that are not already
- // protected by an upstream type check.
- Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
- PHASE_PRINT_TESTTRACE(
- Js::ObjTypeSpecPhase,
- this->m_func,
- _u("Field store: %s, property ID: %d, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n"),
- Js::OpCodeUtil::GetOpCodeName(instrStFld->m_opcode),
- propertySymOpnd->m_sym->AsPropertySym()->m_propertyId,
- this->m_func->GetJITFunctionBody()->GetDisplayName(),
- propertySymOpnd->m_inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(),
- propertySymOpnd->IsTypeChecked() ? _u("true") : _u("false"));
- if (propertySymOpnd->HasFinalType() && !propertySymOpnd->IsLoadedFromProto())
- {
- propertySymOpnd->UpdateSlotForFinalType();
- }
- Func* func = instrStFld->m_func;
- // TODO (ObjTypeSpec): If ((PropertySym*)propertySymOpnd->m_sym)->m_stackSym->m_isIntConst consider emitting a direct
- // jump to helper or bailout. If we have a type check bailout, we could even abort compilation.
- bool hasTypeCheckBailout = instrStFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrStFld->GetBailOutKind());
- // If the type hasn't been checked upstream, see if it makes sense to check it here.
- bool isTypeChecked = propertySymOpnd->IsTypeChecked();
- if (!isTypeChecked)
- {
- // If the initial type has been checked, we can do a hard coded type transition without any type checks
- // (see GenerateStFldWithCachedFinalType), which is always worth doing, even if the type is not needed
- // downstream. We're not introducing any additional bailouts.
- if (propertySymOpnd->HasFinalType() && propertySymOpnd->HasInitialType() && !propertySymOpnd->IsTypeDead())
- {
- // We have a final type in hand, so we can JIT (most of) the type transition work.
- return this->GenerateStFldWithCachedFinalType(instrStFld, propertySymOpnd);
- }
- if (propertySymOpnd->HasTypeMismatch())
- {
- // So we have a type mismatch, which happens when the type (and the type without property if ObjTypeSpecStore
- // is on) on this instruction didn't match the live type value according to the flow. We must have hit some
- // stale inline cache (perhaps inlined from a different function, or on a code path not taken for a while).
- // Either way, we know exactly what type the object must have at this point (fully determined by flow), but
- // we don't know whether that type already has the property we're storing here. All in all, we know exactly
- // what shape the object will have after this operation, but we're not sure what label (type) to give this
- // shape. Thus we can simply let the fast path do its thing based on the live inline cache. The downstream
- // instructions relying only on this shape (loads and stores) are safe, and those that need the next type
- // (i.e. adds) will do the same thing as this instruction.
- return false;
- }
- // If we're still here then we must need a primary type check on this instruction to protect
- // a sequence of field operations downstream, or a local type check for an isolated field store.
- Assert(propertySymOpnd->NeedsPrimaryTypeCheck() || propertySymOpnd->NeedsLocalTypeCheck());
- labelTypeCheckFailed = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- labelBothTypeChecksFailed = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- labelObjCheckFailed = hasTypeCheckBailout ? labelBothTypeChecksFailed : IR::LabelInstr::New(Js::OpCode::Label, func, true);
- typeOpnd = this->GenerateCachedTypeCheck(instrStFld, propertySymOpnd, labelObjCheckFailed, labelBothTypeChecksFailed, labelTypeCheckFailed);
- *typeOpndOut = typeOpnd;
- }
- // Either we are protected by a type check upstream or we just emitted a type check above,
- // now it's time to store the field value.
- GenerateDirectFieldStore(instrStFld, propertySymOpnd);
- // If we are protected by a type check upstream, we don't need a bailout or helper here, delete the instruction
- // and return "true" to indicate that we succeeded in eliminating it.
- if (isTypeChecked)
- {
- Assert(labelTypeCheckFailed == nullptr && labelBothTypeChecksFailed == nullptr);
- AssertMsg(
- !instrStFld->HasBailOutInfo() || instrStFld->OnlyHasLazyBailOut(),
- "Why does a direct field store have bailout that is not lazy?"
- );
- instrStFld->Remove();
- return true;
- }
- // Otherwise, branch around the helper on successful type check.
- labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
- instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, func);
- instrStFld->InsertBefore(instr);
- // On failed type check, try the type without property if we've got one.
- instrStFld->InsertBefore(labelTypeCheckFailed);
- // Caution, this is one of the dusty corners of the JIT. We only get here if this is an isolated StFld which adds a property, or
- // ObjTypeSpecStore is off. In the former case no downstream operations depend on the final type produced here, and we can fall
- // back on live cache and helper if the type doesn't match. In the latter we may have a cache with type transition, which must
- // produce a value for the type after transition, because that type is consumed downstream. Thus, if the object's type doesn't
- // match either the type with or the type without the property we're storing, we must bail out here.
- bool emitAddProperty = propertySymOpnd->IsMono() && propertySymOpnd->HasInitialType();
- if (emitAddProperty)
- {
- GenerateCachedTypeWithoutPropertyCheck(instrStFld, propertySymOpnd, typeOpnd, labelBothTypeChecksFailed);
- GenerateFieldStoreWithTypeChange(instrStFld, propertySymOpnd, propertySymOpnd->GetInitialType(), propertySymOpnd->GetType());
- instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, func);
- instrStFld->InsertBefore(instr);
- }
- instrStFld->InsertBefore(labelBothTypeChecksFailed);
- instrStFld->InsertAfter(labelDone);
- if (hasTypeCheckBailout)
- {
- AssertMsg(PHASE_ON1(Js::ObjTypeSpecIsolatedFldOpsWithBailOutPhase) || !PHASE_ON(Js::DeadStoreTypeChecksOnStoresPhase, this->m_func) || !propertySymOpnd->IsTypeDead() || propertySymOpnd->TypeCheckRequired(),
- "Why does a field store have a type check bailout, if its type is dead?");
- if (instrStFld->GetBailOutInfo()->bailOutInstr != instrStFld)
- {
- // Set the cache index in the bailout info so that the generated code will write it into the
- // bailout record at runtime.
- instrStFld->GetBailOutInfo()->polymorphicCacheIndex = propertySymOpnd->m_inlineCacheIndex;
- }
- else
- {
- Assert(instrStFld->GetBailOutInfo()->polymorphicCacheIndex == propertySymOpnd->m_inlineCacheIndex);
- }
- instrStFld->m_opcode = Js::OpCode::BailOut;
- instrStFld->FreeSrc1();
- instrStFld->FreeDst();
- this->GenerateBailOut(instrStFld);
- return true;
- }
- else
- {
- *continueAsHelperOut = true;
- Assert(labelObjCheckFailed != nullptr && labelObjCheckFailed != labelBothTypeChecksFailed);
- *labelHelperOut = labelObjCheckFailed;
- return false;
- }
- }
- IR::RegOpnd *
- Lowerer::GenerateCachedTypeCheck(IR::Instr *instrChk, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr* labelObjCheckFailed, IR::LabelInstr *labelTypeCheckFailed, IR::LabelInstr *labelSecondChance)
- {
- Assert(propertySymOpnd->MayNeedTypeCheckProtection());
- Func* func = instrChk->m_func;
- IR::RegOpnd *regOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(func);
- regOpnd->SetValueType(propertySymOpnd->GetPropertyOwnerValueType());
- if (!regOpnd->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(regOpnd, instrChk, labelObjCheckFailed);
- }
- // Load the current object type into typeOpnd
- IR::RegOpnd* typeOpnd = IR::RegOpnd::New(TyMachReg, func);
- IR::Opnd *sourceType;
- if (regOpnd->m_sym->IsConst() && !regOpnd->m_sym->IsIntConst() && !regOpnd->m_sym->IsFloatConst())
- {
- sourceType = IR::MemRefOpnd::New((BYTE*)regOpnd->m_sym->GetConstAddress() +
- Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func, IR::AddrOpndKindDynamicObjectTypeRef);
- }
- else
- {
- sourceType = IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func);
- }
- InsertMove(typeOpnd, sourceType, instrChk);
- // Note: don't attempt equivalent type check if we're doing a final type optimization or if we have a monomorphic
- // cache and no type check bailout. In the latter case, we can wind up doing expensive failed equivalence checks
- // repeatedly and never rejit.
- bool doEquivTypeCheck =
- instrChk->HasEquivalentTypeCheckBailOut() ||
- (propertySymOpnd->HasEquivalentTypeSet() &&
- !(propertySymOpnd->HasFinalType() && propertySymOpnd->HasInitialType()) &&
- !propertySymOpnd->MustDoMonoCheck() &&
- (propertySymOpnd->IsPoly() || instrChk->HasTypeCheckBailOut()));
- Assert(doEquivTypeCheck || !instrChk->HasEquivalentTypeCheckBailOut());
- // Create and initialize the property guard if required. Note that for non-shared monomorphic checks we can refer
- // directly to the (pinned) type and not use a guard.
- Js::PropertyGuard * typeCheckGuard;
- IR::RegOpnd * polyIndexOpnd = nullptr;
- JITTypeHolder monoType = nullptr;
- if (doEquivTypeCheck)
- {
- typeCheckGuard = CreateEquivalentTypeGuardAndLinkToGuardedProperties(propertySymOpnd);
- if (typeCheckGuard->IsPoly())
- {
- Assert(propertySymOpnd->ShouldUsePolyEquivTypeGuard(this->m_func));
- polyIndexOpnd = this->GeneratePolymorphicTypeIndex(typeOpnd, typeCheckGuard, instrChk);
- }
- }
- else
- {
- monoType = propertySymOpnd->MustDoMonoCheck() ? propertySymOpnd->GetMonoGuardType() : propertySymOpnd->GetType();
- typeCheckGuard = this->CreateTypePropertyGuardForGuardedProperties(monoType, propertySymOpnd);
- }
- // Create the opnd we will check against the current type.
- IR::Opnd *expectedTypeOpnd;
- JITTypeHolder directCheckType = nullptr;
- if (typeCheckGuard == nullptr)
- {
- Assert(monoType != nullptr);
- expectedTypeOpnd = IR::AddrOpnd::New(monoType->GetAddr(), IR::AddrOpndKindDynamicType, func, true);
- directCheckType = monoType;
- }
- else
- {
- Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
- if (this->m_func->IsOOPJIT())
- {
- if (polyIndexOpnd != nullptr)
- {
- IR::RegOpnd * baseOpnd = IR::RegOpnd::New(TyMachPtr, func);
- this->GenerateLeaOfOOPData(baseOpnd, typeCheckGuard, Js::JitPolyEquivalentTypeGuard::GetOffsetOfPolyValues(), instrChk);
- expectedTypeOpnd = IR::IndirOpnd::New(baseOpnd, polyIndexOpnd, m_lowererMD.GetDefaultIndirScale(), TyMachPtr, func);
- }
- else
- {
- expectedTypeOpnd = this->GenerateIndirOfOOPData(typeCheckGuard, 0, instrChk);
- }
- this->addToLiveOnBackEdgeSyms->Set(func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
- }
- else
- {
- if (polyIndexOpnd != nullptr)
- {
- IR::RegOpnd * baseOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(baseOpnd, IR::AddrOpnd::New((Js::Var)typeCheckGuard->AsPolyTypeCheckGuard()->GetAddressOfPolyValues(), IR::AddrOpndKindDynamicTypeCheckGuard, func, true), instrChk);
- expectedTypeOpnd = IR::IndirOpnd::New(baseOpnd, polyIndexOpnd, m_lowererMD.GetDefaultIndirScale(), TyMachPtr, func);
- }
- else
- {
- expectedTypeOpnd = IR::MemRefOpnd::New((void*)(typeCheckGuard->GetAddressOfValue()), TyMachPtr, func, IR::AddrOpndKindDynamicGuardValueRef);
- }
- }
- }
- if (PHASE_VERBOSE_TRACE(Js::ObjTypeSpecPhase, this->m_func))
- {
- OUTPUT_VERBOSE_TRACE_FUNC(Js::ObjTypeSpecPhase, this->m_func, _u("Emitted %s type check "),
- directCheckType != nullptr ? _u("direct") : propertySymOpnd->IsPoly() ? _u("equivalent") : _u("indirect"));
- #if DBG
- if (propertySymOpnd->GetGuardedPropOps() != nullptr)
- {
- Output::Print(_u(" guarding operations:\n "));
- propertySymOpnd->GetGuardedPropOps()->Dump();
- }
- else
- {
- Output::Print(_u("\n"));
- }
- #else
- Output::Print(_u("\n"));
- #endif
- Output::Flush();
- }
- if (doEquivTypeCheck)
- {
- // TODO (ObjTypeSpec): For isolated equivalent type checks it would be good to emit a check if the cache is still valid, and
- // if not go straight to live polymorphic cache. This way we wouldn't have to bail out and re-JIT, and also wouldn't continue
- // to try the equivalent type cache, miss it and do the slow comparison. This may be as easy as sticking a null on the main
- // type in the equivalent type cache.
- IR::LabelInstr* labelCheckEquivalentType = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- IR::BranchInstr* branchInstr = InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelCheckEquivalentType, instrChk);
- InsertObjectPoison(regOpnd, branchInstr, instrChk, false);
- IR::LabelInstr *labelTypeCheckSucceeded = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- InsertBranch(Js::OpCode::Br, labelTypeCheckSucceeded, instrChk);
- instrChk->InsertBefore(labelCheckEquivalentType);
- IR::Opnd* typeCheckGuardOpnd = nullptr;
- if (this->m_func->IsOOPJIT())
- {
- typeCheckGuardOpnd = IR::RegOpnd::New(TyMachPtr, func);
- this->GenerateLeaOfOOPData(typeCheckGuardOpnd->AsRegOpnd(), typeCheckGuard, 0, instrChk);
- this->addToLiveOnBackEdgeSyms->Set(func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
- }
- else
- {
- typeCheckGuardOpnd = IR::AddrOpnd::New((Js::Var)typeCheckGuard, IR::AddrOpndKindDynamicTypeCheckGuard, func, true);
- }
- IR::JnHelperMethod helperMethod;
- if (polyIndexOpnd != nullptr)
- {
- helperMethod = propertySymOpnd->HasFixedValue() ? IR::HelperCheckIfPolyTypeIsEquivalentForFixedField : IR::HelperCheckIfPolyTypeIsEquivalent;
- this->m_lowererMD.LoadHelperArgument(instrChk, polyIndexOpnd);
- }
- else
- {
- helperMethod = propertySymOpnd->HasFixedValue() ? IR::HelperCheckIfTypeIsEquivalentForFixedField : IR::HelperCheckIfTypeIsEquivalent;
- }
- this->m_lowererMD.LoadHelperArgument(instrChk, typeCheckGuardOpnd);
- this->m_lowererMD.LoadHelperArgument(instrChk, typeOpnd);
- IR::RegOpnd* equivalentTypeCheckResultOpnd = IR::RegOpnd::New(TyUint8, func);
- IR::HelperCallOpnd* equivalentTypeCheckHelperCallOpnd = IR::HelperCallOpnd::New(helperMethod, func);
- IR::Instr* equivalentTypeCheckCallInstr = IR::Instr::New(Js::OpCode::Call, equivalentTypeCheckResultOpnd, equivalentTypeCheckHelperCallOpnd, func);
- instrChk->InsertBefore(equivalentTypeCheckCallInstr);
- this->m_lowererMD.LowerCall(equivalentTypeCheckCallInstr, 0);
- InsertTestBranch(equivalentTypeCheckResultOpnd, equivalentTypeCheckResultOpnd, Js::OpCode::BrEq_A, labelTypeCheckFailed, instrChk);
- // TODO (ObjTypeSpec): Consider emitting a shared bailout to which a specific bailout kind is written at runtime. This would allow us to distinguish
- // between non-equivalent type and other cases, such as invalidated guard (due to fixed field overwrite, perhaps) or too much thrashing on the
- // equivalent type cache. We could determine bailout kind based on the value returned by the helper. In the case of cache thrashing we could just
- // turn off the whole optimization for a given function.
- instrChk->InsertBefore(labelTypeCheckSucceeded);
- }
- else
- {
- IR::BranchInstr* branchInstr = InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelSecondChance != nullptr ? labelSecondChance : labelTypeCheckFailed, instrChk);
- InsertObjectPoison(regOpnd, branchInstr, instrChk, false);
- }
- // Don't pin the type for polymorphic operations. The code can successfully execute even if this type is no longer referenced by any objects,
- // as long as there are other objects with types equivalent on the properties referenced by this code. The type is kept alive until entry point
- // installation by the JIT transfer data, and after that by the equivalent type cache, so it will stay alive unless or until it gets evicted
- // from the cache.
- if (!doEquivTypeCheck)
- {
- Assert(monoType != nullptr);
- PinTypeRef(monoType, monoType.t, instrChk, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
- }
- return typeOpnd;
- }
- IR::RegOpnd *
- Lowerer::GeneratePolymorphicTypeIndex(IR::RegOpnd * typeOpnd, Js::PropertyGuard * typeCheckGuard, IR::Instr * instrInsert)
- {
- IR::RegOpnd * resultOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
- InsertMove(resultOpnd, typeOpnd, instrInsert);
- InsertShift(Js::OpCode::ShrU_A, false, resultOpnd, resultOpnd, IR::IntConstOpnd::New(PolymorphicInlineCacheShift, TyInt8, this->m_func, true), instrInsert);
- InsertAnd(resultOpnd, resultOpnd, IR::IntConstOpnd::New(typeCheckGuard->AsPolyTypeCheckGuard()->GetSize() - 1, TyMachReg, this->m_func, true), instrInsert);
- return resultOpnd;
- }
- void
- Lowerer::GenerateLeaOfOOPData(IR::RegOpnd * regOpnd, void * address, int32 offset, IR::Instr * instrInsert)
- {
- Func * func = instrInsert->m_func;
- int32 dataOffset;
- Int32Math::Add(NativeCodeData::GetDataTotalOffset(address), offset, &dataOffset);
- InsertLea(regOpnd,
- IR::IndirOpnd::New(IR::RegOpnd::New(func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), dataOffset, TyMachPtr,
- #if DBG
- NativeCodeData::GetDataDescription(address, func->m_alloc),
- #endif
- func, true),
- instrInsert);
- }
- IR::Opnd *
- Lowerer::GenerateIndirOfOOPData(void * address, int32 offset, IR::Instr * instrInsert)
- {
- Func * func = instrInsert->m_func;
- int32 dataOffset;
- Int32Math::Add(NativeCodeData::GetDataTotalOffset(address), offset, &dataOffset);
- IR::Opnd * opnd = IR::IndirOpnd::New(IR::RegOpnd::New(func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), dataOffset, TyMachPtr,
- #if DBG
- NativeCodeData::GetDataDescription(address, func->m_alloc),
- #endif
- func, true);
- return opnd;
- }
- void
- Lowerer::InsertObjectPoison(IR::Opnd* poisonedOpnd, IR::BranchInstr* branchInstr, IR::Instr* insertInstr, bool isForStore)
- {
- #ifndef _M_ARM
- LowererMD::InsertObjectPoison(poisonedOpnd, branchInstr, insertInstr, isForStore);
- #endif
- }
- void
- Lowerer::PinTypeRef(JITTypeHolder type, void* typeRef, IR::Instr* instr, Js::PropertyId propertyId)
- {
- this->m_func->PinTypeRef(typeRef);
- if (PHASE_TRACE(Js::TracePinnedTypesPhase, this->m_func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(_u("PinnedTypes: function %s(%s) instr %s property ID %u pinned %s reference 0x%p to type 0x%p.\n"),
- this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
- Js::OpCodeUtil::GetOpCodeName(instr->m_opcode), propertyId,
- typeRef == type.t ? _u("strong") : _u("weak"), typeRef, type.t);
- Output::Flush();
- }
- }
- void
- Lowerer::GenerateCachedTypeWithoutPropertyCheck(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, IR::Opnd *typeOpnd, IR::LabelInstr *labelTypeCheckFailed)
- {
- Assert(propertySymOpnd->IsMonoObjTypeSpecCandidate());
- Assert(propertySymOpnd->HasInitialType());
- JITTypeHolder typeWithoutProperty = propertySymOpnd->GetInitialType();
- // We should never add properties to objects of static types.
- Assert(Js::DynamicType::Is(typeWithoutProperty->GetTypeId()));
- if (typeOpnd == nullptr)
- {
- // No opnd holding the type was passed in, so we have to load the type here.
- IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
- if (!baseOpnd->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(baseOpnd, instrInsert, labelTypeCheckFailed);
- }
- IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
- typeOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
- InsertMove(typeOpnd, opnd, instrInsert);
- }
- Js::JitTypePropertyGuard* typePropertyGuard = CreateTypePropertyGuardForGuardedProperties(typeWithoutProperty, propertySymOpnd);
- IR::Opnd *expectedTypeOpnd;
- if (typePropertyGuard)
- {
- bool emitDirectCheck = true;
- Assert(typePropertyGuard != nullptr);
- Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
- if (this->m_func->IsOOPJIT())
- {
- int typeCheckGuardOffset = NativeCodeData::GetDataTotalOffset(typePropertyGuard);
- expectedTypeOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), typeCheckGuardOffset, TyMachPtr,
- #if DBG
- NativeCodeData::GetDataDescription(typePropertyGuard, this->m_func->m_alloc),
- #endif
- this->m_func, true);
- this->addToLiveOnBackEdgeSyms->Set(m_func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
- }
- else
- {
- expectedTypeOpnd = IR::MemRefOpnd::New((void*)(typePropertyGuard->GetAddressOfValue()), TyMachPtr, this->m_func, IR::AddrOpndKindDynamicGuardValueRef);
- }
- emitDirectCheck = false;
- OUTPUT_VERBOSE_TRACE_FUNC(Js::ObjTypeSpecPhase, this->m_func, _u("Emitted %s type check for type 0x%p.\n"),
- emitDirectCheck ? _u("direct") : _u("indirect"), typeWithoutProperty->GetAddr());
- }
- else
- {
- expectedTypeOpnd = IR::AddrOpnd::New(typeWithoutProperty->GetAddr(), IR::AddrOpndKindDynamicType, m_func, true);
- }
- InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelTypeCheckFailed, instrInsert);
- // Technically, it should be enough to pin the final type, because it should keep all of its predecessors alive, but
- // just to be extra cautious, let's pin the initial type as well.
- PinTypeRef(typeWithoutProperty, typeWithoutProperty.t, instrInsert, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
- }
- bool
- Lowerer::GenerateFixedFieldGuardCheck(IR::Instr *insertPointInstr, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
- {
- return this->GeneratePropertyGuardCheck(insertPointInstr, propertySymOpnd, labelBailOut);
- }
- Js::JitTypePropertyGuard*
- Lowerer::CreateTypePropertyGuardForGuardedProperties(JITTypeHolder type, IR::PropertySymOpnd* propertySymOpnd)
- {
- // We should always have a list of guarded properties.
- Assert(propertySymOpnd->GetGuardedPropOps() != nullptr);
- Js::JitTypePropertyGuard* guard = nullptr;
- if (m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuards())
- {
- // Consider (ObjTypeSpec): Because we allocate these guards from the JIT thread we can't share guards for the same type across multiple functions.
- // This leads to proliferation of property guards on the thread context. The alternative would be to pre-allocate shared (by value) guards
- // from the thread context during work item creation. We would create too many of them (because some types aren't actually used as guards),
- // but we could share a guard for a given type between functions. This may ultimately be better.
- LinkGuardToGuardedProperties(propertySymOpnd->GetGuardedPropOps(), [this, type, &guard](Js::PropertyId propertyId)
- {
- if (ShouldDoLazyFixedTypeBailout(this->m_func))
- {
- this->m_func->lazyBailoutProperties.Item(propertyId);
- }
- else
- {
- if (guard == nullptr)
- {
- guard = this->m_func->GetOrCreateSingleTypeGuard(type->GetAddr());
- }
- if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(_u("ObjTypeSpec: function %s(%s) registered guard 0x%p with value 0x%p for property ID %u.\n"),
- m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
- guard, guard->GetValue(), propertyId);
- Output::Flush();
- }
- this->m_func->EnsurePropertyGuardsByPropertyId();
- this->m_func->LinkGuardToPropertyId(propertyId, guard);
- }
- });
- }
- return guard;
- }
- Js::JitEquivalentTypeGuard*
- Lowerer::CreateEquivalentTypeGuardAndLinkToGuardedProperties(IR::PropertySymOpnd* propertySymOpnd)
- {
- // We should always have a list of guarded properties.
- Assert(propertySymOpnd->HasObjTypeSpecFldInfo() && propertySymOpnd->HasEquivalentTypeSet() && propertySymOpnd->GetGuardedPropOps());
- Js::JitEquivalentTypeGuard* guard;
- if (propertySymOpnd->ShouldUsePolyEquivTypeGuard(this->m_func))
- {
- Js::JitPolyEquivalentTypeGuard *polyGuard = this->m_func->CreatePolyEquivalentTypeGuard(propertySymOpnd->GetObjTypeSpecFldId());
- // Copy types from the type set to the guard's value locations
- Js::EquivalentTypeSet* typeSet = propertySymOpnd->GetEquivalentTypeSet();
- for (uint16 ti = 0; ti < typeSet->GetCount(); ti++)
- {
- intptr_t typeToCache = typeSet->GetType(ti)->GetAddr();
- polyGuard->SetPolyValue(typeToCache, polyGuard->GetIndexForValue(typeToCache));
- }
- guard = polyGuard;
- }
- else
- {
- guard = this->m_func->CreateEquivalentTypeGuard(propertySymOpnd->GetFirstEquivalentType(), propertySymOpnd->GetObjTypeSpecFldId());
- }
- if (m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuards())
- {
- LinkGuardToGuardedProperties(propertySymOpnd->GetGuardedPropOps(), [=](Js::PropertyId propertyId)
- {
- if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(_u("ObjTypeSpec: function %s(%s) registered equivalent type spec guard 0x%p with value 0x%p for property ID %u.\n"),
- this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
- guard, guard->GetValue(), propertyId);
- Output::Flush();
- }
- this->m_func->EnsurePropertyGuardsByPropertyId();
- this->m_func->LinkGuardToPropertyId(propertyId, guard);
- });
- }
- Assert(guard->GetCache() != nullptr);
- Js::EquivalentTypeCache* cache = guard->GetCache();
- // TODO (ObjTypeSpec): If we delayed populating the types until encoder, we could bulk allocate all equivalent type caches
- // in one block from the heap. This would allow us to not allocate them from the native code data allocator and free them
- // when no longer needed. However, we would need to store the global property operation ID in the guard, so we can look up
- // the info in the encoder. Perhaps we could overload the cache pointer to be the ID until encoder.
- // Copy types from the type set to the guard's cache
- Js::EquivalentTypeSet* typeSet = propertySymOpnd->GetEquivalentTypeSet();
- uint16 cachedTypeCount = typeSet->GetCount() < EQUIVALENT_TYPE_CACHE_SIZE ? typeSet->GetCount() : EQUIVALENT_TYPE_CACHE_SIZE;
- for (uint16 ti = 0; ti < cachedTypeCount; ti++)
- {
- cache->types[ti] = (Js::Type*)typeSet->GetType(ti)->GetAddr();
- }
- #ifdef DEBUG
- bool there_was_a_null_type = false;
- for (uint16 ti = 0; ti < cachedTypeCount; ti++)
- {
- if (cache->types[ti] == nullptr)
- {
- there_was_a_null_type = true;
- }
- else if (there_was_a_null_type)
- {
- AssertMsg(false, "there_was_a_null_type ? something is wrong here.");
- }
- }
- #endif
- // Populate property ID and slot index arrays on the guard's cache. We iterate over the
- // bit vector of property operations protected by this guard, but some property operations
- // may be referring to the same property ID (but not share the same cache). We skip
- // redundant entries by maintaining a hash set of property IDs we've already encountered.
- auto propOps = propertySymOpnd->GetGuardedPropOps();
- uint propOpCount = propOps->Count();
- bool isTypeStatic = Js::StaticType::Is(propertySymOpnd->GetFirstEquivalentType()->GetTypeId());
- JsUtil::BaseDictionary<Js::PropertyId, Js::EquivalentPropertyEntry*, JitArenaAllocator> propIds(this->m_alloc, propOpCount);
- Js::EquivalentPropertyEntry* properties = AnewArray(this->m_alloc, Js::EquivalentPropertyEntry, propOpCount);
- uint propIdCount = 0;
- FOREACH_BITSET_IN_SPARSEBV(propOpId, propOps)
- {
- ObjTypeSpecFldInfo* propOpInfo = this->m_func->GetGlobalObjTypeSpecFldInfo(propOpId);
- Js::PropertyId propertyId = propOpInfo->GetPropertyId();
- Js::PropertyIndex propOpIndex = Js::Constants::NoSlot;
- bool hasFixedValue = propOpInfo->HasFixedValue();
- if (hasFixedValue)
- {
- cache->SetHasFixedValue();
- }
- bool isLoadedFromProto = propOpInfo->IsLoadedFromProto();
- if (isLoadedFromProto)
- {
- cache->SetIsLoadedFromProto();
- }
- else
- {
- propOpIndex = propOpInfo->GetSlotIndex();
- }
- bool propOpUsesAuxSlot = propOpInfo->UsesAuxSlot();
- AssertMsg(!isTypeStatic || !propOpInfo->IsBeingStored(), "Why are we storing a field to an object of static type?");
- Js::EquivalentPropertyEntry* entry = nullptr;
- if (propIds.TryGetValue(propertyId, &entry))
- {
- if (propOpIndex == entry->slotIndex && propOpUsesAuxSlot == entry->isAuxSlot)
- {
- entry->mustBeWritable |= propOpInfo->IsBeingStored();
- }
- else
- {
- // Due to inline cache sharing we have the same property accessed using different caches
- // with inconsistent info. This means a guaranteed bailout on the equivalent type check.
- // We'll just let it happen and turn off the optimization for this function. We could avoid
- // this problem by tracking property information on the value type in glob opt.
- if (PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->m_func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(_u("EquivObjTypeSpec: top function %s (%s): duplicate property clash on %d \n"),
- m_func->GetJITFunctionBody()->GetDisplayName(), m_func->GetDebugNumberSet(debugStringBuffer), propertyId);
- Output::Flush();
- }
- Assert(propIdCount < propOpCount);
- __analysis_assume(propIdCount < propOpCount);
- entry = &properties[propIdCount++];
- entry->propertyId = propertyId;
- entry->slotIndex = propOpIndex;
- entry->isAuxSlot = propOpUsesAuxSlot;
- entry->mustBeWritable = propOpInfo->IsBeingStored();
- }
- }
- else
- {
- Assert(propIdCount < propOpCount);
- __analysis_assume(propIdCount < propOpCount);
- entry = &properties[propIdCount++];
- entry->propertyId = propertyId;
- entry->slotIndex = propOpIndex;
- entry->isAuxSlot = propOpUsesAuxSlot;
- entry->mustBeWritable = propOpInfo->IsBeingStored();
- propIds.AddNew(propertyId, entry);
- }
- }
- NEXT_BITSET_IN_SPARSEBV;
- cache->record.propertyCount = propIdCount;
- // Js::EquivalentPropertyEntry does not contain pointer, no need to fixup
- cache->record.properties = NativeCodeDataNewArrayNoFixup(this->m_func->GetNativeCodeDataAllocator(), Js::EquivalentPropertyEntry, propIdCount);
- memcpy(cache->record.properties, properties, propIdCount * sizeof(Js::EquivalentPropertyEntry));
- return guard;
- }
- bool
- Lowerer::LinkCtorCacheToGuardedProperties(JITTimeConstructorCache* ctorCache)
- {
- // We do not always have guarded properties. If the constructor is empty and the subsequent code doesn't load or store any of
- // the constructed object's properties, or if all inline caches are empty then this ctor cache doesn't guard any properties.
- if (ctorCache->GetGuardedPropOps() == nullptr)
- {
- return false;
- }
- bool linked = false;
- if (this->m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuards())
- {
- linked = LinkGuardToGuardedProperties(ctorCache->GetGuardedPropOps(), [=](Js::PropertyId propertyId)
- {
- if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(_u("ObjTypeSpec: function %s(%s) registered ctor cache 0x%p with value 0x%p for property %u.\n"),
- this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
- ctorCache->GetRuntimeCacheAddr(), ctorCache->GetType()->GetAddr(), propertyId);
- Output::Flush();
- }
- this->m_func->EnsureCtorCachesByPropertyId();
- this->m_func->LinkCtorCacheToPropertyId(propertyId, ctorCache);
- });
- }
- return linked;
- }
- template<typename LinkFunc>
- bool
- Lowerer::LinkGuardToGuardedProperties(const BVSparse<JitArenaAllocator>* guardedPropOps, LinkFunc link)
- {
- Assert(this->m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuards());
- Assert(guardedPropOps != nullptr);
- bool linked = false;
- // For every entry in the bit vector, register the guard for the corresponding property ID.
- FOREACH_BITSET_IN_SPARSEBV(propertyOpId, guardedPropOps)
- {
- ObjTypeSpecFldInfo* propertyOpInfo = this->m_func->GetGlobalObjTypeSpecFldInfo(propertyOpId);
- Js::PropertyId propertyId = propertyOpInfo->GetPropertyId();
- // It's okay for an equivalent type check to be registered as a guard against a property becoming read-only. This transpires if, there is
- // a different monomorphic type check upstream, which guarantees the actual type of the object needed for the hard-coded type transition,
- // but it is later followed by a sequence of polymorphic inline caches, which do not have that type in the type set. At the beginning of
- // that sequence we'll emit an equivalent type check to verify that the actual type has relevant properties on appropriate slots. Then in
- // the dead store pass we'll walk upwards and encounter this check first, thus we'll drop the guarded properties accumulated thus far
- // (including the one being added) on that check.
- // AssertMsg(!propertyOpInfo->IsBeingAdded() || !isEquivalentTypeGuard, "Why do we have an equivalent type check protecting a property add?");
- if (propertyOpInfo->IsBeingAdded() || propertyOpInfo->IsLoadedFromProto() || propertyOpInfo->HasFixedValue())
- {
- // Equivalent object type spec only supports fixed fields on prototypes. This is to simplify the slow type equivalence check.
- // See JavascriptOperators::CheckIfTypeIsEquivalent.
- Assert(!propertyOpInfo->IsPoly() || (!propertyOpInfo->HasFixedValue() || propertyOpInfo->IsLoadedFromProto() || propertyOpInfo->UsesAccessor()));
- if (this->m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuard(propertyId))
- {
- link(propertyId);
- linked = true;
- }
- else
- {
- AssertMsg(false, "Did we fail to create a shared property guard for a guarded property?");
- }
- }
- }
- NEXT_BITSET_IN_SPARSEBV;
- return linked;
- }
- bool
- Lowerer::GeneratePropertyGuardCheck(IR::Instr *insertPointInstr, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
- {
- intptr_t guard = propertySymOpnd->GetPropertyGuardValueAddr();
- Assert(guard != 0);
- if (ShouldDoLazyFixedDataBailout(this->m_func))
- {
- this->m_func->lazyBailoutProperties.Item(propertySymOpnd->GetPropertyId());
- return false;
- }
- else
- {
- Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
- IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
- IR::MemRefOpnd* guardOpnd = IR::MemRefOpnd::New(guard, TyMachPtr, this->m_func, IR::AddrOpndKindDynamicGuardValueRef);
- IR::BranchInstr *branchInstr = InsertCompareBranch(guardOpnd, zeroOpnd, Js::OpCode::BrEq_A, labelBailOut, insertPointInstr);
- IR::RegOpnd *objPtrReg = IR::RegOpnd::New(propertySymOpnd->GetObjectSym(), TyMachPtr, m_func);
- InsertObjectPoison(objPtrReg, branchInstr, insertPointInstr, false);
- return true;
- }
- }
- IR::Instr*
- Lowerer::GeneratePropertyGuardCheckBailoutAndLoadType(IR::Instr *insertInstr)
- {
- IR::Instr* instrPrev = insertInstr->m_prev;
- IR::Opnd* numberTypeOpnd = IR::AddrOpnd::New(insertInstr->m_func->GetScriptContextInfo()->GetNumberTypeStaticAddr(), IR::AddrOpndKindDynamicType, insertInstr->m_func);
- IR::PropertySymOpnd* propertySymOpnd = insertInstr->GetSrc1()->AsPropertySymOpnd();
- IR::LabelInstr* labelBailout = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func, true);
- IR::LabelInstr* labelContinue = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func);
- IR::LabelInstr* loadNumberTypeLabel = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func, true);
- GeneratePropertyGuardCheck(insertInstr, propertySymOpnd, labelBailout);
- IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
- GenerateObjectTestAndTypeLoad(insertInstr, baseOpnd, insertInstr->GetDst()->AsRegOpnd(), loadNumberTypeLabel);
- insertInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelContinue, this->m_func));
- insertInstr->InsertBefore(loadNumberTypeLabel);
- this->InsertMove(insertInstr->GetDst(), numberTypeOpnd, insertInstr);
- insertInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelContinue, this->m_func));
- insertInstr->InsertBefore(labelBailout);
- insertInstr->InsertAfter(labelContinue);
- insertInstr->FreeSrc1();
- insertInstr->m_opcode = Js::OpCode::BailOut;
- this->GenerateBailOut(insertInstr);
- return instrPrev;
- }
- void
- Lowerer::GenerateAdjustSlots(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, JITTypeHolder initialType, JITTypeHolder finalType)
- {
- IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
- bool adjusted = this->GenerateAdjustBaseSlots(instrInsert, baseOpnd, initialType, finalType);
- if (!adjusted)
- {
- baseOpnd->Free(m_func);
- }
- }
- bool
- Lowerer::GenerateAdjustBaseSlots(IR::Instr *instrInsert, IR::RegOpnd *baseOpnd, JITTypeHolder initialType, JITTypeHolder finalType)
- {
- // Possibly allocate new slot capacity to accommodate a type transition.
- AssertMsg(JITTypeHandler::IsTypeHandlerCompatibleForObjectHeaderInlining(initialType->GetTypeHandler(), finalType->GetTypeHandler()),
- "Incompatible typeHandler transition?");
- int oldCount = 0;
- int newCount = 0;
- Js::PropertyIndex inlineSlotCapacity = 0;
- Js::PropertyIndex newInlineSlotCapacity = 0;
- bool needSlotAdjustment =
- JITTypeHandler::NeedSlotAdjustment(initialType->GetTypeHandler(), finalType->GetTypeHandler(), &oldCount, &newCount, &inlineSlotCapacity, &newInlineSlotCapacity);
- if (!needSlotAdjustment)
- {
- return false;
- }
- // Call AdjustSlots using the new counts. Because AdjustSlots uses the "no dispose" flavor of alloc,
- // no implicit calls are possible, and we don't need an implicit call check and bailout.
- // CALL AdjustSlots, instance, newInlineSlotCapacity, newAuxSlotCapacity
- //3rd Param
- Assert(newCount > newInlineSlotCapacity);
- const int newAuxSlotCapacity = newCount - newInlineSlotCapacity;
- m_lowererMD.LoadHelperArgument(instrInsert, IR::IntConstOpnd::New(newAuxSlotCapacity, TyInt32, this->m_func));
- //2nd Param
- m_lowererMD.LoadHelperArgument(instrInsert, IR::IntConstOpnd::New(newInlineSlotCapacity, TyUint16, this->m_func));
- //1st Param (instance)
- m_lowererMD.LoadHelperArgument(instrInsert, baseOpnd);
- //CALL HelperAdjustSlots
- IR::Opnd *opnd = IR::HelperCallOpnd::New(IR::HelperAdjustSlots, this->m_func);
- IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
- instr->SetSrc1(opnd);
- instrInsert->InsertBefore(instr);
- m_lowererMD.LowerCall(instr, 0);
- return true;
- }
- void
- Lowerer::GenerateFieldStoreWithTypeChange(IR::Instr * instrStFld, IR::PropertySymOpnd *propertySymOpnd, JITTypeHolder initialType, JITTypeHolder finalType)
- {
- // Adjust instance slots, if necessary.
- this->GenerateAdjustSlots(instrStFld, propertySymOpnd, initialType, finalType);
- // We should never add properties to objects of static types.
- Assert(Js::DynamicType::Is(finalType->GetTypeId()));
- // Let's pin the final type to be sure its alive when we try to do the type transition.
- PinTypeRef(finalType, finalType.t, instrStFld, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
- IR::Opnd *finalTypeOpnd = IR::AddrOpnd::New(finalType->GetAddr(), IR::AddrOpndKindDynamicType, instrStFld->m_func, true);
- // Set the new type.
- IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(instrStFld->m_func);
- IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrStFld->m_func);
- this->InsertMove(opnd, finalTypeOpnd, instrStFld);
- // Now do the store.
- GenerateDirectFieldStore(instrStFld, propertySymOpnd);
- }
- bool
- Lowerer::GenerateStFldWithCachedFinalType(IR::Instr * instrStFld, IR::PropertySymOpnd *propertySymOpnd)
- {
- // This function tries to treat a sequence of add-property stores as a single type transition.
- Assert(propertySymOpnd == instrStFld->GetDst()->AsPropertySymOpnd());
- Assert(propertySymOpnd->IsMonoObjTypeSpecCandidate());
- Assert(propertySymOpnd->HasFinalType());
- Assert(propertySymOpnd->HasInitialType());
- IR::Instr *instr;
- IR::LabelInstr *labelBailOut = nullptr;
- AssertMsg(!propertySymOpnd->IsTypeChecked(), "Why are we doing a type transition when we have the type we want?");
- // If the initial type must be checked here, do it.
- Assert(instrStFld->HasBailOutInfo());
- labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- GenerateCachedTypeWithoutPropertyCheck(instrStFld, propertySymOpnd, nullptr/*typeOpnd*/, labelBailOut);
- // Do the type transition.
- GenerateFieldStoreWithTypeChange(instrStFld, propertySymOpnd, propertySymOpnd->GetInitialType(), propertySymOpnd->GetFinalType());
- instrStFld->FreeSrc1();
- instrStFld->FreeDst();
- // Insert the bailout and let the main path branch around it.
- IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
- instrStFld->InsertBefore(instr);
- if (instrStFld->HasBailOutInfo())
- {
- Assert(labelBailOut != nullptr);
- instrStFld->InsertBefore(labelBailOut);
- instrStFld->InsertAfter(labelDone);
- instrStFld->m_opcode = Js::OpCode::BailOut;
- this->GenerateBailOut(instrStFld);
- }
- else
- {
- instrStFld->InsertAfter(labelDone);
- instrStFld->Remove();
- }
- return true;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerScopedStFld
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerScopedStFld(IR::Instr * stFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache,
- bool withPropertyOperationFlags, Js::PropertyOperationFlags flags)
- {
- IR::Instr *instrPrev = stFldInstr->m_prev;
- if (withPropertyOperationFlags)
- {
- m_lowererMD.LoadHelperArgument(stFldInstr,
- IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
- }
- if(!withInlineCache)
- {
- LoadScriptContext(stFldInstr);
- }
- // Pass the default instance
- IR::Opnd *src = stFldInstr->UnlinkSrc2();
- m_lowererMD.LoadHelperArgument(stFldInstr, src);
- // Pass the value to store
- src = stFldInstr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(stFldInstr, src);
- // Pass the property sym to store to
- IR::Opnd *dst = stFldInstr->UnlinkDst();
- AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
- this->LoadPropertySymAsArgument(stFldInstr, dst);
- if (withInlineCache)
- {
- AssertMsg(dst->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
- m_lowererMD.LoadHelperArgument(
- stFldInstr,
- IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
- // Not using the polymorphic inline cache because the fast path only uses the monomorphic inline cache
- this->m_lowererMD.LoadHelperArgument(stFldInstr, this->LoadRuntimeInlineCacheOpnd(stFldInstr, dst->AsPropertySymOpnd()));
- m_lowererMD.LoadHelperArgument(stFldInstr, LoadFunctionBodyOpnd(stFldInstr));
- }
- m_lowererMD.ChangeToHelperCall(stFldInstr, helperMethod);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerLoadVar
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerLoadVar(IR::Instr *instr, IR::Opnd *opnd)
- {
- instr->SetSrc1(opnd);
- return m_lowererMD.ChangeToAssign(instr);
- }
- IR::Instr *
- Lowerer::LoadHelperTemp(IR::Instr * instr, IR::Instr * instrInsert)
- {
- IR::Opnd *tempOpnd;
- IR::Opnd *dst = instr->GetDst();
- AssertMsg(dst != nullptr, "Always expect a dst for these.");
- AssertMsg(instr->dstIsTempNumber, "Should only be loading temps here");
- Assert(dst->IsRegOpnd());
- StackSym * tempNumberSym = this->GetTempNumberSym(dst, instr->dstIsTempNumberTransferred);
- IR::Instr *load = InsertLoadStackAddress(tempNumberSym, instrInsert);
- tempOpnd = load->GetDst();
- m_lowererMD.LoadHelperArgument(instrInsert, tempOpnd);
- return load;
- }
- void
- Lowerer::LoadArgumentCount(IR::Instr *const instr)
- {
- Assert(instr);
- Assert(instr->GetDst());
- Assert(!instr->GetSrc1());
- Assert(!instr->GetSrc2());
- if(instr->m_func->IsInlinee())
- {
- // Argument count including 'this'
- instr->SetSrc1(IR::IntConstOpnd::New(instr->m_func->actualCount, TyUint32, instr->m_func, true));
- LowererMD::ChangeToAssign(instr);
- }
- else if (instr->m_func->GetJITFunctionBody()->IsCoroutine())
- {
- IR::SymOpnd* symOpnd = LoadCallInfo(instr);
- instr->SetSrc1(symOpnd);
- LowererMD::ChangeToAssign(instr);
- }
- else
- {
- m_lowererMD.LoadArgumentCount(instr);
- }
- }
- void
- Lowerer::LoadStackArgPtr(IR::Instr *const instr)
- {
- Assert(instr);
- Assert(instr->GetDst());
- Assert(!instr->GetSrc1());
- Assert(!instr->GetSrc2());
- if(instr->m_func->IsInlinee())
- {
- // Address of argument after 'this'
- const auto firstRealArgStackSym = instr->m_func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
- this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr);
- instr->SetSrc1(IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, instr->m_func));
- ChangeToLea(instr);
- }
- else
- {
- m_lowererMD.LoadStackArgPtr(instr);
- }
- }
- IR::Instr *
- Lowerer::InsertLoadStackAddress(StackSym *sym, IR::Instr * instrInsert, IR::RegOpnd *optionalDstOpnd /* = nullptr */)
- {
- IR::RegOpnd * regDst = optionalDstOpnd != nullptr ? optionalDstOpnd : IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::SymOpnd * symSrc = IR::SymOpnd::New(sym, TyMachPtr, this->m_func);
- return InsertLea(regDst, symSrc, instrInsert);
- }
- void
- Lowerer::LoadArgumentsFromFrame(IR::Instr *const instr)
- {
- Assert(instr);
- Assert(instr->GetDst());
- Assert(!instr->GetSrc1());
- Assert(!instr->GetSrc2());
- if(instr->m_func->IsInlinee())
- {
- // Use the inline object meta arg slot for the arguments object
- instr->SetSrc1(instr->m_func->GetInlineeArgumentsObjectSlotOpnd());
- LowererMD::ChangeToAssign(instr);
- }
- else
- {
- m_lowererMD.LoadArgumentsFromFrame(instr);
- }
- }
- #ifdef ENABLE_WASM
- IR::Instr *
- Lowerer::LowerCheckWasmSignature(IR::Instr * instr)
- {
- Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
- Assert(instr->GetSrc1());
- Assert(instr->GetSrc2()->IsIntConstOpnd());
- int sigId = instr->UnlinkSrc2()->AsIntConstOpnd()->AsInt32();
- IR::Instr *instrPrev = instr->m_prev;
- IR::IndirOpnd * actualSig = IR::IndirOpnd::New(instr->UnlinkSrc1()->AsRegOpnd(), Js::WasmScriptFunction::GetOffsetOfSignature(), TyMachReg, m_func);
- Wasm::WasmSignature * expectedSig = m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetWasmSignature(sigId);
- if (expectedSig->GetShortSig() == Js::Constants::InvalidSignature)
- {
- intptr_t sigAddr = m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetWasmSignatureAddr(sigId);
- IR::AddrOpnd * expectedOpnd = IR::AddrOpnd::New(sigAddr, IR::AddrOpndKindConstantAddress, m_func);
- m_lowererMD.LoadHelperArgument(instr, expectedOpnd);
- m_lowererMD.LoadHelperArgument(instr, actualSig);
- LoadScriptContext(instr);
- m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_CheckWasmSignature);
- }
- else
- {
- IR::LabelInstr * trapLabel = InsertLabel(true, instr);
- IR::LabelInstr * labelFallThrough = InsertLabel(false, instr->m_next);
- IR::RegOpnd * actualRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
- InsertMove(actualRegOpnd, actualSig, trapLabel);
- IR::IndirOpnd * shortSigIndir = IR::IndirOpnd::New(actualRegOpnd, Wasm::WasmSignature::GetOffsetOfShortSig(), TyMachReg, m_func);
- InsertCompareBranch(shortSigIndir, IR::IntConstOpnd::New(expectedSig->GetShortSig(), TyMachReg, m_func), Js::OpCode::BrNeq_A, trapLabel, trapLabel);
- InsertBranch(Js::OpCode::Br, labelFallThrough, trapLabel);
- GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_SignatureMismatch), TyInt32, m_func), instr);
- instr->Remove();
- }
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerLdWasmFunc(IR::Instr* instr)
- {
- IR::Instr * prev = instr->m_prev;
- IR::RegOpnd * tableReg = instr->UnlinkSrc1()->AsRegOpnd();
- IR::Opnd * indexOpnd = instr->UnlinkSrc2();
- IR::Opnd * dst = instr->UnlinkDst();
- IR::IndirOpnd * lengthOpnd = IR::IndirOpnd::New(tableReg, Js::WebAssemblyTable::GetOffsetOfCurrentLength(), TyUint32, m_func);
- IR::IndirOpnd * valuesIndirOpnd = IR::IndirOpnd::New(tableReg, Js::WebAssemblyTable::GetOffsetOfValues(), TyMachPtr, m_func);
- IR::RegOpnd * valuesRegOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
- byte scale = m_lowererMD.GetDefaultIndirScale();
- IR::IndirOpnd * funcIndirOpnd;
- if (indexOpnd->IsIntConstOpnd())
- {
- funcIndirOpnd = IR::IndirOpnd::New(valuesRegOpnd, indexOpnd->AsIntConstOpnd()->AsInt32() << scale, TyMachPtr, m_func);
- }
- else
- {
- Assert(indexOpnd->IsRegOpnd());
- funcIndirOpnd = IR::IndirOpnd::New(valuesRegOpnd, indexOpnd->AsRegOpnd(), TyMachPtr, m_func);
- funcIndirOpnd->SetScale(scale);
- }
- IR::LabelInstr * trapOutOfBoundsLabel = InsertLabel(true, instr);
- IR::LabelInstr * trapLabel = InsertLabel(true, trapOutOfBoundsLabel);
- IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
- InsertCompareBranch(indexOpnd, lengthOpnd, Js::OpCode::BrGe_A, true, trapOutOfBoundsLabel, trapLabel);
- InsertMove(valuesRegOpnd, valuesIndirOpnd, trapLabel);
- InsertMove(dst, funcIndirOpnd, trapLabel);
- InsertCompareBranch(dst, IR::IntConstOpnd::New(0, TyMachPtr, m_func), Js::OpCode::BrEq_A, trapLabel, trapLabel);
- InsertBranch(Js::OpCode::Br, doneLabel, trapLabel);
- GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_NeedWebAssemblyFunc), TyInt32, m_func), trapOutOfBoundsLabel);
- GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_TableIndexOutOfRange), TyInt32, m_func), instr);
- instr->Remove();
- return prev;
- }
- IR::Instr *
- Lowerer::LowerGrowWasmMemory(IR::Instr* instr)
- {
- IR::Instr * instrPrev = m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
- m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
- m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_GrowWasmMemory);
- return instrPrev;
- }
- #endif
- IR::Instr *
- Lowerer::LowerUnaryHelper(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::Opnd* opndBailoutArg)
- {
- IR::Instr *instrPrev;
- IR::Opnd *src1 = instr->UnlinkSrc1();
- instrPrev = m_lowererMD.LoadHelperArgument(instr, src1);
- m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, opndBailoutArg);
- return instrPrev;
- }
- // helper takes memory context as second argument
- IR::Instr *
- Lowerer::LowerUnaryHelperMem(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::Opnd* opndBailoutArg)
- {
- IR::Instr *instrPrev;
- instrPrev = LoadScriptContext(instr);
- return this->LowerUnaryHelper(instr, helperMethod, opndBailoutArg);
- }
- IR::Instr *
- Lowerer::LowerUnaryHelperMemWithFunctionInfo(IR::Instr *instr, IR::JnHelperMethod helperMethod)
- {
- m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionInfoOpnd(instr));
- return this->LowerUnaryHelperMem(instr, helperMethod);
- }
- IR::Instr *
- Lowerer::LowerUnaryHelperMemWithFuncBody(IR::Instr *instr, IR::JnHelperMethod helperMethod)
- {
- m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionBodyOpnd(instr));
- return this->LowerUnaryHelperMem(instr, helperMethod);
- }
- IR::Instr *
- Lowerer::LowerBinaryHelperMemWithFuncBody(IR::Instr *instr, IR::JnHelperMethod helperMethod)
- {
- AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3, "Expected a binary instruction...");
- m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionBodyOpnd(instr));
- return this->LowerBinaryHelperMem(instr, helperMethod);
- }
- IR::Instr *
- Lowerer::LowerUnaryHelperMemWithTemp(IR::Instr *instr, IR::JnHelperMethod helperMethod)
- {
- AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2, "Expected a unary instruction...");
- IR::Instr * instrFirst;
- IR::Opnd * tempOpnd;
- if (instr->dstIsTempNumber)
- {
- instrFirst = this->LoadHelperTemp(instr, instr);
- }
- else
- {
- tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
- instrFirst = m_lowererMD.LoadHelperArgument(instr, tempOpnd);
- }
- this->LowerUnaryHelperMem(instr, helperMethod);
- return instrFirst;
- }
- IR::Instr *
- Lowerer::LowerUnaryHelperMemWithTemp2(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::JnHelperMethod helperMethodWithTemp)
- {
- AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2, "Expected a unary instruction...");
- if (instr->dstIsTempNumber)
- {
- IR::Instr * instrFirst = this->LoadHelperTemp(instr, instr);
- this->LowerUnaryHelperMem(instr, helperMethodWithTemp);
- return instrFirst;
- }
- return this->LowerUnaryHelperMem(instr, helperMethod);
- }
- IR::Instr *
- Lowerer::LowerUnaryHelperMemWithBoolReference(IR::Instr *instr, IR::JnHelperMethod helperMethod, bool useBoolForBailout)
- {
- if (!this->m_func->tempSymBool)
- {
- this->m_func->tempSymBool = StackSym::New(TyUint8, this->m_func);
- this->m_func->StackAllocate(this->m_func->tempSymBool, TySize[TyUint8]);
- }
- IR::SymOpnd * boolOpnd = IR::SymOpnd::New(this->m_func->tempSymBool, TyUint8, this->m_func);
- IR::RegOpnd * boolRefOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
- InsertLea(boolRefOpnd, boolOpnd, instr);
- m_lowererMD.LoadHelperArgument(instr, boolRefOpnd);
- return this->LowerUnaryHelperMem(instr, helperMethod, useBoolForBailout ? boolOpnd : nullptr);
- }
- IR::Instr *
- Lowerer::LowerInitCachedScope(IR::Instr* instr)
- {
- instr->m_opcode = Js::OpCode::CallHelper;
- IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperOP_InitCachedScope, this->m_func);
- IR::Opnd * src1 = instr->UnlinkSrc1();
- instr->SetSrc1(helperOpnd);
- instr->SetSrc2(src1);
- return instr;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerBinaryHelper
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerBinaryHelper(IR::Instr *instr, IR::JnHelperMethod helperMethod)
- {
- // The only case where this would still be null when we return is when
- // helperMethod == HelperOP_CmSrEq_EmptyString; in which case we ignore
- // instrPrev.
- IR::Instr *instrPrev = nullptr;
- AssertMsg((Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Unsigned1) ||
- Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3 ||
- Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2 ||
- Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2Int1 ||
- Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::ElementU ||
- instr->m_opcode == Js::OpCode::InvalCachedScope, "Expected a binary instruction...");
- IR::Opnd *src2 = instr->UnlinkSrc2();
- if (helperMethod != IR::HelperOP_CmSrEq_EmptyString)
- instrPrev = m_lowererMD.LoadHelperArgument(instr, src2);
- IR::Opnd *src1 = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, src1);
- m_lowererMD.ChangeToHelperCall(instr, helperMethod);
- return instrPrev;
- }
- // helper takes memory context as third argument
- IR::Instr *
- Lowerer::LowerBinaryHelperMem(IR::Instr *instr, IR::JnHelperMethod helperMethod)
- {
- IR::Instr *instrPrev;
- AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3 ||
- Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2 ||
- Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2Int1 ||
- Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Unsigned1, "Expected a binary instruction...");
- instrPrev = LoadScriptContext(instr);
- return this->LowerBinaryHelper(instr, helperMethod);
- }
- IR::Instr *
- Lowerer::LowerBinaryHelperMemWithTemp(IR::Instr *instr, IR::JnHelperMethod helperMethod)
- {
- AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3, "Expected a binary instruction...");
- IR::Instr * instrFirst;
- IR::Opnd * tempOpnd;
- if (instr->dstIsTempNumber)
- {
- instrFirst = this->LoadHelperTemp(instr, instr);
- }
- else
- {
- tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
- instrFirst = m_lowererMD.LoadHelperArgument(instr, tempOpnd);
- }
- this->LowerBinaryHelperMem(instr, helperMethod);
- return instrFirst;
- }
- IR::Instr *
- Lowerer::LowerBinaryHelperMemWithTemp2(
- IR::Instr *instr,
- IR::JnHelperMethod helperMethod,
- IR::JnHelperMethod helperMethodWithTemp
- )
- {
- AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3, "Expected a binary instruction...");
- if (instr->dstIsTempNumber && instr->GetDst() && instr->GetDst()->GetValueType().HasBeenNumber())
- {
- IR::Instr * instrFirst = this->LoadHelperTemp(instr, instr);
- this->LowerBinaryHelperMem(instr, helperMethodWithTemp);
- return instrFirst;
- }
- return this->LowerBinaryHelperMem(instr, helperMethod);
- }
- IR::Instr *
- Lowerer::LowerAddLeftDeadForString(IR::Instr *instr)
- {
- IR::Opnd * opndLeft;
- IR::Opnd * opndRight;
- opndLeft = instr->GetSrc1();
- opndRight = instr->GetSrc2();
- Assert(opndLeft && opndRight);
- bool generateFastPath = this->m_func->DoFastPaths();
- if (!generateFastPath
- || !opndLeft->IsRegOpnd()
- || !opndRight->IsRegOpnd()
- || !instr->GetDst()->IsRegOpnd()
- || !opndLeft->GetValueType().IsLikelyString()
- || !opndRight->GetValueType().IsLikelyString()
- || !opndLeft->IsEqual(instr->GetDst()->AsRegOpnd())
- || opndLeft->IsEqual(opndRight))
- {
- return this->LowerBinaryHelperMemWithTemp(instr, IR::HelperOp_AddLeftDead);
- }
- IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- IR::LabelInstr * labelFallThrough = instr->GetOrCreateContinueLabel(false);
- IR::LabelInstr *insertBeforeInstr = labelHelper;
- instr->InsertBefore(labelHelper);
- if (!opndLeft->IsNotTaggedValue())
- {
- this->m_lowererMD.GenerateObjectTest(opndLeft->AsRegOpnd(), insertBeforeInstr, labelHelper);
- }
- IR::BranchInstr* branchInstr = InsertCompareBranch(
- IR::IndirOpnd::New(opndLeft->AsRegOpnd(), 0, TyMachPtr, m_func),
- this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableCompoundString),
- Js::OpCode::BrNeq_A,
- labelHelper,
- insertBeforeInstr);
- InsertObjectPoison(opndLeft->AsRegOpnd(), branchInstr, insertBeforeInstr, false);
- GenerateStringTest(opndRight->AsRegOpnd(), insertBeforeInstr, labelHelper);
- // left->m_charLength <= JavascriptArray::MaxCharLength
- IR::IndirOpnd *indirLeftCharLengthOpnd = IR::IndirOpnd::New(opndLeft->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, m_func);
- IR::RegOpnd *regLeftCharLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
- InsertMove(regLeftCharLengthOpnd, indirLeftCharLengthOpnd, insertBeforeInstr);
- InsertCompareBranch(
- regLeftCharLengthOpnd,
- IR::IntConstOpnd::New(Js::JavascriptString::MaxCharLength, TyUint32, m_func),
- Js::OpCode::BrGe_A,
- labelHelper,
- insertBeforeInstr);
- // left->m_pszValue == NULL (!left->IsFinalized())
- InsertCompareBranch(
- IR::IndirOpnd::New(opndLeft->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func),
- IR::AddrOpnd::NewNull(m_func),
- Js::OpCode::BrNeq_A,
- labelHelper,
- insertBeforeInstr);
- // right->m_pszValue != NULL (right->IsFinalized())
- InsertCompareBranch(
- IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func),
- IR::AddrOpnd::NewNull(m_func),
- Js::OpCode::BrEq_A,
- labelHelper,
- insertBeforeInstr);
- // if ownsLastBlock != 0
- InsertCompareBranch(
- IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfOwnsLastBlock(), TyUint8, m_func),
- IR::IntConstOpnd::New(0, TyUint8, m_func),
- Js::OpCode::BrEq_A,
- labelHelper,
- insertBeforeInstr);
- // if right->m_charLength == 1
- InsertCompareBranch(IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_charLength), TyUint32, m_func),
- IR::IntConstOpnd::New(1, TyUint32, m_func),
- Js::OpCode::BrNeq_A, labelHelper, insertBeforeInstr);
- // if left->m_directCharLength == -1
- InsertCompareBranch(IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfDirectCharLength(), TyUint32, m_func),
- IR::IntConstOpnd::New(UINT32_MAX, TyUint32, m_func),
- Js::OpCode::BrNeq_A, labelHelper, insertBeforeInstr);
- // if lastBlockInfo.charLength < lastBlockInfo.charCapacity
- IR::IndirOpnd *indirCharLength = IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo() + (int32)Js::CompoundString::GetOffsetOfLastBlockInfoCharLength(), TyUint32, m_func);
- IR::RegOpnd *charLengthOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
- InsertMove(charLengthOpnd, indirCharLength, insertBeforeInstr);
- InsertCompareBranch(charLengthOpnd, IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo() + (int32)Js::CompoundString::GetOffsetOfLastBlockInfoCharCapacity(), TyUint32, m_func), Js::OpCode::BrGe_A, labelHelper, insertBeforeInstr);
- // load c = right->m_pszValue[0]
- IR::RegOpnd *pszValue0Opnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- IR::IndirOpnd *indirRightPszOpnd = IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func);
- InsertMove(pszValue0Opnd, indirRightPszOpnd, insertBeforeInstr);
- IR::RegOpnd *charResultOpnd = IR::RegOpnd::New(TyUint16, this->m_func);
- InsertMove(charResultOpnd, IR::IndirOpnd::New(pszValue0Opnd, 0, TyUint16, this->m_func), insertBeforeInstr);
- // lastBlockInfo.buffer[blockCharLength] = c;
- IR::RegOpnd *baseOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- InsertMove(baseOpnd, IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo() + (int32)Js::CompoundString::GetOffsetOfLastBlockInfoBuffer(), TyMachPtr, m_func), insertBeforeInstr);
- IR::IndirOpnd *indirBufferToStore = IR::IndirOpnd::New(baseOpnd, charLengthOpnd, (byte)Math::Log2(sizeof(char16)), TyUint16, m_func);
- InsertMove(indirBufferToStore, charResultOpnd, insertBeforeInstr);
- // left->m_charLength++
- InsertAdd(false, indirLeftCharLengthOpnd, regLeftCharLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
- // lastBlockInfo.charLength++
- InsertAdd(false, indirCharLength, indirCharLength, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
- InsertBranch(Js::OpCode::Br, labelFallThrough, insertBeforeInstr);
- return this->LowerBinaryHelperMemWithTemp(instr, IR::HelperOp_AddLeftDead);
- }
- IR::Instr *
- Lowerer::LowerBinaryHelperMemWithTemp3(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::JnHelperMethod helperMethodWithTemp, IR::JnHelperMethod helperMethodLeftDead)
- {
- IR::Opnd *src1 = instr->GetSrc1();
- if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_isTempLastUse && !src1->GetValueType().IsNotString())
- {
- Assert(helperMethodLeftDead == IR::HelperOp_AddLeftDead);
- return LowerAddLeftDeadForString(instr);
- }
- else
- {
- return this->LowerBinaryHelperMemWithTemp2(instr, helperMethod, helperMethodWithTemp);
- }
- }
- StackSym *
- Lowerer::GetTempNumberSym(IR::Opnd * opnd, bool isTempTransferred)
- {
- AssertMsg(opnd->IsRegOpnd(), "Expected regOpnd");
- if (isTempTransferred)
- {
- StackSym * tempNumberSym = StackSym::New(TyMisc, m_func);
- this->m_func->StackAllocate(tempNumberSym, sizeof(Js::JavascriptNumber));
- return tempNumberSym;
- }
- StackSym * stackSym = opnd->AsRegOpnd()->m_sym;
- StackSym * tempNumberSym = stackSym->m_tempNumberSym;
- if (tempNumberSym == nullptr)
- {
- tempNumberSym = StackSym::New(TyMisc, m_func);
- this->m_func->StackAllocate(tempNumberSym, sizeof(Js::JavascriptNumber));
- stackSym->m_tempNumberSym = tempNumberSym;
- }
- return tempNumberSym;
- }
- void Lowerer::LowerProfiledLdElemI(IR::JitProfilingInstr *const instr)
- {
- Assert(instr);
- /*
- Var ProfilingHelpers::ProfiledLdElem(
- const Var base,
- const Var varIndex,
- FunctionBody *const functionBody,
- const ProfileId profileId,
- bool didArrayAccessHelperCall,
- bool bailedOutOnArraySpecialization)
- */
- Func *const func = instr->m_func;
- m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(false, TyInt8, func));
- m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(false, TyInt8, func));
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, func));
- m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(func));
- IR::IndirOpnd *const indir = instr->UnlinkSrc1()->AsIndirOpnd();
- IR::Opnd *const indexOpnd = indir->UnlinkIndexOpnd();
- Assert(indexOpnd || indir->GetOffset() >= 0 && !Js::TaggedInt::IsOverflow(indir->GetOffset()));
- m_lowererMD.LoadHelperArgument(
- instr,
- indexOpnd
- ? static_cast<IR::Opnd *>(indexOpnd)
- : IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(indir->GetOffset()), IR::AddrOpndKindDynamicVar, func));
- m_lowererMD.LoadHelperArgument(instr, indir->UnlinkBaseOpnd());
- indir->Free(func);
- instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledLdElem, func));
- m_lowererMD.LowerCall(instr, 0);
- }
- void Lowerer::LowerProfiledStElemI(IR::JitProfilingInstr *const instr, const Js::PropertyOperationFlags flags)
- {
- Assert(instr);
- /*
- void ProfilingHelpers::ProfiledStElem(
- const Var base,
- const Var varIndex,
- const Var value,
- FunctionBody *const functionBody,
- const ProfileId profileId,
- const PropertyOperationFlags flags,
- bool didArrayAccessHelperCall,
- bool bailedOutOnArraySpecialization)
- */
- Func *const func = instr->m_func;
- IR::JnHelperMethod helper;
- if(flags == Js::PropertyOperation_None)
- {
- helper = IR::HelperProfiledStElem_DefaultFlags;
- }
- else
- {
- helper = IR::HelperProfiledStElem;
- m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(false, TyInt8, func));
- m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(false, TyInt8, func));
- m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(flags, TyInt32, func, true));
- }
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, func));
- m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(func));
- m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
- IR::IndirOpnd *const indir = instr->UnlinkDst()->AsIndirOpnd();
- IR::Opnd *const indexOpnd = indir->UnlinkIndexOpnd();
- Assert(indexOpnd || indir->GetOffset() >= 0 && !Js::TaggedInt::IsOverflow(indir->GetOffset()));
- m_lowererMD.LoadHelperArgument(
- instr,
- indexOpnd
- ? static_cast<IR::Opnd *>(indexOpnd)
- : IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(indir->GetOffset()), IR::AddrOpndKindDynamicVar, func));
- m_lowererMD.LoadHelperArgument(instr, indir->UnlinkBaseOpnd());
- indir->Free(func);
- instr->SetSrc1(IR::HelperCallOpnd::New(helper, func));
- m_lowererMD.LowerCall(instr, 0);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerStElemI
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerStElemI(IR::Instr * instr, Js::PropertyOperationFlags flags, bool isHelper, IR::JnHelperMethod helperMethod)
- {
- IR::Instr *instrPrev = instr->m_prev;
- if (instr->IsJitProfilingInstr())
- {
- Assert(!isHelper);
- LowerProfiledStElemI(instr->AsJitProfilingInstr(), flags);
- return instrPrev;
- }
- IR::Opnd *src1 = instr->GetSrc1();
- IR::Opnd *dst = instr->GetDst();
- IR::Opnd *newDst = nullptr;
- IRType srcType = src1->GetType();
- AssertMsg(dst->IsIndirOpnd(), "Expected indirOpnd on StElementI");
- #if !FLOATVAR
- if (dst->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyOptimizedTypedArray() && src1->IsRegOpnd())
- {
- // We allow the source of typedArray StElem to be marked as temp, since we just need the value,
- // however if the array turns out to be a non-typed array, or the index isn't valid (the value is then stored as a property)
- // the temp needs to be boxed if it is a float. The BoxStackNumber helper will box JavascriptNumbers
- // which are on the stack.
- // regVar = BoxStackNumber(src1, scriptContext)
- IR::Instr *newInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
- IR::RegOpnd *regVar = IR::RegOpnd::New(TyVar, this->m_func);
- newInstr->SetDst(regVar);
- newInstr->SetSrc1(src1);
- instr->InsertBefore(newInstr);
- LowerUnaryHelperMem(newInstr, IR::HelperBoxStackNumber);
- // MOV src1, regVar
- newInstr = IR::Instr::New(Js::OpCode::Ld_A, src1, regVar, this->m_func);
- instr->InsertBefore(m_lowererMD.ChangeToAssign(newInstr));
- }
- #endif
- if(instr->HasBailOutInfo())
- {
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if(bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment)
- {
- Assert(!(bailOutKind & IR::BailOutOnMissingValue));
- LowerBailOnInvalidatedArrayHeadSegment(instr, isHelper);
- bailOutKind ^= IR::BailOutOnInvalidatedArrayHeadSegment;
- Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
- }
- else if(bailOutKind & IR::BailOutOnMissingValue)
- {
- LowerBailOnCreatedMissingValue(instr, isHelper);
- bailOutKind ^= IR::BailOutOnMissingValue;
- Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
- }
- if(bailOutKind & IR::BailOutOnInvalidatedArrayLength)
- {
- LowerBailOnInvalidatedArrayLength(instr, isHelper);
- bailOutKind ^= IR::BailOutOnInvalidatedArrayLength;
- Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
- }
- if(bailOutKind & IR::BailOutConvertedNativeArray)
- {
- IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
- instr->InsertAfter(labelSkipBailOut);
- LowerOneBailOutKind(instr, IR::BailOutConvertedNativeArray, isHelper);
- newDst = IR::RegOpnd::New(TyMachReg, m_func);
- InsertTestBranch(newDst, newDst, Js::OpCode::BrEq_A, labelSkipBailOut, instr->m_next);
- }
- }
- instr->UnlinkDst();
- instr->UnlinkSrc1();
- Assert(
- helperMethod == IR::HelperOP_InitElemGetter ||
- helperMethod == IR::HelperOP_InitElemSetter ||
- helperMethod == IR::HelperOP_InitComputedProperty ||
- helperMethod == IR::HelperOp_SetElementI ||
- helperMethod == IR::HelperOp_InitClassMemberComputedName ||
- helperMethod == IR::HelperOp_InitClassMemberGetComputedName ||
- helperMethod == IR::HelperOp_InitClassMemberSetComputedName
- );
- IR::IndirOpnd* dstIndirOpnd = dst->AsIndirOpnd();
- IR::Opnd *indexOpnd = dstIndirOpnd->UnlinkIndexOpnd();
- if (indexOpnd && indexOpnd->GetType() != TyVar)
- {
- Assert(
- helperMethod != IR::HelperOP_InitElemGetter &&
- helperMethod != IR::HelperOP_InitElemSetter &&
- helperMethod != IR::HelperOp_InitClassMemberGetComputedName &&
- helperMethod != IR::HelperOp_InitClassMemberSetComputedName
- );
- if (indexOpnd->GetType() == TyInt32)
- {
- helperMethod =
- srcType == TyVar ? IR::HelperOp_SetElementI_Int32 :
- srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI_Int32 :
- IR::HelperOp_SetNativeFloatElementI_Int32;
- }
- else if (indexOpnd->GetType() == TyUint32)
- {
- helperMethod =
- srcType == TyVar ? IR::HelperOp_SetElementI_UInt32 :
- srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI_UInt32 :
- IR::HelperOp_SetNativeFloatElementI_UInt32;
- }
- else
- {
- Assert(FALSE);
- }
- }
- else
- {
- if (indexOpnd == nullptr)
- {
- // No index; the offset identifies the element.
- IntConstType offset = (IntConstType)dst->AsIndirOpnd()->GetOffset();
- indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
- }
- if (srcType != TyVar)
- {
- helperMethod =
- srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI : IR::HelperOp_SetNativeFloatElementI;
- }
- }
- if (srcType == TyFloat64)
- {
- m_lowererMD.LoadDoubleHelperArgument(instr, src1);
- }
- m_lowererMD.LoadHelperArgument(instr,
- IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
- LoadScriptContext(instr);
- if (srcType != TyFloat64)
- {
- m_lowererMD.LoadHelperArgument(instr, src1);
- }
- m_lowererMD.LoadHelperArgument(instr, indexOpnd);
- IR::Opnd *baseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
- m_lowererMD.LoadHelperArgument(instr, baseOpnd);
- dst->Free(this->m_func);
- if (newDst)
- {
- instr->SetDst(newDst);
- }
- m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, nullptr, nullptr, isHelper);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerLdElemI
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerLdElemI(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool isHelper)
- {
- IR::Instr *instrPrev = instr->m_prev;
- if(instr->IsJitProfilingInstr())
- {
- Assert(helperMethod == IR::HelperOp_GetElementI);
- Assert(!isHelper);
- LowerProfiledLdElemI(instr->AsJitProfilingInstr());
- return instrPrev;
- }
- if (!isHelper && instr->DoStackArgsOpt())
- {
- IR::LabelInstr * labelLdElem = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
- // Pass in null for labelFallThru to only generate the LdHeapArgument call
- GenerateFastArgumentsLdElemI(instr, nullptr);
- instr->InsertBefore(labelLdElem);
- instr->UnlinkSrc1();
- instr->UnlinkDst();
- Assert(instr->HasBailOutInfo() && instr->GetBailOutKind() == IR::BailOutKind::BailOnStackArgsOutOfActualsRange);
- instr = GenerateBailOut(instr, nullptr, nullptr);
- return instrPrev;
- }
- IR::Opnd *src1 = instr->UnlinkSrc1();
- AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd");
- IR::IndirOpnd *indirOpnd = src1->AsIndirOpnd();
- bool loadScriptContext = true;
- IRType dstType = instr->GetDst()->GetType();
- IR::Opnd *indexOpnd = indirOpnd->UnlinkIndexOpnd();
- if (indexOpnd && indexOpnd->GetType() != TyVar)
- {
- Assert(indexOpnd->GetType() == TyUint32 || indexOpnd->GetType() == TyInt32);
- switch (helperMethod)
- {
- case IR::HelperOp_GetElementI:
- if (indexOpnd->GetType() == TyUint32)
- {
- helperMethod =
- dstType == TyVar ? IR::HelperOp_GetElementI_UInt32 :
- dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI_UInt32 :
- IR::HelperOp_GetNativeFloatElementI_UInt32;
- }
- else
- {
- helperMethod =
- dstType == TyVar ? IR::HelperOp_GetElementI_Int32 :
- dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI_Int32 :
- IR::HelperOp_GetNativeFloatElementI_Int32;
- }
- break;
- case IR::HelperOp_GetMethodElement:
- Assert(dstType == TyVar);
- helperMethod = indexOpnd->GetType() == TyUint32?
- IR::HelperOp_GetMethodElement_UInt32 : IR::HelperOp_GetMethodElement_Int32;
- break;
- case IR::HelperOp_TypeofElem:
- Assert(dstType == TyVar);
- helperMethod = indexOpnd->GetType() == TyUint32?
- IR::HelperOp_TypeofElem_UInt32 : IR::HelperOp_TypeofElem_Int32;
- break;
- default:
- Assert(false);
- }
- }
- else
- {
- if (indexOpnd == nullptr)
- {
- // No index; the offset identifies the element.
- IntConstType offset = (IntConstType)src1->AsIndirOpnd()->GetOffset();
- indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
- }
- if (dstType != TyVar)
- {
- loadScriptContext = false;
- helperMethod =
- dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI : IR::HelperOp_GetNativeFloatElementI;
- }
- }
- // Jitted loop bodies have volatile information about values created outside the loop, so don't update array creation site
- // profile data from jitted loop bodies
- if(!m_func->IsLoopBody())
- {
- const ValueType baseValueType(indirOpnd->GetBaseOpnd()->GetValueType());
- if( baseValueType.IsLikelyObject() &&
- baseValueType.GetObjectType() == ObjectType::Array &&
- !baseValueType.HasIntElements())
- {
- switch(helperMethod)
- {
- case IR::HelperOp_GetElementI:
- helperMethod =
- baseValueType.HasFloatElements()
- ? IR::HelperOp_GetElementI_ExpectingNativeFloatArray
- : IR::HelperOp_GetElementI_ExpectingVarArray;
- break;
- case IR::HelperOp_GetElementI_UInt32:
- helperMethod =
- baseValueType.HasFloatElements()
- ? IR::HelperOp_GetElementI_UInt32_ExpectingNativeFloatArray
- : IR::HelperOp_GetElementI_UInt32_ExpectingVarArray;
- break;
- case IR::HelperOp_GetElementI_Int32:
- helperMethod =
- baseValueType.HasFloatElements()
- ? IR::HelperOp_GetElementI_Int32_ExpectingNativeFloatArray
- : IR::HelperOp_GetElementI_Int32_ExpectingVarArray;
- break;
- }
- }
- }
- if (loadScriptContext)
- {
- LoadScriptContext(instr);
- }
- m_lowererMD.LoadHelperArgument(instr, indexOpnd);
- IR::Opnd *baseOpnd = indirOpnd->UnlinkBaseOpnd();
- m_lowererMD.LoadHelperArgument(instr, baseOpnd);
- src1->Free(this->m_func);
- m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, nullptr, nullptr, isHelper);
- return instrPrev;
- }
- void Lowerer::LowerLdLen(IR::Instr *const instr, const bool isHelper)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::LdLen_A);
- // LdLen has persisted to this point for the sake of pre-lower opts.
- // Turn it into a LdFld of the "length" property.
- // This is normally a load of the internal "length" of an Array, so it probably doesn't benefit
- // from inline caching.
- if (instr->GetSrc1()->IsRegOpnd())
- {
- IR::RegOpnd * baseOpnd = instr->GetSrc1()->AsRegOpnd();
- PropertySym* fieldSym = PropertySym::FindOrCreate(baseOpnd->m_sym->m_id, Js::PropertyIds::length, (uint32)-1, (uint)-1, PropertyKindData, m_func);
- instr->ReplaceSrc1(IR::SymOpnd::New(fieldSym, TyVar, m_func));
- }
- LowerLdFld(instr, IR::HelperOp_GetProperty, IR::HelperOp_GetProperty, false, nullptr, isHelper);
- }
- IR::Instr* InsertMaskableMove(bool isStore, bool generateWriteBarrier, IR::Opnd* dst, IR::Opnd* src1, IR::Opnd* src2, IR::Opnd* indexOpnd, IR::Instr* insertBeforeInstr, Lowerer* lowerer)
- {
- Assert(insertBeforeInstr->m_func->GetJITFunctionBody()->IsAsmJsMode());
- // Mask with the bounds check operand to avoid speculation issues
- const bool usesFastArray = insertBeforeInstr->m_func->GetJITFunctionBody()->UsesWAsmJsFastVirtualBuffer();
- IR::RegOpnd* mask = nullptr;
- bool shouldMaskResult = false;
- if (!usesFastArray)
- {
- bool shouldMask = isStore ? CONFIG_FLAG_RELEASE(PoisonTypedArrayStore) : CONFIG_FLAG_RELEASE(PoisonTypedArrayLoad);
- if (shouldMask && indexOpnd != nullptr)
- {
- // indices in asmjs fit in 32 bits, but we need a mask
- IR::RegOpnd* temp = IR::RegOpnd::New(indexOpnd->GetType(), insertBeforeInstr->m_func);
- lowerer->InsertMove(temp, indexOpnd, insertBeforeInstr, false);
- lowerer->InsertAdd(false, temp, temp, IR::IntConstOpnd::New((uint32)src1->GetSize() - 1, temp->GetType(), insertBeforeInstr->m_func, true), insertBeforeInstr);
- // For native ints and vars, we do the masking after the load; we don't do this for
- // floats and doubles because the conversion to and from fp regs is slow.
- shouldMaskResult = (!isStore) && IRType_IsNativeIntOrVar(src1->GetType()) && TySize[dst->GetType()] <= TySize[TyMachReg];
- // When we do post-load masking, we AND the mask with dst, so they need to have the
- // same type, as otherwise we'll hit asserts later on. When we do pre-load masking,
- // we AND the mask with the index component of the indir opnd for the move from the
- // array, so we need to align with that type instead.
- mask = IR::RegOpnd::New((shouldMaskResult ? dst : indexOpnd)->GetType(), insertBeforeInstr->m_func);
- if (temp->GetSize() != mask->GetSize())
- {
- Assert(mask->GetSize() == MachPtr);
- Assert(src2->GetType() == TyUint32);
- temp = temp->UseWithNewType(TyMachPtr, insertBeforeInstr->m_func)->AsRegOpnd();
- src2 = src2->UseWithNewType(TyMachPtr, insertBeforeInstr->m_func)->AsRegOpnd();
- }
- lowerer->InsertSub(false, mask, temp, src2, insertBeforeInstr);
- lowerer->InsertShift(Js::OpCode::Shr_A, false, mask, mask, IR::IntConstOpnd::New(TySize[mask->GetType()] * 8 - 1, TyInt8, insertBeforeInstr->m_func), insertBeforeInstr);
- // If we're not masking the result, we're masking the index
- if (!shouldMaskResult)
- {
- lowerer->InsertAnd(indexOpnd, indexOpnd, mask, insertBeforeInstr);
- }
- }
- }
- IR::Instr* ret = lowerer->InsertMove(dst, src1, insertBeforeInstr, generateWriteBarrier);
- if(!usesFastArray && shouldMaskResult)
- {
- // Mask the result if we didn't use the mask earlier to mask the index
- lowerer->InsertAnd(dst, dst, mask, insertBeforeInstr);
- }
- return ret;
- }
- IR::Instr *
- Lowerer::LowerLdArrViewElem(IR::Instr * instr)
- {
- #ifdef ASMJS_PLAT
- Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::LdArrViewElem);
- IR::Instr * instrPrev = instr->m_prev;
- IR::RegOpnd * indexOpnd = instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd();
- int32 offset = instr->GetSrc1()->AsIndirOpnd()->GetOffset();
- IR::Opnd * dst = instr->GetDst();
- IR::Opnd * src1 = instr->GetSrc1();
- IR::Opnd * src2 = instr->GetSrc2();
- IR::Instr * done;
- if (offset < 0)
- {
- IR::Opnd * oobValue = nullptr;
- if(dst->IsFloat32())
- {
- oobValue = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNaNAddr(), TyFloat32, m_func);
- }
- else if(dst->IsFloat64())
- {
- oobValue = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNaNAddr(), TyFloat64, m_func);
- }
- else
- {
- oobValue = IR::IntConstOpnd::New(0, dst->GetType(), m_func);
- }
- instr->ReplaceSrc1(oobValue);
- if (src2)
- {
- instr->FreeSrc2();
- }
- return m_lowererMD.ChangeToAssign(instr);
- }
- if (indexOpnd || m_func->GetJITFunctionBody()->GetAsmJsInfo()->AccessNeedsBoundCheck((uint32)offset))
- {
- // CMP indexOpnd, src2(arrSize)
- // JA $helper
- // JMP $load
- // $helper:
- // MOV dst, 0
- // JMP $done
- // $load:
- // MOV dst, src1([arrayBuffer + indexOpnd])
- // $done:
- Assert(!dst->IsFloat32() || src1->IsFloat32());
- Assert(!dst->IsFloat64() || src1->IsFloat64());
- done = m_lowererMD.LowerAsmJsLdElemHelper(instr);
- }
- else
- {
- // any access below 0x10000 is safe
- instr->UnlinkDst();
- instr->UnlinkSrc1();
- if (src2)
- {
- instr->FreeSrc2();
- }
- done = instr;
- }
- InsertMaskableMove(false, true, dst, src1, src2, indexOpnd, done, this);
- instr->Remove();
- return instrPrev;
- #else
- Assert(UNREACHED);
- return instr;
- #endif
- }
- IR::Instr *
- Lowerer::LowerWasmArrayBoundsCheck(IR::Instr * instr, IR::Opnd *addrOpnd)
- {
- uint32 offset = addrOpnd->AsIndirOpnd()->GetOffset();
- // don't encode offset for wasm memory reads/writes
- addrOpnd->AsIndirOpnd()->m_dontEncode = true;
- // if offset/size overflow the max length, throw (this also saves us from having to do int64 math)
- int64 constOffset = (int64)addrOpnd->GetSize() + (int64)offset;
- if (constOffset >= Js::ArrayBuffer::MaxArrayBufferLength)
- {
- GenerateRuntimeError(instr, WASMERR_ArrayIndexOutOfRange, IR::HelperOp_WebAssemblyRuntimeError);
- return instr;
- }
- else
- {
- return m_lowererMD.LowerWasmArrayBoundsCheck(instr, addrOpnd);
- }
- }
- IR::Instr *
- Lowerer::LowerLdArrViewElemWasm(IR::Instr * instr)
- {
- #ifdef ENABLE_WASM
- Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::LdArrViewElemWasm);
- IR::Instr * instrPrev = instr->m_prev;
- IR::Opnd * dst = instr->GetDst();
- IR::Opnd * src1 = instr->GetSrc1();
- Assert(!dst->IsFloat32() || src1->IsFloat32());
- Assert(!dst->IsFloat64() || src1->IsFloat64());
- IR::Instr * done = LowerWasmArrayBoundsCheck(instr, src1);
- IR::Instr* newMove = InsertMaskableMove(false, true, dst, src1, instr->GetSrc2(), src1->AsIndirOpnd()->GetIndexOpnd(), done, this);
- if (m_func->GetJITFunctionBody()->UsesWAsmJsFastVirtualBuffer())
- {
- // We need to have an AV when accessing out of bounds memory even if the dst is not used
- // Make sure LinearScan doesn't dead store this instruction
- newMove->hasSideEffects = true;
- }
- instr->Remove();
- return instrPrev;
- #else
- Assert(UNREACHED);
- return instr;
- #endif
- }
- IR::Instr *
- Lowerer::LowerMemset(IR::Instr * instr, IR::RegOpnd * helperRet)
- {
- IR::Opnd * dst = instr->UnlinkDst();
- IR::Opnd * src1 = instr->UnlinkSrc1();
- Assert(dst->IsIndirOpnd());
- IR::Opnd *baseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
- IR::Opnd *indexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
- IR::Opnd *sizeOpnd = instr->UnlinkSrc2();
- Assert(baseOpnd);
- Assert(sizeOpnd);
- Assert(indexOpnd);
- IR::JnHelperMethod helperMethod = IR::HelperOp_Memset;
- IR::Instr *instrPrev = nullptr;
- if (src1->IsRegOpnd() && !src1->IsVar())
- {
- IR::RegOpnd* varOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
- instrPrev = IR::Instr::New(Js::OpCode::ToVar, varOpnd, src1, instr->m_func);
- instr->InsertBefore(instrPrev);
- src1 = varOpnd;
- }
- instr->SetDst(helperRet);
- LoadScriptContext(instr);
- m_lowererMD.LoadHelperArgument(instr, sizeOpnd);
- m_lowererMD.LoadHelperArgument(instr, src1);
- m_lowererMD.LoadHelperArgument(instr, indexOpnd);
- m_lowererMD.LoadHelperArgument(instr, baseOpnd);
- m_lowererMD.ChangeToHelperCall(instr, helperMethod);
- dst->Free(m_func);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerMemcopy(IR::Instr * instr, IR::RegOpnd * helperRet)
- {
- IR::Opnd * dst = instr->UnlinkDst();
- IR::Opnd * src = instr->UnlinkSrc1();
- Assert(dst->IsIndirOpnd());
- Assert(src->IsIndirOpnd());
- IR::Opnd *dstBaseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
- IR::Opnd *dstIndexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
- IR::Opnd *srcBaseOpnd = src->AsIndirOpnd()->UnlinkBaseOpnd();
- IR::Opnd *srcIndexOpnd = src->AsIndirOpnd()->UnlinkIndexOpnd();
- IR::Opnd *sizeOpnd = instr->UnlinkSrc2();
- Assert(sizeOpnd);
- Assert(dstBaseOpnd);
- Assert(dstIndexOpnd);
- Assert(srcBaseOpnd);
- Assert(srcIndexOpnd);
- IR::JnHelperMethod helperMethod = IR::HelperOp_Memcopy;
- instr->SetDst(helperRet);
- LoadScriptContext(instr);
- m_lowererMD.LoadHelperArgument(instr, sizeOpnd);
- m_lowererMD.LoadHelperArgument(instr, srcIndexOpnd);
- m_lowererMD.LoadHelperArgument(instr, srcBaseOpnd);
- m_lowererMD.LoadHelperArgument(instr, dstIndexOpnd);
- m_lowererMD.LoadHelperArgument(instr, dstBaseOpnd);
- m_lowererMD.ChangeToHelperCall(instr, helperMethod);
- dst->Free(m_func);
- src->Free(m_func);
- return nullptr;
- }
- IR::Instr *
- Lowerer::LowerMemOp(IR::Instr * instr)
- {
- Assert(instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
- IR::Instr *instrPrev = instr->m_prev;
- IR::RegOpnd* helperRet = IR::RegOpnd::New(TyInt8, instr->m_func);
- const bool isHelper = false;
- AssertMsg(instr->HasBailOutInfo(), "Expected bailOut on MemOp instruction");
- if (instr->HasBailOutInfo())
- {
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if (bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment)
- {
- Assert(!(bailOutKind & IR::BailOutOnMissingValue));
- LowerBailOnInvalidatedArrayHeadSegment(instr, isHelper);
- bailOutKind ^= IR::BailOutOnInvalidatedArrayHeadSegment;
- Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
- }
- else if (bailOutKind & IR::BailOutOnMissingValue)
- {
- LowerBailOnCreatedMissingValue(instr, isHelper);
- bailOutKind ^= IR::BailOutOnMissingValue;
- Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
- }
- if (bailOutKind & IR::BailOutOnInvalidatedArrayLength)
- {
- LowerBailOnInvalidatedArrayLength(instr, isHelper);
- bailOutKind ^= IR::BailOutOnInvalidatedArrayLength;
- Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
- }
- AssertMsg(bailOutKind & IR::BailOutOnMemOpError, "Expected BailOutOnMemOpError on MemOp instruction");
- if (bailOutKind & IR::BailOutOnMemOpError)
- {
- // Insert or get continue label
- IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isHelper);
- Func *const func = instr->m_func;
- LowerOneBailOutKind(instr, IR::BailOutOnMemOpError, isHelper);
- IR::Instr *const insertBeforeInstr = instr->m_next;
- // test helperRet, helperRet
- // jz $skipBailOut
- InsertCompareBranch(
- helperRet,
- IR::IntConstOpnd::New(0, TyInt8, func),
- Js::OpCode::BrNeq_A,
- skipBailOutLabel,
- insertBeforeInstr);
- // (Bail out with IR::BailOutOnMemOpError)
- // $skipBailOut:
- bailOutKind ^= IR::BailOutOnMemOpError;
- Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
- }
- instr->ClearBailOutInfo();
- }
- IR::Instr* newInstrPrev = nullptr;
- if (instr->m_opcode == Js::OpCode::Memset)
- {
- newInstrPrev = LowerMemset(instr, helperRet);
- }
- else if (instr->m_opcode == Js::OpCode::Memcopy)
- {
- newInstrPrev = LowerMemcopy(instr, helperRet);
- }
- if (newInstrPrev != nullptr)
- {
- instrPrev = newInstrPrev;
- }
- return instrPrev;
- }
- IR::Instr*
- Lowerer::LowerStAtomicsWasm(IR::Instr* instr)
- {
- #ifdef ENABLE_WASM
- Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::StAtomicWasm);
- IR::Instr * instrPrev = instr->m_prev;
- IR::Opnd * dst = instr->GetDst();
- IR::Opnd * src1 = instr->GetSrc1();
- Assert(IRType_IsNativeInt(dst->GetType()));
- IR::Instr * done = LowerWasmArrayBoundsCheck(instr, dst);
- m_lowererMD.LowerAtomicStore(dst, src1, done);
- instr->Remove();
- return instrPrev;
- #else
- Assert(UNREACHED);
- return instr;
- #endif
- }
- IR::Instr * Lowerer::LowerLdAtomicsWasm(IR::Instr * instr)
- {
- #ifdef ENABLE_WASM
- Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::LdAtomicWasm);
- IR::Instr * instrPrev = instr->m_prev;
- IR::Opnd * dst = instr->GetDst();
- IR::Opnd * src1 = instr->GetSrc1();
- Assert(IRType_IsNativeInt(dst->GetType()));
- IR::Instr * done = LowerWasmArrayBoundsCheck(instr, src1);
- m_lowererMD.LowerAtomicLoad(dst, src1, done);
- instr->Remove();
- return instrPrev;
- #else
- Assert(UNREACHED);
- return instr;
- #endif
- }
- IR::Instr *
- Lowerer::LowerStArrViewElem(IR::Instr * instr)
- {
- #ifdef ASMJS_PLAT
- Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::StArrViewElem);
- IR::Instr * instrPrev = instr->m_prev;
- IR::Opnd * dst = instr->GetDst();
- IR::Opnd * src1 = instr->GetSrc1();
- IR::Opnd * src2 = instr->GetSrc2();
- // type of dst is the type of array
- IR::RegOpnd * indexOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
- int32 offset = dst->AsIndirOpnd()->GetOffset();
- Assert(!dst->IsFloat32() || src1->IsFloat32());
- Assert(!dst->IsFloat64() || src1->IsFloat64());
- Assert(!dst->IsInt64() || src1->IsInt64());
- IR::Instr * done;
- if (m_func->GetJITFunctionBody()->IsWasmFunction())
- {
- done = LowerWasmArrayBoundsCheck(instr, dst);
- }
- else if (offset < 0)
- {
- instr->Remove();
- return instrPrev;
- }
- else if (indexOpnd || m_func->GetJITFunctionBody()->GetAsmJsInfo()->AccessNeedsBoundCheck((uint32)offset))
- {
- // CMP indexOpnd, src2(arrSize)
- // JA $helper
- // JMP $store
- // $helper:
- // JMP $done
- // $store:
- // MOV dst([arrayBuffer + indexOpnd]), src1
- // $done:
- done = m_lowererMD.LowerAsmJsStElemHelper(instr);
- }
- else
- {
- // any constant access below 0x10000 is safe, as that is the min heap size
- instr->UnlinkDst();
- instr->UnlinkSrc1();
- done = instr;
- if (src2)
- {
- instr->FreeSrc2();
- }
- }
- // wasm memory buffer is not recycler allocated, so we shouldn't generate write barrier
- InsertMaskableMove(true, false, dst, src1, src2, indexOpnd, done, this);
- instr->Remove();
- return instrPrev;
- #else
- Assert(UNREACHED);
- return instr;
- #endif
- }
- IR::Instr *
- Lowerer::LowerArrayDetachedCheck(IR::Instr * instr)
- {
- // TEST isDetached, isDetached
- // JE Done
- // Helper:
- // CALL Js::Throw::OutOfMemory
- // Done:
- Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
- IR::Instr * instrPrev = instr->m_prev;
- IR::Opnd * isDetachedOpnd = instr->UnlinkSrc1();
- Assert(isDetachedOpnd->IsIndirOpnd() || isDetachedOpnd->IsMemRefOpnd());
- IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
- IR::LabelInstr * helperLabel = InsertLabel(true, instr);
- InsertTestBranch(isDetachedOpnd, isDetachedOpnd, Js::OpCode::BrNotNeq_A, doneLabel, helperLabel);
- m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_OutOfMemoryError);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerDeleteElemI
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerDeleteElemI(IR::Instr * instr, bool strictMode)
- {
- IR::Instr *instrPrev;
- IR::Opnd *src1 = instr->UnlinkSrc1();
- AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd on DeleteElementI");
- Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
- if (strictMode)
- {
- propertyOperationFlag = Js::PropertyOperation_StrictMode;
- }
- instrPrev = instr->m_prev;
- IR::JnHelperMethod helperMethod = IR::HelperOp_DeleteElementI;
- IR::Opnd *indexOpnd = src1->AsIndirOpnd()->UnlinkIndexOpnd();
- if (indexOpnd)
- {
- if (indexOpnd->GetType() == TyInt32)
- {
- helperMethod = IR::HelperOp_DeleteElementI_Int32;
- }
- else if (indexOpnd->GetType() == TyUint32)
- {
- helperMethod = IR::HelperOp_DeleteElementI_UInt32;
- }
- else
- {
- Assert(indexOpnd->GetType() == TyVar);
- }
- }
- else
- {
- // No index; the offset identifies the element.
- IntConstType offset = (IntConstType)src1->AsIndirOpnd()->GetOffset();
- indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
- }
- m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
- LoadScriptContext(instr);
- m_lowererMD.LoadHelperArgument(instr, indexOpnd);
- IR::Opnd *baseOpnd = src1->AsIndirOpnd()->UnlinkBaseOpnd();
- m_lowererMD.LoadHelperArgument(instr, baseOpnd);
- src1->Free(this->m_func);
- m_lowererMD.ChangeToHelperCall(instr, helperMethod);
- return instrPrev;
- }
- IR::Opnd *
- Lowerer::GetForInEnumeratorFieldOpnd(IR::Opnd * forInEnumeratorOpnd, uint fieldOffset, IRType type)
- {
- if (forInEnumeratorOpnd->IsSymOpnd())
- {
- IR::SymOpnd * symOpnd = forInEnumeratorOpnd->AsSymOpnd();
- return IR::SymOpnd::New(symOpnd->GetStackSym(), symOpnd->m_offset + fieldOffset, type, this->m_func);
- }
- Assert(forInEnumeratorOpnd->IsIndirOpnd());
- IR::IndirOpnd * indirOpnd = forInEnumeratorOpnd->AsIndirOpnd();
- return IR::IndirOpnd::New(indirOpnd->GetBaseOpnd(), indirOpnd->GetOffset() + fieldOffset, type, this->m_func);
- }
- void
- Lowerer::GenerateFastBrBReturn(IR::Instr * instr)
- {
- Assert(instr->m_opcode == Js::OpCode::BrOnEmpty || instr->m_opcode == Js::OpCode::BrOnNotEmpty);
- AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
- IR::Opnd * forInEnumeratorOpnd = instr->GetSrc1();
- IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- IR::LabelInstr * loopBody = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- // CMP forInEnumerator->canUseJitFastPath, 0
- // JEQ $helper
- IR::Opnd * canUseJitFastPathOpnd = GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfCanUseJitFastPath(), TyInt8);
- InsertCompareBranch(canUseJitFastPathOpnd, IR::IntConstOpnd::New(0, TyInt8, this->m_func), Js::OpCode::BrEq_A, labelHelper, instr);
- // MOV objectOpnd, forInEnumerator->enumerator.object
- // MOV cachedDataTypeOpnd, forInEnumerator->enumerator.cachedDataType
- // CMP cachedDataTypeOpnd, objectOpnd->type
- // JNE $helper
- IR::RegOpnd * objectOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- InsertMove(objectOpnd,
- GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorObject(), TyMachPtr), instr);
- IR::RegOpnd * cachedDataTypeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- InsertMove(cachedDataTypeOpnd,
- GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorInitialType(), TyMachPtr), instr);
- InsertCompareBranch(cachedDataTypeOpnd, IR::IndirOpnd::New(objectOpnd, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, this->m_func),
- Js::OpCode::BrNeq_A, labelHelper, instr);
- // MOV cachedDataOpnd, forInEnumeratorOpnd->enumerator.cachedData
- // MOV enumeratedCountOpnd, forInEnumeratorOpnd->enumerator.enumeratedCount
- // CMP enumeratedCountOpnd, cachedDataOpnd->cachedCount
- // JLT $loopBody
- IR::RegOpnd * cachedDataOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
- InsertMove(cachedDataOpnd,
- GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorCachedData(), TyMachPtr), instr);
- IR::RegOpnd * enumeratedCountOpnd = IR::RegOpnd::New(TyUint32, m_func);
- InsertMove(enumeratedCountOpnd,
- GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorEnumeratedCount(), TyUint32), instr);
- InsertCompareBranch(enumeratedCountOpnd,
- IR::IndirOpnd::New(cachedDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataCachedCount(), TyUint32, this->m_func),
- Js::OpCode::BrLt_A, loopBody, instr);
- // CMP cacheData.completed, 0
- // JNE $loopEnd
- // JMP $helper
- IR::LabelInstr * labelAfter = instr->GetOrCreateContinueLabel();
- InsertCompareBranch(
- IR::IndirOpnd::New(cachedDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataCompleted(), TyInt8, this->m_func),
- IR::IntConstOpnd::New(0, TyInt8, this->m_func),
- Js::OpCode::BrNeq_A, instr->m_opcode == Js::OpCode::BrOnNotEmpty ? labelAfter : instr->AsBranchInstr()->GetTarget(), instr);
- InsertBranch(Js::OpCode::Br, labelHelper, instr);
- // $loopBody:
- instr->InsertBefore(loopBody);
- IR::Opnd * opndDst = instr->GetDst(); // ForIn result propertyString
- Assert(opndDst->IsRegOpnd());
- // MOV stringsOpnd, cachedData->strings
- // MOV opndDst, stringsOpnd[enumeratedCount]
- IR::RegOpnd * stringsOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
- InsertMove(stringsOpnd,
- IR::IndirOpnd::New(cachedDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataStrings(), TyMachPtr, this->m_func), instr);
- InsertMove(opndDst,
- IR::IndirOpnd::New(stringsOpnd, enumeratedCountOpnd, m_lowererMD.GetDefaultIndirScale(), TyVar, this->m_func), instr);
- // MOV indexesOpnd, cachedData->indexes
- // MOV objectIndexOpnd, indexesOpnd[enumeratedCount]
- // MOV forInEnumeratorOpnd->enumerator.objectIndex, objectIndexOpnd
- IR::RegOpnd * indexesOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
- InsertMove(indexesOpnd,
- IR::IndirOpnd::New(cachedDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataIndexes(), TyMachPtr, this->m_func), instr);
- IR::RegOpnd * objectIndexOpnd = IR::RegOpnd::New(TyUint32, m_func);
- InsertMove(objectIndexOpnd,
- IR::IndirOpnd::New(indexesOpnd, enumeratedCountOpnd, IndirScale4, TyUint32, this->m_func), instr);
- InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorObjectIndex(), TyUint32),
- objectIndexOpnd, instr);
- // INC enumeratedCountOpnd
- // MOV forInEnumeratorOpnd->enumerator.enumeratedCount, enumeratedCountOpnd
- InsertAdd(false, enumeratedCountOpnd, enumeratedCountOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), instr);
- InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorEnumeratedCount(), TyUint32),
- enumeratedCountOpnd, instr);
- // We know result propertyString (opndDst) != NULL
- InsertBranch(Js::OpCode::Br, instr->m_opcode == Js::OpCode::BrOnNotEmpty ? instr->AsBranchInstr()->GetTarget() : labelAfter, instr);
- // $helper
- instr->InsertBefore(labelHelper);
- // $after
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerBrB - lower 1-operand (boolean) conditional branch
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerBrBReturn(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool isHelper)
- {
- IR::Instr * instrPrev;
- IR::Instr * instrCall;
- IR::HelperCallOpnd * opndHelper;
- IR::Opnd * opndDst;
- AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
- Assert(instr->m_opcode == Js::OpCode::BrOnEmpty || instr->m_opcode == Js::OpCode::BrOnNotEmpty);
- IR::RegOpnd * forInEnumeratorRegOpnd = GenerateForInEnumeratorLoad(instr->UnlinkSrc1(), instr);
- instrPrev = m_lowererMD.LoadHelperArgument(instr, forInEnumeratorRegOpnd);
- // Generate helper call to convert the unknown operand to boolean
- opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
- opndDst = instr->UnlinkDst();
- instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
- instr->InsertBefore(instrCall);
- instrCall = m_lowererMD.LowerCall(instrCall, 0);
- // Branch on the result of the call
- instr->m_opcode = (instr->m_opcode == Js::OpCode::BrOnNotEmpty? Js::OpCode::BrTrue_A : Js::OpCode::BrFalse_A);
- instr->SetSrc1(opndDst);
- IR::Instr *loweredInstr;
- loweredInstr = this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, isHelper);
- #if DBG
- if (isHelper)
- {
- if (!loweredInstr->IsBranchInstr())
- {
- loweredInstr = loweredInstr->GetNextBranchOrLabel();
- }
- if (loweredInstr->IsBranchInstr())
- {
- loweredInstr->AsBranchInstr()->m_isHelperToNonHelperBranch = true;
- }
- }
- #endif
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerMultiBr
- /// - Lowers the instruction for dictionary look up(string case arms)
- ///
- ///----------------------------------------------------------------------------
- IR::Instr* Lowerer::LowerMultiBr(IR::Instr * instr, IR::JnHelperMethod helperMethod)
- {
- IR::Instr * instrPrev = instr->m_prev;
- IR::Instr * instrCall;
- IR::HelperCallOpnd * opndHelper;
- IR::Opnd * opndSrc;
- IR::Opnd * opndDst;
- StackSym * symDst;
- AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnd on BrB");
- // Push the args in reverse order.
- // The end and start labels for the function are used to guarantee
- // that the dictionary jump destinations haven't been tampered with, so we
- // will always jump to some location within this function
- IR::LabelOpnd * endFuncOpnd = IR::LabelOpnd::New(m_func->EnsureFuncEndLabel(), m_func);
- m_lowererMD.LoadHelperArgument(instr, endFuncOpnd);
- IR::LabelOpnd * startFuncOpnd = IR::LabelOpnd::New(m_func->EnsureFuncStartLabel(), m_func);
- m_lowererMD.LoadHelperArgument(instr, startFuncOpnd);
- //Load the address of the dictionary pair- Js::StringDictionaryWrapper
- auto dictionary = instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary();
- if (this->m_func->IsOOPJIT())
- {
- auto dictionaryOffset = NativeCodeData::GetDataTotalOffset(dictionary);
- auto addressRegOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
- Lowerer::InsertLea(addressRegOpnd,
- IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), dictionaryOffset, TyMachPtr,
- #if DBG
- NativeCodeData::GetDataDescription(dictionary, this->m_func->m_alloc),
- #endif
- this->m_func, true), instr);
- this->addToLiveOnBackEdgeSyms->Set(m_func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
- m_lowererMD.LoadHelperArgument(instr, addressRegOpnd);
- }
- else
- {
- IR::AddrOpnd* nativestringDictionaryOpnd = IR::AddrOpnd::New(dictionary, IR::AddrOpndKindDynamicMisc, this->m_func);
- m_lowererMD.LoadHelperArgument(instr, nativestringDictionaryOpnd);
- }
- //Load the String passed in the Switch expression for look up - JavascriptString
- opndSrc = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, opndSrc);
- // Generate helper call for dictionary lookup.
- opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
- symDst = StackSym::New(TyMachPtr,this->m_func);
- opndDst = IR::RegOpnd::New(symDst, TyMachPtr, this->m_func);
- instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
- instr->InsertBefore(instrCall);
- instrCall = m_lowererMD.LowerCall(instrCall, 0);
- instr->SetSrc1(instrCall->GetDst());
- instr->m_opcode = LowererMD::MDMultiBranchOpcode;
- return instrPrev;
- }
- void
- Lowerer::LowerJumpTableMultiBranch(IR::MultiBranchInstr * multiBrInstr, IR::RegOpnd * indexOpnd)
- {
- Func * func = this->m_func;
- IR::Opnd * opndDst = IR::RegOpnd::New(TyMachPtr, func);
- //Move the native address of the jump table to a register
- IR::LabelInstr * nativeJumpTableLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- nativeJumpTableLabel->m_isDataLabel = true;
- IR::LabelOpnd * nativeJumpTable = IR::LabelOpnd::New(nativeJumpTableLabel, m_func);
- IR::RegOpnd * nativeJumpTableReg = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(nativeJumpTableReg, nativeJumpTable, multiBrInstr);
- BranchJumpTableWrapper * branchJumpTable = multiBrInstr->GetBranchJumpTable();
- AssertMsg(branchJumpTable->labelInstr == nullptr, "Should not be already assigned");
- branchJumpTable->labelInstr = nativeJumpTableLabel;
- //Indirect addressing @ target location in the jump table.
- //MOV eax, [nativeJumpTableReg + (offset * indirScale)]
- BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
- IR::Opnd * opndSrc = IR::IndirOpnd::New(nativeJumpTableReg, indexOpnd, indirScale, TyMachReg, this->m_func);
- IR::Instr * indirInstr = InsertMove(opndDst, opndSrc, multiBrInstr);
- //MultiBr eax
- multiBrInstr->SetSrc1(indirInstr->GetDst());
- //Jump to the address at the target location in the jump table
- multiBrInstr->m_opcode = LowererMD::MDMultiBranchOpcode;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerMultiBr
- /// - Lowers the instruction for jump table(consecutive integer case arms)
- ///
- ///----------------------------------------------------------------------------
- IR::Instr* Lowerer::LowerMultiBr(IR::Instr * instr)
- {
- IR::Instr * instrPrev = instr->m_prev;
- AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnd on BrB");
- AssertMsg(instr->IsBranchInstr() && instr->AsBranchInstr()->IsMultiBranch(), "Bad Instruction Lowering Call to LowerMultiBr()");
- IR::MultiBranchInstr * multiBrInstr = instr->AsBranchInstr()->AsMultiBrInstr();
- IR::RegOpnd * offset = instr->UnlinkSrc1()->AsRegOpnd();
- LowerJumpTableMultiBranch(multiBrInstr, offset);
- return instrPrev;
- }
- IR::Instr* Lowerer::LowerBrBMem(IR::Instr * instr, IR::JnHelperMethod helperMethod)
- {
- IR::Instr * instrPrev;
- IR::Instr * instrCall;
- IR::HelperCallOpnd * opndHelper;
- IR::Opnd * opndSrc;
- IR::Opnd * opndDst;
- StackSym * symDst;
- AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
- instrPrev = LoadScriptContext(instr);
- opndSrc = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, opndSrc);
- // Generate helper call to convert the unknown operand to boolean
- opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
- symDst = StackSym::New(TyVar, this->m_func);
- opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
- instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
- instr->InsertBefore(instrCall);
- instrCall = m_lowererMD.LowerCall(instrCall, 0);
- // Branch on the result of the call
- instr->SetSrc1(opndDst);
- m_lowererMD.LowerCondBranch(instr);
- return instrPrev;
- }
- IR::Instr* Lowerer::LowerBrOnObject(IR::Instr * instr, IR::JnHelperMethod helperMethod)
- {
- IR::Instr * instrPrev;
- IR::Instr * instrCall;
- IR::HelperCallOpnd * opndHelper;
- IR::Opnd * opndSrc;
- IR::Opnd * opndDst;
- StackSym * symDst;
- AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
- opndSrc = instr->UnlinkSrc1();
- instrPrev = m_lowererMD.LoadHelperArgument(instr, opndSrc);
- // Generate helper call to check if the operand's type is object
- opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
- symDst = StackSym::New(TyVar, this->m_func);
- opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
- instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
- instr->InsertBefore(instrCall);
- instrCall = m_lowererMD.LowerCall(instrCall, 0);
- // Branch on the result of the call
- instr->SetSrc1(opndDst);
- m_lowererMD.LowerCondBranch(instr);
- return instrPrev;
- }
- IR::Instr * Lowerer::LowerBrOnClassConstructor(IR::Instr * instr, IR::JnHelperMethod helperMethod)
- {
- IR::Instr * instrPrev;
- IR::Instr * instrCall;
- IR::HelperCallOpnd * opndHelper;
- IR::Opnd * opndSrc;
- IR::Opnd * opndDst;
- StackSym * symDst;
- AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
- opndSrc = instr->UnlinkSrc1();
- instrPrev = m_lowererMD.LoadHelperArgument(instr, opndSrc);
- // Generate helper call to check if the operand's type is object
- opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
- symDst = StackSym::New(TyVar, this->m_func);
- opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
- instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
- instr->InsertBefore(instrCall);
- instrCall = m_lowererMD.LowerCall(instrCall, 0);
- // Branch on the result of the call
- instr->SetSrc1(opndDst);
- m_lowererMD.LowerCondBranch(instr);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerEqualityCompare(IR::Instr* instr, IR::JnHelperMethod helper)
- {
- IR::Instr * instrPrev = instr->m_prev;
- bool needHelper = true;
- bool fNoLower = false;
- bool isStrictCompare = instr->m_opcode == Js::OpCode::CmSrEq_A || instr->m_opcode == Js::OpCode::CmSrNeq_A;
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- this->m_lowererMD.GenerateFastCmXxR8(instr);
- }
- else if (PHASE_OFF(Js::BranchFastPathPhase, m_func) || !m_func->DoFastPaths())
- {
- LowerBinaryHelperMem(instr, helper);
- }
- else if (TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, instr->IsNeq(), &fNoLower))
- {
- if (!fNoLower)
- {
- LowerBinaryHelperMem(instr, helper);
- }
- }
- else if (isStrictCompare && TryGenerateFastCmSrXx(instr))
- {
- }
- else
- {
- if (GenerateFastBrOrCmString(instr))
- {
- LowerBinaryHelperMem(instr, helper);
- }
- else if (isStrictCompare && GenerateFastBrOrCmEqDefinite(instr, helper, &needHelper, false, false))
- {
- if (needHelper)
- {
- LowerBinaryHelperMem(instr, helper);
- }
- }
- else if(GenerateFastCmEqLikely(instr, &needHelper, false) || GenerateFastEqBoolInt(instr, &needHelper, false))
- {
- if (needHelper)
- {
- if (isStrictCompare)
- {
- LowerStrictBrOrCm(instr, helper, false, false /* isBranch */, true);
- }
- else
- {
- LowerBinaryHelperMem(instr, helper);
- }
- }
- }
- else if (!m_lowererMD.GenerateFastCmXxTaggedInt(instr, false))
- {
- if (isStrictCompare)
- {
- LowerStrictBrOrCm(instr, helper, false, false /* isBranch */, false);
- }
- else
- {
- LowerBinaryHelperMem(instr, helper);
- }
- }
- }
- if (!needHelper)
- {
- instr->Remove();
- }
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerEqualityBranch(IR::Instr* instr, IR::JnHelperMethod helper)
- {
- IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
- IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
- IR::Instr * instrPrev = instr->m_prev;
- bool fNoLower = false;
- const bool noFastPath = PHASE_OFF(Js::BranchFastPathPhase, m_func) || !m_func->DoFastPaths();
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- return instrPrev;
- }
- if (instr->GetSrc2()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- instr->SwapOpnds();
- m_lowererMD.LowerToFloat(instr);
- return instrPrev;
- }
- if (noFastPath)
- {
- LowerBrCMem(instr, helper, true, false /*isHelper*/);
- return instrPrev;
- }
- if (TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, instr->IsNeq(), &fNoLower))
- {
- if (!fNoLower)
- {
- LowerBrCMem(instr, helper, false, false /*isHelper*/);
- }
- return instrPrev;
- }
- bool done = false;
- bool isStrictCompare = false;
- switch(instr->m_opcode)
- {
- case Js::OpCode::BrNeq_A:
- case Js::OpCode::BrNotEq_A:
- done = TryGenerateFastBrNeq(instr);
- break;
- case Js::OpCode::BrEq_A:
- case Js::OpCode::BrNotNeq_A:
- done = TryGenerateFastBrEq(instr);
- break;
- case Js::OpCode::BrSrEq_A:
- case Js::OpCode::BrSrNotNeq_A:
- case Js::OpCode::BrSrNeq_A:
- case Js::OpCode::BrSrNotEq_A:
- isStrictCompare = true;
- done = TryGenerateFastBrSrXx(instr, srcReg1, srcReg2, &instrPrev, noFastPath);
- break;
- default:
- Assume(UNREACHED);
- }
- if (done)
- {
- return instrPrev;
- }
- bool needHelper = true;
- bool hasStrFastPath = false;
- if (GenerateFastBrOrCmString(instr))
- {
- hasStrFastPath = true;
- LowerBrCMem(instr, helper, false, true);
- }
- else if (isStrictCompare && GenerateFastBrOrCmEqDefinite(instr, helper, &needHelper, true, hasStrFastPath))
- {
- if (needHelper)
- {
- LowerBrCMem(instr, helper, true /*noMathFastPath*/, hasStrFastPath);
- }
- }
- else if (GenerateFastBrEqLikely(instr->AsBranchInstr(), &needHelper, hasStrFastPath) || GenerateFastEqBoolInt(instr, &needHelper, hasStrFastPath))
- {
- if (needHelper)
- {
- if (isStrictCompare)
- {
- LowerStrictBrOrCm(instr, helper, false, true /* isBranch */, true);
- }
- else
- {
- LowerBrCMem(instr, helper, false, hasStrFastPath);
- }
- }
- }
- else if (needHelper)
- {
- if (isStrictCompare)
- {
- LowerStrictBrOrCm(instr, helper, false, true /* isBranch */, false);
- }
- else
- {
- LowerBrCMem(instr, helper, false, hasStrFastPath);
- }
- }
- if (!needHelper)
- {
- if (instr->AsBranchInstr()->GetTarget()->m_isLoopTop)
- {
- LowerBrCMem(instr, helper, false, hasStrFastPath);
- }
- else
- {
- instr->Remove();
- }
- }
- return instrPrev;
- }
- // Generate fast path for StrictEquals for objects that are not GlobalObject, HostDispatch or External to be pointer comparison
- IR::Instr *
- Lowerer::LowerStrictBrOrCm(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool noMathFastPath, bool isBranch, bool isHelper)
- {
- IR::Instr * instrPrev = instr->m_prev;
- IR::LabelInstr * labelHelper = nullptr;
- IR::LabelInstr * labelFallThrough = nullptr;
- IR::LabelInstr * labelBranchSuccess = nullptr;
- IR::LabelInstr * labelBranchFailure = nullptr;
- LibraryValue successValueType = ValueInvalid;
- LibraryValue failureValueType = ValueInvalid;
- bool isEqual = !instr->IsNeq();
- IR::Opnd * src1 = instr->GetSrc1();
- IR::Opnd * src2 = instr->GetSrc2();
- AssertMsg(src1 != nullptr && src2 != nullptr, "Expected 2 src opnds on BrC");
- labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- if (!noMathFastPath)
- {
- labelFallThrough = instr->GetOrCreateContinueLabel(isHelper);
- if (!isBranch)
- {
- labelBranchSuccess = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- labelBranchFailure = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- successValueType = isEqual ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
- failureValueType = isEqual ? LibraryValue::ValueFalse : LibraryValue::ValueTrue;
- }
- else
- {
- labelBranchSuccess = isEqual ? instr->AsBranchInstr()->GetTarget() : labelFallThrough;
- labelBranchFailure = isEqual ? labelFallThrough : instr->AsBranchInstr()->GetTarget();
- }
- if (src1->IsEqual(src2))
- {
- if (instr->GetSrc1()->GetValueType().IsNotFloat())
- {
- if (!isBranch)
- {
- InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, successValueType), instr);
- InsertBranch(Js::OpCode::Br, labelFallThrough, instr);
- }
- else
- {
- IR::BranchInstr * branch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelBranchSuccess, this->m_func);
- instr->InsertBefore(branch);
- }
- instr->Remove();
- return instrPrev;
- }
- #if !FLOATVAR
- m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instr, labelHelper);
- IR::RegOpnd *src1TypeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
- Lowerer::InsertMove(src1TypeReg, IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func), instr);
- // MOV src1TypeIdReg, [src1TypeReg + offset(typeId)]
- IR::RegOpnd *src1TypeIdReg = IR::RegOpnd::New(TyInt32, this->m_func);
- Lowerer::InsertMove(src1TypeIdReg, IR::IndirOpnd::New(src1TypeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func), instr);
- // CMP src1TypeIdReg, TypeIds_Number
- // JEQ $helper
- IR::IntConstOpnd *numberTypeId = IR::IntConstOpnd::New(Js::TypeIds_Number, TyInt32, this->m_func, true);
- InsertCompareBranch(src1TypeIdReg, numberTypeId, Js::OpCode::BrEq_A, labelHelper, instr);
- #else
- m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instr, labelHelper);
- #endif
- IR::BranchInstr * branch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelBranchSuccess, this->m_func);
- instr->InsertBefore(branch);
- }
- else
- {
- m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instr, labelHelper);
- #if !FLOATVAR
- IR::RegOpnd *src1TypeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
- Lowerer::InsertMove(src1TypeReg, IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func), instr);
- // MOV src1TypeIdReg, [src1TypeReg + offset(typeId)]
- IR::RegOpnd *src1TypeIdReg = IR::RegOpnd::New(TyInt32, this->m_func);
- Lowerer::InsertMove(src1TypeIdReg, IR::IndirOpnd::New(src1TypeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func), instr);
- // CMP src1TypeIdReg, TypeIds_Number
- // JEQ $helper
- IR::IntConstOpnd *numberTypeId = IR::IntConstOpnd::New(Js::TypeIds_Number, TyInt32, this->m_func, true);
- InsertCompareBranch(src1TypeIdReg, numberTypeId, Js::OpCode::BrEq_A, labelHelper, instr);
- #endif
- // CMP src1, src2 - Ptr comparison
- // JEQ $branchSuccess
- InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, labelBranchSuccess, instr);
- #if FLOATVAR
- IR::RegOpnd *src1TypeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
- Lowerer::InsertMove(src1TypeReg, IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func), instr);
- // MOV src1TypeIdReg, [src1TypeReg + offset(typeId)]
- IR::RegOpnd *src1TypeIdReg = IR::RegOpnd::New(TyInt32, this->m_func);
- Lowerer::InsertMove(src1TypeIdReg, IR::IndirOpnd::New(src1TypeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func), instr);
- #endif
- // CMP src1TypeIdReg, TypeIds_HostDispatch
- // JLE $helper (le condition covers string, int64, uint64, hostdispatch, as well as undefined, null, boolean)
- IR::IntConstOpnd *hostDispatchTypeId = IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, this->m_func, true);
- InsertCompareBranch(src1TypeIdReg, hostDispatchTypeId, Js::OpCode::BrLe_A, labelHelper, instr);
- // CMP src1TypeIdReg, TypeIds_GlobalObject
- // JE $helper
- IR::IntConstOpnd *globalObjectTypeId = IR::IntConstOpnd::New(Js::TypeIds_GlobalObject, TyInt32, this->m_func, true);
- InsertCompareBranch(src1TypeIdReg, globalObjectTypeId, Js::OpCode::BrEq_A, labelHelper, instr);
- // TEST src1TypeReg->flags, TypeFlagMask_EngineExternal
- // JE $helper
- IR::Opnd *flags = IR::IndirOpnd::New(src1TypeReg, Js::Type::GetOffsetOfFlags(), TyInt8, this->m_func);
- InsertTestBranch(flags, IR::IntConstOpnd::New(TypeFlagMask_EngineExternal, TyInt8, this->m_func), Js::OpCode::BrNeq_A, labelHelper, instr);
- if (src2->IsRegOpnd())
- {
- m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper);
- // MOV src2TypeReg, [src2 + offset(type)]
- // TEST [src2TypeReg + offset(flags)], TypeFlagMask_EngineExternal
- // JE $helper
- IR::RegOpnd *src2TypeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::IndirOpnd *src2Type = IR::IndirOpnd::New(src2->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
- Lowerer::InsertMove(src2TypeReg, src2Type, instr);
- IR::Opnd *src2Flags = IR::IndirOpnd::New(src2TypeReg, Js::Type::GetOffsetOfFlags(), TyInt8, this->m_func);
- InsertTestBranch(src2Flags, IR::IntConstOpnd::New(TypeFlagMask_EngineExternal, TyInt8, this->m_func), Js::OpCode::BrNeq_A, labelHelper, instr);
- }
- // JMP $done
- IR::BranchInstr * branch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelBranchFailure, this->m_func);
- instr->InsertBefore(branch);
- }
- if (!isBranch)
- {
- instr->InsertBefore(labelBranchSuccess);
- InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, successValueType), instr);
- InsertBranch(Js::OpCode::Br, labelFallThrough, instr);
- instr->InsertBefore(labelBranchFailure);
- InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, failureValueType), instr);
- InsertBranch(Js::OpCode::Br, labelFallThrough, instr);
- }
- }
- instr->InsertBefore(labelHelper);
- if (isBranch)
- {
- LowerBrCMem(instr, helperMethod, true, true);
- }
- else
- {
- LowerBinaryHelperMem(instr, helperMethod);
- }
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerBrCMem(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool noMathFastPath, bool isHelper)
- {
- IR::Instr * instrPrev = instr->m_prev;
- IR::Instr * instrCall;
- IR::HelperCallOpnd * opndHelper;
- IR::Opnd * opndSrc;
- IR::Opnd * opndDst;
- StackSym * symDst;
- bool inverted = false;
- AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() != nullptr, "Expected 2 src opnds on BrC");
- if (!noMathFastPath && !this->GenerateFastCondBranch(instr->AsBranchInstr(), &isHelper))
- {
- return instrPrev;
- }
- // Push the args in reverse order.
- const bool loadScriptContext = !(helperMethod == IR::HelperOp_StrictEqualString || helperMethod == IR::HelperOp_StrictEqualEmptyString);
- const bool loadArg2 = !(helperMethod == IR::HelperOp_StrictEqualEmptyString);
- if (helperMethod == IR::HelperOp_NotEqual)
- {
- // Op_NotEqual() returns !Op_Equal(). It is faster to call Op_Equal() directly.
- helperMethod = IR::HelperOp_Equal;
- instr->AsBranchInstr()->Invert();
- inverted = true;
- }
- else if(helperMethod == IR::HelperOp_NotStrictEqual)
- {
- // Op_NotStrictEqual() returns !Op_StrictEqual(). It is faster to call Op_StrictEqual() directly.
- helperMethod = IR::HelperOp_StrictEqual;
- instr->AsBranchInstr()->Invert();
- inverted = true;
- }
- if (loadScriptContext)
- LoadScriptContext(instr);
- opndSrc = instr->UnlinkSrc2();
- if (loadArg2)
- m_lowererMD.LoadHelperArgument(instr, opndSrc);
- opndSrc = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, opndSrc);
- // Generate helper call to compare the source operands.
- opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
- symDst = StackSym::New(TyMachReg, this->m_func);
- opndDst = IR::RegOpnd::New(symDst, TyMachReg, this->m_func);
- instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
- instr->InsertBefore(instrCall);
- instrCall = m_lowererMD.LowerCall(instrCall, 0);
- switch (instr->m_opcode)
- {
- case Js::OpCode::BrNotEq_A:
- case Js::OpCode::BrNotNeq_A:
- case Js::OpCode::BrSrNotEq_A:
- case Js::OpCode::BrSrNotNeq_A:
- if (instr->HasBailOutInfo())
- {
- instr->GetBailOutInfo()->isInvertedBranch = true;
- }
- break;
- case Js::OpCode::BrNotGe_A:
- case Js::OpCode::BrNotGt_A:
- case Js::OpCode::BrNotLe_A:
- case Js::OpCode::BrNotLt_A:
- inverted = true;
- break;
- }
- // Branch if the result is "true".
- instr->SetSrc1(opndDst);
- instr->m_opcode = (inverted ? Js::OpCode::BrFalse_A : Js::OpCode::BrTrue_A);
- this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, !noMathFastPath && isHelper);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerBrFncApply(IR::Instr * instr, IR::JnHelperMethod helperMethod) {
- IR::Instr * instrPrev = instr->m_prev;
- IR::Instr * instrCall;
- IR::HelperCallOpnd * opndHelper;
- IR::Opnd * opndSrc;
- IR::Opnd * opndDst;
- StackSym * symDst;
- AssertMsg(instr->GetSrc1() != nullptr, "Expected 1 src opnd on BrFncApply");
- LoadScriptContext(instr);
- opndSrc = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, opndSrc);
- // Generate helper call to compare the source operands.
- opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
- symDst = StackSym::New(TyMachReg, this->m_func);
- opndDst = IR::RegOpnd::New(symDst, TyMachReg, this->m_func);
- instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
- instr->InsertBefore(instrCall);
- instrCall = m_lowererMD.LowerCall(instrCall, 0);
- // Branch if the result is "true".
- instr->SetSrc1(opndDst);
- instr->m_opcode = Js::OpCode::BrTrue_A;
- m_lowererMD.LowerCondBranch(instr);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerBrProperty - lower branch-on-has/no-property
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerBrProperty(IR::Instr * instr, IR::JnHelperMethod helper)
- {
- IR::Instr * instrPrev;
- IR::Instr * instrCall;
- IR::HelperCallOpnd * opndHelper;
- IR::Opnd * opndSrc;
- IR::Opnd * opndDst;
- opndSrc = instr->UnlinkSrc1();
- AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(),
- "Expected propertySym as src of BrProperty");
- instrPrev = LoadScriptContext(instr);
- this->LoadPropertySymAsArgument(instr, opndSrc);
- opndHelper = IR::HelperCallOpnd::New(helper, this->m_func);
- opndDst = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), TyMachReg, this->m_func);
- instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
- instr->InsertBefore(instrCall);
- instrCall = m_lowererMD.LowerCall(instrCall, 0);
- // Branch on the result of the call
- instr->SetSrc1(opndDst);
- switch (instr->m_opcode)
- {
- case Js::OpCode::BrOnHasProperty:
- instr->m_opcode = Js::OpCode::BrTrue_A;
- break;
- case Js::OpCode::BrOnNoProperty:
- instr->m_opcode = Js::OpCode::BrFalse_A;
- break;
- default:
- AssertMsg(0, "Unknown opcode on BrProperty branch");
- break;
- }
- this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, false);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerElementUndefined
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerElementUndefined(IR::Instr * instr, IR::JnHelperMethod helper)
- {
- IR::Opnd *dst = instr->UnlinkDst();
- AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as dst of Ld Undefined");
- // Pass the property sym to store to
- this->LoadPropertySymAsArgument(instr, dst);
- m_lowererMD.ChangeToHelperCall(instr, helper);
- return instr;
- }
- IR::Instr *
- Lowerer::LowerElementUndefinedMem(IR::Instr * instr, IR::JnHelperMethod helper)
- {
- // Pass script context
- IR::Instr * instrPrev = LoadScriptContext(instr);
- this->LowerElementUndefined(instr, helper);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerLdElemUndef(IR::Instr * instr)
- {
- if (this->m_func->GetJITFunctionBody()->IsEval())
- {
- return LowerElementUndefinedMem(instr, IR::HelperOp_LdElemUndefDynamic);
- }
- else
- {
- return LowerElementUndefined(instr, IR::HelperOp_LdElemUndef);
- }
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerElementUndefinedScoped
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerElementUndefinedScoped(IR::Instr * instr, IR::JnHelperMethod helper)
- {
- IR::Instr * instrPrev = instr->m_prev;
- // Pass the default instance
- IR::Opnd *src = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, src);
- // Pass the property sym to store to
- IR::Opnd * dst = instr->UnlinkDst();
- AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as dst of Ld Undefined Scoped");
- this->LoadPropertySymAsArgument(instr, dst);
- m_lowererMD.ChangeToHelperCall(instr, helper);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerElementUndefinedScopedMem(IR::Instr * instr, IR::JnHelperMethod helper)
- {
- // Pass script context
- IR::Instr * instrPrev = LoadScriptContext(instr);
- this->LowerElementUndefinedScoped(instr, helper);
- return instrPrev;
- }
- void
- Lowerer::LowerStLoopBodyCount(IR::Instr* instr)
- {
- intptr_t header = m_func->m_workItem->GetLoopHeaderAddr();
- IR::MemRefOpnd *loopBodyCounterOpnd = IR::MemRefOpnd::New((BYTE*)(header) + Js::LoopHeader::GetOffsetOfProfiledLoopCounter(), TyUint32, this->m_func);
- instr->SetDst(loopBodyCounterOpnd);
- instr->ReplaceSrc1(instr->GetSrc1()->AsRegOpnd()->UseWithNewType(TyUint32, this->m_func));
- IR::AutoReuseOpnd autoReuse(loopBodyCounterOpnd, this->m_func);
- m_lowererMD.ChangeToAssign(instr);
- return;
- }
- #if !FLOATVAR
- IR::Instr *
- Lowerer::LowerStSlotBoxTemp(IR::Instr *stSlot)
- {
- // regVar = BoxStackNumber(src, scriptContext)
- IR::RegOpnd * regSrc = stSlot->UnlinkSrc1()->AsRegOpnd();
- IR::Instr * instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
- IR::RegOpnd *regVar = IR::RegOpnd::New(TyVar, this->m_func);
- instr->SetDst(regVar);
- instr->SetSrc1(regSrc);
- stSlot->InsertBefore(instr);
- this->LowerUnaryHelperMem(instr, IR::HelperBoxStackNumber);
- stSlot->SetSrc1(regVar);
- return this->LowerStSlot(stSlot);
- }
- #endif
- IR::Opnd *
- Lowerer::CreateOpndForSlotAccess(IR::Opnd * opnd)
- {
- IR::SymOpnd * symOpnd = opnd->AsSymOpnd();
- PropertySym * dstSym = symOpnd->m_sym->AsPropertySym();
- if (!m_func->IsLoopBody() &&
- m_func->DoStackFrameDisplay() &&
- (dstSym->m_stackSym == m_func->GetLocalClosureSym() || dstSym->m_stackSym == m_func->GetLocalFrameDisplaySym()))
- {
- // Stack closure syms are made to look like slot accesses for the benefit of GlobOpt, so that it can do proper
- // copy prop and implicit call bailout. But what we really want is local stack load/store.
- // Don't do this for loop body, though, since we don't have the value saved on the stack.
- IR::SymOpnd * closureSym = IR::SymOpnd::New(dstSym->m_stackSym, 0, TyMachReg, this->m_func);
- closureSym->GetStackSym()->m_isClosureSym = true;
- return closureSym;
- }
- int32 offset = dstSym->m_propertyId;
- if (!m_func->GetJITFunctionBody()->IsAsmJsMode())
- {
- offset = offset * TySize[opnd->GetType()];
- }
- #ifdef ASMJS_PLAT
- if (m_func->IsTJLoopBody())
- {
- offset = offset - m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetTotalSizeInBytes();
- }
- #endif
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(symOpnd->CreatePropertyOwnerOpnd(m_func),
- offset , opnd->GetType(), this->m_func);
- return indirOpnd;
- }
- IR::Instr* Lowerer::AddSlotArrayCheck(PropertySym *propertySym, IR::Instr* instr)
- {
- if (propertySym->m_stackSym != m_func->GetLocalClosureSym() || PHASE_OFF(Js::ClosureRangeCheckPhase, m_func))
- {
- return instr->m_prev;
- }
- IR::Instr *instrDef = propertySym->m_stackSym->m_instrDef;
- bool doDynamicCheck = this->m_func->IsLoopBody();
- bool insertSlotArrayCheck = false;
- uint32 slotId = (uint32)propertySym->m_propertyId;
- if (instrDef)
- {
- switch (instrDef->m_opcode)
- {
- case Js::OpCode::NewScopeSlots:
- case Js::OpCode::NewStackScopeSlots:
- case Js::OpCode::NewScopeSlotsWithoutPropIds:
- {
- IR::Opnd *allocOpnd = allocOpnd = instrDef->GetSrc1();
- uint32 allocCount = allocOpnd->AsIntConstOpnd()->AsUint32();
- if (slotId >= allocCount)
- {
- Js::Throw::FatalInternalError();
- }
- break;
- }
- case Js::OpCode::ArgIn_A:
- break;
- case Js::OpCode::LdSlot:
- case Js::OpCode::LdSlotArr:
- {
- if (doDynamicCheck && slotId > Js::ScopeSlots::FirstSlotIndex)
- {
- insertSlotArrayCheck = true;
- }
- break;
- }
- case Js::OpCode::SlotArrayCheck:
- {
- uint32 currentSlotId = instrDef->GetSrc2()->AsIntConstOpnd()->AsInt32();
- if (slotId > currentSlotId)
- {
- instrDef->ReplaceSrc2(IR::IntConstOpnd::New(slotId, TyUint32, m_func));
- }
- break;
- }
- default:
- Js::Throw::FatalInternalError();
- }
- }
- if (insertSlotArrayCheck)
- {
- IR::Instr *insertInstr = instrDef->m_next;
- IR::RegOpnd *dstOpnd = instrDef->UnlinkDst()->AsRegOpnd();
- IR::Instr *checkInstr = IR::Instr::New(Js::OpCode::SlotArrayCheck, dstOpnd, m_func);
- dstOpnd = IR::RegOpnd::New(TyVar, m_func);
- instrDef->SetDst(dstOpnd);
- checkInstr->SetSrc1(dstOpnd);
- // Attach the slot ID to the check instruction.
- IR::IntConstOpnd *slotIdOpnd = IR::IntConstOpnd::New(slotId, TyUint32, m_func);
- checkInstr->SetSrc2(slotIdOpnd);
- insertInstr->InsertBefore(checkInstr);
- }
- return instr->m_prev;
- }
- IR::Instr *
- Lowerer::LowerStSlot(IR::Instr *instr)
- {
- // StSlot stores the nth Var in the buffer pointed to by the property sym's stack sym.
- IR::Opnd * dstOpnd = instr->UnlinkDst();
- AssertMsg(dstOpnd, "Expected dst opnd on StSlot");
- IR::Opnd * dstNew = this->CreateOpndForSlotAccess(dstOpnd);
- dstOpnd->Free(this->m_func);
- instr->SetDst(dstNew);
- instr = m_lowererMD.ChangeToWriteBarrierAssign(instr, this->m_func);
- return instr;
- }
- IR::Instr *
- Lowerer::LowerStSlotChkUndecl(IR::Instr *instrStSlot)
- {
- Assert(instrStSlot->GetSrc2() != nullptr);
- // Src2 is required only to avoid dead store false positives during GlobOpt.
- instrStSlot->FreeSrc2();
- IR::Opnd *dstOpnd = this->CreateOpndForSlotAccess(instrStSlot->GetDst());
- IR::Instr *instr = this->LowerStSlot(instrStSlot);
- this->GenUndeclChk(instr, dstOpnd);
- return instr;
- }
- void Lowerer::LowerProfileLdSlot(IR::Opnd *const valueOpnd, Func *const ldSlotFunc, const Js::ProfileId profileId, IR::Instr *const insertBeforeInstr)
- {
- Assert(valueOpnd);
- Assert(profileId != Js::Constants::NoProfileId);
- Assert(insertBeforeInstr);
- Func *const irFunc = insertBeforeInstr->m_func;
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, IR::Opnd::CreateProfileIdOpnd(profileId, irFunc));
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, CreateFunctionBodyOpnd(ldSlotFunc));
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, valueOpnd);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, irFunc);
- callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfileLdSlot, irFunc));
- insertBeforeInstr->InsertBefore(callInstr);
- m_lowererMD.LowerCall(callInstr, 0);
- }
- void
- Lowerer::LowerLdSlot(IR::Instr *instr)
- {
- IR::Opnd * srcOpnd = instr->UnlinkSrc1();
- AssertMsg(srcOpnd, "Expected src opnd on LdSlot");
- IR::Opnd * srcNew = this->CreateOpndForSlotAccess(srcOpnd);
- srcOpnd->Free(this->m_func);
- instr->SetSrc1(srcNew);
- m_lowererMD.ChangeToAssign(instr);
- }
- IR::Instr *
- Lowerer::LowerChkUndecl(IR::Instr *instr)
- {
- IR::Instr *instrPrev = instr->m_prev;
- this->GenUndeclChk(instr, instr->GetSrc1());
- instr->Remove();
- return instrPrev;
- }
- void
- Lowerer::GenUndeclChk(IR::Instr *instrInsert, IR::Opnd *opnd)
- {
- IR::LabelInstr *labelContinue = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- InsertCompareBranch(
- opnd,
- LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueUndeclBlockVar),
- Js::OpCode::BrNeq_A, labelContinue, instrInsert);
- IR::LabelInstr *labelThrow = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- instrInsert->InsertBefore(labelThrow);
- IR::Instr *instr = IR::Instr::New(
- Js::OpCode::RuntimeReferenceError,
- IR::RegOpnd::New(TyMachReg, m_func),
- IR::IntConstOpnd::New(SCODE_CODE(JSERR_UseBeforeDeclaration), TyInt32, m_func),
- m_func);
- instrInsert->InsertBefore(instr);
- this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeReferenceError);
- instrInsert->InsertBefore(labelContinue);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerStElemC
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerStElemC(IR::Instr * stElem)
- {
- IR::Instr *instrPrev = stElem->m_prev;
- IR::IndirOpnd * indirOpnd = stElem->GetDst()->AsIndirOpnd();
- IR::RegOpnd *indexOpnd = indirOpnd->UnlinkIndexOpnd();
- Assert(!indexOpnd || indexOpnd->m_sym->IsIntConst());
- IntConstType value;
- if (indexOpnd)
- {
- value = indexOpnd->AsRegOpnd()->m_sym->GetIntConstValue();
- indexOpnd->Free(this->m_func);
- }
- else
- {
- value = (IntConstType)indirOpnd->GetOffset();
- }
- if (stElem->IsJitProfilingInstr())
- {
- Assert(stElem->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
- m_lowererMD.LoadHelperArgument(stElem, stElem->UnlinkSrc1());
- const auto meth = stElem->m_opcode == Js::OpCode::StElemC ? IR::HelperSimpleStoreArrayHelper : IR::HelperSimpleStoreArraySegHelper;
- stElem->SetSrc1(IR::HelperCallOpnd::New(meth, m_func));
- m_lowererMD.LoadHelperArgument(stElem, IR::IntConstOpnd::New(value, TyUint32, m_func));
- m_lowererMD.LoadHelperArgument(stElem, indirOpnd->UnlinkBaseOpnd());
- stElem->UnlinkDst()->Free(m_func);
- m_lowererMD.LowerCall(stElem, 0);
- return instrPrev;
- }
- IntConstType base;
- IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
- const ValueType baseValueType(baseOpnd->GetValueType());
- if(baseValueType.IsLikelyNativeArray())
- {
- Assert(stElem->m_opcode == Js::OpCode::StElemC);
- IR::LabelInstr *labelBailOut = nullptr;
- IR::Instr *instrBailOut = nullptr;
- if (stElem->HasBailOutInfo())
- {
- labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- instrBailOut = stElem;
- stElem = IR::Instr::New(instrBailOut->m_opcode, m_func);
- instrBailOut->TransferTo(stElem);
- instrBailOut->InsertBefore(stElem);
- IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- InsertBranch(Js::OpCode::Br, labelDone, instrBailOut);
- instrBailOut->InsertBefore(labelBailOut);
- instrBailOut->InsertAfter(labelDone);
- instrBailOut->m_opcode = Js::OpCode::BailOut;
- GenerateBailOut(instrBailOut);
- }
- if (!baseValueType.IsObject())
- {
- // Likely native array: do a vtable check and bail if it fails.
- Assert(labelBailOut);
- GenerateArrayTest(baseOpnd, labelBailOut, labelBailOut, stElem, true);
- }
- if (stElem->GetSrc1()->GetType() == TyVar)
- {
- // Storing a non-specialized value. This may cause array conversion, which invalidates all the code
- // that depends on the array check we've already done.
- // Call a helper that returns the type ID of the resulting array, check it here against the one we
- // expect, and bail if it fails.
- Assert(labelBailOut);
- // Call a helper to (try and) unbox the var and store it.
- // If we had to convert the array to do the store, we'll bail.
- LoadScriptContext(stElem);
- m_lowererMD.LoadHelperArgument(stElem, stElem->UnlinkSrc1());
- IR::IntConstOpnd * intConstIndexOpnd = IR::IntConstOpnd::New(value, TyUint32, m_func);
- m_lowererMD.LoadHelperArgument(stElem, intConstIndexOpnd);
- m_lowererMD.LoadHelperArgument(stElem, indirOpnd->UnlinkBaseOpnd());
- IR::JnHelperMethod helperMethod;
- if (baseValueType.HasIntElements())
- {
- helperMethod = IR::HelperScrArr_SetNativeIntElementC;
- }
- else
- {
- helperMethod = IR::HelperScrArr_SetNativeFloatElementC;
- }
- IR::Instr *instrInsertBranch = stElem->m_next;
- IR::RegOpnd *typeIdOpnd = IR::RegOpnd::New(TyUint32, m_func);
- stElem->ReplaceDst(typeIdOpnd);
- m_lowererMD.ChangeToHelperCall(stElem, helperMethod);
- InsertCompareBranch(
- typeIdOpnd,
- IR::IntConstOpnd::New(
- baseValueType.HasIntElements() ?
- Js::TypeIds_NativeIntArray : Js::TypeIds_NativeFloatArray, TyUint32, m_func),
- Js::OpCode::BrNeq_A,
- labelBailOut,
- instrInsertBranch);
- return instrPrev;
- }
- else if (baseValueType.HasIntElements() && labelBailOut)
- {
- Assert(stElem->GetSrc1()->GetType() == GetArrayIndirType(baseValueType));
- IR::Opnd* missingElementOpnd = GetMissingItemOpnd(stElem->GetSrc1()->GetType(), m_func);
- if (!stElem->GetSrc1()->IsEqual(missingElementOpnd))
- {
- InsertMissingItemCompareBranch(stElem->GetSrc1(), Js::OpCode::BrEq_A, labelBailOut, stElem);
- }
- else
- {
- //Its a missing value store and data flow proves that src1 is always missing value. Array cannot be an int array at the first place
- //if this code was ever hit. Just bailout, this code path would be updated with the profile information next time around.
- InsertBranch(Js::OpCode::Br, labelBailOut, stElem);
- #if DBG
- labelBailOut->m_noHelperAssert = true;
- #endif
- stElem->Remove();
- return instrPrev;
- }
- }
- else
- {
- Assert(stElem->GetSrc1()->GetType() == GetArrayIndirType(baseValueType));
- }
- stElem->GetDst()->SetType(stElem->GetSrc1()->GetType());
- Assert(value <= Js::SparseArraySegmentBase::INLINE_CHUNK_SIZE);
- if(baseValueType.HasIntElements())
- {
- base = sizeof(Js::JavascriptNativeIntArray) + offsetof(Js::SparseArraySegment<int32>, elements);
- }
- else
- {
- base = sizeof(Js::JavascriptNativeFloatArray) + offsetof(Js::SparseArraySegment<double>, elements);
- }
- }
- else if(baseValueType.IsLikelyObject() && baseValueType.GetObjectType() == ObjectType::Array)
- {
- Assert(stElem->m_opcode == Js::OpCode::StElemC);
- Assert(value <= Js::SparseArraySegmentBase::INLINE_CHUNK_SIZE);
- base = sizeof(Js::JavascriptArray) + offsetof(Js::SparseArraySegment<Js::Var>, elements);
- }
- else
- {
- Assert(stElem->m_opcode == Js::OpCode::StElemC || stElem->m_opcode == Js::OpCode::StArrSegElemC);
- Assert(indirOpnd->GetBaseOpnd()->GetType() == TyVar);
- base = offsetof(Js::SparseArraySegment<Js::Var>, elements);
- }
- Assert(value >= 0);
- // MOV [r3 + offset(element) + index], src
- const BYTE indirScale =
- baseValueType.IsLikelyAnyOptimizedArray() ? GetArrayIndirScale(baseValueType) : m_lowererMD.GetDefaultIndirScale();
- IntConstType offset = base + (value << indirScale);
- Assert(Math::FitsInDWord(offset));
- indirOpnd->SetOffset((int32)offset);
- m_lowererMD.ChangeToWriteBarrierAssign(stElem, this->m_func);
- return instrPrev;
- }
- void Lowerer::LowerLdArrHead(IR::Instr *const instr)
- {
- IR::RegOpnd *array = instr->UnlinkSrc1()->AsRegOpnd();
- const ValueType arrayValueType(array->GetValueType());
- Assert(arrayValueType.IsAnyOptimizedArray());
- if(arrayValueType.GetObjectType() == ObjectType::ObjectWithArray)
- {
- array = LoadObjectArray(array, instr);
- }
- // mov arrayHeadSegment, [array + offset(headSegment)]
- instr->GetDst()->SetType(TyMachPtr);
- instr->SetSrc1(
- IR::IndirOpnd::New(
- array,
- GetArrayOffsetOfHeadSegment(arrayValueType),
- TyMachPtr,
- instr->m_func));
- LowererMD::ChangeToAssign(instr);
- }
- // Creates the rest parameter array.
- // Var JavascriptArray::OP_NewScArrayWithElements(
- // uint32 elementCount,
- // Var *elements,
- // ScriptContext* scriptContext)
- IR::Instr *Lowerer::LowerRestParameter(IR::Opnd *formalsOpnd, IR::Opnd *dstOpnd, IR::Opnd *excessOpnd, IR::Instr *instr, IR::RegOpnd *generatorArgsPtrOpnd)
- {
- IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, dstOpnd, instr->m_func);
- instr->InsertAfter(helperCallInstr);
- // Var JavascriptArray::OP_NewScArrayWithElements(
- // int32 elementCount,
- // Var *elements,
- // ScriptContext* scriptContext)
- IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScArrayWithElements;
- LoadScriptContext(helperCallInstr);
- BOOL isGenerator = this->m_func->GetJITFunctionBody()->IsCoroutine();
- // Elements pointer = ebp + (formals count + formals offset + 1)*sizeof(Var)
- IR::RegOpnd *srcOpnd = isGenerator ? generatorArgsPtrOpnd : IR::Opnd::CreateFramePointerOpnd(this->m_func);
- uint16 actualOffset = isGenerator ? 0 : GetFormalParamOffset(); //4
- IR::RegOpnd *argPtrOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- InsertAdd(false, argPtrOpnd, srcOpnd, IR::IntConstOpnd::New((formalsOpnd->AsIntConstOpnd()->GetValue() + actualOffset) * MachPtr, TyMachPtr, this->m_func), helperCallInstr);
- m_lowererMD.LoadHelperArgument(helperCallInstr, argPtrOpnd);
- m_lowererMD.LoadHelperArgument(helperCallInstr, excessOpnd);
- m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
- return helperCallInstr;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerArgIn
- ///
- /// This function checks the passed-in argument count against the index of this
- /// argument and uses null for a param value if the caller didn't explicitly
- /// pass anything.
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerArgIn(IR::Instr *instrArgIn)
- {
- IR::LabelInstr * labelDone;
- IR::LabelInstr * labelUndef;
- IR::LabelInstr * labelNormal;
- IR::LabelInstr * labelInit;
- IR::LabelInstr * labelInitNext;
- IR::BranchInstr * instrBranch;
- IR::Instr * instrArgInNext;
- IR::Instr * instrInsert;
- IR::Instr * instrPrev;
- IR::Instr * instrResume = nullptr;
- IR::Opnd * dstOpnd;
- IR::Opnd * srcOpnd;
- IR::Opnd * opndUndef;
- Js::ArgSlot argIndex;
- StackSym * symParam;
- BOOLEAN isDuplicate;
- IR::RegOpnd * generatorArgsPtrOpnd = nullptr;
- // We start with:
- // s1 = ArgIn_A param1
- // s2 = ArgIn_A param2
- // ...
- // sn = ArgIn_A paramn
- //
- // We want to end up with:
- //
- // s1 = ArgIn_A param1 -- Note that this is unconditional
- // count = (load from param area)
- // BrLt_A $start, count, n -- Forward cbranch to the uncommon case
- // Br $Ln
- // $start:
- // sn = assign undef
- // BrGe_A $Ln-1, count, n-1
- // sn-1 = assign undef
- // ...
- // s2 = assign undef
- // Br $done
- // $Ln:
- // sn = assign paramn
- // $Ln-1:
- // sn-1 = assign paramn-1
- // ...
- // s2 = assign param2
- // $done:
- AnalysisAssert(instrArgIn);
- IR::Opnd *restDst = nullptr;
- bool hasRest = instrArgIn->m_opcode == Js::OpCode::ArgIn_Rest;
- if (hasRest)
- {
- IR::Instr *restInstr = instrArgIn;
- restDst = restInstr->UnlinkDst();
- if (m_func->GetJITFunctionBody()->HasImplicitArgIns() && m_func->argInsCount > 0)
- {
- while (instrArgIn->m_opcode != Js::OpCode::ArgIn_A)
- {
- instrArgIn = instrArgIn->m_prev;
- if (instrResume == nullptr)
- {
- instrResume = instrArgIn;
- }
- }
- restInstr->Remove();
- }
- else
- {
- Assert(instrArgIn->m_func == this->m_func);
- IR::Instr * instrCount = m_lowererMD.LoadInputParamCount(instrArgIn, -this->m_func->GetInParamsCount());
- IR::Opnd * excessOpnd = instrCount->GetDst();
- IR::LabelInstr *createRestArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- // BrGe $createRestArray, excess, 0
- InsertCompareBranch(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), Js::OpCode::BrGe_A, createRestArrayLabel, instrArgIn);
- // MOV excess, 0
- InsertMove(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), instrArgIn);
- // $createRestArray
- instrArgIn->InsertBefore(createRestArrayLabel);
- if (m_func->GetJITFunctionBody()->IsCoroutine())
- {
- generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrArgIn);
- }
- IR::IntConstOpnd * formalsOpnd = IR::IntConstOpnd::New(this->m_func->GetInParamsCount(), TyUint32, this->m_func);
- IR::Instr *prev = LowerRestParameter(formalsOpnd, restDst, excessOpnd, instrArgIn, generatorArgsPtrOpnd);
- instrArgIn->Remove();
- return prev;
- }
- }
- srcOpnd = instrArgIn->GetSrc1();
- symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
- argIndex = symParam->GetParamSlotNum();
- if (argIndex == 1)
- {
- // The "this" argument is not source-dependent and doesn't need to be checked.
- if (m_func->GetJITFunctionBody()->IsCoroutine())
- {
- generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrArgIn);
- ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
- }
- m_lowererMD.ChangeToAssign(instrArgIn);
- return instrResume == nullptr ? instrArgIn->m_prev : instrResume;
- }
- Js::ArgSlot formalsCount = this->m_func->GetInParamsCount();
- AssertMsg(argIndex <= formalsCount, "Expect to see the ArgIn's within the range of the formals");
- // Because there may be instructions between the ArgIn's, such as saves to the frame object,
- // we find the top of the sequence of ArgIn's and insert everything there. This assumes that
- // ArgIn's use param symbols as src's and not the results of previous instructions.
- instrPrev = instrArgIn;
- Js::ArgSlot currArgInCount = 0;
- Assert(this->m_func->argInsCount > 0);
- while (currArgInCount < this->m_func->argInsCount - 1)
- {
- instrPrev = instrPrev->m_prev;
- if (instrPrev->m_opcode == Js::OpCode::ArgIn_A)
- {
- srcOpnd = instrPrev->GetSrc1();
- symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
- AssertMsg(symParam->GetParamSlotNum() < argIndex, "ArgIn's not in numerical order");
- argIndex = symParam->GetParamSlotNum();
- currArgInCount++;
- }
- else
- {
- // Make sure that this instruction gets lowered.
- if (instrResume == nullptr)
- {
- instrResume = instrPrev;
- }
- }
- }
- // The loading of parameters will be inserted above this instruction.
- instrInsert = instrPrev;
- AnalysisAssert(instrInsert);
- if (instrResume == nullptr)
- {
- // We found no intervening non-ArgIn's, so lowering can resume at the previous instruction.
- instrResume = instrInsert->m_prev;
- }
- // Now insert all the checks and undef-assigns.
- if (m_func->GetJITFunctionBody()->IsCoroutine())
- {
- generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrInsert);
- }
- // excessOpnd = (load from param area) - formalCounts
- IR::Instr * instrCount = this->m_lowererMD.LoadInputParamCount(instrInsert, -formalsCount, true);
- IR::Opnd * excessOpnd = instrCount->GetDst();
- labelUndef = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /*helperLabel*/ true);
- Lowerer::InsertBranch(Js::OpCode::BrLt_A, labelUndef, instrInsert);
- // Br $Ln
- labelNormal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- labelInit = labelNormal;
- instrBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelNormal, this->m_func);
- instrInsert->InsertBefore(instrBranch);
- // Insert the labels
- instrInsert->InsertBefore(labelUndef);
- instrInsert->InsertBefore(labelNormal);
- //Adjustment for deadstore of ArgIn_A
- Js::ArgSlot highestSlotNum = instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum();
- Js::ArgSlot missingSlotNums = this->m_func->GetInParamsCount() - highestSlotNum;
- Assert(missingSlotNums >= 0);
- while (missingSlotNums > 0)
- {
- InsertAdd(true, excessOpnd, excessOpnd, IR::IntConstOpnd::New(1, TyMachReg, this->m_func), labelNormal);
- Lowerer::InsertBranch(Js::OpCode::BrEq_A, labelNormal, labelNormal);
- missingSlotNums--;
- }
- // MOV undefReg, undefAddress
- IR::Opnd* opndUndefAddress = this->LoadLibraryValueOpnd(labelNormal, LibraryValue::ValueUndefined);
- opndUndef = IR::RegOpnd::New(TyMachPtr, this->m_func);
- Lowerer::InsertMove(opndUndef, opndUndefAddress, labelNormal);
- BVSparse<JitArenaAllocator> *formalsBv = JitAnew(this->m_alloc, BVSparse<JitArenaAllocator>, this->m_alloc);
- while (currArgInCount > 0)
- {
- dstOpnd = instrArgIn->GetDst();
- Assert(dstOpnd->IsRegOpnd());
- isDuplicate = formalsBv->TestAndSet(dstOpnd->AsRegOpnd()->m_sym->AsStackSym()->m_id);
- // Now insert the undef initialization before the "normal" label
- // sn = assign undef
- Lowerer::InsertMove(dstOpnd, opndUndef, labelNormal);
- // INC excessOpnd
- // BrEq_A $Ln-1
- currArgInCount--;
- labelInitNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- // And insert the "normal" initialization before the "done" label
- // sn = assign paramn
- // $Ln-1:
- labelInit->InsertAfter(labelInitNext);
- labelInit = labelInitNext;
- instrArgInNext = instrArgIn->m_prev;
- instrArgIn->Unlink();
- Js::ArgSlot prevParamSlotNum = instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum();
- // function foo(x, x) { use(x); }
- // This should refer to the second 'x'. Since we reverse the order here however, we need to skip
- // the initialization of the first 'x' to not override the one for the second. WOOB:1105504
- if (isDuplicate)
- {
- instrArgIn->Free();
- }
- else
- {
- ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
- labelInit->InsertBefore(instrArgIn);
- this->m_lowererMD.ChangeToAssign(instrArgIn);
- }
- instrArgIn = instrArgInNext;
- while (instrArgIn->m_opcode != Js::OpCode::ArgIn_A)
- {
- instrArgIn = instrArgIn->m_prev;
- AssertMsg(instrArgIn, "???");
- }
- //Adjustment for deadstore of ArgIn_A
- Js::ArgSlot currParamSlotNum = instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum();
- Js::ArgSlot diffSlotsNum = prevParamSlotNum - currParamSlotNum;
- AssertMsg(diffSlotsNum > 0, "Argins are not in order?");
- while (diffSlotsNum > 0)
- {
- InsertAdd(true, excessOpnd, excessOpnd, IR::IntConstOpnd::New(1, TyMachReg, this->m_func), labelNormal);
- InsertBranch(Js::OpCode::BrEq_A, labelInitNext, labelNormal);
- diffSlotsNum--;
- }
- AssertMsg(instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum() <= formalsCount,
- "Expect all ArgIn's to be in numerical order by param slot");
- }
- // Insert final undef and normal initializations, jumping unconditionally to the end
- // rather than checking against the decremented formals count as we did inside the loop above.
- // s2 = assign undef
- dstOpnd = instrArgIn->GetDst();
- Assert(dstOpnd->IsRegOpnd());
- isDuplicate = formalsBv->TestAndSet(dstOpnd->AsRegOpnd()->m_sym->AsStackSym()->m_id);
- Lowerer::InsertMove(dstOpnd, opndUndef, labelNormal);
- if (hasRest)
- {
- InsertMove(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), labelNormal);
- }
- // Br $done
- labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- instrBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
- labelNormal->InsertBefore(instrBranch);
- // s2 = assign param2
- // $done:
- labelInit->InsertAfter(labelDone);
- if (hasRest)
- {
- // The formals count has been tainted, so restore it before lowering rest
- IR::IntConstOpnd * formalsOpnd = IR::IntConstOpnd::New(this->m_func->GetInParamsCount(), TyUint32, this->m_func);
- LowerRestParameter(formalsOpnd, restDst, excessOpnd, labelDone, generatorArgsPtrOpnd);
- }
- instrArgIn->Unlink();
- if (isDuplicate)
- {
- instrArgIn->Free();
- }
- else
- {
- ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
- labelDone->InsertBefore(instrArgIn);
- this->m_lowererMD.ChangeToAssign(instrArgIn);
- }
- JitAdelete(this->m_alloc, formalsBv);
- return instrResume;
- }
- void
- Lowerer::ConvertArgOpndIfGeneratorFunction(IR::Instr *instrArgIn, IR::RegOpnd *generatorArgsPtrOpnd)
- {
- if (this->m_func->GetJITFunctionBody()->IsCoroutine())
- {
- // Replace stack param operand with offset into arguments array held by
- // the generator object.
- IR::Opnd * srcOpnd = instrArgIn->UnlinkSrc1();
- StackSym * symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
- Js::ArgSlot argIndex = symParam->GetParamSlotNum();
- IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorArgsPtrOpnd, (argIndex - 1) * MachPtr, TyMachPtr, this->m_func);
- srcOpnd->Free(this->m_func);
- instrArgIn->SetSrc1(indirOpnd);
- }
- }
- IR::RegOpnd *
- Lowerer::LoadGeneratorArgsPtr(IR::Instr *instrInsert)
- {
- IR::Instr * instr = LoadGeneratorObject(instrInsert);
- IR::RegOpnd * generatorRegOpnd = instr->GetDst()->AsRegOpnd();
- IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorRegOpnd, Js::JavascriptGenerator::GetArgsPtrOffset(), TyMachPtr, instrInsert->m_func);
- IR::RegOpnd * argsPtrOpnd = IR::RegOpnd::New(TyMachReg, instrInsert->m_func);
- Lowerer::InsertMove(argsPtrOpnd, indirOpnd, instrInsert);
- return argsPtrOpnd;
- }
- IR::Instr *
- Lowerer::LoadGeneratorObject(IR::Instr * instrInsert)
- {
- StackSym * generatorSym = StackSym::NewImplicitParamSym(3, instrInsert->m_func);
- instrInsert->m_func->SetArgOffset(generatorSym, LowererMD::GetFormalParamOffset() * MachPtr);
- IR::SymOpnd * generatorSymOpnd = IR::SymOpnd::New(generatorSym, TyMachPtr, instrInsert->m_func);
- IR::RegOpnd * generatorRegOpnd = IR::RegOpnd::New(TyMachPtr, instrInsert->m_func);
- instrInsert->m_func->SetHasImplicitParamLoad();
- return Lowerer::InsertMove(generatorRegOpnd, generatorSymOpnd, instrInsert);
- }
- IR::Instr *
- Lowerer::LowerArgInAsmJs(IR::Instr * instr)
- {
- Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
- Assert(instr && instr->m_opcode == Js::OpCode::ArgIn_A);
- IR::Instr* instrPrev = instr->m_prev;
- m_lowererMD.ChangeToAssign(instr);
- return instrPrev;
- }
- bool
- Lowerer::InlineBuiltInLibraryCall(IR::Instr *callInstr)
- {
- IR::Opnd *src1 = callInstr->GetSrc1();
- IR::Opnd *src2 = callInstr->GetSrc2();
- // Get the arg count by looking at the slot number of the last arg symbol.
- if (!src2->IsSymOpnd())
- {
- // No args? Not sure this is possible, but handle it.
- return false;
- }
- StackSym *argLinkSym = src2->AsSymOpnd()->m_sym->AsStackSym();
- // Subtract "this" from the arg count.
- IntConstType argCount = argLinkSym->GetArgSlotNum() - 1;
- // Find the callee's built-in index (if any).
- Js::BuiltinFunction index = Func::GetBuiltInIndex(src1);
- // Warning!
- // Don't add new built-in to following switch. Built-ins needs to be inlined in call direct way.
- // Following is only for prejit scenarios where we don't get inlining always and generate fast path in lowerer.
- // Generating fastpath here misses fixed functions and globopt optimizations.
- switch(index)
- {
- case Js::BuiltinFunction::JavascriptString_CharAt:
- case Js::BuiltinFunction::JavascriptString_CharCodeAt:
- if (argCount != 1)
- {
- return false;
- }
- if (!callInstr->GetDst())
- {
- // Optimization of Char[Code]At assumes result is used.
- return false;
- }
- break;
- case Js::BuiltinFunction::Math_Abs:
- #ifdef _M_IX86
- if (!AutoSystemInfo::Data.SSE2Available())
- {
- return false;
- }
- #endif
- if (argCount != 1)
- {
- return false;
- }
- if (!callInstr->GetDst())
- {
- // Optimization of Abs assumes result is used.
- return false;
- }
- break;
- case Js::BuiltinFunction::JavascriptArray_Push:
- {
- if (argCount != 1)
- {
- return false;
- }
- if (callInstr->GetDst())
- {
- // Optimization of push assumes result is unused.
- return false;
- }
- StackSym *linkSym = callInstr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
- Assert(linkSym->IsSingleDef());
- linkSym = linkSym->m_instrDef->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
- Assert(linkSym->IsSingleDef());
- IR::Opnd *const arrayOpnd = linkSym->m_instrDef->GetSrc1();
- if(!arrayOpnd->IsRegOpnd())
- {
- // This should be rare, but needs to be handled.
- // By now, we've already started some of the inlining. Simply jmp to the helper.
- // The branch will get peeped later.
- return false;
- }
- if(!ShouldGenerateArrayFastPath(arrayOpnd, false, false, false) ||
- arrayOpnd->GetValueType().IsLikelyNativeArray())
- {
- // Rejecting native array for now, since we have to do a FromVar at the call site and bail out.
- return false;
- }
- break;
- }
- case Js::BuiltinFunction::JavascriptString_Replace:
- {
- if(argCount != 2)
- {
- return false;
- }
- if(!ShouldGenerateStringReplaceFastPath(callInstr, argCount))
- {
- return false;
- }
- break;
- }
- default:
- return false;
- }
- Assert(Func::IsBuiltInInlinedInLowerer(callInstr->GetSrc1()));
- IR::Opnd *callTargetOpnd = callInstr->GetSrc1();
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- IR::Opnd *objRefOpnd = IR::MemRefOpnd::New((void*)this->GetObjRefForBuiltInTarget(callTargetOpnd->AsRegOpnd()), TyMachReg, this->m_func);
- InsertCompareBranch(callTargetOpnd, objRefOpnd, Js::OpCode::BrNeq_A, labelHelper, callInstr);
- callInstr->InsertBefore(labelHelper);
- Assert(argCount <= 2);
- IR::Opnd *argsOpnd[3];
- IR::Opnd *linkOpnd = callInstr->GetSrc2();
- while(linkOpnd->IsSymOpnd())
- {
- IR::SymOpnd * symOpnd = linkOpnd->AsSymOpnd();
- StackSym *sym = symOpnd->m_sym->AsStackSym();
- Assert(sym->m_isSingleDef);
- IR::Instr *argInstr = sym->m_instrDef;
- Assert(argCount >= 0);
- argsOpnd[argCount] = argInstr->GetSrc1();
- argCount--;
- argInstr->Unlink();
- labelHelper->InsertAfter(argInstr);
- linkOpnd = argInstr->GetSrc2();
- }
- AnalysisAssert(argCount == -1);
- // Move startcall
- Assert(linkOpnd->IsRegOpnd());
- StackSym *sym = linkOpnd->AsRegOpnd()->m_sym;
- Assert(sym->m_isSingleDef);
- IR::Instr *startCall = sym->m_instrDef;
- Assert(startCall->m_opcode == Js::OpCode::StartCall);
- startCall->Unlink();
- labelHelper->InsertAfter(startCall);
- // $doneLabel:
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- callInstr->InsertAfter(doneLabel);
- bool success = true;
- switch(index)
- {
- case Js::BuiltinFunction::Math_Abs:
- this->m_lowererMD.GenerateFastAbs(callInstr->GetDst(), argsOpnd[1], callInstr, labelHelper, labelHelper, doneLabel);
- break;
- case Js::BuiltinFunction::JavascriptString_CharCodeAt:
- case Js::BuiltinFunction::JavascriptString_CharAt:
- success = GenerateFastCharAt(index, callInstr->GetDst(), argsOpnd[0], argsOpnd[1],
- callInstr, labelHelper, labelHelper, doneLabel);
- break;
- case Js::BuiltinFunction::JavascriptArray_Push:
- success = GenerateFastPush(argsOpnd[0], argsOpnd[1], callInstr, labelHelper, labelHelper, nullptr, doneLabel);
- break;
- case Js::BuiltinFunction::JavascriptString_Replace:
- success = GenerateFastReplace(argsOpnd[0], argsOpnd[1], argsOpnd[2], callInstr, labelHelper, labelHelper, doneLabel);
- break;
- default:
- Assert(UNREACHED);
- }
- IR::Instr *instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, doneLabel, this->m_func);
- labelHelper->InsertBefore(instr);
- return success;
- }
- // Perform lowerer part of inlining built-in function.
- // For details, see inline.cpp.
- //
- // Description of changes here (note that taking care of Argouts are similar to InlineeStart):
- // - Move ArgOut_A_InlineBuiltIn next to the call instr -- used by bailout processing in register allocator.
- // - Remove StartCall and InlineBuiltInStart for this call.
- // Before:
- // StartCall fn
- // d1 = BIA s1, link1
- // ...
- // InlineBuiltInStart fn, link0
- // After:
- // ...
- // d1 = BIA s1, NULL
- void Lowerer::LowerInlineBuiltIn(IR::Instr* builtInEndInstr)
- {
- Assert(builtInEndInstr->m_opcode == Js::OpCode::InlineBuiltInEnd || builtInEndInstr->m_opcode == Js::OpCode::InlineNonTrackingBuiltInEnd);
- IR::Instr* startCallInstr = nullptr;
- builtInEndInstr->IterateArgInstrs([&](IR::Instr* argInstr) {
- startCallInstr = argInstr->GetSrc2()->GetStackSym()->m_instrDef;
- return false;
- });
- // Keep the startCall around as bailout refers to it. Just unlink it for now - do not delete it.
- startCallInstr->Unlink();
- builtInEndInstr->Remove();
- }
- intptr_t
- Lowerer::GetObjRefForBuiltInTarget(IR::RegOpnd * regOpnd)
- {
- intptr_t mathFns = m_func->GetScriptContextInfo()->GetBuiltinFunctionsBaseAddr();
- Js::BuiltinFunction index = regOpnd->m_sym->m_builtInIndex;
- AssertMsg(index < Js::BuiltinFunction::Count, "Invalid built-in index on a call target marked as built-in");
- return mathFns + index;
- }
- IR::Instr *
- Lowerer::LowerNewRegEx(IR::Instr * instr)
- {
- IR::Opnd *src1 = instr->UnlinkSrc1();
- Assert(src1->IsAddrOpnd());
- #if ENABLE_REGEX_CONFIG_OPTIONS
- if (REGEX_CONFIG_FLAG(RegexTracing))
- {
- Assert(!instr->GetDst()->CanStoreTemp());
- IR::Instr * instrPrev = LoadScriptContext(instr);
- instrPrev = m_lowererMD.LoadHelperArgument(instr, src1);
- m_lowererMD.ChangeToHelperCall(instr, IR::HelperScrRegEx_OP_NewRegEx);
- return instrPrev;
- }
- #endif
- IR::Instr * instrPrev = instr->m_prev;
- IR::RegOpnd * dstOpnd = instr->UnlinkDst()->AsRegOpnd();
- IR::SymOpnd * tempObjectSymOpnd;
- bool isZeroed = GenerateRecyclerOrMarkTempAlloc(instr, dstOpnd, IR::HelperAllocMemForJavascriptRegExp, sizeof(Js::JavascriptRegExp), &tempObjectSymOpnd);
- if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
- {
- // Hoist the vtable and pattern init to the outer most loop top as it never changes
- InsertMove(tempObjectSymOpnd,
- LoadVTableValueOpnd(this->outerMostLoopLabel, VTableValue::VtableJavascriptRegExp),
- this->outerMostLoopLabel, false);
- }
- else
- {
- GenerateMemInit(dstOpnd, 0, LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp), instr, isZeroed);
- }
- GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfType(),
- this->LoadLibraryValueOpnd(instr, LibraryValue::ValueRegexType), instr, isZeroed);
- GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfAuxSlots(), instr, isZeroed);
- GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfObjectArray(), instr, isZeroed);
- if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
- {
- InsertMove(IR::SymOpnd::New(tempObjectSymOpnd->m_sym,
- tempObjectSymOpnd->m_offset + Js::JavascriptRegExp::GetOffsetOfPattern(), TyMachPtr, this->m_func),
- src1, this->outerMostLoopLabel, false);
- }
- else
- {
- GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfPattern(), src1, instr, isZeroed);
- }
- GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfSplitPattern(), instr, isZeroed);
- GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), instr, isZeroed);
- GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), 0, instr, isZeroed);
- instr->Remove();
- return instrPrev;
- }
- IR::Instr *
- Lowerer::GenerateRuntimeError(IR::Instr * insertBeforeInstr, Js::MessageId errorCode, IR::JnHelperMethod helper /*= IR::JnHelperMethod::HelperOp_RuntimeTypeError*/)
- {
- IR::Instr * runtimeErrorInstr = IR::Instr::New(Js::OpCode::RuntimeTypeError, this->m_func);
- runtimeErrorInstr->SetSrc1(IR::IntConstOpnd::New(errorCode, TyInt32, this->m_func, true));
- insertBeforeInstr->InsertBefore(runtimeErrorInstr);
- return this->LowerUnaryHelperMem(runtimeErrorInstr, helper);
- }
- bool Lowerer::IsNullOrUndefRegOpnd(IR::RegOpnd *opnd) const
- {
- StackSym *sym = opnd->m_sym;
- if (sym->IsIntConst() || sym->IsFloatConst())
- {
- return false;
- }
- return opnd->GetValueType().IsUndefined() || opnd->GetValueType().IsNull();
- }
- bool Lowerer::IsConstRegOpnd(IR::RegOpnd *opnd) const
- {
- StackSym *sym = opnd->m_sym;
- if (sym->IsIntConst() || sym->IsFloatConst())
- {
- return false;
- }
- const auto& vt = opnd->GetValueType();
- return vt.IsUndefined() || vt.IsNull() || (sym->m_isConst && vt.IsBoolean());
- }
- IR::Opnd * Lowerer::GetConstRegOpnd(IR::RegOpnd *opnd, IR::Instr * instr)
- {
- if (opnd->GetValueType().IsUndefined())
- {
- return this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined);
- }
- if (opnd->GetValueType().IsNull())
- {
- return this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull);
- }
- Assert(opnd->GetValueType().IsBoolean());
- return opnd->GetStackSym()->GetInstrDef()->GetSrc1()->AsAddrOpnd();
- }
- bool
- Lowerer::HasSideEffects(IR::Instr *instr)
- {
- if (LowererMD::IsCall(instr))
- {
- #ifdef _M_IX86
- IR::Opnd *src1 = instr->GetSrc1();
- if (src1->IsHelperCallOpnd())
- {
- IR::HelperCallOpnd * helper = src1->AsHelperCallOpnd();
- switch(helper->m_fnHelper)
- {
- case IR::HelperOp_Int32ToAtomInPlace:
- case IR::HelperOp_Int32ToAtom:
- case IR::HelperOp_UInt32ToAtom:
- return false;
- }
- }
- #endif
- return true;
- }
- return instr->HasAnySideEffects();
- }
- bool Lowerer::IsArgSaveRequired(Func *func) {
- return (!func->IsTrueLeaf() || func->IsJitInDebugMode() ||
- // GetHasImplicitParamLoad covers generators, asmjs,
- // and other javascript functions that implicitly read from the arg stack slots
- func->GetHasThrow() || func->GetHasImplicitParamLoad() || func->HasThis() || func->argInsCount > 0);
- }
- IR::Instr*
- Lowerer::GenerateFastInlineBuiltInMathRandom(IR::Instr* instr)
- {
- AssertMsg(instr->GetDst()->IsFloat(), "dst must be float.");
- IR::Instr* retInstr = instr->m_prev;
- IR::Opnd* dst = instr->GetDst();
- #if defined(_M_X64)
- if (m_func->GetScriptContextInfo()->IsPRNGSeeded())
- {
- const uint64 mExp = 0x3FF0000000000000;
- const uint64 mMant = 0x000FFFFFFFFFFFFF;
- IR::RegOpnd* r0 = IR::RegOpnd::New(TyUint64, m_func); // s0
- IR::RegOpnd* r1 = IR::RegOpnd::New(TyUint64, m_func); // s1
- IR::RegOpnd* r3 = IR::RegOpnd::New(TyUint64, m_func); // helper uint64 reg
- IR::RegOpnd* r4 = IR::RegOpnd::New(TyFloat64, m_func); // helper float64 reg
- // ===========================================================
- // s0 = scriptContext->GetLibrary()->GetRandSeed1();
- // s1 = scriptContext->GetLibrary()->GetRandSeed0();
- // ===========================================================
- this->InsertMove(r0,
- IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetRandSeed1Offset(), TyUint64, instr->m_func), instr);
- this->InsertMove(r1,
- IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetRandSeed0Offset(), TyUint64, instr->m_func), instr);
- // ===========================================================
- // s1 ^= s1 << 23;
- // ===========================================================
- this->InsertMove(r3, r1, instr);
- this->InsertShift(Js::OpCode::Shl_A, false, r3, r3, IR::IntConstOpnd::New(23, TyInt8, m_func), instr);
- this->InsertXor(r1, r1, r3, instr);
- // ===========================================================
- // s1 ^= s1 >> 17;
- // ===========================================================
- this->InsertMove(r3, r1, instr);
- this->InsertShift(Js::OpCode::ShrU_A, false, r3, r3, IR::IntConstOpnd::New(17, TyInt8, m_func), instr);
- this->InsertXor(r1, r1, r3, instr);
- // ===========================================================
- // s1 ^= s0;
- // ===========================================================
- this->InsertXor(r1, r1, r0, instr);
- // ===========================================================
- // s1 ^= s0 >> 26;
- // ===========================================================
- this->InsertMove(r3, r0, instr);
- this->InsertShift(Js::OpCode::ShrU_A, false, r3, r3, IR::IntConstOpnd::New(26, TyInt8, m_func), instr);
- this->InsertXor(r1, r1, r3, instr);
- // ===========================================================
- // scriptContext->GetLibrary()->SetRandSeed0(s0);
- // scriptContext->GetLibrary()->SetRandSeed1(s1);
- // ===========================================================
- this->InsertMove(
- IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetRandSeed0Offset(), TyUint64, m_func), r0, instr);
- this->InsertMove(
- IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetRandSeed1Offset(), TyUint64, m_func), r1, instr);
- // ===========================================================
- // dst = bit_cast<float64>(((s0 + s1) & mMant) | mExp);
- // ===========================================================
- this->InsertAdd(false, r1, r1, r0, instr);
- this->InsertMove(r3, IR::IntConstOpnd::New(mMant, TyInt64, m_func, true), instr);
- this->InsertAnd(r1, r1, r3, instr);
- this->InsertMove(r3, IR::IntConstOpnd::New(mExp, TyInt64, m_func, true), instr);
- this->InsertOr(r1, r1, r3, instr);
- this->InsertMoveBitCast(dst, r1, instr);
- // ===================================================================
- // dst -= 1.0;
- // ===================================================================
- this->InsertMove(r4, IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleOnePointZeroAddr(), TyFloat64, m_func, IR::AddrOpndKindDynamicDoubleRef), instr);
- this->InsertSub(false, dst, dst, r4, instr);
- }
- else
- #endif
- {
- IR::Opnd* tmpdst = dst;
- if (!dst->IsRegOpnd())
- {
- tmpdst = IR::RegOpnd::New(dst->GetType(), instr->m_func);
- }
- LoadScriptContext(instr);
- IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, tmpdst, instr->m_func);
- instr->InsertBefore(helperCallInstr);
- m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperDirectMath_Random);
- if (tmpdst != dst)
- {
- InsertMove(dst, tmpdst, instr);
- }
- }
- instr->Remove();
- return retInstr;
- }
- IR::Instr *
- Lowerer::LowerCallDirect(IR::Instr * instr)
- {
- IR::Opnd* linkOpnd = instr->UnlinkSrc2();
- StackSym *linkSym = linkOpnd->AsSymOpnd()->m_sym->AsStackSym();
- IR::Instr* argInstr = linkSym->m_instrDef;
- Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_InlineSpecialized);
- IR::Opnd* funcObj = argInstr->UnlinkSrc1();
- instr->SetSrc2(argInstr->UnlinkSrc2());
- argInstr->Remove();
- if (instr->HasBailOutInfo() && !instr->HasLazyBailOut())
- {
- IR::Instr * bailOutInstr = this->SplitBailOnImplicitCall(instr, instr->m_next, instr->m_next);
- this->LowerBailOnEqualOrNotEqual(bailOutInstr);
- }
- Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
- return this->GenerateDirectCall(instr, funcObj, (ushort)flags);
- }
- IR::Instr *
- Lowerer::GenerateDirectCall(IR::Instr* inlineInstr, IR::Opnd* funcObj, ushort callflags)
- {
- int32 argCount = m_lowererMD.LowerCallArgs(inlineInstr, callflags);
- m_lowererMD.LoadHelperArgument(inlineInstr, funcObj);
- m_lowererMD.LowerCall(inlineInstr, (Js::ArgSlot)argCount); //to account for function object and callinfo
- return inlineInstr->m_prev;
- }
- /*
- * GenerateHelperToArrayPushFastPath
- * Generates Helper Call and pushes arguments to the Push HelperCall
- */
- IR::Instr *
- Lowerer::GenerateHelperToArrayPushFastPath(IR::Instr * instr, IR::LabelInstr * bailOutLabelHelper)
- {
- IR::Opnd * arrayHelperOpnd = instr->UnlinkSrc1();
- IR::Opnd * elementHelperOpnd = instr->UnlinkSrc2();
- IR::JnHelperMethod helperMethod;
- if(elementHelperOpnd->IsInt32())
- {
- Assert(arrayHelperOpnd->GetValueType().IsLikelyNativeIntArray());
- helperMethod = IR::HelperArray_NativeIntPush;
- m_lowererMD.LoadHelperArgument(instr, elementHelperOpnd);
- }
- else if(elementHelperOpnd->IsFloat())
- {
- Assert(arrayHelperOpnd->GetValueType().IsLikelyNativeFloatArray());
- helperMethod = IR::HelperArray_NativeFloatPush;
- m_lowererMD.LoadDoubleHelperArgument(instr, elementHelperOpnd);
- }
- else
- {
- helperMethod = IR::HelperArray_VarPush;
- m_lowererMD.LoadHelperArgument(instr, elementHelperOpnd);
- }
- m_lowererMD.LoadHelperArgument(instr, arrayHelperOpnd);
- LoadScriptContext(instr);
- return m_lowererMD.ChangeToHelperCall(instr, helperMethod);
- }
- /*
- * GenerateHelperToArrayPopFastPath
- * Generates Helper Call and pushes arguments to the Pop HelperCall
- */
- IR::Instr *
- Lowerer::GenerateHelperToArrayPopFastPath(IR::Instr * instr, IR::LabelInstr * doneLabel, IR::LabelInstr * bailOutLabelHelper)
- {
- IR::Opnd * arrayHelperOpnd = instr->UnlinkSrc1();
- ValueType arrayValueType = arrayHelperOpnd->GetValueType();
- IR::JnHelperMethod helperMethod;
- //Decide the helperMethod based on dst availability and nativity of the array.
- if(arrayValueType.IsLikelyNativeArray() && !instr->GetDst())
- {
- helperMethod = IR::HelperArray_NativePopWithNoDst;
- }
- else if(arrayValueType.IsLikelyNativeIntArray())
- {
- helperMethod = IR::HelperArray_NativeIntPop;
- }
- else if(arrayValueType.IsLikelyNativeFloatArray())
- {
- helperMethod = IR::HelperArray_NativeFloatPop;
- }
- else
- {
- helperMethod = IR::HelperArray_VarPop;
- }
- m_lowererMD.LoadHelperArgument(instr, arrayHelperOpnd);
- //We do not need scriptContext for HelperArray_NativePopWithNoDst call.
- if(helperMethod != IR::HelperArray_NativePopWithNoDst)
- {
- LoadScriptContext(instr);
- }
- IR::Instr * retInstr = m_lowererMD.ChangeToHelperCall(instr, helperMethod, bailOutLabelHelper);
- //We don't need missing item check for var arrays, as there it is taken care by the helper.
- if(arrayValueType.IsLikelyNativeArray())
- {
- if(retInstr->GetDst())
- {
- //Do this check only for native arrays with Dst. For Var arrays, this is taken care in the Runtime helper itself.
- InsertMissingItemCompareBranch(retInstr->GetDst(), Js::OpCode::BrNeq_A, doneLabel, bailOutLabelHelper);
- }
- else
- {
- //We need unconditional jump to doneLabel, if there is no dst in Pop instr.
- InsertBranch(Js::OpCode::Br, true, doneLabel, bailOutLabelHelper);
- }
- }
- return retInstr;
- }
- IR::Instr *
- Lowerer::LowerCondBranchCheckBailOut(IR::BranchInstr * branchInstr, IR::Instr * helperCall, bool isHelper)
- {
- Assert(branchInstr->m_opcode == Js::OpCode::BrTrue_A || branchInstr->m_opcode == Js::OpCode::BrFalse_A);
- if (branchInstr->HasBailOutInfo())
- {
- #ifdef ENABLE_SCRIPT_DEBUGGING
- IR::BailOutKind debuggerBailOutKind = IR::BailOutInvalid;
- if (branchInstr->HasAuxBailOut())
- {
- // We have shared debugger bailout. For branches we lower it here, not in SplitBailForDebugger.
- // See SplitBailForDebugger for details.
- AssertMsg(!(branchInstr->GetBailOutKind() & IR::BailOutForDebuggerBits), "There should be no debugger bits in main bailout kind.");
- debuggerBailOutKind = branchInstr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits;
- AssertMsg((debuggerBailOutKind & ~(IR::BailOutIgnoreException | IR::BailOutForceByFlag)) == 0, "Only IR::BailOutIgnoreException|ForceByFlag supported here.");
- }
- #endif
- IR::Instr * bailOutInstr = this->SplitBailOnImplicitCall(branchInstr, helperCall, branchInstr);
- IR::Instr* prevInstr = this->LowerBailOnEqualOrNotEqual(bailOutInstr, branchInstr, nullptr, nullptr, isHelper);
- #ifdef ENABLE_SCRIPT_DEBUGGING
- if (debuggerBailOutKind != IR::BailOutInvalid)
- {
- // Note that by this time implicit calls bailout is already lowered.
- // What we do here is use same bailout info and lower debugger bailout which would be shared bailout.
- BailOutInfo* bailOutInfo = bailOutInstr->GetBailOutInfo();
- IR::BailOutInstr* debuggerBailoutInstr = IR::BailOutInstr::New(
- Js::OpCode::BailForDebugger, debuggerBailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
- prevInstr->InsertAfter(debuggerBailoutInstr);
- // The result of that is:
- // original helper op_* instr, then debugger bailout, then implicit calls bailout/etc with the branch instr.
- // Example:
- // s35(eax).i32 = CALL Op_GreaterEqual.u32 # -- original op_* helper
- // s34.i32 = MOV s35(eax).i32 #
- // BailForDebugger # Bailout: #0042 (BailOutIgnoreException) -- the debugger bailout
- // CMP [0x0003BDE0].i8, 1 (0x1).i8 # -- implicit calls check
- // JEQ $L10 #
- //$L11: [helper] #
- // CALL SaveAllRegistersAndBranchBailOut.u32 # Bailout: #0042 (BailOutOnImplicitCalls)
- // JMP $L5 #
- //$L10: [helper] #
- // BrFalse_A $L3, s34.i32 #0034 -- The BrTrue/BrFalse branch (branch instr)
- //$L6: [helper] #0042
- this->LowerBailForDebugger(debuggerBailoutInstr, isHelper);
- // After lowering this we will have a check which on bailout condition will JMP to $L11.
- }
- #else
- (prevInstr);
- #endif
- }
- return m_lowererMD.LowerCondBranch(branchInstr);
- }
- IR::SymOpnd *
- Lowerer::LoadCallInfo(IR::Instr * instrInsert)
- {
- IR::SymOpnd * srcOpnd;
- Func * func = instrInsert->m_func;
- if (func->GetJITFunctionBody()->IsCoroutine())
- {
- // Generator function arguments and ArgumentsInfo are not on the stack. Instead they
- // are accessed off the generator object (which is prm1).
- IR::Instr *genLoadInstr = LoadGeneratorObject(instrInsert);
- IR::RegOpnd * generatorRegOpnd = genLoadInstr->GetDst()->AsRegOpnd();
- IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorRegOpnd, Js::JavascriptGenerator::GetCallInfoOffset(), TyMachPtr, func);
- IR::Instr * instr = Lowerer::InsertMove(IR::RegOpnd::New(TyMachPtr, func), indirOpnd, instrInsert);
- StackSym * callInfoSym = StackSym::New(TyMachReg, func);
- IR::SymOpnd * callInfoSymOpnd = IR::SymOpnd::New(callInfoSym, TyMachReg, func);
- Lowerer::InsertMove(callInfoSymOpnd, instr->GetDst(), instrInsert);
- srcOpnd = IR::SymOpnd::New(callInfoSym, TyMachReg, func);
- }
- else
- {
- // Otherwise callInfo is always the "second" argument.
- // The stack looks like this:
- //
- // script param N
- // ...
- // script param 1
- // callinfo
- // function object
- // return addr
- // FP -> FP chain
- StackSym * srcSym = LowererMD::GetImplicitParamSlotSym(1, func);
- srcOpnd = IR::SymOpnd::New(srcSym, TyMachReg, func);
- }
- return srcOpnd;
- }
- IR::Instr *
- Lowerer::LowerBailOnNotStackArgs(IR::Instr * instr)
- {
- if (!this->m_func->GetHasStackArgs())
- {
- throw Js::RejitException(RejitReason::InlineApplyDisabled);
- }
- IR::Instr * prevInstr = instr->m_prev;
- // Bail out test
- // Label to skip Bailout and continue
- IR::LabelInstr * continueLabelInstr;
- IR::Instr *instrNext = instr->m_next;
- if (instrNext->IsLabelInstr())
- {
- continueLabelInstr = instrNext->AsLabelInstr();
- }
- else
- {
- continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
- instr->InsertAfter(continueLabelInstr);
- }
- if (!instr->m_func->IsInlinee())
- {
- //BailOut if the number of actuals (except "this" argument) is greater than or equal to 15.
- IR::RegOpnd* ldLenDstOpnd = IR::RegOpnd::New(TyUint32, instr->m_func);
- const IR::AutoReuseOpnd autoReuseldLenDstOpnd(ldLenDstOpnd, instr->m_func);
- IR::Instr* ldLen = IR::Instr::New(Js::OpCode::LdLen_A, ldLenDstOpnd, instr->m_func);
- ldLenDstOpnd->SetValueType(ValueType::GetTaggedInt()); //LdLen_A works only on stack arguments
- instr->InsertBefore(ldLen);
- this->GenerateFastRealStackArgumentsLdLen(ldLen);
- this->InsertCompareBranch(ldLenDstOpnd, IR::IntConstOpnd::New(Js::InlineeCallInfo::MaxInlineeArgoutCount, TyUint32, m_func, true), Js::OpCode::BrLt_A, true, continueLabelInstr, instr);
- this->GenerateBailOut(instr, nullptr, nullptr);
- }
- else
- {
- //For Inlined functions, we are sure actuals can't exceed Js::InlineeCallInfo::MaxInlineeArgoutCount (15).
- //No need to bail out.
- instr->Remove();
- }
- return prevInstr;
- }
- IR::Instr *
- Lowerer::LowerBailOnNotSpreadable(IR::Instr *instr)
- {
- // We only avoid bailing out / throwing a rejit exception when the array operand is a simple, non-optimized, non-object array.
- IR::Instr * prevInstr = instr->m_prev;
- Func *func = instr->m_func;
- IR::Opnd *arraySrcOpnd = instr->UnlinkSrc1();
- IR::RegOpnd *arrayOpnd = GetRegOpnd(arraySrcOpnd, instr, func, TyMachPtr);
- const ValueType baseValueType(arrayOpnd->GetValueType());
- // Check if we can just throw a rejit exception based on valuetype alone instead of bailing out.
- if (!baseValueType.IsLikelyArray()
- || baseValueType.IsLikelyAnyOptimizedArray()
- || (baseValueType.IsLikelyObject() && (baseValueType.GetObjectType() == ObjectType::ObjectWithArray))
- // Validate that GenerateArrayTest will not fail.
- || !(baseValueType.IsUninitialized() || baseValueType.HasBeenObject())
- || m_func->IsInlinee())
- {
- throw Js::RejitException(RejitReason::InlineSpreadDisabled);
- }
- // Past this point, we will need to use a bailout.
- IR::LabelInstr *bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true /* isOpHelper */);
- // See if we can skip various array checks on value type alone
- if (!baseValueType.IsArray())
- {
- GenerateArrayTest(arrayOpnd, bailOutLabel, bailOutLabel, instr, false);
- }
- if (!(baseValueType.IsArray() && baseValueType.HasNoMissingValues()))
- {
- InsertTestBranch(
- IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, func),
- IR::IntConstOpnd::New(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues), TyUint8, func, true),
- Js::OpCode::BrEq_A,
- bailOutLabel,
- instr);
- }
- IR::IndirOpnd *arrayLenPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, func);
- InsertCompareBranch(arrayLenPtrOpnd, IR::IntConstOpnd::New(Js::InlineeCallInfo::MaxInlineeArgoutCount - 1, TyUint8, func), Js::OpCode::BrGt_A, true, bailOutLabel, instr);
- IR::LabelInstr *skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertBranch(Js::OpCode::Br, skipBailOutLabel, instr);
- instr->InsertBefore(bailOutLabel);
- instr->InsertAfter(skipBailOutLabel);
- GenerateBailOut(instr);
- return prevInstr;
- }
- IR::Instr *
- Lowerer::LowerBailOnNotPolymorphicInlinee(IR::Instr * instr)
- {
- Assert(instr->HasBailOutInfo() && (instr->GetBailOutKind() == IR::BailOutOnFailedPolymorphicInlineTypeCheck || instr->GetBailOutKind() == IR::BailOutOnPolymorphicInlineFunction));
- IR::Instr* instrPrev = instr->m_prev;
- this->GenerateBailOut(instr, nullptr, nullptr);
- return instrPrev;
- }
- void
- Lowerer::LowerBailoutCheckAndLabel(IR::Instr *instr, bool onEqual, bool isHelper)
- {
- // Label to skip Bailout and continue
- IR::LabelInstr * continueLabelInstr;
- IR::Instr *instrNext = instr->m_next;
- if (instrNext->IsLabelInstr())
- {
- continueLabelInstr = instrNext->AsLabelInstr();
- }
- else
- {
- continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
- instr->InsertAfter(continueLabelInstr);
- }
- if(instr->GetBailOutKind() == IR::BailOutInjected)
- {
- // BailOnEqual 0, 0
- Assert(onEqual);
- Assert(instr->GetSrc1()->IsEqual(instr->GetSrc2()));
- Assert(instr->GetSrc1()->AsIntConstOpnd()->GetValue() == 0);
- // The operands cannot be equal when generating a compare (assert) but since this is for testing purposes, hoist a src.
- // Ideally, we would just create a BailOut instruction that generates a guaranteed bailout, but there seem to be issues
- // with doing this in a non-helper path. So finally, it would generate:
- // xor s0, s0
- // test s0, s0
- // jnz $continue
- // $bailout:
- // // bailout
- // $continue:
- instr->HoistSrc1(LowererMD::GetLoadOp(instr->GetSrc1()->GetType()));
- }
- InsertCompareBranch(instr->UnlinkSrc1(), instr->UnlinkSrc2(),
- onEqual ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, continueLabelInstr, instr);
- if (!isHelper)
- {
- IR::LabelInstr * helperLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- instr->InsertBefore(helperLabelInstr);
- #if DBG
- helperLabelInstr->m_noLazyHelperAssert = true;
- #endif
- }
- }
- IR::Instr *
- Lowerer::LowerBailOnEqualOrNotEqual(IR::Instr * instr,
- IR::BranchInstr *branchInstr, // = nullptr
- IR::LabelInstr *labelBailOut, // = nullptr
- IR::PropertySymOpnd * propSymOpnd, // = nullptr
- bool isHelper) // = false
- {
- IR::Instr * prevInstr = instr->m_prev;
- // Bail out test
- bool onEqual = instr->m_opcode == Js::OpCode::BailOnEqual;
- LowerBailoutCheckAndLabel(instr, onEqual, isHelper);
- // BailOutOnImplicitCalls is a post-op bailout. Since we look at the profile info for LdFld/StFld to decide whether the instruction may or may not call an accessor,
- // we need to update this profile information on the bailout path for BailOutOnImplicitCalls if the implicit call was an accessor call.
- if(propSymOpnd && ((instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCalls) && (propSymOpnd->m_inlineCacheIndex != -1) &&
- instr->m_func->HasProfileInfo())
- {
- // result = AND implCallFlags, ~ImplicitCall_None
- // TST result, ImplicitCall_Accessor
- // JEQ $bail
- // OR profiledFlags, ( FldInfo_FromAccessor | FldInfo_Polymorphic )
- // $bail
- IR::Opnd * implicitCallFlags = GetImplicitCallFlagsOpnd();
- IR::Opnd * accessorImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_Accessor & ~Js::ImplicitCall_None, GetImplicitCallFlagsType(), instr->m_func, true);
- IR::Opnd * maskNoImplicitCall = IR::IntConstOpnd::New((Js::ImplicitCallFlags)~Js::ImplicitCall_None, GetImplicitCallFlagsType(), instr->m_func, true);
- IR::Opnd * fldInfoAccessor = IR::IntConstOpnd::New(Js::FldInfo_FromAccessor | Js::FldInfo_Polymorphic, GetFldInfoFlagsType(), instr->m_func, true);
- IR::LabelInstr * label = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true);
- IR::Instr * andInstr = InsertAnd(IR::RegOpnd::New(GetImplicitCallFlagsType(), instr->m_func), implicitCallFlags, maskNoImplicitCall, instr);
- InsertTestBranch(andInstr->GetDst(), accessorImplicitCall, Js::OpCode::BrEq_A, label, instr);
- intptr_t infoAddr = instr->m_func->GetReadOnlyProfileInfo()->GetFldInfoAddr(propSymOpnd->m_inlineCacheIndex);
- IR::Opnd * profiledFlags = IR::MemRefOpnd::New(infoAddr + Js::FldInfo::GetOffsetOfFlags(), TyInt8, instr->m_func);
- InsertOr(profiledFlags, profiledFlags, fldInfoAccessor, instr);
- instr->InsertBefore(label);
- }
- this->GenerateBailOut(instr, branchInstr, labelBailOut);
- return prevInstr;
- }
- void Lowerer::LowerBailOnNegative(IR::Instr *const instr)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::BailOnNegative);
- Assert(instr->HasBailOutInfo());
- Assert(!instr->GetDst());
- Assert(instr->GetSrc1());
- Assert(instr->GetSrc1()->GetType() == TyInt32 || instr->GetSrc1()->GetType() == TyUint32);
- Assert(!instr->GetSrc2());
- IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(false);
- LowerOneBailOutKind(instr, instr->GetBailOutKind(), false);
- Assert(!instr->HasBailOutInfo());
- IR::Instr *insertBeforeInstr = instr->m_next;
- Func *const func = instr->m_func;
- // test src, src
- // jns $skipBailOut
- InsertCompareBranch(
- instr->UnlinkSrc1(),
- IR::IntConstOpnd::New(0, TyInt32, func, true),
- Js::OpCode::BrGe_A,
- skipBailOutLabel,
- insertBeforeInstr);
- instr->Remove();
- }
- IR::Instr *
- Lowerer::LowerBailOnNotObject(IR::Instr *instr,
- IR::BranchInstr *branchInstr /* = nullptr */,
- IR::LabelInstr *labelBailOut /* = nullptr */)
- {
- IR::Instr *prevInstr = instr->m_prev;
- IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label,
- m_func);
- instr->InsertAfter(continueLabelInstr);
- this->m_lowererMD.GenerateObjectTest(instr->UnlinkSrc1(),
- instr,
- continueLabelInstr,
- /* fContinueLabel = */ true);
- this->GenerateBailOut(instr, branchInstr, labelBailOut);
- return prevInstr;
- }
- IR::Instr*
- Lowerer::LowerCheckIsFuncObj(IR::Instr *instr, bool checkFuncInfo)
- {
- // The CheckIsFuncObj instr and CheckFuncInfo instr (checkFuncInfo = true) are used to
- // generate bailout instrs that type check a function (and can also check the func info).
- // Rather than creating these bailout instrs in Inline, they are created in Lower because
- // CheckIsFuncObj and CheckFuncInfo instrs can be hoisted outside of loops and thus the
- // bailout instrs created can exist outside of loops.
- IR::RegOpnd *funcOpnd = instr->GetSrc1()->AsRegOpnd();
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- BailOutInfo *bailOutInfo = instr->GetBailOutInfo();
- // Check that the property is an object.
- InsertObjectCheck(funcOpnd, instr, bailOutKind, bailOutInfo);
- // Check that the object is a function with the correct type ID.
- IR::Instr *lastInstr = InsertFunctionTypeIdCheck(funcOpnd, instr, bailOutKind, bailOutInfo);
- if (checkFuncInfo)
- {
- // Check that the function body matches the func info.
- lastInstr = InsertFunctionInfoCheck(
- funcOpnd, instr, instr->GetSrc2()->AsAddrOpnd(), bailOutKind, bailOutInfo);
- lastInstr->SetByteCodeOffset(instr);
- }
- if (bailOutInfo->bailOutInstr == instr)
- {
- // bailOutInstr is currently instr. By changing bailOutInstr to point to lastInstr, the next
- // instruction to be lowered (lastInstr) will create the bailout target. This is necessary in
- // cases where instr does not have a shared bailout (ex: instr was not hoisted outside of a loop).
- bailOutInfo->bailOutInstr = lastInstr;
- }
- // the CheckFunctionEntryPoint instr exists in order to create the instrs above. It does not have
- // any other purpose and thus it is removed. The instr's BailOutInfo continues to be used and thus
- // must not be deleted. Flags are turned off to stop Remove() from deleting instr's BailOutInfo.
- instr->hasBailOutInfo = false;
- instr->hasAuxBailOut = false;
- instr->Remove();
- return lastInstr;
- }
- IR::Instr*
- Lowerer::LowerBailOnTrue(IR::Instr* instr, IR::LabelInstr* labelBailOut /*nullptr*/)
- {
- IR::Instr* instrPrev = instr->m_prev;
- IR::LabelInstr* continueLabel = instr->GetOrCreateContinueLabel();
- IR::RegOpnd * regSrc1 = IR::RegOpnd::New(instr->GetSrc1()->GetType(), this->m_func);
- InsertMove(regSrc1, instr->UnlinkSrc1(), instr);
- InsertTestBranch(regSrc1, regSrc1, Js::OpCode::BrEq_A, continueLabel, instr);
- GenerateBailOut(instr, nullptr, labelBailOut);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerBailOnNotBuiltIn(IR::Instr *instr,
- IR::BranchInstr *branchInstr /* = nullptr */,
- IR::LabelInstr *labelBailOut /* = nullptr */)
- {
- Assert(instr->GetSrc2()->IsIntConstOpnd());
- IR::Instr *prevInstr = instr->m_prev;
- intptr_t builtInFuncs = m_func->GetScriptContextInfo()->GetBuiltinFunctionsBaseAddr();
- Js::BuiltinFunction builtInIndex = instr->UnlinkSrc2()->AsIntConstOpnd()->AsInt32();
- IR::Opnd *builtIn = IR::MemRefOpnd::New((void*)(builtInFuncs + builtInIndex * MachPtr), TyMachReg, instr->m_func);
- #if TESTBUILTINFORNULL
- IR::LabelInstr * continueAfterTestLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
- InsertTestBranch(builtIn, builtIn, Js::OpCode::BrNeq_A, continueAfterTestLabel, instr);
- this->m_lowererMD.GenerateDebugBreak(instr);
- instr->InsertBefore(continueAfterTestLabel);
- #endif
- IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
- instr->InsertAfter(continueLabel);
- InsertCompareBranch(instr->UnlinkSrc1(), builtIn, Js::OpCode::BrEq_A, continueLabel, instr);
- GenerateBailOut(instr, branchInstr, labelBailOut);
- return prevInstr;
- }
- #ifdef ENABLE_SCRIPT_DEBUGGING
- IR::Instr *
- Lowerer::LowerBailForDebugger(IR::Instr* instr, bool isInsideHelper /* = false */)
- {
- IR::Instr * prevInstr = instr->m_prev;
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- AssertMsg(bailOutKind, "bailOutKind should not be zero at this time.");
- AssertMsg(!(bailOutKind & IR::BailOutExplicit) || bailOutKind == IR::BailOutExplicit,
- "BailOutExplicit cannot be combined with any other bailout flags.");
- IR::LabelInstr* explicitBailOutLabel = nullptr;
- if (!(bailOutKind & IR::BailOutExplicit))
- {
- intptr_t flags = m_func->GetScriptContextInfo()->GetDebuggingFlagsAddr();
- // Check 1 (do we need to bail out?)
- // JXX bailoutLabel
- // Check 2 (do we need to bail out?)
- // JXX bailoutLabel
- // ...
- // JMP continueLabel
- // bailoutDocumentLabel:
- // (determine if document boundary reached - if not, JMP to continueLabel)
- // NOTE: THIS BLOCK IS CONDITIONALLY GENERATED BASED ON doGenerateBailOutDocumentBlock
- // bailoutLabel:
- // bail out
- // continueLabel:
- // ...
- IR::LabelInstr* bailOutDocumentLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ true);
- instr->InsertBefore(bailOutDocumentLabel);
- IR::LabelInstr* bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ true);
- instr->InsertBefore(bailOutLabel);
- IR::LabelInstr* continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ isInsideHelper);
- instr->InsertAfter(continueLabel);
- IR::BranchInstr* continueBranchInstr = this->InsertBranch(Js::OpCode::Br, continueLabel, bailOutDocumentLabel); // JMP continueLabel.
- bool doGenerateBailOutDocumentBlock = false;
- const IR::BailOutKind c_forceAndIgnoreEx = IR::BailOutForceByFlag | IR::BailOutIgnoreException;
- if ((bailOutKind & c_forceAndIgnoreEx) == c_forceAndIgnoreEx)
- {
- // It's faster to check these together in 1 check rather than 2 separate checks at run time.
- // CMP [&(flags->m_forceInterpreter, flags->m_isIgnoreException)], 0
- // BNE bailout
- IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetForceInterpreterOffset(), TyInt16, m_func);
- IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt16, m_func, /*dontEncode*/ true);
- InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
- bailOutKind ^= c_forceAndIgnoreEx;
- }
- else
- {
- if (bailOutKind & IR::BailOutForceByFlag)
- {
- // CMP [&flags->m_forceInterpreter], 0
- // BNE bailout
- IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetForceInterpreterOffset(), TyInt8, m_func);
- IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func, /*dontEncode*/ true);
- InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
- bailOutKind ^= IR::BailOutForceByFlag;
- }
- if (bailOutKind & IR::BailOutIgnoreException)
- {
- // CMP [&flags->m_byteCodeOffsetAfterIgnoreException], DebuggingFlags::InvalidByteCodeOffset
- // BNE bailout
- IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetByteCodeOffsetAfterIgnoreExceptionOffset(), TyInt32, m_func);
- IR::Opnd* opnd2 = IR::IntConstOpnd::New(DebuggingFlags::InvalidByteCodeOffset, TyInt32, m_func, /*dontEncode*/ true);
- InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
- bailOutKind ^= IR::BailOutIgnoreException;
- }
- }
- if (bailOutKind & IR::BailOutBreakPointInFunction)
- {
- // CMP [&functionBody->m_sourceInfo.m_probeCount], 0
- // BNE bailout
- IR::Opnd* opnd1 = IR::MemRefOpnd::New(m_func->GetJITFunctionBody()->GetProbeCountAddr(), TyInt32, m_func);
- IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt32, m_func, /*dontEncode*/ true);
- InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
- bailOutKind ^= IR::BailOutBreakPointInFunction;
- }
- // on method entry
- if(bailOutKind & IR::BailOutStep)
- {
- // TEST STEP_BAILOUT, [&stepController->StepType]
- // BNE BailoutLabel
- IR::Opnd* opnd1 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugStepTypeAddr(), TyInt8, m_func);
- IR::Opnd* opnd2 = IR::IntConstOpnd::New(Js::STEP_BAILOUT, TyInt8, this->m_func, /*dontEncode*/ true);
- InsertTestBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
- // CMP STEP_DOCUMENT, [&stepController->StepType]
- // BEQ BailoutDocumentLabel
- opnd1 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugStepTypeAddr(), TyInt8, m_func);
- opnd2 = IR::IntConstOpnd::New(Js::STEP_DOCUMENT, TyInt8, this->m_func, /*dontEncode*/ true);
- InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrEq_A, /*isUnsigned*/ true, bailOutDocumentLabel, continueBranchInstr);
- doGenerateBailOutDocumentBlock = true;
- bailOutKind ^= IR::BailOutStep;
- }
- // on method exit
- if (bailOutKind & IR::BailOutStackFrameBase)
- {
- // CMP EffectiveFrameBase, [&stepController->frameAddrWhenSet]
- // BA bailoutLabel
- RegNum effectiveFrameBaseReg;
- #ifdef _M_X64
- effectiveFrameBaseReg = m_lowererMD.GetRegStackPointer();
- #else
- effectiveFrameBaseReg = m_lowererMD.GetRegFramePointer();
- #endif
- IR::Opnd* opnd1 = IR::RegOpnd::New(nullptr, effectiveFrameBaseReg, TyMachReg, m_func);
- IR::Opnd* opnd2 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugFrameAddressAddr(), TyMachReg, m_func);
- this->InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrGt_A, /*isUnsigned*/ true, bailOutLabel, continueBranchInstr);
- // CMP STEP_DOCUMENT, [&stepController->StepType]
- // BEQ BailoutDocumentLabel
- opnd1 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugStepTypeAddr(), TyInt8, m_func);
- opnd2 = IR::IntConstOpnd::New(Js::STEP_DOCUMENT, TyInt8, this->m_func, /*dontEncode*/ true);
- InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrEq_A, /*isUnsigned*/ true, bailOutDocumentLabel, continueBranchInstr);
- doGenerateBailOutDocumentBlock = true;
- bailOutKind ^= IR::BailOutStackFrameBase;
- }
- if (bailOutKind & IR::BailOutLocalValueChanged)
- {
- int32 hasLocalVarChangedOffset = m_func->GetHasLocalVarChangedOffset();
- if (hasLocalVarChangedOffset != Js::Constants::InvalidOffset)
- {
- // CMP [EBP + hasLocalVarChangedStackOffset], 0
- // BNE bailout
- StackSym* sym = StackSym::New(TyInt8, m_func);
- sym->m_offset = hasLocalVarChangedOffset;
- sym->m_allocated = true;
- IR::Opnd* opnd1 = IR::SymOpnd::New(sym, TyInt8, m_func);
- IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func);
- InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
- }
- bailOutKind ^= IR::BailOutLocalValueChanged;
- }
- if (doGenerateBailOutDocumentBlock)
- {
- // GENERATE the BailoutDocumentLabel
- // bailOutDocumentLabel:
- // CMP CurrentScriptId, [&stepController->ScriptIdWhenSet]
- // BEQ ContinueLabel
- // bailOutLabel: // (fallthrough bailOutLabel)
- IR::Opnd* opnd1 = IR::MemRefOpnd::New(m_func->GetJITFunctionBody()->GetScriptIdAddr(), TyInt32, m_func);
- IR::Opnd* opnd2 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugScriptIdWhenSetAddr(), TyInt32, m_func);
- IR::RegOpnd* reg1 = IR::RegOpnd::New(TyInt32, m_func);
- InsertMove(reg1, opnd2, bailOutLabel);
- InsertCompareBranch(opnd1, reg1, Js::OpCode::BrEq_A, /*isUnsigned*/ true, continueLabel, bailOutLabel);
- }
- AssertMsg(bailOutKind == (IR::BailOutKind)0, "Some of the bits in BailOutKind were not processed!");
- // Note: at this time the 'instr' is in between bailoutLabel and continueLabel.
- }
- else
- {
- // For explicit/unconditional bailout use label which is not a helper, otherwise we would get a helper in main code path
- // which breaks helper label consistency (you can only get to helper from a conditional branch in main code), see DbCheckPostLower.
- explicitBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
- }
- this->GenerateBailOut(instr, nullptr, explicitBailOutLabel);
- return prevInstr;
- }
- #endif
- IR::Instr*
- Lowerer::LowerBailOnException(IR::Instr * instr)
- {
- Assert(instr->HasBailOutInfo());
- IR::Instr * instrPrev = instr->m_prev;
- this->GenerateBailOut(instr, nullptr, nullptr);
- return instrPrev;
- }
- IR::Instr*
- Lowerer::LowerBailOnEarlyExit(IR::Instr * instr)
- {
- Assert(instr->HasBailOutInfo());
- IR::Instr * instrPrev = instr->m_prev;
- this->GenerateBailOut(instr, nullptr, nullptr);
- return instrPrev;
- }
- // Generate BailOut Lowerer Instruction if the value is INT_MIN.
- // It it's not INT_MIN, we continue without bailout.
- IR::Instr *
- Lowerer::LowerBailOnIntMin(IR::Instr *instr, IR::BranchInstr *branchInstr /* = nullptr */, IR::LabelInstr *labelBailOut /* = nullptr */)
- {
- Assert(instr);
- Assert(instr->GetSrc1());
- IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- instr->InsertAfter(continueLabelInstr);
- if(!instr->HasBailOutInfo())
- {
- instr->Remove();
- }
- else
- {
- Assert(instr->GetBailOutKind() == IR::BailOnIntMin);
- // Note: src1 must be int32 at this point.
- if (instr->GetSrc1()->IsIntConstOpnd())
- {
- // For consts we can check the value at JIT time. Note: without this check we'll have to legalize the CMP instr.
- IR::IntConstOpnd* intConst = instr->UnlinkSrc1()->AsIntConstOpnd();
- if (intConst->GetValue() == INT_MIN)
- {
- this->GenerateBailOut(instr, branchInstr, labelBailOut);
- intConst->Free(instr->m_func);
- }
- else
- {
- instr->Remove();
- }
- }
- else
- {
- InsertCompareBranch(instr->UnlinkSrc1(), IR::IntConstOpnd::New(INT_MIN, TyInt32, this->m_func), Js::OpCode::BrNeq_A, continueLabelInstr, instr);
- this->GenerateBailOut(instr, branchInstr, labelBailOut);
- }
- }
- return continueLabelInstr;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerBailOnNotString
- /// Generate BailOut Lowerer Instruction if not a String
- ///
- ///----------------------------------------------------------------------------
- void Lowerer::LowerBailOnNotString(IR::Instr *instr)
- {
- if (!instr->GetSrc1()->GetValueType().IsString())
- {
- /*Creating a MOV instruction*/
- IR::Instr * movInstr = IR::Instr::New(instr->m_opcode, instr->UnlinkDst(), instr->UnlinkSrc1(), instr->m_func);
- instr->InsertBefore(movInstr);
- IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::LabelInstr *helperLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- instr->InsertAfter(continueLabelInstr);
- IR::RegOpnd *srcReg = movInstr->GetSrc1()->IsRegOpnd() ? movInstr->GetSrc1()->AsRegOpnd() : nullptr;
- this->GenerateStringTest(srcReg, instr, helperLabelInstr, continueLabelInstr);
- this->GenerateBailOut(instr, nullptr, helperLabelInstr);
- }
- else
- {
- instr->ClearBailOutInfo();
- }
- }
- void Lowerer::LowerOneBailOutKind(
- IR::Instr *const instr,
- const IR::BailOutKind bailOutKindToLower,
- const bool isInHelperBlock,
- const bool preserveBailOutKindInInstr)
- {
- Assert(instr);
- Assert(bailOutKindToLower);
- Assert(!(bailOutKindToLower & IR::BailOutKindBits) || !(bailOutKindToLower & bailOutKindToLower - 1u));
- Func *const func = instr->m_func;
- // Split bailouts other than the one being lowered here
- BailOutInfo *const bailOutInfo = instr->GetBailOutInfo();
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- Assert(
- bailOutKindToLower & IR::BailOutKindBits
- ? bailOutKind & bailOutKindToLower
- : (bailOutKind & ~IR::BailOutKindBits) == bailOutKindToLower);
- if(!preserveBailOutKindInInstr)
- {
- bailOutKind -= bailOutKindToLower;
- }
- if(bailOutKind)
- {
- if(bailOutInfo->bailOutInstr == instr)
- {
- // Create a shared bailout point for the split bailout checks
- IR::Instr *const sharedBail = instr->ShareBailOut();
- Assert(sharedBail->GetBailOutInfo() == bailOutInfo);
- GenerateBailOut(sharedBail);
- }
- instr->SetBailOutKind(bailOutKind);
- }
- else
- {
- instr->UnlinkBailOutInfo();
- if(bailOutInfo->bailOutInstr == instr)
- {
- bailOutInfo->bailOutInstr = nullptr;
- }
- }
- IR::Instr *const insertBeforeInstr = instr->m_next;
- // (Bail out with the requested bail out kind)
- IR::BailOutInstr *const bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOut, bailOutKindToLower, bailOutInfo, func);
- bailOutInstr->SetByteCodeOffset(instr);
- insertBeforeInstr->InsertBefore(bailOutInstr);
- GenerateBailOut(bailOutInstr);
- // The caller is expected to generate code to decide whether to bail out
- }
- void Lowerer::SplitBailOnNotArray(
- IR::Instr *const instr,
- IR::Instr * *const bailOnNotArrayRef,
- IR::Instr * *const bailOnMissingValueRef)
- {
- Assert(instr);
- Assert(!instr->GetDst());
- Assert(instr->GetSrc1());
- Assert(instr->GetSrc1()->IsRegOpnd());
- Assert(!instr->GetSrc2());
- Assert(bailOnNotArrayRef);
- Assert(bailOnMissingValueRef);
- IR::Instr *&bailOnNotArray = *bailOnNotArrayRef;
- IR::Instr *&bailOnMissingValue = *bailOnMissingValueRef;
- bailOnNotArray = instr;
- bailOnMissingValue = nullptr;
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if(bailOutKind == IR::BailOutOnNotArray ||
- bailOutKind == IR::BailOutOnNotNativeArray)
- {
- return;
- }
- // Split array checks
- BailOutInfo *const bailOutInfo = instr->GetBailOutInfo();
- if(bailOutInfo->bailOutInstr == instr)
- {
- // Create a shared bailout point for the split bailout checks
- IR::Instr *const sharedBail = instr->ShareBailOut();
- Assert(sharedBail->GetBailOutInfo() == bailOutInfo);
- LowerBailTarget(sharedBail);
- }
- bailOutKind -= IR::BailOutOnMissingValue;
- Assert(bailOutKind == IR::BailOutOnNotArray ||
- bailOutKind == IR::BailOutOnNotNativeArray);
- instr->SetBailOutKind(bailOutKind);
- Func *const func = bailOutInfo->bailOutFunc;
- IR::Instr *const insertBeforeInstr = instr->m_next;
- // Split missing value checks
- bailOnMissingValue = IR::BailOutInstr::New(Js::OpCode::BailOnNotArray, IR::BailOutOnMissingValue, bailOutInfo, func);
- bailOnMissingValue->SetByteCodeOffset(instr);
- insertBeforeInstr->InsertBefore(bailOnMissingValue);
- }
- IR::RegOpnd *Lowerer::LowerBailOnNotArray(IR::Instr *const instr)
- {
- Assert(instr);
- Assert(!instr->GetDst());
- Assert(instr->GetSrc1());
- Assert(instr->GetSrc1()->IsRegOpnd());
- Assert(!instr->GetSrc2());
- Func *const func = instr->m_func;
- // Label to jump to (or fall through to) when bailing out
- const auto bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true /* isOpHelper */);
- instr->InsertBefore(bailOutLabel);
- // Label to jump to when not bailing out
- const auto skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- instr->InsertAfter(skipBailOutLabel);
- // Do the array tests and jump to bailOutLabel if it's not an array. Fall through if it is an array.
- IR::RegOpnd *const arrayOpnd =
- GenerateArrayTest(instr->UnlinkSrc1()->AsRegOpnd(), bailOutLabel, bailOutLabel, bailOutLabel, true);
- // Skip bail-out when it is an array
- InsertBranch(Js::OpCode::Br, skipBailOutLabel, bailOutLabel);
- // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
- // ordering instructions anymore.
- GenerateBailOut(instr);
- return arrayOpnd;
- }
- void Lowerer::LowerBailOnMissingValue(IR::Instr *const instr, IR::RegOpnd *const arrayOpnd)
- {
- Assert(instr);
- Assert(!instr->GetDst());
- Assert(!instr->GetSrc1());
- Assert(!instr->GetSrc2());
- Assert(arrayOpnd);
- Assert(arrayOpnd->GetValueType().IsArrayOrObjectWithArray());
- Func *const func = instr->m_func;
- // Label to jump to when not bailing out
- const auto skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- instr->InsertAfter(skipBailOutLabel);
- // Skip bail-out when the array has no missing values
- //
- // test [array + offsetOf(objectArrayOrFlags)], Js::DynamicObjectFlags::HasNoMissingValues
- // jnz $skipBailOut
- const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, func);
- CompileAssert(
- static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
- Js::DynamicObjectFlags::HasNoMissingValues);
- InsertTestBranch(
- IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, func),
- IR::IntConstOpnd::New(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues), TyUint8, func, true),
- Js::OpCode::BrNeq_A,
- skipBailOutLabel,
- instr);
- // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
- // ordering instructions anymore.
- GenerateBailOut(instr);
- }
- void Lowerer::LowerBailOnInvalidatedArrayHeadSegment(IR::Instr *const instr, const bool isInHelperBlock)
- {
- /*
- // Generate checks for whether the head segment or the head segment length changed during the helper call
- if(!(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentSym()))
- {
- // Record the array head segment before the helper call
- headSegmentBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayHeadSegmentForArrayOrObjectWithArray(base)
- }
- if(!(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentLengthSym()))
- {
- // Record the array head segment length before the helper call
- if(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentSym())
- {
- mov headSegmentLengthBeforeHelperCall, [headSegmentBeforeHelperCall + offsetOf(length)]
- }
- else
- {
- headSegmentLengthBeforeHelperCall =
- Js::JavascriptArray::Jit_GetArrayHeadSegmentLength(headSegmentBeforeHelperCall)
- }
- }
- helperCall:
- (Helper call and other bailout checks)
- // If the array has a different head segment or head segment length after the helper call, then this store needs to bail
- // out
- invalidatedHeadSegment =
- JavascriptArray::Jit_OperationInvalidatedArrayHeadSegment(
- headSegmentBeforeHelperCall,
- headSegmentLengthBeforeHelperCall,
- base)
- test invalidatedHeadSegment, invalidatedHeadSegment
- jz $skipBailOut
- (Bail out with IR::BailOutOnInvalidatedArrayHeadSegment)
- $skipBailOut:
- */
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
- Assert(instr->GetDst());
- Assert(instr->GetDst()->IsIndirOpnd());
- Func *const func = instr->m_func;
- IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
- const ValueType baseValueType(baseOpnd->GetValueType());
- Assert(!baseValueType.IsNotArrayOrObjectWithArray());
- const bool isArrayOrObjectWithArray = baseValueType.IsArrayOrObjectWithArray();
- IR::ArrayRegOpnd *const arrayOpnd = baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd() : nullptr;
- IR::RegOpnd *headSegmentBeforeHelperCallOpnd;
- IR::AutoReuseOpnd autoReuseHeadSegmentBeforeHelperCallOpnd;
- if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentSym())
- {
- headSegmentBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->HeadSegmentSym(), TyMachPtr, func);
- autoReuseHeadSegmentBeforeHelperCallOpnd.Initialize(headSegmentBeforeHelperCallOpnd, func);
- }
- else
- {
- // Record the array head segment before the helper call
- // headSegmentBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayHeadSegmentForArrayOrObjectWithArray(base)
- m_lowererMD.LoadHelperArgument(instr, baseOpnd);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
- headSegmentBeforeHelperCallOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, func), TyMachPtr, func);
- autoReuseHeadSegmentBeforeHelperCallOpnd.Initialize(headSegmentBeforeHelperCallOpnd, func);
- callInstr->SetDst(headSegmentBeforeHelperCallOpnd);
- instr->InsertBefore(callInstr);
- m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayHeadSegmentForArrayOrObjectWithArray);
- }
- IR::RegOpnd *headSegmentLengthBeforeHelperCallOpnd;
- IR::AutoReuseOpnd autoReuseHeadSegmentLengthBeforeHelperCallOpnd;
- if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentLengthSym())
- {
- headSegmentLengthBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->HeadSegmentLengthSym(), TyUint32, func);
- autoReuseHeadSegmentLengthBeforeHelperCallOpnd.Initialize(headSegmentLengthBeforeHelperCallOpnd, func);
- }
- else
- {
- headSegmentLengthBeforeHelperCallOpnd = IR::RegOpnd::New(StackSym::New(TyUint32, func), TyUint32, func);
- autoReuseHeadSegmentLengthBeforeHelperCallOpnd.Initialize(headSegmentLengthBeforeHelperCallOpnd, func);
- if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentSym())
- {
- // Record the array head segment length before the helper call
- // mov headSegmentLengthBeforeHelperCall, [headSegmentBeforeHelperCall + offsetOf(length)]
- InsertMove(
- headSegmentLengthBeforeHelperCallOpnd,
- IR::IndirOpnd::New(
- headSegmentBeforeHelperCallOpnd,
- Js::SparseArraySegmentBase::GetOffsetOfLength(),
- TyUint32,
- func),
- instr);
- }
- else
- {
- // Record the array head segment length before the helper call
- // headSegmentLengthBeforeHelperCall =
- // Js::JavascriptArray::Jit_GetArrayHeadSegmentLength(headSegmentBeforeHelperCall)
- m_lowererMD.LoadHelperArgument(instr, headSegmentBeforeHelperCallOpnd);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
- callInstr->SetDst(headSegmentLengthBeforeHelperCallOpnd);
- instr->InsertBefore(callInstr);
- m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayHeadSegmentLength);
- }
- }
- IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
- LowerOneBailOutKind(instr, IR::BailOutOnInvalidatedArrayHeadSegment, isInHelperBlock);
- IR::Instr *const insertBeforeInstr = instr->m_next;
- // If the array has a different head segment or head segment length after the helper call, then this store needs to bail out
- // invalidatedHeadSegment =
- // JavascriptArray::Jit_OperationInvalidatedArrayHeadSegment(
- // headSegmentBeforeHelperCall,
- // headSegmentLengthBeforeHelperCall,
- // base)
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, headSegmentLengthBeforeHelperCallOpnd);
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, headSegmentBeforeHelperCallOpnd);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
- IR::RegOpnd *const invalidatedHeadSegmentOpnd = IR::RegOpnd::New(TyUint8, func);
- const IR::AutoReuseOpnd autoReuseInvalidatedHeadSegmentOpnd(invalidatedHeadSegmentOpnd, func);
- callInstr->SetDst(invalidatedHeadSegmentOpnd);
- insertBeforeInstr->InsertBefore(callInstr);
- m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationInvalidatedArrayHeadSegment);
- // test invalidatedHeadSegment, invalidatedHeadSegment
- // jz $skipBailOut
- InsertTestBranch(
- invalidatedHeadSegmentOpnd,
- invalidatedHeadSegmentOpnd,
- Js::OpCode::BrEq_A,
- skipBailOutLabel,
- insertBeforeInstr);
- // (Bail out with IR::BailOutOnInvalidatedArrayHeadSegment)
- // $skipBailOut:
- }
- void Lowerer::LowerBailOnInvalidatedArrayLength(IR::Instr *const instr, const bool isInHelperBlock)
- {
- /*
- // Generate checks for whether the length changed during the helper call
- if(!(arrayOpnd && arrayOpnd.LengthSym() && arrayOpnd.LengthSym() != arrayOpnd.HeadSegmentLengthSym()))
- {
- // Record the array length before the helper call
- lengthBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayLength(base)
- }
- helperCall:
- (Helper call and other bailout checks)
- // If the array has a different length after the helper call, then this store needs to bail out
- invalidatedLength = JavascriptArray::Jit_OperationInvalidatedArrayLength(lengthBeforeHelperCall, base)
- test invalidatedLength, invalidatedLength
- jz $skipBailOut
- (Bail out with IR::BailOutOnInvalidatedArrayLength)
- $skipBailOut:
- */
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
- Assert(instr->GetDst());
- Assert(instr->GetDst()->IsIndirOpnd());
- Func *const func = instr->m_func;
- IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
- const ValueType baseValueType(baseOpnd->GetValueType());
- Assert(!baseValueType.IsNotArray());
- IR::ArrayRegOpnd *const arrayOpnd = baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd() : nullptr;
- IR::RegOpnd *lengthBeforeHelperCallOpnd;
- IR::AutoReuseOpnd autoReuseLengthBeforeHelperCallOpnd;
- if(arrayOpnd && arrayOpnd->LengthSym() && arrayOpnd->LengthSym() != arrayOpnd->HeadSegmentLengthSym())
- {
- lengthBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->LengthSym(), arrayOpnd->LengthSym()->GetType(), func);
- autoReuseLengthBeforeHelperCallOpnd.Initialize(lengthBeforeHelperCallOpnd, func);
- }
- else
- {
- // Record the array length before the helper call
- // lengthBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayLength(base)
- m_lowererMD.LoadHelperArgument(instr, baseOpnd);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
- lengthBeforeHelperCallOpnd = IR::RegOpnd::New(TyUint32, func);
- autoReuseLengthBeforeHelperCallOpnd.Initialize(lengthBeforeHelperCallOpnd, func);
- callInstr->SetDst(lengthBeforeHelperCallOpnd);
- instr->InsertBefore(callInstr);
- m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayLength);
- }
- IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
- LowerOneBailOutKind(instr, IR::BailOutOnInvalidatedArrayLength, isInHelperBlock);
- IR::Instr *const insertBeforeInstr = instr->m_next;
- // If the array has a different length after the helper call, then this store needs to bail out
- // invalidatedLength = JavascriptArray::Jit_OperationInvalidatedArrayLength(lengthBeforeHelperCall, base)
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, lengthBeforeHelperCallOpnd);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
- IR::RegOpnd *const invalidatedLengthOpnd = IR::RegOpnd::New(TyUint8, func);
- const IR::AutoReuseOpnd autoReuseInvalidatedLengthOpnd(invalidatedLengthOpnd, func);
- callInstr->SetDst(invalidatedLengthOpnd);
- insertBeforeInstr->InsertBefore(callInstr);
- m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationInvalidatedArrayLength);
- // test invalidatedLength, invalidatedLength
- // jz $skipBailOut
- InsertTestBranch(
- invalidatedLengthOpnd,
- invalidatedLengthOpnd,
- Js::OpCode::BrEq_A,
- skipBailOutLabel,
- insertBeforeInstr);
- // (Bail out with IR::BailOutOnInvalidatedArrayLength)
- // $skipBailOut:
- }
- void Lowerer::LowerBailOnCreatedMissingValue(IR::Instr *const instr, const bool isInHelperBlock)
- {
- /*
- // Generate checks for whether the first missing value was created during the helper call
- if(!(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()))
- {
- // Record whether the array has missing values before the helper call
- arrayFlagsBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayFlagsForArrayOrObjectWithArray(base)
- }
- helperCall:
- (Helper call and other bailout checks)
- // If the array had no missing values before the helper call, and the array has missing values after the helper
- // call, then this store created the first missing value in the array and needs to bail out
- if(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())
- (arrayFlagsBeforeHelperCall = Js::DynamicObjectFlags::HasNoMissingValues)
- createdFirstMissingValue = JavascriptArray::Jit_OperationCreatedFirstMissingValue(arrayFlagsBeforeHelperCall, base)
- test createdFirstMissingValue, createdFirstMissingValue
- jz $skipBailOut
- (Bail out with IR::BailOutOnMissingValue)
- $skipBailOut:
- */
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
- Assert(instr->GetDst());
- Assert(instr->GetDst()->IsIndirOpnd());
- Func *const func = instr->m_func;
- IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
- const ValueType baseValueType(baseOpnd->GetValueType());
- Assert(!baseValueType.IsNotArrayOrObjectWithArray());
- IR::Opnd *arrayFlagsBeforeHelperCallOpnd = nullptr;
- IR::AutoReuseOpnd autoReuseArrayFlagsBeforeHelperCallOpnd;
- const IRType arrayFlagsType = sizeof(uintptr_t) == sizeof(uint32) ? TyUint32 : TyUint64;
- if(!(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()))
- {
- // Record whether the array has missing values before the helper call
- // arrayFlagsBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayFlagsForArrayOrObjectWithArray(base)
- m_lowererMD.LoadHelperArgument(instr, baseOpnd);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
- arrayFlagsBeforeHelperCallOpnd = IR::RegOpnd::New(arrayFlagsType, func);
- autoReuseArrayFlagsBeforeHelperCallOpnd.Initialize(arrayFlagsBeforeHelperCallOpnd, func);
- callInstr->SetDst(arrayFlagsBeforeHelperCallOpnd);
- instr->InsertBefore(callInstr);
- m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayFlagsForArrayOrObjectWithArray);
- }
- IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
- LowerOneBailOutKind(instr, IR::BailOutOnMissingValue, isInHelperBlock);
- IR::Instr *const insertBeforeInstr = instr->m_next;
- // If the array had no missing values before the helper call, and the array has missing values after the helper
- // call, then this store created the first missing value in the array and needs to bail out
- if(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())
- {
- // (arrayFlagsBeforeHelperCall = Js::DynamicObjectFlags::HasNoMissingValues)
- Assert(!arrayFlagsBeforeHelperCallOpnd);
- arrayFlagsBeforeHelperCallOpnd =
- arrayFlagsType == TyUint32
- ? static_cast<IR::Opnd *>(
- IR::IntConstOpnd::New(
- static_cast<uintptr_t>(Js::DynamicObjectFlags::HasNoMissingValues),
- arrayFlagsType,
- func,
- true))
- : IR::AddrOpnd::New(
- reinterpret_cast<void *>(Js::DynamicObjectFlags::HasNoMissingValues),
- IR::AddrOpndKindConstantVar,
- func,
- true);
- autoReuseArrayFlagsBeforeHelperCallOpnd.Initialize(arrayFlagsBeforeHelperCallOpnd, func);
- }
- else
- {
- Assert(arrayFlagsBeforeHelperCallOpnd);
- }
- // createdFirstMissingValue = JavascriptArray::Jit_OperationCreatedFirstMissingValue(arrayFlagsBeforeHelperCall, base)
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, arrayFlagsBeforeHelperCallOpnd);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
- IR::RegOpnd *const createdFirstMissingValueOpnd = IR::RegOpnd::New(TyUint8, func);
- IR::AutoReuseOpnd autoReuseCreatedFirstMissingValueOpnd(createdFirstMissingValueOpnd, func);
- callInstr->SetDst(createdFirstMissingValueOpnd);
- insertBeforeInstr->InsertBefore(callInstr);
- m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationCreatedFirstMissingValue);
- // test createdFirstMissingValue, createdFirstMissingValue
- // jz $skipBailOut
- InsertCompareBranch(
- createdFirstMissingValueOpnd,
- IR::IntConstOpnd::New(0, createdFirstMissingValueOpnd->GetType(), func, true),
- Js::OpCode::BrEq_A,
- skipBailOutLabel,
- insertBeforeInstr);
- // (Bail out with IR::BailOutOnMissingValue)
- // $skipBailOut:
- }
- IR::Opnd*
- Lowerer::GetFuncObjectOpnd(IR::Instr* insertBeforeInstr)
- {
- Func * func = insertBeforeInstr->m_func;
- IR::Opnd *paramOpnd = nullptr;
- if (func->IsInlinee())
- {
- paramOpnd = func->GetInlineeFunctionObjectSlotOpnd();
- }
- else
- {
- #if defined(_M_ARM32_OR_ARM64)
- StackSym * paramSym = this->m_lowererMD.GetImplicitParamSlotSym(0);
- #else
- StackSym *paramSym = StackSym::New(TyMachReg, this->m_func);
- this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
- this->m_func->SetHasImplicitParamLoad();
- #endif
- paramOpnd = IR::SymOpnd::New(paramSym, TyMachReg, this->m_func);
- }
- if (func->GetJITFunctionBody()->IsCoroutine())
- {
- // the function object for generator calls is a GeneratorVirtualScriptFunction object
- // and we need to return the real JavascriptGeneratorFunction object so grab it before
- // assigning to the dst
- Assert(!func->IsInlinee());
- IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, func);
- Lowerer::InsertMove(tmpOpnd, paramOpnd, insertBeforeInstr);
- paramOpnd = IR::IndirOpnd::New(tmpOpnd, Js::GeneratorVirtualScriptFunction::GetRealFunctionOffset(), TyMachPtr, func);
- }
- return paramOpnd;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LoadFuncExpression
- ///
- /// Load the function expression to src1 from [ebp + 8]
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LoadFuncExpression(IR::Instr *instrFuncExpr)
- {
- ASSERT_INLINEE_FUNC(instrFuncExpr);
- IR::Opnd *paramOpnd = GetFuncObjectOpnd(instrFuncExpr);
- // mov dst, param
- instrFuncExpr->SetSrc1(paramOpnd);
- LowererMD::ChangeToAssign(instrFuncExpr);
- return instrFuncExpr;
- }
- void Lowerer::LowerBoundCheck(IR::Instr *const instr)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::BoundCheck || instr->m_opcode == Js::OpCode::UnsignedBoundCheck);
- #if DBG
- if(instr->m_opcode == Js::OpCode::UnsignedBoundCheck)
- {
- // UnsignedBoundCheck is currently only supported for the pattern:
- // UnsignedBoundCheck s1 <= s2 + c, where c == 0 || c == -1
- Assert(instr->GetSrc1()->IsRegOpnd());
- Assert(instr->GetSrc1()->IsInt32());
- Assert(instr->GetSrc2());
- Assert(!instr->GetSrc2()->IsIntConstOpnd());
- if(instr->GetDst())
- {
- const int32 c = instr->GetDst()->AsIntConstOpnd()->AsInt32();
- Assert(c == 0 || c == -1);
- }
- }
- #endif
- const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- Assert(
- bailOutKind == IR::BailOutOnArrayAccessHelperCall ||
- bailOutKind == IR::BailOutOnInvalidatedArrayHeadSegment ||
- bailOutKind == IR::BailOutOnFailedHoistedBoundCheck ||
- bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
- IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(false);
- LowerOneBailOutKind(instr, bailOutKind, false);
- Assert(!instr->HasBailOutInfo());
- IR::Instr *insertBeforeInstr = instr->m_next;
- #if DBG
- const auto VerifyLeftOrRightOpnd = [&](IR::Opnd *const opnd, const bool isRightOpnd)
- {
- if(!opnd)
- {
- Assert(isRightOpnd);
- return;
- }
- if(opnd->IsIntConstOpnd())
- {
- Assert(!isRightOpnd || opnd->AsIntConstOpnd()->GetValue() != 0);
- return;
- }
- Assert(opnd->GetType() == TyInt32 || opnd->GetType() == TyUint32);
- };
- #endif
- // left <= right + offset (src1 <= src2 + dst)
- IR::Opnd *leftOpnd = instr->UnlinkSrc1();
- DebugOnly(VerifyLeftOrRightOpnd(leftOpnd, false));
- IR::Opnd *rightOpnd = instr->UnlinkSrc2();
- DebugOnly(VerifyLeftOrRightOpnd(rightOpnd, true));
- Assert(!leftOpnd->IsIntConstOpnd() || rightOpnd && !rightOpnd->IsIntConstOpnd());
- IR::IntConstOpnd *offsetOpnd = instr->GetDst() ? instr->UnlinkDst()->AsIntConstOpnd() : nullptr;
- Assert(!offsetOpnd || offsetOpnd->GetValue() != 0);
- const bool doUnsignedCompare = instr->m_opcode == Js::OpCode::UnsignedBoundCheck;
- instr->Remove();
- Func *const func = insertBeforeInstr->m_func;
- IntConstType offset = offsetOpnd ? offsetOpnd->GetValue() : 0;
- Js::OpCode compareOpCode = Js::OpCode::BrLe_A;
- if(leftOpnd->IsIntConstOpnd() && rightOpnd->IsRegOpnd() && offset != IntConstMin)
- {
- // Put the constants together: swap the operands, negate the offset, and invert the branch
- IR::Opnd *const tempOpnd = leftOpnd;
- leftOpnd = rightOpnd;
- rightOpnd = tempOpnd;
- offset = -offset;
- compareOpCode = Js::OpCode::BrGe_A;
- }
- if(rightOpnd->IsIntConstOpnd())
- {
- // Try to aggregate right + offset into a constant offset
- IntConstType newOffset;
- if(!IntConstMath::Add(offset, rightOpnd->AsIntConstOpnd()->GetValue(), TyInt32, &newOffset))
- {
- offset = newOffset;
- rightOpnd = nullptr;
- offsetOpnd = nullptr;
- }
- }
- // Determine if the Add for (right + offset) is necessary, and the op code that will be used for the comparison
- IR::AutoReuseOpnd autoReuseAddResultOpnd;
- if(offset == -1 && compareOpCode == Js::OpCode::BrLe_A)
- {
- offset = 0;
- compareOpCode = Js::OpCode::BrLt_A;
- }
- else if(offset == 1 && compareOpCode == Js::OpCode::BrGe_A)
- {
- offset = 0;
- compareOpCode = Js::OpCode::BrGt_A;
- }
- else if(offset != 0 && rightOpnd)
- {
- // Need to Add (right + offset). If it overflows, bail out.
- IR::LabelInstr *const bailOutLabel = insertBeforeInstr->m_prev->GetOrCreateContinueLabel(true);
- insertBeforeInstr = bailOutLabel;
- // mov temp, right
- // add temp, offset
- // jo $bailOut
- // $bailOut: (insertBeforeInstr)
- Assert(!offsetOpnd || offsetOpnd->GetValue() == offset);
- IR::RegOpnd *const addResultOpnd = IR::RegOpnd::New(TyInt32, func);
- autoReuseAddResultOpnd.Initialize(addResultOpnd, func);
- InsertAdd(
- true,
- addResultOpnd,
- rightOpnd,
- offsetOpnd ? offsetOpnd->UseWithNewType(TyInt32, func) : IR::IntConstOpnd::New(offset, TyInt32, func),
- insertBeforeInstr);
- InsertBranch(LowererMD::MDOverflowBranchOpcode, bailOutLabel, insertBeforeInstr);
- rightOpnd = addResultOpnd;
- }
- // cmp left, right
- // jl[e] $skipBailOut
- // $bailOut:
- if(!rightOpnd)
- {
- rightOpnd = IR::IntConstOpnd::New(offset, TyInt32, func);
- }
- InsertCompareBranch(leftOpnd, rightOpnd, compareOpCode, doUnsignedCompare, skipBailOutLabel, insertBeforeInstr);
- }
- IR::Instr *
- Lowerer::LowerBailTarget(IR::Instr * instr)
- {
- // this is just a bailout target, just skip over it and generate a label before so other bailout can jump here.
- IR::Instr * prevInstr = instr->m_prev;
- IR::LabelInstr * continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- instr->InsertAfter(continueLabelInstr);
- IR::BranchInstr * skipInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, continueLabelInstr, this->m_func);
- instr->InsertBefore(skipInstr);
- this->GenerateBailOut(instr);
- return prevInstr;
- }
- IR::Instr *
- Lowerer::SplitBailOnImplicitCall(IR::Instr *& instr)
- {
- Assert(instr->IsPlainInstr() || instr->IsProfiledInstr());
- const auto bailOutKind = instr->GetBailOutKind();
- Assert(BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind));
- IR::Opnd * implicitCallFlags = this->GetImplicitCallFlagsOpnd();
- const IR::AutoReuseOpnd autoReuseImplicitCallFlags(implicitCallFlags, instr->m_func);
- IR::IntConstOpnd * noImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_None, TyInt8, this->m_func, true);
- const IR::AutoReuseOpnd autoReuseNoImplicitCall(noImplicitCall, instr->m_func);
- // Reset the implicit call flag on every helper call
- Lowerer::InsertMove(implicitCallFlags, noImplicitCall, instr);
- IR::Instr *disableImplicitCallsInstr = nullptr, *enableImplicitCallsInstr = nullptr;
- if(BailOutInfo::WithoutLazyBailOut(bailOutKind) == IR::BailOutOnImplicitCallsPreOp)
- {
- const auto disableImplicitCallAddress =
- m_lowererMD.GenerateMemRef(
- instr->m_func->GetThreadContextInfo()->GetDisableImplicitFlagsAddr(),
- TyInt8,
- instr);
- // Disable implicit calls since they will be called after bailing out
- disableImplicitCallsInstr =
- IR::Instr::New(
- Js::OpCode::Ld_A,
- disableImplicitCallAddress,
- IR::IntConstOpnd::New(DisableImplicitCallFlag, TyInt8, instr->m_func, true),
- instr->m_func);
- instr->InsertBefore(disableImplicitCallsInstr);
- // Create instruction for re-enabling implicit calls
- enableImplicitCallsInstr =
- IR::Instr::New(
- Js::OpCode::Ld_A,
- disableImplicitCallAddress,
- IR::IntConstOpnd::New(DisableImplicitNoFlag, TyInt8, instr->m_func, true),
- instr->m_func);
- #if DBG
- enableImplicitCallsInstr->m_noLazyHelperAssert = true;
- #endif
- }
- IR::Instr * bailOutInstr = instr;
- instr = IR::Instr::New(instr->m_opcode, instr->m_func);
- bailOutInstr->TransferTo(instr);
- bailOutInstr->InsertBefore(instr);
- if(disableImplicitCallsInstr)
- {
- // Re-enable implicit calls
- Assert(enableImplicitCallsInstr);
- bailOutInstr->InsertBefore(enableImplicitCallsInstr);
- // Lower both instructions. Lowering an instruction may free the instruction's original operands, so do that last.
- LowererMD::ChangeToAssign(disableImplicitCallsInstr);
- LowererMD::ChangeToAssign(enableImplicitCallsInstr);
- }
- bailOutInstr->m_opcode = Js::OpCode::BailOnNotEqual;
- bailOutInstr->SetSrc1(implicitCallFlags);
- bailOutInstr->SetSrc2(noImplicitCall);
- return bailOutInstr;
- }
- IR::Instr *
- Lowerer::SplitBailOnImplicitCall(IR::Instr * instr, IR::Instr * helperCall, IR::Instr * insertBeforeInstr)
- {
- IR::Opnd * implicitCallFlags = this->GetImplicitCallFlagsOpnd();
- const IR::AutoReuseOpnd autoReuseImplicitCallFlags(implicitCallFlags, instr->m_func);
- IR::IntConstOpnd * noImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_None, TyInt8, this->m_func, true);
- const IR::AutoReuseOpnd autoReuseNoImplicitCall(noImplicitCall, instr->m_func);
- // Reset the implicit call flag on every helper call
- Lowerer::InsertMove(implicitCallFlags, noImplicitCall, helperCall->m_prev);
- BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
- if (bailOutInfo->bailOutInstr == instr)
- {
- bailOutInfo->bailOutInstr = nullptr;
- }
- IR::Instr * bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOnNotEqual, IR::BailOutOnImplicitCalls, bailOutInfo, bailOutInfo->bailOutFunc);
- bailOutInstr->SetSrc1(implicitCallFlags);
- bailOutInstr->SetSrc2(noImplicitCall);
- insertBeforeInstr->InsertBefore(bailOutInstr);
- instr->ClearBailOutInfo();
- return bailOutInstr;
- }
- // Split out bailout for debugger into separate bailout instr out of real instr which has bailout for debugger.
- // Returns the instr which needs to lower next, which would normally be last of splitted instr.
- // IR on input:
- // - Real instr with BailOutInfo but it's opcode is not BailForDebugger.
- // - debugger bailout is not shared. In this case we'll have debugger bailout in instr->GetBailOutKind().
- // - debugger bailout is shared. In this case we'll have debugger bailout in instr->GetAuxBailOutKind().
- // IR on output:
- // - Either of:
- // - real instr, then debuggerBailout -- in case we only had debugger bailout.
- // - real instr with BailOutInfo w/o debugger bailout, then debuggerBailout, then sharedBailout -- in case bailout for debugger was shared w/some other b.o.
- IR::Instr* Lowerer::SplitBailForDebugger(IR::Instr* instr)
- {
- Assert(m_func->IsJitInDebugMode() && instr->m_opcode != Js::OpCode::BailForDebugger);
- IR::BailOutKind debuggerBailOutKind; // Used for splitted instr.
- BailOutInfo* bailOutInfo = instr->GetBailOutInfo();
- IR::Instr* sharedBailoutInstr = nullptr;
- if (instr->GetBailOutKind() & IR::BailOutForDebuggerBits)
- {
- // debugger bailout is not shared.
- Assert(!instr->HasAuxBailOut());
- AssertMsg(!(instr->GetBailOutKind() & ~IR::BailOutForDebuggerBits), "There should only be debugger bailout bits in the instr.");
- debuggerBailOutKind = instr->GetBailOutKind() & IR::BailOutForDebuggerBits;
- // There is no non-debugger bailout in the instr, still can't clear bailout info, as we use it for the splitted instr,
- // but we need to mark the bailout as hasn't been generated yet.
- if (bailOutInfo->bailOutInstr == instr)
- {
- // null will be picked up by following BailOutInstr::New which will change it to new bailout instr.
- bailOutInfo->bailOutInstr = nullptr;
- }
- // Remove bailout info from the original instr which from now on becomes just regular instr, w/o deallocating bailout info.
- instr->ClearBailOutInfo();
- }
- else if (instr->IsBranchInstr() && instr->HasBailOutInfo() && instr->HasAuxBailOut())
- {
- // Branches with shared bailout are lowered in LowerCondBranchCheckBailOut,
- // can't do here because we need to use BranchBailOutRecord but don't know which BrTrue/BrFalse to use for it.
- debuggerBailOutKind = IR::BailOutInvalid;
- }
- else if (instr->HasAuxBailOut() && instr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits)
- {
- // debugger bailout is shared.
- AssertMsg(!(instr->GetBailOutKind() & IR::BailOutForDebuggerBits), "There should be no debugger bits in main bailout kind.");
- debuggerBailOutKind = instr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits;
- // This will insert SharedBail instr after current instr and set bailOutInfo->bailOutInstr to the shared one.
- sharedBailoutInstr = instr->ShareBailOut();
- // As we extracted aux bail out, invalidate all tracks of it in the instr.
- instr->ResetAuxBailOut();
- }
- else
- {
- AssertMsg(FALSE, "shouldn't get here");
- debuggerBailOutKind = IR::BailOutInvalid;
- }
- if (debuggerBailOutKind != IR::BailOutInvalid)
- {
- IR::BailOutInstr* debuggerBailoutInstr = IR::BailOutInstr::New(
- Js::OpCode::BailForDebugger, debuggerBailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
- instr->InsertAfter(debuggerBailoutInstr);
- // Since we go backwards, we need to process extracted out bailout for debugger first.
- instr = sharedBailoutInstr ? sharedBailoutInstr : debuggerBailoutInstr;
- }
- return instr;
- }
- IR::Instr *
- Lowerer::SplitBailOnResultCondition(IR::Instr *const instr) const
- {
- Assert(instr);
- Assert(!instr->IsLowered());
- Assert(
- instr->GetBailOutKind() & IR::BailOutOnResultConditions ||
- instr->GetBailOutKind() == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
- const auto nonBailOutInstr = IR::Instr::New(instr->m_opcode, instr->m_func);
- instr->TransferTo(nonBailOutInstr);
- instr->InsertBefore(nonBailOutInstr);
- return nonBailOutInstr;
- }
- void
- Lowerer::LowerBailOnResultCondition(
- IR::Instr *const instr,
- IR::LabelInstr * *const bailOutLabel,
- IR::LabelInstr * *const skipBailOutLabel)
- {
- Assert(instr);
- Assert(
- instr->GetBailOutKind() & IR::BailOutOnResultConditions ||
- instr->GetBailOutKind() == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
- Assert(bailOutLabel);
- Assert(skipBailOutLabel);
- // Label to jump to (or fall through to) when bailing out. The actual bailout label
- // (bailOutInfo->bailOutInstr->AsLabelInstr()) may be shared, and code may be added to restore values before the jump to the
- // actual bailout label in the cloned bailout case, so always create a new bailout label for this particular path.
- *bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true /* isOpHelper */);
- instr->InsertBefore(*bailOutLabel);
- // Label to jump to when not bailing out
- *skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
- instr->InsertAfter(*skipBailOutLabel);
- // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
- // ordering instructions anymore.
- GenerateBailOut(instr);
- }
- void
- Lowerer::PreserveSourcesForBailOnResultCondition(IR::Instr *const instr, IR::LabelInstr *const skipBailOutLabel) const
- {
- Assert(instr);
- Assert(!instr->IsLowered());
- Assert(!instr->HasBailOutInfo());
- // Since this instruction may bail out, writing to the destination cannot overwrite one of the sources, or we may lose one
- // of the sources needed to redo the equivalent byte code instruction. Determine if the sources need to be preserved.
- const auto dst = instr->GetDst();
- Assert(dst);
- const auto dstStackSym = dst->GetStackSym();
- if(!dstStackSym || !dstStackSym->HasByteCodeRegSlot())
- {
- // We only need to ensure that a byte-code source is not being overwritten
- return;
- }
- switch(instr->m_opcode)
- {
- // The sources of these instructions don't need restoring, or will be restored in the bailout path
- case Js::OpCode::Neg_I4:
- // In case of overflow or zero, the result is the same as the operand
- case Js::OpCode::Add_I4:
- case Js::OpCode::Sub_I4:
- // In case of overflow, there is always enough information to restore the operands
- return;
- }
- Assert(instr->GetSrc1());
- if(!dst->IsEqual(instr->GetSrc1()) && !(instr->GetSrc2() && dst->IsEqual(instr->GetSrc2())))
- {
- // The destination is different from the sources
- return;
- }
- // The destination is the same as one of the sources and the original sources cannot be restored after the instruction, so
- // use a temporary destination for the result and move it back to the original destination after deciding not to bail out
- LowererMD::ChangeToAssign(instr->SinkDst(Js::OpCode::Ld_I4, RegNOREG, skipBailOutLabel));
- }
- void
- Lowerer::LowerInstrWithBailOnResultCondition(
- IR::Instr *const instr,
- const IR::BailOutKind bailOutKind,
- IR::LabelInstr *const bailOutLabel,
- IR::LabelInstr *const skipBailOutLabel) const
- {
- Assert(instr);
- Assert(!instr->IsLowered());
- Assert(!instr->HasBailOutInfo());
- Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
- Assert(bailOutLabel);
- Assert(instr->m_next == bailOutLabel);
- Assert(skipBailOutLabel);
- // Preserve sources that are overwritten by the instruction if needed
- PreserveSourcesForBailOnResultCondition(instr, skipBailOutLabel);
- // Lower the instruction
- switch(instr->m_opcode)
- {
- case Js::OpCode::Neg_I4:
- LowererMD::LowerInt4NegWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
- break;
- case Js::OpCode::Add_I4:
- LowererMD::LowerInt4AddWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
- break;
- case Js::OpCode::Sub_I4:
- LowererMD::LowerInt4SubWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
- break;
- case Js::OpCode::Mul_I4:
- LowererMD::LowerInt4MulWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
- break;
- case Js::OpCode::Rem_I4:
- m_lowererMD.LowerInt4RemWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
- break;
- default:
- Assert(false); // not implemented
- __assume(false);
- }
- }
- void
- Lowerer::GenerateObjectTestAndTypeLoad(IR::Instr *instrLdSt, IR::RegOpnd *opndBase, IR::RegOpnd *opndType, IR::LabelInstr *labelHelper)
- {
- IR::IndirOpnd *opndIndir;
- if (!opndBase->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(opndBase, instrLdSt, labelHelper);
- }
- opndIndir = IR::IndirOpnd::New(opndBase, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
- InsertMove(opndType, opndIndir, instrLdSt);
- }
- IR::LabelInstr *
- Lowerer::GenerateBailOut(IR::Instr * instr, IR::BranchInstr * branchInstr, IR::LabelInstr *bailOutLabel, IR::LabelInstr * collectRuntimeStatsLabel)
- {
- BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
- IR::Instr * bailOutInstr = bailOutInfo->bailOutInstr;
- if (instr->IsCloned())
- {
- Assert(bailOutInstr != instr);
- // Jump to the cloned bail out label
- IR::LabelInstr * bailOutLabelInstr = bailOutInstr->AsLabelInstr();
- IR::BranchInstr * bailOutBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, bailOutLabelInstr, this->m_func);
- instr->InsertBefore(bailOutBranch);
- instr->Remove();
- return bailOutLabel;
- }
- // Add helper label to trigger layout.
- if (!collectRuntimeStatsLabel)
- {
- collectRuntimeStatsLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- }
- Assert(!collectRuntimeStatsLabel->IsLinked());
- instr->InsertBefore(collectRuntimeStatsLabel);
- if (bailOutInstr != instr)
- {
- // this bailOutInfo is shared, just jump to the bailout target
- IR::Opnd * indexOpndForBailOutKind = nullptr;
- int bailOutRecordOffset = 0;
- if (this->m_func->IsOOPJIT())
- {
- bailOutRecordOffset = NativeCodeData::GetDataTotalOffset(bailOutInfo->bailOutRecord);
- indexOpndForBailOutKind = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), (int)(bailOutRecordOffset + BailOutRecord::GetOffsetOfBailOutKind()), TyUint32,
- #if DBG
- NativeCodeData::GetDataDescription(bailOutInfo->bailOutRecord, this->m_func->m_alloc),
- #endif
- m_func, true);
- this->addToLiveOnBackEdgeSyms->Set(m_func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
- }
- else
- {
- indexOpndForBailOutKind =
- IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + BailOutRecord::GetOffsetOfBailOutKind(), TyUint32, this->m_func, IR::AddrOpndKindDynamicBailOutKindRef);
- }
- InsertMove(
- indexOpndForBailOutKind, IR::IntConstOpnd::New(instr->GetBailOutKind(), indexOpndForBailOutKind->GetType(), this->m_func), instr, false);
- // No point in doing this for BailOutFailedEquivalentTypeCheck or BailOutFailedEquivalentFixedFieldTypeCheck,
- // because the respective inline cache is already polymorphic, anyway.
- if (instr->GetBailOutKind() == IR::BailOutFailedTypeCheck || instr->GetBailOutKind() == IR::BailOutFailedFixedFieldTypeCheck)
- {
- // We have a type check bailout that shares a bailout record with other instructions.
- // Generate code to write the cache index into the bailout record before we jump to the call site.
- Assert(bailOutInfo->polymorphicCacheIndex != (uint)-1);
- Assert(bailOutInfo->bailOutRecord);
- IR::Opnd * indexOpnd = nullptr;
- if (this->m_func->IsOOPJIT())
- {
- indexOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), (int)(bailOutRecordOffset + BailOutRecord::GetOffsetOfPolymorphicCacheIndex()), TyUint32, m_func);
- }
- else
- {
- indexOpnd = IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + BailOutRecord::GetOffsetOfPolymorphicCacheIndex(), TyUint32, this->m_func);
- }
- InsertMove(
- indexOpnd, IR::IntConstOpnd::New(bailOutInfo->polymorphicCacheIndex, TyUint32, this->m_func), instr, false);
- }
- if (bailOutInfo->bailOutRecord->IsShared())
- {
- IR::Opnd *functionBodyOpnd;
- if (this->m_func->IsOOPJIT())
- {
- functionBodyOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), (int)(bailOutRecordOffset + SharedBailOutRecord::GetOffsetOfFunctionBody()), TyMachPtr, m_func);
- }
- else
- {
- functionBodyOpnd = IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + SharedBailOutRecord::GetOffsetOfFunctionBody(), TyMachPtr, this->m_func);
- }
- InsertMove(
- functionBodyOpnd, CreateFunctionBodyOpnd(instr->m_func), instr, false);
- }
- // GenerateBailOut should have replaced this as a label as we should have already lowered
- // the main bailOutInstr.
- IR::LabelInstr * bailOutTargetLabel = bailOutInstr->AsLabelInstr();
- #if DBG
- if (bailOutTargetLabel->m_noHelperAssert)
- {
- collectRuntimeStatsLabel->m_noHelperAssert = true;
- }
- #endif
- Assert(bailOutLabel == nullptr || bailOutLabel == bailOutTargetLabel);
- IR::BranchInstr * newBranchInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, bailOutTargetLabel, this->m_func);
- instr->InsertAfter(newBranchInstr);
- instr->Remove();
- return collectRuntimeStatsLabel ? collectRuntimeStatsLabel : bailOutLabel;
- }
- // The bailout hasn't been generated yet.
- Assert(!bailOutInstr->IsLabelInstr());
- // Capture the condition for this bailout
- if (bailOutLabel == nullptr)
- {
- // Create a label and place it in the bailout info so that shared bailout point can jump to this one
- if (instr->m_prev->IsLabelInstr())
- {
- bailOutLabel = instr->m_prev->AsLabelInstr();
- Assert(bailOutLabel->isOpHelper);
- }
- else
- {
- bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- instr->InsertBefore(bailOutLabel);
- }
- }
- else
- {
- instr->InsertBefore(bailOutLabel);
- }
- #if DBG
- bailOutLabel->m_noLazyHelperAssert = true;
- #endif
- #if DBG
- const IR::BailOutKind bailOutKind = bailOutInstr->GetBailOutKind();
- if (bailOutInstr->m_opcode == Js::OpCode::BailOnNoSimdTypeSpec ||
- bailOutInstr->m_opcode == Js::OpCode::BailOnNoProfile ||
- bailOutInstr->m_opcode == Js::OpCode::BailOnException ||
- bailOutInstr->m_opcode == Js::OpCode::Yield ||
- bailOutKind & (IR::BailOutConventionalTypedArrayAccessOnly |
- IR::BailOutConventionalNativeArrayAccessOnly |
- IR::BailOutOnArrayAccessHelperCall))
- {
- bailOutLabel->m_noHelperAssert = true;
- }
- #endif
- bailOutInfo->bailOutInstr = bailOutLabel;
- bailOutLabel->m_hasNonBranchRef = true;
- // Create the bail out record
- Assert(bailOutInfo->bailOutRecord == nullptr);
- BailOutRecord * bailOutRecord;
- IR::JnHelperMethod helperMethod;
- if (branchInstr != nullptr)
- {
- Assert(branchInstr->GetSrc2() == nullptr);
- Assert(branchInstr->GetDst() == nullptr);
- IR::LabelInstr * targetLabel = branchInstr->GetTarget();
- Assert(targetLabel->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset);
- uint32 trueOffset;
- uint32 falseOffset;
- IR::Opnd *condOpnd = branchInstr->GetSrc1();
- bool invertTarget = (branchInstr->m_opcode == Js::OpCode::BrFalse_A);
- if (bailOutInfo->isInvertedBranch)
- {
- // Flip the condition
- IR::Instr *subInstr = IR::Instr::New(Js::OpCode::Sub_I4, condOpnd, condOpnd, IR::IntConstOpnd::New(1, TyMachReg, instr->m_func), instr->m_func);
- instr->InsertBefore(subInstr);
- this->m_lowererMD.EmitInt4Instr(subInstr);
- // We should really do a DEC/NEG for a full 2's complement flip from 0/1 to 1/0,
- // but DEC is sufficient to flip from 0/1 to -1/0, which is false/true to true/false...
- // instr->InsertBefore(IR::Instr::New(Js::OpCode::Neg_I4, condOpnd, condOpnd, instr->m_func));
- invertTarget = invertTarget ? false : true;
- }
- if (!invertTarget)
- {
- trueOffset = targetLabel->GetByteCodeOffset();
- falseOffset = bailOutInfo->bailOutOffset;
- }
- else
- {
- falseOffset = targetLabel->GetByteCodeOffset();
- trueOffset = bailOutInfo->bailOutOffset;
- }
- bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
- BranchBailOutRecord, trueOffset, falseOffset, branchInstr->GetByteCodeReg(), instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
- helperMethod = IR::HelperSaveAllRegistersAndBranchBailOut;
- #ifdef _M_IX86
- if(!AutoSystemInfo::Data.SSE2Available())
- {
- helperMethod = IR::HelperSaveAllRegistersNoSse2AndBranchBailOut;
- }
- #endif
- // Save the condition. The register allocator will generate arguments.
- bailOutInfo->branchConditionOpnd = branchInstr->GetSrc1()->Copy(branchInstr->m_func);
- }
- else
- {
- if (bailOutInstr->GetBailOutKind() == IR::BailOutShared)
- {
- bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
- SharedBailOutRecord, bailOutInfo->bailOutOffset, bailOutInfo->polymorphicCacheIndex, instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
- if (bailOutInfo->isLoopTopBailOutInfo)
- {
- bailOutRecord->SetType(BailOutRecord::BailoutRecordType::SharedForLoopTop);
- }
- }
- else
- {
- bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
- BailOutRecord, bailOutInfo->bailOutOffset, bailOutInfo->polymorphicCacheIndex, instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
- }
- helperMethod = IR::HelperSaveAllRegistersAndBailOut;
- #ifdef _M_IX86
- if(!AutoSystemInfo::Data.SSE2Available())
- {
- helperMethod = IR::HelperSaveAllRegistersNoSse2AndBailOut;
- }
- #endif
- }
- // Save the bailout record. The register allocator will generate arguments.
- bailOutInfo->bailOutRecord = bailOutRecord;
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- bailOutRecord->bailOutOpcode = bailOutInfo->bailOutOpcode;
- #endif
- if (instr->m_opcode == Js::OpCode::BailOnNotStackArgs && instr->GetSrc1())
- {
- // src1 on BailOnNotStackArgs is helping CSE
- instr->FreeSrc1();
- }
- if (instr->GetSrc2() != nullptr)
- {
- // Ideally we should never be in this situation but incase we reached a
- // condition where we didn't free src2, free it here.
- instr->FreeSrc2();
- }
- // We do not need lazybailout bit on SaveAllRegistersAndBailOut
- if (instr->HasLazyBailOut())
- {
- instr->ClearLazyBailOut();
- Assert(instr->HasBailOutInfo());
- }
- // Call the bail out wrapper
- instr->m_opcode = Js::OpCode::Call;
- if(instr->GetDst())
- {
- // To facilitate register allocation, don't assign a destination. The result will anyway go into the return register,
- // but the register allocator does not need to kill that register for the call.
- instr->FreeDst();
- }
- instr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, this->m_func));
- m_lowererMD.LowerCall(instr, 0);
- if (bailOutInstr->GetBailOutKind() != IR::BailOutForGeneratorYield)
- {
- // Defer introducing the JMP to epilog until LowerPrologEpilog phase for Yield bailouts so
- // that Yield does not appear to have flow out of its containing block for the RegAlloc phase.
- // Yield is an unconditional bailout but we want to simulate the flow as if the Yield were
- // just like a call.
- GenerateJumpToEpilogForBailOut(bailOutInfo, instr);
- }
- return collectRuntimeStatsLabel ? collectRuntimeStatsLabel : bailOutLabel;
- }
- void
- Lowerer::GenerateJumpToEpilogForBailOut(BailOutInfo * bailOutInfo, IR::Instr *instr)
- {
- IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->m_prev;
- // JMP to the epilog
- IR::LabelInstr * exitTargetInstr;
- if (exitPrevInstr->IsLabelInstr())
- {
- exitTargetInstr = exitPrevInstr->AsLabelInstr();
- }
- else
- {
- exitTargetInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
- exitPrevInstr->InsertAfter(exitTargetInstr);
- }
- exitTargetInstr = m_lowererMD.GetBailOutStackRestoreLabel(bailOutInfo, exitTargetInstr);
- IR::Instr * instrAfter = instr->m_next;
- IR::BranchInstr * exitInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, exitTargetInstr, this->m_func);
- instrAfter->InsertBefore(exitInstr);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::GenerateFastCondBranch
- ///
- ///----------------------------------------------------------------------------
- bool
- Lowerer::GenerateFastCondBranch(IR::BranchInstr * instrBranch, bool *pIsHelper)
- {
- // The idea is to do an inline compare if we can prove that both sources
- // are tagged ints
- //
- // Given:
- //
- // Brxx_A $L, src1, src2
- //
- // Generate:
- //
- // (If not Int31's, goto $helper)
- // Jxx $L, src1, src2
- // JMP $fallthru
- // $helper:
- // (caller will generate normal helper call sequence)
- // $fallthru:
- IR::LabelInstr * labelHelper = nullptr;
- IR::LabelInstr * labelFallThru;
- IR::BranchInstr * instr;
- IR::Opnd * opndSrc1;
- IR::Opnd * opndSrc2;
- opndSrc1 = instrBranch->GetSrc1();
- opndSrc2 = instrBranch->GetSrc2();
- AssertMsg(opndSrc1 && opndSrc2, "BrC expects 2 src operands");
- // Not tagged ints?
- if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
- {
- return true;
- }
- if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
- {
- return true;
- }
- // Tagged ints?
- bool isTaggedInts = false;
- if (opndSrc1->IsTaggedInt())
- {
- if (opndSrc2->IsTaggedInt())
- {
- isTaggedInts = true;
- }
- }
- if (!isTaggedInts)
- {
- labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- this->m_lowererMD.GenerateSmIntPairTest(instrBranch, opndSrc1, opndSrc2, labelHelper);
- }
- // Jxx $L, src1, src2
- opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
- opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
- instr = IR::BranchInstr::New(instrBranch->m_opcode, instrBranch->GetTarget(), opndSrc1, opndSrc2, this->m_func);
- instrBranch->InsertBefore(instr);
- this->m_lowererMD.LowerCondBranch(instr);
- if (isTaggedInts)
- {
- instrBranch->Remove();
- // Skip lowering call to helper
- return false;
- }
- // JMP $fallthru
- IR::Instr *instrNext = instrBranch->GetNextRealInstrOrLabel();
- if (instrNext->IsLabelInstr())
- {
- labelFallThru = instrNext->AsLabelInstr();
- }
- else
- {
- labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /**pIsHelper*/FALSE);
- instrBranch->InsertAfter(labelFallThru);
- }
- instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallThru, this->m_func);
- instrBranch->InsertBefore(instr);
- // $helper:
- // (caller will generate normal helper call sequence)
- // $fallthru:
- AssertMsg(labelHelper, "Should not be NULL");
- instrBranch->InsertBefore(labelHelper);
- *pIsHelper = true;
- return true;
- }
- IR::Instr *
- Lowerer::LowerInlineeStart(IR::Instr * inlineeStartInstr)
- {
- IR::Opnd *linkOpnd = inlineeStartInstr->GetSrc2();
- if (!linkOpnd)
- {
- Assert(inlineeStartInstr->m_func->m_hasInlineArgsOpt);
- return inlineeStartInstr->m_prev;
- }
- AssertMsg(inlineeStartInstr->m_func->firstActualStackOffset != -1, "This should have been already done in backward pass");
- IR::Instr *startCall;
- // Free the argOut links and lower them to MOVs
- inlineeStartInstr->IterateArgInstrs([&](IR::Instr* argInstr){
- Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A || argInstr->m_opcode == Js::OpCode::ArgOut_A_Inline);
- startCall = argInstr->GetSrc2()->GetStackSym()->m_instrDef;
- argInstr->FreeSrc2();
- #pragma prefast(suppress:6235, "Non-Zero Constant in Condition")
- if (!PHASE_ON(Js::EliminateArgoutForInlineePhase, this->m_func) || inlineeStartInstr->m_func->GetJITFunctionBody()->HasOrParentHasArguments())
- {
- m_lowererMD.ChangeToAssign(argInstr);
- }
- else
- {
- argInstr->m_opcode = Js::OpCode::ArgOut_A_InlineBuiltIn;
- }
- return false;
- });
- IR::Instr *argInsertInstr = inlineeStartInstr;
- uint i = 0;
- inlineeStartInstr->IterateMetaArgs( [&] (IR::Instr* metaArg)
- {
- if(i == 0)
- {
- Lowerer::InsertMove(metaArg->m_func->GetNextInlineeFrameArgCountSlotOpnd(),
- IR::AddrOpnd::NewNull(metaArg->m_func),
- argInsertInstr);
- }
- if (i == Js::Constants::InlineeMetaArgIndex_FunctionObject)
- {
- metaArg->SetSrc1(inlineeStartInstr->GetSrc1());
- }
- metaArg->Unlink();
- argInsertInstr->InsertBefore(metaArg);
- IR::Instr* prev = metaArg->m_prev;
- m_lowererMD.ChangeToAssign(metaArg);
- if (i == Js::Constants::InlineeMetaArgIndex_Argc)
- {
- #if defined(_M_IX86) || defined(_M_X64)
- Assert(metaArg == prev->m_next);
- #else //defined(_M_ARM)
- Assert(prev->m_next->m_opcode == Js::OpCode::LDIMM);
- #endif
- metaArg = prev->m_next;
- Assert(metaArg->GetSrc1()->AsIntConstOpnd()->m_dontEncode == true);
- metaArg->isInlineeEntryInstr = true;
- LowererMD::Legalize(metaArg);
- }
- argInsertInstr = metaArg;
- i++;
- return false;
- });
- IR::Instr* prev = inlineeStartInstr->m_prev;
- if (inlineeStartInstr->m_func->m_hasInlineArgsOpt)
- {
- inlineeStartInstr->FreeSrc1();
- inlineeStartInstr->FreeSrc2();
- inlineeStartInstr->FreeDst();
- }
- else
- {
- inlineeStartInstr->Remove();
- }
- return prev;
- }
- void
- Lowerer::LowerInlineeEnd(IR::Instr *instr)
- {
- Assert(instr->m_func->IsInlinee());
- Assert(m_func->IsTopFunc());
- // No need to emit code if the function wasn't marked as having implicit calls or bailout. Dead-Store should have removed inline overhead.
- if (instr->m_func->GetHasImplicitCalls() || PHASE_OFF(Js::DeadStorePhase, this->m_func))
- {
- Lowerer::InsertMove(instr->m_func->GetInlineeArgCountSlotOpnd(),
- IR::IntConstOpnd::New(0, TyMachReg, instr->m_func),
- instr);
- }
- // Keep InlineeEnd around as it is used by register allocator, if we have optimized the arguments stack
- if (instr->m_func->m_hasInlineArgsOpt)
- {
- instr->FreeSrc1();
- }
- else
- {
- instr->Remove();
- }
- }
- IR::Instr *
- Lowerer::LoadFloatFromNonReg(IR::Opnd * opndSrc, IR::Opnd * opndDst, IR::Instr * instrInsert)
- {
- double value;
- if (opndSrc->IsAddrOpnd())
- {
- Js::Var var = opndSrc->AsAddrOpnd()->m_address;
- if (Js::TaggedInt::Is(var))
- {
- value = Js::TaggedInt::ToDouble(var);
- }
- else
- {
- value = Js::JavascriptNumber::GetValue(var);
- }
- }
- else if (opndSrc->IsIntConstOpnd())
- {
- if (opndSrc->IsUInt32())
- {
- value = (double)(uint32)opndSrc->AsIntConstOpnd()->GetValue();
- }
- else
- {
- value = (double)opndSrc->AsIntConstOpnd()->GetValue();
- }
- }
- else if (opndSrc->IsFloatConstOpnd())
- {
- value = (double)opndSrc->AsFloatConstOpnd()->m_value;
- }
- else if (opndSrc->IsFloat32ConstOpnd())
- {
- float floatValue = opndSrc->AsFloat32ConstOpnd()->m_value;
- return LowererMD::LoadFloatValue(opndDst, floatValue, instrInsert);
- }
- else
- {
- AssertMsg(0, "Unexpected opnd type");
- value = 0;
- }
- return LowererMD::LoadFloatValue(opndDst, value, instrInsert);
- }
- void
- Lowerer::LoadInt32FromUntaggedVar(IR::Instr *const instrLoad)
- {
- Assert(instrLoad);
- Assert(instrLoad->GetDst());
- Assert(instrLoad->GetDst()->IsRegOpnd());
- Assert(instrLoad->GetDst()->IsInt32());
- Assert(instrLoad->GetSrc1());
- Assert(instrLoad->GetSrc1()->IsRegOpnd());
- Assert(instrLoad->GetSrc1()->IsVar());
- Assert(!instrLoad->GetSrc2());
- // push src
- // int32Value = call JavascriptNumber::GetNonzeroInt32Value_NoChecks
- // test int32Value, int32Value
- // jne $done
- // (fall through to 'instrLoad'; caller will generate code here)
- // $done:
- // (rest of program)
- Func *const func = instrLoad->m_func;
- IR::LabelInstr *const doneLabel = instrLoad->GetOrCreateContinueLabel();
- // push src
- // int32Value = call JavascriptNumber::GetNonzeroInt32Value_NoChecks
- StackSym *const int32ValueSym = instrLoad->GetDst()->AsRegOpnd()->m_sym;
- IR::Instr *const instr =
- IR::Instr::New(
- Js::OpCode::Call,
- IR::RegOpnd::New(int32ValueSym, TyInt32, func),
- instrLoad->GetSrc1()->AsRegOpnd(),
- func);
- instrLoad->InsertBefore(instr);
- LowerUnaryHelper(instr, IR::HelperGetNonzeroInt32Value_NoTaggedIntCheck);
- // test int32Value, int32Value
- // jne $done
- InsertCompareBranch(
- IR::RegOpnd::New(int32ValueSym, TyInt32, func),
- IR::IntConstOpnd::New(0, TyInt32, func, true),
- Js::OpCode::BrNeq_A,
- doneLabel,
- instrLoad);
- }
- bool
- Lowerer::GetValueFromIndirOpnd(IR::IndirOpnd *indirOpnd, IR::Opnd **pValueOpnd, IntConstType *pValue)
- {
- IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
- IR::Opnd* valueOpnd = nullptr;
- IntConstType value = 0;
- if (!indexOpnd)
- {
- value = (IntConstType)indirOpnd->GetOffset();
- if (value < 0)
- {
- // Can't do fast path for negative index
- return false;
- }
- valueOpnd = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
- }
- else if (indexOpnd->m_sym->IsIntConst())
- {
- value = indexOpnd->AsRegOpnd()->m_sym->GetIntConstValue();
- if (value < 0)
- {
- // Can't do fast path for negative index
- return false;
- }
- valueOpnd = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
- }
- *pValueOpnd = valueOpnd;
- *pValue = value;
- return true;
- }
- void
- Lowerer::GenerateFastBrOnObject(IR::Instr *instr)
- {
- Assert(instr->m_opcode == Js::OpCode::BrOnObject_A);
- IR::RegOpnd *object = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
- IR::LabelInstr *done = instr->GetOrCreateContinueLabel();
- IR::LabelInstr *target = instr->AsBranchInstr()->GetTarget();
- IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
- IR::IntConstOpnd *typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, instr->m_func);
- if (!object)
- {
- object = IR::RegOpnd::New(TyVar, m_func);
- Lowerer::InsertMove(object, instr->GetSrc1(), instr);
- }
- // TEST object, 1
- // JNE $done
- // MOV typeRegOpnd, [object + offset(Type)]
- // CMP [typeRegOpnd + offset(TypeId)], TypeIds_LastJavascriptPrimitiveType
- // JGT $target
- // $done:
- m_lowererMD.GenerateObjectTest(object, instr, done);
- InsertMove(typeRegOpnd,
- IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
- instr);
- InsertCompareBranch(
- IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
- typeIdOpnd, Js::OpCode::BrGt_A, target, instr);
- instr->Remove();
- }
- void Lowerer::GenerateObjectHeaderInliningTest(IR::RegOpnd *baseOpnd, IR::LabelInstr * target,IR::Instr *insertBeforeInstr)
- {
- Assert(baseOpnd);
- Assert(target);
- AssertMsg(
- baseOpnd->GetValueType().IsLikelyObject() &&
- baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray,
- "Why are we here, when the object is already known not to have an ObjArray");
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- // mov type, [base + offsetOf(type)]
- IR::RegOpnd *const opnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(
- opnd,
- IR::IndirOpnd::New(
- baseOpnd,
- Js::DynamicObject::GetOffsetOfType(),
- opnd->GetType(),
- func),
- insertBeforeInstr);
- // mov typeHandler, [type + offsetOf(typeHandler)]
- InsertMove(
- opnd,
- IR::IndirOpnd::New(
- opnd,
- Js::DynamicType::GetOffsetOfTypeHandler(),
- opnd->GetType(),
- func),
- insertBeforeInstr);
- IR::IndirOpnd * offsetOfInlineSlotOpnd = IR::IndirOpnd::New(opnd,Js::DynamicTypeHandler::GetOffsetOfOffsetOfInlineSlots(), TyInt16, func);
- IR::IntConstOpnd * objHeaderInlinedSlotOffset = IR::IntConstOpnd::New(Js::DynamicTypeHandler::GetOffsetOfObjectHeaderInlineSlots(), TyInt16, func);
- // CMP [typeHandler + offsetOf(offsetOfInlineSlots)], objHeaderInlinedSlotOffset
- InsertCompareBranch(
- offsetOfInlineSlotOpnd,
- objHeaderInlinedSlotOffset,
- Js::OpCode::BrEq_A,
- target,
- insertBeforeInstr);
- }
- void Lowerer::GenerateObjectTypeTest(IR::RegOpnd *srcReg, IR::Instr *instrInsert, IR::LabelInstr *labelHelper)
- {
- Assert(srcReg);
- if (!srcReg->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(srcReg, instrInsert, labelHelper);
- }
- // CMP [srcReg], Js::DynamicObject::`vtable'
- // JNE $helper
- IR::BranchInstr *branchInstr = InsertCompareBranch(
- IR::IndirOpnd::New(srcReg, 0, TyMachPtr, m_func),
- LoadVTableValueOpnd(instrInsert, VTableValue::VtableDynamicObject),
- Js::OpCode::BrNeq_A,
- labelHelper,
- instrInsert);
- InsertObjectPoison(srcReg, branchInstr, instrInsert, false);
- }
- const VTableValue Lowerer::VtableAddresses[static_cast<ValueType::TSize>(ObjectType::Count)] =
- {
- /* ObjectType::UninitializedObject */ VTableValue::VtableInvalid,
- /* ObjectType::Object */ VTableValue::VtableInvalid,
- /* ObjectType::RegExp */ VTableValue::VtableInvalid,
- /* ObjectType::ObjectWithArray */ VTableValue::VtableJavascriptArray,
- /* ObjectType::Array */ VTableValue::VtableJavascriptArray,
- /* ObjectType::Int8Array */ VTableValue::VtableInt8Array,
- /* ObjectType::Uint8Array */ VTableValue::VtableUint8Array,
- /* ObjectType::Uint8ClampedArray */ VTableValue::VtableUint8ClampedArray,
- /* ObjectType::Int16Array */ VTableValue::VtableInt16Array,
- /* ObjectType::Uint16Array */ VTableValue::VtableUint16Array,
- /* ObjectType::Int32Array */ VTableValue::VtableInt32Array,
- /* ObjectType::Uint32Array */ VTableValue::VtableUint32Array,
- /* ObjectType::Float32Array */ VTableValue::VtableFloat32Array,
- /* ObjectType::Float64Array */ VTableValue::VtableFloat64Array,
- /* ObjectType::Int8VirtualArray */ VTableValue::VtableInt8VirtualArray,
- /* ObjectType::Uint8VirtualArray */ VTableValue::VtableUint8VirtualArray,
- /* ObjectType::Uint8ClampedVirtualArray */ VTableValue::VtableUint8ClampedVirtualArray,
- /* ObjectType::Int16VirtualArray */ VTableValue::VtableInt16VirtualArray,
- /* ObjectType::Uint16VirtualArray */ VTableValue::VtableUint16VirtualArray,
- /* ObjectType::Int32VirtualArray */ VTableValue::VtableInt32VirtualArray,
- /* ObjectType::Uint32VirtualArray */ VTableValue::VtableUint32VirtualArray,
- /* ObjectType::Float32VirtualArray */ VTableValue::VtableFloat32VirtualArray,
- /* ObjectType::Float64VirtualArray */ VTableValue::VtableFloat64VirtualArray,
- /* ObjectType::Int8MixedArray */ VTableValue::VtableInt8Array,
- /* ObjectType::Uint8MixedArray */ VTableValue::VtableUint8Array,
- /* ObjectType::Uint8ClampedMixedArray */ VTableValue::VtableUint8ClampedArray,
- /* ObjectType::Int16MixedArray */ VTableValue::VtableInt16Array,
- /* ObjectType::Uint16MixedArray */ VTableValue::VtableUint16Array,
- /* ObjectType::Int32MixedArray */ VTableValue::VtableInt32Array,
- /* ObjectType::Uint32MixedArray */ VTableValue::VtableUint32Array,
- /* ObjectType::Float32MixedArray */ VTableValue::VtableFloat32Array,
- /* ObjectType::Float64MixedArray */ VTableValue::VtableFloat64Array,
- /* ObjectType::Int64Array */ VTableValue::VtableInt64Array,
- /* ObjectType::Uint64Array */ VTableValue::VtableUint64Array,
- /* ObjectType::BoolArray */ VTableValue::VtableBoolArray,
- /* ObjectType::CharArray */ VTableValue::VtableCharArray
- };
- const uint32 Lowerer::OffsetsOfHeadSegment[static_cast<ValueType::TSize>(ObjectType::Count)] =
- {
- /* ObjectType::UninitializedObject */ static_cast<uint32>(-1),
- /* ObjectType::Object */ static_cast<uint32>(-1),
- /* ObjectType::RegExp */ static_cast<uint32>(-1),
- /* ObjectType::ObjectWithArray */ Js::JavascriptArray::GetOffsetOfHead(),
- /* ObjectType::Array */ Js::JavascriptArray::GetOffsetOfHead(),
- /* ObjectType::Int8Array */ Js::Int8Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint8Array */ Js::Uint8Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint8ClampedArray */ Js::Uint8ClampedArray::GetOffsetOfBuffer(),
- /* ObjectType::Int16Array */ Js::Int16Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint16Array */ Js::Uint16Array::GetOffsetOfBuffer(),
- /* ObjectType::Int32Array */ Js::Int32Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint32Array */ Js::Uint32Array::GetOffsetOfBuffer(),
- /* ObjectType::Float32Array */ Js::Float32Array::GetOffsetOfBuffer(),
- /* ObjectType::Float64Array */ Js::Float64Array::GetOffsetOfBuffer(),
- /* ObjectType::Int8VirtualArray */ Js::Int8VirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Uint8VirtualArray */ Js::Uint8VirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Uint8ClampedVirtualArray */ Js::Uint8ClampedVirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Int16VirtualArray */ Js::Int16VirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Uint16VirtualArray */ Js::Uint16VirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Int32VirtualArray */ Js::Int32VirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Uint32VirtualArray */ Js::Uint32VirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Float32VirtualArray */ Js::Float32VirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Float64VirtualArray */ Js::Float64VirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Int8MixedArray */ Js::Int8Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint8MixedArray */ Js::Uint8Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint8ClampedMixedArray */ Js::Uint8ClampedArray::GetOffsetOfBuffer(),
- /* ObjectType::Int16MixedArray */ Js::Int16Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint16MixedArray */ Js::Uint16Array::GetOffsetOfBuffer(),
- /* ObjectType::Int32MixedArray */ Js::Int32Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint32MixedArray */ Js::Uint32Array::GetOffsetOfBuffer(),
- /* ObjectType::Float32MixedArray */ Js::Float32Array::GetOffsetOfBuffer(),
- /* ObjectType::Float64MixedArray */ Js::Float64Array::GetOffsetOfBuffer(),
- /* ObjectType::Int64Array */ Js::Int64Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint64Array */ Js::Uint64Array::GetOffsetOfBuffer(),
- /* ObjectType::BoolArray */ Js::BoolArray::GetOffsetOfBuffer(),
- /* ObjectType::CharArray */ Js::CharArray::GetOffsetOfBuffer()
- };
- const uint32 Lowerer::OffsetsOfLength[static_cast<ValueType::TSize>(ObjectType::Count)] =
- {
- /* ObjectType::UninitializedObject */ static_cast<uint32>(-1),
- /* ObjectType::Object */ static_cast<uint32>(-1),
- /* ObjectType::RegExp */ static_cast<uint32>(-1),
- /* ObjectType::ObjectWithArray */ Js::JavascriptArray::GetOffsetOfLength(),
- /* ObjectType::Array */ Js::JavascriptArray::GetOffsetOfLength(),
- /* ObjectType::Int8Array */ Js::Int8Array::GetOffsetOfLength(),
- /* ObjectType::Uint8Array */ Js::Uint8Array::GetOffsetOfLength(),
- /* ObjectType::Uint8ClampedArray */ Js::Uint8ClampedArray::GetOffsetOfLength(),
- /* ObjectType::Int16Array */ Js::Int16Array::GetOffsetOfLength(),
- /* ObjectType::Uint16Array */ Js::Uint16Array::GetOffsetOfLength(),
- /* ObjectType::Int32Array */ Js::Int32Array::GetOffsetOfLength(),
- /* ObjectType::Uint32Array */ Js::Uint32Array::GetOffsetOfLength(),
- /* ObjectType::Float32Array */ Js::Float32Array::GetOffsetOfLength(),
- /* ObjectType::Float64Array */ Js::Float64Array::GetOffsetOfLength(),
- /* ObjectType::Int8VirtualArray */ Js::Int8VirtualArray::GetOffsetOfLength(),
- /* ObjectType::Uint8VirtualArray */ Js::Uint8VirtualArray::GetOffsetOfLength(),
- /* ObjectType::Uint8ClampedVirtualArray */ Js::Uint8ClampedVirtualArray::GetOffsetOfLength(),
- /* ObjectType::Int16VirtualArray */ Js::Int16VirtualArray::GetOffsetOfLength(),
- /* ObjectType::Uint16VirtualArray */ Js::Uint16VirtualArray::GetOffsetOfLength(),
- /* ObjectType::Int32VirtualArray */ Js::Int32VirtualArray::GetOffsetOfLength(),
- /* ObjectType::Uint32VirtualArray */ Js::Uint32VirtualArray::GetOffsetOfLength(),
- /* ObjectType::Float32VirtualArray */ Js::Float32VirtualArray::GetOffsetOfLength(),
- /* ObjectType::Float64VirtualArray */ Js::Float64VirtualArray::GetOffsetOfLength(),
- /* ObjectType::Int8MixedArray */ Js::Int8Array::GetOffsetOfLength(),
- /* ObjectType::Uint8MixedArray */ Js::Uint8Array::GetOffsetOfLength(),
- /* ObjectType::Uint8ClampedMixedArray */ Js::Uint8ClampedArray::GetOffsetOfLength(),
- /* ObjectType::Int16MixedArray */ Js::Int16Array::GetOffsetOfLength(),
- /* ObjectType::Uint16MixedArray */ Js::Uint16Array::GetOffsetOfLength(),
- /* ObjectType::Int32MixedArray */ Js::Int32Array::GetOffsetOfLength(),
- /* ObjectType::Uint32MixedArray */ Js::Uint32Array::GetOffsetOfLength(),
- /* ObjectType::Float32MixedArray */ Js::Float32Array::GetOffsetOfLength(),
- /* ObjectType::Float64MixedArray */ Js::Float64Array::GetOffsetOfLength(),
- /* ObjectType::Int64Array */ Js::Int64Array::GetOffsetOfLength(),
- /* ObjectType::Uint64Array */ Js::Uint64Array::GetOffsetOfLength(),
- /* ObjectType::BoolArray */ Js::BoolArray::GetOffsetOfLength(),
- /* ObjectType::CharArray */ Js::CharArray::GetOffsetOfLength()
- };
- const IRType Lowerer::IndirTypes[static_cast<ValueType::TSize>(ObjectType::Count)] =
- {
- /* ObjectType::UninitializedObject */ TyIllegal,
- /* ObjectType::Object */ TyIllegal,
- /* ObjectType::RegExp */ TyIllegal,
- /* ObjectType::ObjectWithArray */ TyVar,
- /* ObjectType::Array */ TyVar,
- /* ObjectType::Int8Array */ TyInt8,
- /* ObjectType::Uint8Array */ TyUint8,
- /* ObjectType::Uint8ClampedArray */ TyUint8,
- /* ObjectType::Int16Array */ TyInt16,
- /* ObjectType::Uint16Array */ TyUint16,
- /* ObjectType::Int32Array */ TyInt32,
- /* ObjectType::Uint32Array */ TyUint32,
- /* ObjectType::Float32Array */ TyFloat32,
- /* ObjectType::Float64Array */ TyFloat64,
- /* ObjectType::Int8VirtualArray */ TyInt8,
- /* ObjectType::Uint8VirtualArray */ TyUint8,
- /* ObjectType::Uint8ClampedVirtualArray */ TyUint8,
- /* ObjectType::Int16VirtualArray */ TyInt16,
- /* ObjectType::Uint16vArray */ TyUint16,
- /* ObjectType::Int32VirtualArray */ TyInt32,
- /* ObjectType::Uint32VirtualArray */ TyUint32,
- /* ObjectType::Float32VirtualArray */ TyFloat32,
- /* ObjectType::Float64VirtualArray */ TyFloat64,
- /* ObjectType::Int8MixedArray */ TyInt8,
- /* ObjectType::Uint8MixedArray */ TyUint8,
- /* ObjectType::Uint8ClampedMixedArray */ TyUint8,
- /* ObjectType::Int16MixedArray */ TyInt16,
- /* ObjectType::Uint16MixedArray */ TyUint16,
- /* ObjectType::Int32MixedArray */ TyInt32,
- /* ObjectType::Uint32MixedArray */ TyUint32,
- /* ObjectType::Float32MixedArray */ TyFloat32,
- /* ObjectType::Float64MixedArray */ TyFloat64,
- /* ObjectType::Int64Array */ TyInt64,
- /* ObjectType::Uint64Array */ TyUint64,
- /* ObjectType::BoolArray */ TyUint8,
- /* ObjectType::CharArray */ TyUint16
- };
- const BYTE Lowerer::IndirScales[static_cast<ValueType::TSize>(ObjectType::Count)] =
- {
- /* ObjectType::UninitializedObject */ static_cast<BYTE>(-1),
- /* ObjectType::Object */ static_cast<BYTE>(-1),
- /* ObjectType::RegExp */ static_cast<BYTE>(-1),
- /* ObjectType::ObjectWithArray */ LowererMD::GetDefaultIndirScale(),
- /* ObjectType::Array */ LowererMD::GetDefaultIndirScale(),
- /* ObjectType::Int8Array */ 0, // log2(sizeof(int8))
- /* ObjectType::Uint8Array */ 0, // log2(sizeof(uint8))
- /* ObjectType::Uint8ClampedArray */ 0, // log2(sizeof(uint8))
- /* ObjectType::Int16Array */ 1, // log2(sizeof(int16))
- /* ObjectType::Uint16Array */ 1, // log2(sizeof(uint16))
- /* ObjectType::Int32Array */ 2, // log2(sizeof(int32))
- /* ObjectType::Uint32Array */ 2, // log2(sizeof(uint32))
- /* ObjectType::Float32Array */ 2, // log2(sizeof(float))
- /* ObjectType::Float64Array */ 3, // log2(sizeof(double))
- /* ObjectType::Int8VirtualArray */ 0, // log2(sizeof(int8))
- /* ObjectType::Uint8VirtualArray */ 0, // log2(sizeof(uint8))
- /* ObjectType::Uint8ClampedVirtualArray */ 0, // log2(sizeof(uint8))
- /* ObjectType::Int16VirtualArray */ 1, // log2(sizeof(int16))
- /* ObjectType::Uint16VirtualArray */ 1, // log2(sizeof(uint16))
- /* ObjectType::Int32VirtualArray */ 2, // log2(sizeof(int32))
- /* ObjectType::Uint32VirtualArray */ 2, // log2(sizeof(uint32))
- /* ObjectType::Float32VirtualArray */ 2, // log2(sizeof(float))
- /* ObjectType::Float64VirtualArray */ 3, // log2(sizeof(double))
- /* ObjectType::Int8MixedArray */ 0, // log2(sizeof(int8))
- /* ObjectType::Uint8MixedArray */ 0, // log2(sizeof(uint8))
- /* ObjectType::Uint8ClampedMixedArray */ 0, // log2(sizeof(uint8))
- /* ObjectType::Int16MixedArray */ 1, // log2(sizeof(int16))
- /* ObjectType::Uint16MixedArray */ 1, // log2(sizeof(uint16))
- /* ObjectType::Int32MixedArray */ 2, // log2(sizeof(int32))
- /* ObjectType::Uint32MixedArray */ 2, // log2(sizeof(uint32))
- /* ObjectType::Float32MixedArray */ 2, // log2(sizeof(float))
- /* ObjectType::Float64MixedArray */ 3, // log2(sizeof(double))
- /* ObjectType::Int64Array */ 3, // log2(sizeof(int64))
- /* ObjectType::Uint64Array */ 3, // log2(sizeof(uint64))
- /* ObjectType::BoolArray */ 0, // log2(sizeof(bool))
- /* ObjectType::CharArray */ 1 // log2(sizeof(char16))
- };
- VTableValue Lowerer::GetArrayVtableAddress(const ValueType valueType, bool getVirtual)
- {
- Assert(valueType.IsLikelyAnyOptimizedArray());
- if(valueType.IsLikelyArrayOrObjectWithArray())
- {
- if(valueType.HasIntElements())
- {
- return VTableValue::VtableNativeIntArray;
- }
- else if(valueType.HasFloatElements())
- {
- return VTableValue::VtableNativeFloatArray;
- }
- }
- if (getVirtual && valueType.IsLikelyMixedTypedArrayType())
- {
- return VtableAddresses[static_cast<ValueType::TSize>(valueType.GetMixedToVirtualTypedArrayObjectType())];
- }
- return VtableAddresses[static_cast<ValueType::TSize>(valueType.GetObjectType())];
- }
- uint32 Lowerer::GetArrayOffsetOfHeadSegment(const ValueType valueType)
- {
- Assert(valueType.IsLikelyAnyOptimizedArray());
- return OffsetsOfHeadSegment[static_cast<ValueType::TSize>(valueType.GetObjectType())];
- }
- uint32 Lowerer::GetArrayOffsetOfLength(const ValueType valueType)
- {
- Assert(valueType.IsLikelyAnyOptimizedArray());
- return OffsetsOfLength[static_cast<ValueType::TSize>(valueType.GetObjectType())];
- }
- IRType Lowerer::GetArrayIndirType(const ValueType valueType)
- {
- Assert(valueType.IsLikelyAnyOptimizedArray());
- if(valueType.IsLikelyArrayOrObjectWithArray())
- {
- if(valueType.HasIntElements())
- {
- return TyInt32;
- }
- else if(valueType.HasFloatElements())
- {
- return TyFloat64;
- }
- }
- return IndirTypes[static_cast<ValueType::TSize>(valueType.GetObjectType())];
- }
- BYTE Lowerer::GetArrayIndirScale(const ValueType valueType)
- {
- Assert(valueType.IsLikelyAnyOptimizedArray());
- if(valueType.IsLikelyArrayOrObjectWithArray())
- {
- if(valueType.HasIntElements())
- {
- return 2; // log2(sizeof(int32))
- }
- else if(valueType.HasFloatElements())
- {
- return 3; // log2(sizeof(double))
- }
- }
- return IndirScales[static_cast<ValueType::TSize>(valueType.GetObjectType())];
- }
- int Lowerer::SimdGetElementCountFromBytes(ValueType arrValueType, uint8 dataWidth)
- {
- Assert(dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
- Assert(arrValueType.IsTypedArray());
- BYTE bpe = 1 << Lowerer::GetArrayIndirScale(arrValueType);
- // round up
- return (int)::ceil(((float)dataWidth) / bpe);
- }
- bool Lowerer::ShouldGenerateArrayFastPath(
- const IR::Opnd *const arrayOpnd,
- const bool supportsObjectsWithArrays,
- const bool supportsTypedArrays,
- const bool requiresSse2ForFloatArrays) const
- {
- Assert(arrayOpnd);
- const ValueType arrayValueType(arrayOpnd->GetValueType());
- if(arrayValueType.IsUninitialized())
- {
- // Don't have info about the value type, better to generate the fast path anyway
- return true;
- }
- if (!arrayValueType.IsLikelyObject())
- {
- if (!arrayValueType.HasBeenObject() || arrayValueType.IsLikelyString())
- {
- return false;
- }
- //We have seen at least once there is an object in the code path. Generate fastpath hoping it to be array.
- //Its nice if we can get all the attributes set but valueType is only 16 bits. Consider expanding the same.
- return true;
- }
- if( (!supportsObjectsWithArrays && arrayValueType.GetObjectType() == ObjectType::ObjectWithArray) ||
- (!supportsTypedArrays && arrayValueType.IsLikelyTypedArray()) )
- {
- // The fast path likely would not hit
- return false;
- }
- if(arrayValueType.GetObjectType() == ObjectType::UninitializedObject)
- {
- // Don't have info about the object type, better to generate the fast path anyway
- return true;
- }
- #ifdef _M_IX86
- if(requiresSse2ForFloatArrays &&
- (
- arrayValueType.GetObjectType() == ObjectType::Float32Array ||
- arrayValueType.GetObjectType() == ObjectType::Float64Array
- ) &&
- !AutoSystemInfo::Data.SSE2Available())
- {
- // Fast paths for float arrays rely on SSE2
- return false;
- }
- #endif
- return !arrayValueType.IsLikelyAnyUnOptimizedArray();
- }
- IR::RegOpnd *Lowerer::LoadObjectArray(IR::RegOpnd *const baseOpnd, IR::Instr *const insertBeforeInstr)
- {
- Assert(baseOpnd);
- Assert(
- baseOpnd->GetValueType().IsLikelyObject() &&
- baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- // mov array, [base + offsetOf(objectArrayOrFlags)]
- IR::RegOpnd *const arrayOpnd =
- baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd()->CopyAsRegOpnd(func) : baseOpnd->Copy(func)->AsRegOpnd();
- arrayOpnd->m_sym = StackSym::New(TyVar, func);
- arrayOpnd->SetValueType(arrayOpnd->GetValueType().ToArray());
- const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, func, false /* autoDelete */);
- InsertMove(
- arrayOpnd,
- IR::IndirOpnd::New(
- baseOpnd,
- Js::DynamicObject::GetOffsetOfObjectArray(),
- arrayOpnd->GetType(),
- func),
- insertBeforeInstr);
- return arrayOpnd;
- }
- void
- Lowerer::GenerateIsEnabledArraySetElementFastPathCheck(
- IR::LabelInstr * isDisabledLabel,
- IR::Instr * const insertBeforeInstr)
- {
- InsertCompareBranch(
- this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable),
- LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableInvalid),
- Js::OpCode::BrEq_A,
- isDisabledLabel,
- insertBeforeInstr);
- }
- IR::RegOpnd *Lowerer::GenerateArrayTest(
- IR::RegOpnd *const baseOpnd,
- IR::LabelInstr *const isNotObjectLabel,
- IR::LabelInstr *const isNotArrayLabel,
- IR::Instr *const insertBeforeInstr,
- const bool forceFloat,
- const bool isStore,
- const bool allowDefiniteArray)
- {
- Assert(baseOpnd);
- const ValueType baseValueType(baseOpnd->GetValueType());
- // Shouldn't request to do an array test when it's already known to be an array, or if it's unlikely to be an array
- Assert(!baseValueType.IsAnyOptimizedArray() || allowDefiniteArray || baseValueType.IsNativeArray());
- Assert(baseValueType.IsUninitialized() || baseValueType.HasBeenObject());
- Assert(isNotObjectLabel);
- Assert(isNotArrayLabel);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::RegOpnd *arrayOpnd;
- IR::AutoReuseOpnd autoReuseArrayOpnd;
- if(baseValueType.IsLikelyObject() && baseValueType.GetObjectType() == ObjectType::ObjectWithArray)
- {
- // Only DynamicObject is allowed (DynamicObject vtable is ensured) because some object types have special handling for
- // index properties - arguments object, string object, external object, etc.
- // JavascriptArray::Jit_TryGetArrayForObjectWithArray as well.
- GenerateObjectTypeTest(baseOpnd, insertBeforeInstr, isNotObjectLabel);
- GenerateObjectHeaderInliningTest(baseOpnd, isNotArrayLabel, insertBeforeInstr);
- arrayOpnd = LoadObjectArray(baseOpnd, insertBeforeInstr);
- autoReuseArrayOpnd.Initialize(arrayOpnd, func, false /* autoDelete */);
- // test array, array
- // je $isNotArrayLabel
- // test array, 1
- // jne $isNotArrayLabel
- InsertTestBranch(
- arrayOpnd,
- arrayOpnd,
- Js::OpCode::BrEq_A,
- isNotArrayLabel,
- insertBeforeInstr);
- InsertTestBranch(
- arrayOpnd,
- IR::IntConstOpnd::New(1, TyUint8, func, true),
- Js::OpCode::BrNeq_A,
- isNotArrayLabel,
- insertBeforeInstr);
- }
- else
- {
- if(!baseOpnd->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(baseOpnd, insertBeforeInstr, isNotObjectLabel);
- }
- arrayOpnd = baseOpnd->Copy(func)->AsRegOpnd();
- if(!baseValueType.IsLikelyAnyOptimizedArray())
- {
- arrayOpnd->SetValueType(
- ValueType::GetObject(ObjectType::Array)
- .ToLikely()
- .SetHasNoMissingValues(false)
- .SetArrayTypeId(Js::TypeIds_Array));
- }
- autoReuseArrayOpnd.Initialize(arrayOpnd, func, false /* autoDelete */);
- }
- VTableValue vtableAddress = baseValueType.IsLikelyAnyOptimizedArray()
- ? GetArrayVtableAddress(baseValueType)
- : VTableValue::VtableJavascriptArray;
- VTableValue virtualVtableAddress = VTableValue::VtableInvalid;
- if (baseValueType.IsLikelyMixedTypedArrayType())
- {
- virtualVtableAddress = GetArrayVtableAddress(baseValueType, true);
- }
- IR::Opnd * vtableOpnd;
- IR::Opnd * vtableVirtualOpnd = nullptr;
- if (isStore &&
- (vtableAddress == VTableValue::VtableJavascriptArray ||
- baseValueType.IsLikelyNativeArray()))
- {
- vtableOpnd = IR::RegOpnd::New(TyMachPtr, func);
- if (baseValueType.IsLikelyNativeArray())
- {
- if (baseValueType.HasIntElements())
- {
- InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesIntArraySetElementFastPathVtable), insertBeforeInstr);
- }
- else
- {
- Assert(baseValueType.HasFloatElements());
- InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesFloatArraySetElementFastPathVtable), insertBeforeInstr);
- }
- }
- else
- {
- InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable), insertBeforeInstr);
- }
- }
- else
- {
- vtableOpnd = LoadVTableValueOpnd(insertBeforeInstr, vtableAddress);
- }
- // cmp [array], vtableAddress
- // jne $isNotArrayLabel
- if (forceFloat && baseValueType.IsLikelyNativeFloatArray())
- {
- // We expect a native float array. If we get native int instead, convert it on the spot and bail out afterward.
- const auto goodArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::BranchInstr* branchInstr = InsertCompareBranch(
- IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
- vtableOpnd,
- Js::OpCode::BrEq_A,
- goodArrayLabel,
- insertBeforeInstr);
- InsertObjectPoison(arrayOpnd, branchInstr, insertBeforeInstr, isStore);
- IR::LabelInstr *notFloatArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- insertBeforeInstr->InsertBefore(notFloatArrayLabel);
- if (isStore)
- {
- vtableOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(vtableOpnd, IR::MemRefOpnd::New(
- func->GetScriptContextInfo()->GetIntArraySetElementFastPathVtableAddr(),
- TyMachPtr, func), insertBeforeInstr);
- }
- else
- {
- vtableOpnd = LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableJavascriptNativeIntArray);
- }
- branchInstr = InsertCompareBranch(
- IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
- vtableOpnd,
- Js::OpCode::BrNeq_A,
- isNotArrayLabel,
- insertBeforeInstr);
- InsertObjectPoison(arrayOpnd, branchInstr, insertBeforeInstr, isStore);
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, arrayOpnd);
- IR::Instr *helperInstr = IR::Instr::New(Js::OpCode::Call, m_func);
- insertBeforeInstr->InsertBefore(helperInstr);
- m_lowererMD.ChangeToHelperCall(helperInstr, IR::HelperIntArr_ToNativeFloatArray);
- // Branch to the (bailout) label, because converting the array may have made our array checks unsafe.
- InsertBranch(Js::OpCode::Br, isNotArrayLabel, insertBeforeInstr);
- insertBeforeInstr->InsertBefore(goodArrayLabel);
- }
- else
- {
- IR::LabelInstr* goodArrayLabel = nullptr;
- if (baseValueType.IsLikelyMixedTypedArrayType())
- {
- goodArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- InsertCompareBranch(
- IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
- vtableOpnd,
- Js::OpCode::BrEq_A,
- goodArrayLabel,
- insertBeforeInstr);
- Assert(virtualVtableAddress);
- vtableVirtualOpnd = LoadVTableValueOpnd(insertBeforeInstr, virtualVtableAddress);
- Assert(vtableVirtualOpnd);
- IR::BranchInstr* branchInstr = InsertCompareBranch(
- IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
- vtableVirtualOpnd,
- Js::OpCode::BrNeq_A,
- isNotArrayLabel,
- insertBeforeInstr);
- InsertObjectPoison(arrayOpnd, branchInstr, insertBeforeInstr, isStore);
- insertBeforeInstr->InsertBefore(goodArrayLabel);
- }
- else
- {
- IR::BranchInstr *branchInstr = InsertCompareBranch(
- IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
- vtableOpnd,
- Js::OpCode::BrNeq_A,
- isNotArrayLabel,
- insertBeforeInstr);
- InsertObjectPoison(arrayOpnd, branchInstr, insertBeforeInstr, isStore);
- }
- }
- ValueType arrayValueType(arrayOpnd->GetValueType());
- if(arrayValueType.IsLikelyArrayOrObjectWithArray() && !arrayValueType.IsObject())
- {
- arrayValueType = arrayValueType.SetHasNoMissingValues(false);
- }
- arrayValueType = arrayValueType.ToDefiniteObject();
- arrayOpnd->SetValueType(arrayValueType);
- return arrayOpnd;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::HoistIndirOffset
- ///
- /// Replace the offset of the given indir with a new symbol, which becomes the indir index.
- /// Assign the new symbol by creating an assignment from the constant offset.
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *Lowerer::HoistIndirOffset(IR::Instr* instr, IR::IndirOpnd *indirOpnd, RegNum regNum)
- {
- int32 offset = indirOpnd->GetOffset();
- if (indirOpnd->GetIndexOpnd())
- {
- Assert(indirOpnd->GetBaseOpnd());
- return Lowerer::HoistIndirOffsetAsAdd(instr, indirOpnd, indirOpnd->GetBaseOpnd(), offset, regNum);
- }
- IR::IntConstOpnd *offsetOpnd = IR::IntConstOpnd::New(offset, TyInt32, instr->m_func);
- IR::RegOpnd *indexOpnd = IR::RegOpnd::New(StackSym::New(TyMachReg, instr->m_func), regNum, TyMachReg, instr->m_func);
- #if defined(DBG) && defined(_M_ARM)
- if (regNum == SCRATCH_REG)
- {
- AssertMsg(indirOpnd->GetBaseOpnd()->GetReg()!= SCRATCH_REG, "Why both are SCRATCH_REG");
- if (instr->GetSrc1() && instr->GetSrc1()->IsRegOpnd())
- {
- Assert(instr->GetSrc1()->AsRegOpnd()->GetReg() != SCRATCH_REG);
- }
- if (instr->GetSrc2() && instr->GetSrc2()->IsRegOpnd())
- {
- Assert(instr->GetSrc2()->AsRegOpnd()->GetReg() != SCRATCH_REG);
- }
- if (instr->GetDst() && instr->GetDst()->IsRegOpnd())
- {
- Assert(instr->GetDst()->AsRegOpnd()->GetReg() != SCRATCH_REG);
- }
- }
- #endif
- // Clear the offset and add a new reg as the index.
- indirOpnd->SetOffset(0);
- indirOpnd->SetIndexOpnd(indexOpnd);
- IR::Instr *instrAssign = Lowerer::InsertMove(indexOpnd, offsetOpnd, instr);
- indexOpnd->m_sym->SetIsIntConst(offset);
- return instrAssign;
- }
- IR::Instr *Lowerer::HoistIndirOffsetAsAdd(IR::Instr* instr, IR::IndirOpnd *orgOpnd, IR::Opnd *baseOpnd, int offset, RegNum regNum)
- {
- IR::RegOpnd *newBaseOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, instr->m_func), regNum, TyMachPtr, instr->m_func);
- IR::IntConstOpnd *src2 = IR::IntConstOpnd::New(offset, TyInt32, instr->m_func);
- IR::Instr * instrAdd = IR::Instr::New(Js::OpCode::Add_A, newBaseOpnd, baseOpnd, src2, instr->m_func);
- LowererMD::ChangeToAdd(instrAdd, false);
- instr->InsertBefore(instrAdd);
- orgOpnd->ReplaceBaseOpnd(newBaseOpnd);
- orgOpnd->SetOffset(0);
- return instrAdd;
- }
- IR::Instr *Lowerer::HoistIndirIndexOpndAsAdd(IR::Instr* instr, IR::IndirOpnd *orgOpnd, IR::Opnd *baseOpnd, IR::Opnd *indexOpnd, RegNum regNum)
- {
- IR::RegOpnd *newBaseOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, instr->m_func), regNum, TyMachPtr, instr->m_func);
- IR::Instr * instrAdd = IR::Instr::New(Js::OpCode::Add_A, newBaseOpnd, baseOpnd, indexOpnd->UseWithNewType(TyMachPtr, instr->m_func), instr->m_func);
- LowererMD::ChangeToAdd(instrAdd, false);
- instr->InsertBefore(instrAdd);
- orgOpnd->ReplaceBaseOpnd(newBaseOpnd);
- orgOpnd->SetIndexOpnd(nullptr);
- return instrAdd;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::HoistSymOffset
- ///
- /// Replace the given sym with an indir using the given base and offset.
- /// (This is used, for instance, to hoist a sym offset that is too large to encode.)
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *Lowerer::HoistSymOffset(IR::Instr *instr, IR::SymOpnd *symOpnd, RegNum baseReg, uint32 offset, RegNum regNum)
- {
- IR::RegOpnd *baseOpnd = IR::RegOpnd::New(nullptr, baseReg, TyMachPtr, instr->m_func);
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(baseOpnd, offset, symOpnd->GetType(), instr->m_func);
- if (symOpnd == instr->GetDst())
- {
- instr->ReplaceDst(indirOpnd);
- }
- else
- {
- instr->ReplaceSrc(symOpnd, indirOpnd);
- }
- return Lowerer::HoistIndirOffset(instr, indirOpnd, regNum);
- }
- IR::Instr *Lowerer::HoistSymOffsetAsAdd(IR::Instr* instr, IR::SymOpnd *orgOpnd, IR::Opnd *baseOpnd, int offset, RegNum regNum)
- {
- IR::IndirOpnd *newIndirOpnd = IR::IndirOpnd::New(baseOpnd->AsRegOpnd(), 0, TyMachPtr, instr->m_func);
- instr->Replace(orgOpnd, newIndirOpnd); // Replace SymOpnd with IndirOpnd
- return Lowerer::HoistIndirOffsetAsAdd(instr, newIndirOpnd, baseOpnd, offset, regNum);
- }
- IR::LabelInstr *Lowerer::InsertLabel(const bool isHelper, IR::Instr *const insertBeforeInstr)
- {
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::LabelInstr *const instr = IR::LabelInstr::New(Js::OpCode::Label, func, isHelper);
- insertBeforeInstr->InsertBefore(instr);
- return instr;
- }
- IR::Instr *Lowerer::InsertMoveWithBarrier(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr)
- {
- return Lowerer::InsertMove(dst, src, insertBeforeInstr, true);
- }
- IR::Instr *Lowerer::InsertMove(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr, bool generateWriteBarrier)
- {
- Assert(dst);
- Assert(src);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- if(dst->IsFloat() && src->IsConstOpnd())
- {
- return LoadFloatFromNonReg(src, dst, insertBeforeInstr);
- }
- if(TySize[dst->GetType()] < TySize[src->GetType()])
- {
- #if _M_IX86
- if (IRType_IsInt64(src->GetType()))
- {
- // On x86, if we are trying to move an int64 to a smaller type
- // Insert a move of the low bits into dst
- return InsertMove(dst, func->FindOrCreateInt64Pair(src).low, insertBeforeInstr, generateWriteBarrier);
- }
- else
- #endif
- {
- src = src->UseWithNewType(dst->GetType(), func);
- }
- }
- IR::Instr * instr = IR::Instr::New(Js::OpCode::Ld_A, dst, src, func);
- insertBeforeInstr->InsertBefore(instr);
- if (generateWriteBarrier)
- {
- instr = LowererMD::ChangeToWriteBarrierAssign(instr, func);
- }
- else
- {
- LowererMD::ChangeToAssignNoBarrierCheck(instr);
- }
- return instr;
- }
- IR::BranchInstr *Lowerer::InsertBranch(
- const Js::OpCode opCode,
- IR::LabelInstr *const target,
- IR::Instr *const insertBeforeInstr)
- {
- return InsertBranch(opCode, false /* isUnsigned */, target, insertBeforeInstr);
- }
- IR::BranchInstr *Lowerer::InsertBranch(
- const Js::OpCode opCode,
- const bool isUnsigned,
- IR::LabelInstr *const target,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(target);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::BranchInstr *const instr = IR::BranchInstr::New(opCode, target, func);
- if(!instr->IsLowered())
- {
- if(opCode == Js::OpCode::Br)
- {
- instr->m_opcode = LowererMD::MDUncondBranchOpcode;
- }
- else if(isUnsigned)
- {
- instr->m_opcode = LowererMD::MDUnsignedBranchOpcode(opCode);
- }
- else
- {
- instr->m_opcode = LowererMD::MDBranchOpcode(opCode);
- }
- }
- insertBeforeInstr->InsertBefore(instr);
- return instr;
- }
- IR::Instr *Lowerer::InsertCompare(IR::Opnd *const src1, IR::Opnd *const src2, IR::Instr *const insertBeforeInstr)
- {
- Assert(src1);
- Assert(!src1->IsFloat64()); // not implemented
- Assert(src2);
- Assert(!src2->IsFloat64()); // not implemented
- Assert(!src1->IsEqual(src2));
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(Js::OpCode::CMP, func);
- instr->SetSrc1(src1);
- instr->SetSrc2(src2);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::Legalize(instr);
- return instr;
- }
- IR::BranchInstr *Lowerer::InsertCompareBranch(
- IR::Opnd *const compareSrc1,
- IR::Opnd *const compareSrc2,
- Js::OpCode branchOpCode,
- IR::LabelInstr *const target,
- IR::Instr *const insertBeforeInstr,
- const bool ignoreNaN)
- {
- return InsertCompareBranch(compareSrc1, compareSrc2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr, ignoreNaN);
- }
- IR::BranchInstr *Lowerer::InsertCompareBranch(
- IR::Opnd *compareSrc1,
- IR::Opnd *compareSrc2,
- Js::OpCode branchOpCode,
- const bool isUnsigned,
- IR::LabelInstr *const target,
- IR::Instr *const insertBeforeInstr,
- const bool ignoreNaN)
- {
- Assert(compareSrc1);
- Assert(compareSrc2);
- Func *const func = insertBeforeInstr->m_func;
- if(compareSrc1->IsFloat())
- {
- Assert(compareSrc2->IsFloat());
- Assert(!isUnsigned);
- IR::BranchInstr *const instr = IR::BranchInstr::New(branchOpCode, target, compareSrc1, compareSrc2, func);
- insertBeforeInstr->InsertBefore(instr);
- return LowererMD::LowerFloatCondBranch(instr, ignoreNaN);
- }
- #ifdef _M_IX86
- else if (compareSrc1->IsInt64())
- {
- Assert(compareSrc2->IsInt64());
- IR::BranchInstr *const instr = IR::BranchInstr::New(branchOpCode, target, compareSrc1, compareSrc2, func);
- insertBeforeInstr->InsertBefore(instr);
- m_lowererMD.EmitInt64Instr(instr);
- return instr;
- }
- #endif
- Js::OpCode swapSrcsBranchOpCode;
- switch(branchOpCode)
- {
- case Js::OpCode::BrEq_A:
- case Js::OpCode::BrNeq_A:
- swapSrcsBranchOpCode = branchOpCode;
- goto Common_BrEqNeqGeGtLeLt;
- case Js::OpCode::BrGe_A:
- swapSrcsBranchOpCode = Js::OpCode::BrLe_A;
- goto Common_BrEqNeqGeGtLeLt;
- case Js::OpCode::BrGt_A:
- swapSrcsBranchOpCode = Js::OpCode::BrLt_A;
- goto Common_BrEqNeqGeGtLeLt;
- case Js::OpCode::BrLe_A:
- swapSrcsBranchOpCode = Js::OpCode::BrGe_A;
- goto Common_BrEqNeqGeGtLeLt;
- case Js::OpCode::BrLt_A:
- swapSrcsBranchOpCode = Js::OpCode::BrGt_A;
- // fall through
- Common_BrEqNeqGeGtLeLt:
- // Check if src1 is a constant and src2 is not, and facilitate folding the constant into the Cmp instruction
- if( (
- compareSrc1->IsIntConstOpnd() ||
- (
- compareSrc1->IsAddrOpnd() &&
- Math::FitsInDWord(reinterpret_cast<size_t>(compareSrc1->AsAddrOpnd()->m_address))
- )
- ) &&
- !compareSrc2->IsIntConstOpnd() &&
- !compareSrc2->IsAddrOpnd())
- {
- // Swap the sources and branch
- IR::Opnd *const tempSrc = compareSrc1;
- compareSrc1 = compareSrc2;
- compareSrc2 = tempSrc;
- branchOpCode = swapSrcsBranchOpCode;
- }
- // Check for compare with zero, to prefer using Test instead of Cmp
- if( !compareSrc1->IsRegOpnd() ||
- !(
- (compareSrc2->IsIntConstOpnd() && compareSrc2->AsIntConstOpnd()->GetValue() == 0) ||
- (compareSrc2->IsAddrOpnd() && !compareSrc2->AsAddrOpnd()->m_address)
- ) ||
- branchOpCode == Js::OpCode::BrGt_A || branchOpCode == Js::OpCode::BrLe_A)
- {
- goto Default;
- }
- if(branchOpCode == Js::OpCode::BrGe_A || branchOpCode == Js::OpCode::BrLt_A)
- {
- if(isUnsigned)
- {
- goto Default;
- }
- branchOpCode = LowererMD::MDCompareWithZeroBranchOpcode(branchOpCode);
- }
- if(!compareSrc2->IsInUse())
- {
- compareSrc2->Free(func);
- }
- InsertTest(compareSrc1, compareSrc1, insertBeforeInstr);
- break;
- default:
- Default:
- InsertCompare(compareSrc1, compareSrc2, insertBeforeInstr);
- break;
- }
- return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
- }
- IR::Instr *Lowerer::InsertTest(IR::Opnd *const src1, IR::Opnd *const src2, IR::Instr *const insertBeforeInstr)
- {
- Assert(src1);
- Assert(!src1->IsFloat64()); // not implemented
- Assert(src2);
- Assert(!src2->IsFloat64()); // not implemented
- #if !TARGET_64
- Assert(!src1->IsInt64()); // not implemented
- Assert(!src2->IsInt64()); // not implemented
- #endif
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(LowererMD::MDTestOpcode, func);
- instr->SetSrc1(src1);
- instr->SetSrc2(src2);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::Legalize(instr);
- return instr;
- }
- IR::BranchInstr *Lowerer::InsertTestBranch(
- IR::Opnd *const testSrc1,
- IR::Opnd *const testSrc2,
- const Js::OpCode branchOpCode,
- IR::LabelInstr *const target,
- IR::Instr *const insertBeforeInstr)
- {
- return InsertTestBranch(testSrc1, testSrc2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr);
- }
- IR::BranchInstr *Lowerer::InsertTestBranch(
- IR::Opnd *const testSrc1,
- IR::Opnd *const testSrc2,
- const Js::OpCode branchOpCode,
- const bool isUnsigned,
- IR::LabelInstr *const target,
- IR::Instr *const insertBeforeInstr)
- {
- InsertTest(testSrc1, testSrc2, insertBeforeInstr);
- return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
- }
- /* Inserts add with an overflow check, if we overflow throw OOM
- * add dst, src
- * jno $continueLabel
- * overflow code
- * $continueLabel : fall through
- */
- void Lowerer::InsertAddWithOverflowCheck(
- const bool needFlags,
- IR::Opnd *const dst,
- IR::Opnd *src1,
- IR::Opnd *src2,
- IR::Instr *const insertBeforeInstr,
- IR::Instr **const onOverflowInsertBeforeInstrRef)
- {
- Func * func = insertBeforeInstr->m_func;
- InsertAdd(needFlags, dst, src1, src2, insertBeforeInstr);
- IR::LabelInstr *const continueLabel = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- InsertBranch(LowererMD::MDNotOverflowBranchOpcode, continueLabel, insertBeforeInstr);
- *onOverflowInsertBeforeInstrRef = continueLabel;
- }
- IR::Instr *Lowerer::InsertAdd(
- const bool needFlags,
- IR::Opnd *const dst,
- IR::Opnd *src1,
- IR::Opnd *src2,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(src1);
- Assert(src2);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- if(src2->IsIntConstOpnd())
- {
- IR::IntConstOpnd *const intConstOpnd = src2->AsIntConstOpnd();
- const IntConstType value = intConstOpnd->GetValue();
- if(value < 0 && value != IntConstMin)
- {
- // Change (s1 = s1 + -5) into (s1 = s1 - 5)
- IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
- newSrc2->SetValue(-value);
- return InsertSub(needFlags, dst, src1, newSrc2, insertBeforeInstr);
- }
- }
- else if(src1->IsIntConstOpnd())
- {
- IR::IntConstOpnd *const intConstOpnd = src1->AsIntConstOpnd();
- const IntConstType value = intConstOpnd->GetValue();
- if(value < 0 && value != IntConstMin)
- {
- // Change (s1 = -5 + s1) into (s1 = s1 - 5)
- IR::Opnd *const newSrc1 = src2;
- IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
- newSrc2->SetValue(-value);
- return InsertSub(needFlags, dst, newSrc1, newSrc2, insertBeforeInstr);
- }
- }
- IR::Instr *const instr = IR::Instr::New(Js::OpCode::Add_A, dst, src1, src2, func);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::ChangeToAdd(instr, needFlags);
- LowererMD::Legalize(instr);
- return instr;
- }
- IR::Instr *Lowerer::InsertSub(
- const bool needFlags,
- IR::Opnd *const dst,
- IR::Opnd *src1,
- IR::Opnd *src2,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(src1);
- Assert(src2);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- if(src2->IsIntConstOpnd())
- {
- IR::IntConstOpnd *const intConstOpnd = src2->AsIntConstOpnd();
- const IntConstType value = intConstOpnd->GetValue();
- if(value < 0 && value != IntConstMin)
- {
- // Change (s1 = s1 - -5) into (s1 = s1 + 5)
- IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
- newSrc2->SetValue(-value);
- return InsertAdd(needFlags, dst, src1, newSrc2, insertBeforeInstr);
- }
- }
- IR::Instr *const instr = IR::Instr::New(Js::OpCode::Sub_A, dst, src1, src2, func);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::ChangeToSub(instr, needFlags);
- LowererMD::Legalize(instr);
- return instr;
- }
- IR::Instr *Lowerer::InsertLea(IR::RegOpnd *const dst, IR::Opnd *const src, IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(src);
- Assert(src->IsIndirOpnd() || src->IsSymOpnd());
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(LowererMD::MDLea, dst, src, func);
- insertBeforeInstr->InsertBefore(instr);
- return ChangeToLea(instr);
- }
- IR::Instr *
- Lowerer::ChangeToLea(IR::Instr * instr)
- {
- Assert(instr);
- Assert(instr->GetDst());
- Assert(instr->GetDst()->IsRegOpnd());
- Assert(instr->GetSrc1());
- Assert(instr->GetSrc1()->IsIndirOpnd() || instr->GetSrc1()->IsSymOpnd());
- Assert(!instr->GetSrc2());
- instr->m_opcode = LowererMD::MDLea;
- LowererMD::Legalize(instr);
- return instr;
- }
- #if _M_X64
- IR::Instr *Lowerer::InsertMoveBitCast(
- IR::Opnd *const dst,
- IR::Opnd *const src1,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(dst->GetType() == TyFloat64);
- Assert(src1);
- Assert(src1->GetType() == TyUint64);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(LowererMD::MDMovUint64ToFloat64Opcode, dst, src1, func);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::Legalize(instr);
- return instr;
- }
- #endif
- IR::Instr *Lowerer::InsertXor(
- IR::Opnd *const dst,
- IR::Opnd *const src1,
- IR::Opnd *const src2,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(src1);
- Assert(src2);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(LowererMD::MDXorOpcode, dst, src1, src2, func);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::Legalize(instr);
- return instr;
- }
- IR::Instr *Lowerer::InsertAnd(
- IR::Opnd *const dst,
- IR::Opnd *const src1,
- IR::Opnd *const src2,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(src1);
- Assert(src2);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(Js::OpCode::AND, dst, src1, src2, func);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::Legalize(instr);
- return instr;
- }
- IR::Instr *Lowerer::InsertOr(
- IR::Opnd *const dst,
- IR::Opnd *const src1,
- IR::Opnd *const src2,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(src1);
- Assert(src2);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(LowererMD::MDOrOpcode, dst, src1, src2, func);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::Legalize(instr);
- return instr;
- }
- IR::Instr *Lowerer::InsertShift(
- const Js::OpCode opCode,
- const bool needFlags,
- IR::Opnd *const dst,
- IR::Opnd *const src1,
- IR::Opnd *const src2,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(!dst->IsFloat64()); // not implemented
- Assert(src1);
- Assert(!src1->IsFloat64()); // not implemented
- Assert(src2);
- Assert(!src2->IsFloat64()); // not implemented
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(opCode, dst, src1, src2, func);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::ChangeToShift(instr, needFlags);
- LowererMD::Legalize(instr);
- return instr;
- }
- IR::Instr *Lowerer::InsertShiftBranch(
- const Js::OpCode shiftOpCode,
- IR::Opnd *const dst,
- IR::Opnd *const src1,
- IR::Opnd *const src2,
- const Js::OpCode branchOpCode,
- IR::LabelInstr *const target,
- IR::Instr *const insertBeforeInstr)
- {
- return InsertShiftBranch(shiftOpCode, dst, src1, src2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr);
- }
- IR::Instr *Lowerer::InsertShiftBranch(
- const Js::OpCode shiftOpCode,
- IR::Opnd *const dst,
- IR::Opnd *const src1,
- IR::Opnd *const src2,
- const Js::OpCode branchOpCode,
- const bool isUnsigned,
- IR::LabelInstr *const target,
- IR::Instr *const insertBeforeInstr)
- {
- InsertShift(shiftOpCode, true /* needFlags */, dst, src1, src2, insertBeforeInstr);
- return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
- }
- IR::Instr *Lowerer::InsertConvertFloat32ToFloat64(
- IR::Opnd *const dst,
- IR::Opnd *const src,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(dst->IsFloat64());
- Assert(src);
- Assert(src->IsFloat32());
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(LowererMD::MDConvertFloat32ToFloat64Opcode, dst, src, func);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::Legalize(instr);
- return instr;
- }
- IR::Instr *Lowerer::InsertConvertFloat64ToFloat32(
- IR::Opnd *const dst,
- IR::Opnd *const src,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(dst->IsFloat32());
- Assert(src);
- Assert(src->IsFloat64());
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(LowererMD::MDConvertFloat64ToFloat32Opcode, dst, src, func);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::Legalize(instr);
- return instr;
- }
- void Lowerer::InsertDecUInt32PreventOverflow(
- IR::Opnd *const dst,
- IR::Opnd *const src,
- IR::Instr *const insertBeforeInstr,
- IR::Instr * *const onOverflowInsertBeforeInstrRef)
- {
- Assert(dst);
- Assert(dst->GetType() == TyUint32);
- Assert(src);
- Assert(src->GetType() == TyUint32);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- // Generate:
- // subs temp, src, 1
- // bcs $overflow
- // mov dst, temp
- // b $continue
- // $overflow:
- // mov dst, 0
- // $continue:
- IR::LabelInstr *const overflowLabel = Lowerer::InsertLabel(false, insertBeforeInstr);
- // subs temp, src, 1
- IR::RegOpnd *const tempOpnd = IR::RegOpnd::New(StackSym::New(TyUint32, func), TyUint32, func);
- const IR::AutoReuseOpnd autoReuseTempOpnd(tempOpnd, func);
- Lowerer::InsertSub(true, tempOpnd, src, IR::IntConstOpnd::New(1, TyUint32, func, true), overflowLabel);
- // bcs $overflow
- Lowerer::InsertBranch(Js::OpCode::BrLt_A, true, overflowLabel, overflowLabel);
- // mov dst, temp
- Lowerer::InsertMove(dst, tempOpnd, overflowLabel);
- const bool dstEqualsSrc = dst->IsEqual(src);
- if(!dstEqualsSrc || onOverflowInsertBeforeInstrRef)
- {
- // b $continue
- // $overflow:
- // mov dst, 0
- // $continue:
- IR::LabelInstr *const continueLabel = Lowerer::InsertLabel(false, insertBeforeInstr);
- Lowerer::InsertBranch(Js::OpCode::Br, continueLabel, overflowLabel);
- if(!dstEqualsSrc)
- {
- Lowerer::InsertMove(dst, IR::IntConstOpnd::New(0, TyUint32, func, true), continueLabel);
- }
- if(onOverflowInsertBeforeInstrRef)
- {
- *onOverflowInsertBeforeInstrRef = continueLabel;
- }
- }
- else
- {
- // $overflow:
- }
- }
- void Lowerer::InsertFloatCheckForZeroOrNanBranch(
- IR::Opnd *const src,
- const bool branchOnZeroOrNan,
- IR::LabelInstr *const target,
- IR::LabelInstr *const fallthroughLabel,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(src);
- Assert(src->IsFloat64());
- Assert(target);
- Assert(!fallthroughLabel || fallthroughLabel != target);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::BranchInstr *const branchOnEqualOrNotEqual =
- InsertCompareBranch(
- src,
- IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetDoubleZeroAddr(), TyFloat64, func),
- branchOnZeroOrNan ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A,
- target,
- insertBeforeInstr,
- true /* ignoreNaN */);
- // x86/x64
- // When NaN is ignored, on x86 and x64, JE branches when equal or unordered since an unordered result sets the zero
- // flag, and JNE branches when not equal and not unordered. By comparing with zero, JE will branch when src is zero or
- // NaN, and JNE will branch when src is not zero and not NaN.
- //
- // ARM
- // When NaN is ignored, BEQ branches when equal and not unordered, and BNE branches when not equal or unordered. So,
- // when comparing src with zero, an unordered check needs to be added before the BEQ/BNE.
- branchOnEqualOrNotEqual; // satisfy the compiler
- #ifdef _M_ARM32_OR_ARM64
- InsertBranch(
- Js::OpCode::BVS,
- branchOnZeroOrNan
- ? target
- : fallthroughLabel ? fallthroughLabel : insertBeforeInstr->m_prev->GetOrCreateContinueLabel(),
- branchOnEqualOrNotEqual);
- #endif
- }
- IR::IndirOpnd*
- Lowerer::GenerateFastElemICommon(
- _In_ IR::Instr* elemInstr,
- _In_ bool isStore,
- _In_ IR::IndirOpnd* indirOpnd,
- _In_ IR::LabelInstr* labelHelper,
- _In_ IR::LabelInstr* labelCantUseArray,
- _In_opt_ IR::LabelInstr* labelFallthrough,
- _Out_ bool* pIsTypedArrayElement,
- _Out_ bool* pIsStringIndex,
- _Out_opt_ bool* emitBailoutRef,
- _Outptr_opt_result_maybenull_ IR::Opnd** maskOpnd,
- _Outptr_opt_result_maybenull_ IR::LabelInstr** pLabelSegmentLengthIncreased, // = nullptr
- _In_ bool checkArrayLengthOverflow, // = true
- _In_ bool forceGenerateFastPath, // = false
- _In_ bool returnLength, // = false
- _In_opt_ IR::LabelInstr* bailOutLabelInstr, // = nullptr
- _Out_opt_ bool* indirOpndOverflowed, // = nullptr
- _In_ Js::FldInfoFlags flags) // = Js::FldInfo_NoInfo
- {
- *pIsTypedArrayElement = false;
- *pIsStringIndex = false;
- if(pLabelSegmentLengthIncreased)
- {
- *pLabelSegmentLengthIncreased = nullptr;
- }
- if (maskOpnd)
- {
- *maskOpnd = nullptr;
- }
- if (indirOpndOverflowed)
- {
- *indirOpndOverflowed = false;
- }
- if (emitBailoutRef)
- {
- *emitBailoutRef = false;
- }
- IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
- AssertMsg(baseOpnd, "This shouldn't be NULL");
- // Caution: If making changes to the conditions under which we don't emit the typical array checks, make sure
- // the code in GlobOpt::ShouldAssumeIndirOpndHasNonNegativeIntIndex is updated accordingly. We don't want the
- // global optimizer to type specialize instructions, for which the lowerer is forced to emit unconditional
- // bailouts.
- if (baseOpnd->IsTaggedInt())
- {
- return NULL;
- }
- IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
- if (indexOpnd)
- {
- const bool normalLocation = (flags & (Js::FldInfo_FromLocal | Js::FldInfo_FromProto | Js::FldInfo_FromLocalWithoutProperty)) != 0;
- const bool normalSlots = (flags & (Js::FldInfo_FromAuxSlots | Js::FldInfo_FromInlineSlots)) != 0;
- const bool generateFastpath = !baseOpnd->GetValueType().IsLikelyOptimizedTypedArray() && normalLocation && normalSlots && flags != Js::FldInfo_NoInfo;
- if (indexOpnd->GetValueType().IsLikelyString())
- {
- if (generateFastpath)
- {
- // If profile data says that it's a typed array - do not generate the property string fast path as the src. could be a temp and that would cause a bug.
- *pIsTypedArrayElement = false;
- *pIsStringIndex = true;
- return GenerateFastElemIStringIndexCommon(elemInstr, isStore, indirOpnd, labelHelper, flags);
- }
- else
- {
- // There's no point in generating the int index fast path if we know the index has a string value.
- return nullptr;
- }
- }
- else if (indexOpnd->GetValueType().IsLikelySymbol())
- {
- if (generateFastpath)
- {
- // If profile data says that it's a typed array - do not generate the symbol fast path as the src. could be a temp and that would cause a bug.
- return GenerateFastElemISymbolIndexCommon(elemInstr, isStore, indirOpnd, labelHelper, flags);
- }
- else
- {
- // There's no point in generating the int index fast path if we know the index has a symbol value.
- return nullptr;
- }
- }
- }
- return
- GenerateFastElemIIntIndexCommon(
- elemInstr,
- isStore,
- indirOpnd,
- labelHelper,
- labelCantUseArray,
- labelFallthrough,
- pIsTypedArrayElement,
- emitBailoutRef,
- pLabelSegmentLengthIncreased,
- checkArrayLengthOverflow,
- maskOpnd,
- false,
- returnLength,
- bailOutLabelInstr,
- indirOpndOverflowed);
- }
- void
- Lowerer::GenerateDynamicLoadPolymorphicInlineCacheSlot(IR::Instr * instrInsert, IR::RegOpnd * inlineCacheOpnd, IR::Opnd * objectTypeOpnd)
- {
- // Generates:
- // MOV opndOffset, objectTypeOpnd
- // SHR opndOffset, PolymorphicInlineCacheShift
- // MOVZX cacheIndexOpnd, inlineCacheOpnd->size
- // DEC cacheIndexOpnd
- // AND opndOffset, cacheIndexOpnd
- // SHL opndOffset, Math::Log2(sizeof(Js::InlineCache))
- // MOV inlineCacheOpnd, inlineCacheOpnd->inlineCaches
- // LEA inlineCacheOpnd, [inlineCacheOpnd + opndOffset]
- IntConstType rightShiftAmount = PolymorphicInlineCacheShift;
- IntConstType leftShiftAmount = Math::Log2(sizeof(Js::InlineCache));
- Assert(rightShiftAmount > leftShiftAmount);
- IR::RegOpnd * opndOffset = IR::RegOpnd::New(TyMachPtr, m_func);
- InsertShift(Js::OpCode::ShrU_A, false, opndOffset, objectTypeOpnd, IR::IntConstOpnd::New(rightShiftAmount, TyUint8, m_func, true), instrInsert);
- IR::RegOpnd * cacheIndexOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
- InsertMove(cacheIndexOpnd, IR::IndirOpnd::New(inlineCacheOpnd, Js::PolymorphicInlineCache::GetOffsetOfSize(), TyUint16, m_func), instrInsert);
- InsertSub(false, cacheIndexOpnd, cacheIndexOpnd, IR::IntConstOpnd::New(1, TyMachPtr, m_func), instrInsert);
- InsertAnd(opndOffset, opndOffset, cacheIndexOpnd, instrInsert);
- InsertShift(Js::OpCode::Shl_A, false, opndOffset, opndOffset, IR::IntConstOpnd::New(leftShiftAmount, TyUint8, m_func), instrInsert);
- InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCacheOpnd, Js::PolymorphicInlineCache::GetOffsetOfInlineCaches(), TyMachPtr, m_func), instrInsert);
- InsertLea(inlineCacheOpnd, IR::IndirOpnd::New(inlineCacheOpnd, opndOffset, TyMachPtr, m_func), instrInsert);
- }
- // Test that the operand is a PropertyString, or bail to helper
- void
- Lowerer::GeneratePropertyStringTest(IR::RegOpnd *srcReg, IR::Instr *instrInsert, IR::LabelInstr *labelHelper, bool isStore)
- {
- // Generates:
- // StringTest(srcReg, $helper) ; verify index is string type
- // CMP srcReg, PropertyString::`vtable' ; verify index is property string
- // JNE $helper
- GenerateStringTest(srcReg, instrInsert, labelHelper);
- IR::LabelInstr * notPropStrLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::LabelInstr * propStrLoadedLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::BranchInstr *branchInstr = InsertCompareBranch(
- IR::IndirOpnd::New(srcReg, 0, TyMachPtr, m_func),
- LoadVTableValueOpnd(instrInsert, VTableValue::VtablePropertyString),
- Js::OpCode::BrNeq_A, notPropStrLabel, instrInsert);
- InsertObjectPoison(srcReg, branchInstr, instrInsert, isStore);
- InsertBranch(Js::OpCode::Br, propStrLoadedLabel, instrInsert);
- InsertBranch(Js::OpCode::Br, propStrLoadedLabel, instrInsert);
- instrInsert->InsertBefore(notPropStrLabel);
- branchInstr = InsertCompareBranch(
- IR::IndirOpnd::New(srcReg, 0, TyMachPtr, m_func),
- LoadVTableValueOpnd(instrInsert, VTableValue::VtableLiteralStringWithPropertyStringPtr),
- Js::OpCode::BrNeq_A, labelHelper, instrInsert);
- InsertObjectPoison(srcReg, branchInstr, instrInsert, isStore);
- IR::IndirOpnd * propStrOpnd = IR::IndirOpnd::New(srcReg, Js::LiteralStringWithPropertyStringPtr::GetOffsetOfPropertyString(), TyMachPtr, m_func);
- InsertCompareBranch(propStrOpnd, IR::IntConstOpnd::New(NULL, TyMachPtr, m_func), Js::OpCode::BrNeq_A, labelHelper, instrInsert);
- // We don't really own srcReg, but it is fine to update it to be the PropertyString, since that is better to have anyway
- InsertMove(srcReg, propStrOpnd, instrInsert);
- instrInsert->InsertBefore(propStrLoadedLabel);
- }
- IR::IndirOpnd*
- Lowerer::GenerateFastElemIStringIndexCommon(
- _In_ IR::Instr* elemInstr,
- _In_ bool isStore,
- _In_ IR::IndirOpnd* indirOpnd,
- _In_ IR::LabelInstr* labelHelper,
- _In_ Js::FldInfoFlags flags)
- {
- IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
- IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
- Assert(baseOpnd != nullptr);
- Assert(indexOpnd->GetValueType().IsLikelyString());
- // Generates:
- // PropertyStringTest(indexOpnd, $helper) ; verify index is string type
- // FastElemISymbolOrStringIndexCommon(indexOpnd, baseOpnd, $helper) ; shared code with JavascriptSymbol
- GeneratePropertyStringTest(indexOpnd, elemInstr, labelHelper, isStore);
- const uint32 inlineCacheOffset = isStore ? Js::PropertyString::GetOffsetOfStElemInlineCache() : Js::PropertyString::GetOffsetOfLdElemInlineCache();
- const uint32 hitRateOffset = Js::PropertyString::GetOffsetOfHitRate();
- return GenerateFastElemISymbolOrStringIndexCommon(elemInstr, indexOpnd, baseOpnd, inlineCacheOffset, hitRateOffset, labelHelper, flags);
- }
- IR::IndirOpnd*
- Lowerer::GenerateFastElemISymbolIndexCommon(
- _In_ IR::Instr* elemInstr,
- _In_ bool isStore,
- _In_ IR::IndirOpnd* indirOpnd,
- _In_ IR::LabelInstr* labelHelper,
- _In_ Js::FldInfoFlags flags)
- {
- IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
- IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
- Assert(baseOpnd != nullptr);
- Assert(indexOpnd->GetValueType().IsLikelySymbol());
- // Generates:
- // SymbolTest(indexOpnd, $helper) ; verify index is symbol type
- // FastElemISymbolOrStringIndexCommon(indexOpnd, baseOpnd, $helper) ; shared code with PropertyString
- GenerateSymbolTest(indexOpnd, elemInstr, labelHelper);
- const uint32 inlineCacheOffset = isStore ? Js::JavascriptSymbol::GetOffsetOfStElemInlineCache() : Js::JavascriptSymbol::GetOffsetOfLdElemInlineCache();
- const uint32 hitRateOffset = Js::JavascriptSymbol::GetOffsetOfHitRate();
- return GenerateFastElemISymbolOrStringIndexCommon(elemInstr, indexOpnd, baseOpnd, inlineCacheOffset, hitRateOffset, labelHelper, flags);
- }
- void
- Lowerer::GenerateFastIsInSymbolOrStringIndex(IR::Instr * instrInsert, IR::RegOpnd *indexOpnd, IR::RegOpnd *baseOpnd, IR::Opnd *dest, uint32 inlineCacheOffset, const uint32 hitRateOffset, IR::LabelInstr * labelHelper, IR::LabelInstr * labelDone)
- {
- // Try to look up the property in the cache, or bail to helper
- GenerateLookUpInIndexCache(instrInsert, indexOpnd, baseOpnd, nullptr /*opndSlotArray*/, nullptr /*opndSlotIndex*/, inlineCacheOffset, hitRateOffset, labelHelper);
- // MOV dest, true
- InsertMove(dest, LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueTrue), instrInsert);
- // JMP labelDone
- InsertBranch(Js::OpCode::Br, labelDone, instrInsert);
- }
- IR::IndirOpnd*
- Lowerer::GenerateFastElemISymbolOrStringIndexCommon(
- _In_ IR::Instr* instrInsert,
- _In_ IR::RegOpnd* indexOpnd,
- _In_ IR::RegOpnd* baseOpnd,
- _In_ const uint32 inlineCacheOffset,
- _In_ const uint32 hitRateOffset,
- _In_ IR::LabelInstr* labelHelper,
- _In_ Js::FldInfoFlags flags)
- {
- // Try to look up the property in the cache, or bail to helper
- IR::RegOpnd * opndSlotArray = IR::RegOpnd::New(TyMachReg, instrInsert->m_func);
- IR::RegOpnd * opndSlotIndex = IR::RegOpnd::New(TyMachReg, instrInsert->m_func);
- GenerateLookUpInIndexCache(instrInsert, indexOpnd, baseOpnd, opndSlotArray, opndSlotIndex, inlineCacheOffset, hitRateOffset, labelHelper, flags);
- // return [opndSlotArray + opndSlotIndex * PtrSize]
- return IR::IndirOpnd::New(opndSlotArray, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, instrInsert->m_func);
- }
- // Look up a value from the polymorphic inline cache on a PropertyString or Symbol. Offsets are relative to indexOpnd.
- // Checks local and/or proto caches based on profile data. If the property is not found, jump to the helper.
- // opndSlotArray is optional; if provided, it will receive the base address of the slot array that contains the property.
- // opndSlotIndex is optional; if provided, it will receive the index of the match within the slot array.
- void
- Lowerer::GenerateLookUpInIndexCache(
- _In_ IR::Instr* instrInsert,
- _In_ IR::RegOpnd* indexOpnd,
- _In_ IR::RegOpnd* baseOpnd,
- _In_opt_ IR::RegOpnd* opndSlotArray,
- _In_opt_ IR::RegOpnd* opndSlotIndex,
- _In_ const uint32 inlineCacheOffset,
- _In_ const uint32 hitRateOffset,
- _In_ IR::LabelInstr* labelHelper,
- _In_ Js::FldInfoFlags flags) // = Js::FldInfo_NoInfo
- {
- // Generates:
- // MOV inlineCacheOpnd, index->inlineCache
- // GenerateObjectTest(baseOpnd, $helper) ; verify base is an object
- // MOV objectTypeOpnd, baseOpnd->type
- // GenerateDynamicLoadPolymorphicInlineCacheSlot(inlineCacheOpnd, objectTypeOpnd) ; loads inline cache for given type
- // if (checkLocalInlineSlots)
- // GenerateLookUpInIndexCacheHelper<CheckLocal, CheckInlineSlot> // checks local inline slots, goes to next on failure
- // if (checkLocalAuxSlots)
- // GenerateLookUpInIndexCacheHelper<CheckLocal, CheckAuxSlot> // checks local aux slots, goes to next on failure
- // if (fromProto && fromInlineSlots)
- // GenerateLookUpInIndexCacheHelper<CheckProto, CheckInlineSlot> // checks proto inline slots, goes to next on failure
- // if (fromProto && fromAuxSlots)
- // GenerateLookUpInIndexCacheHelper<CheckProto, CheckAuxSlot> // checks proto aux slots, goes to next on failure
- // if (doAdd && fromInlineSlots)
- // GenerateLookUpInIndexCacheHelper<CheckLocal, CheckInlineSlot, DoAdd> // checks typeWithoutProperty inline slots, goes to next on failure
- // if (doAdd && fromAuxSlots)
- // GenerateLookUpInIndexCacheHelper<CheckLocal, CheckAuxSlot, DoAdd> // checks typeWithoutProperty aux slots, goes to helper on failure
- // $slotIndexLoadedLabel
- // INC indexOpnd->hitRate
- const bool fromInlineSlots = (flags & Js::FldInfo_FromInlineSlots) == Js::FldInfo_FromInlineSlots;
- const bool fromAuxSlots = (flags & Js::FldInfo_FromAuxSlots) == Js::FldInfo_FromAuxSlots;
- const bool fromLocal = (flags & Js::FldInfo_FromLocal) == Js::FldInfo_FromLocal;
- const bool fromProto = (flags & Js::FldInfo_FromProto) == Js::FldInfo_FromProto;
- const bool doAdd = (flags & Js::FldInfo_FromLocalWithoutProperty) == Js::FldInfo_FromLocalWithoutProperty;
- const bool checkLocalInlineSlots = flags == Js::FldInfo_NoInfo || (fromInlineSlots && fromLocal);
- const bool checkLocalAuxSlots = flags == Js::FldInfo_NoInfo || (fromAuxSlots && fromLocal);
- m_lowererMD.GenerateObjectTest(baseOpnd, instrInsert, labelHelper);
- IR::RegOpnd * objectTypeOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
- InsertMove(objectTypeOpnd, IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, m_func), instrInsert);
- IR::RegOpnd * inlineCacheOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
- InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(indexOpnd, inlineCacheOffset, TyMachPtr, m_func), instrInsert);
- GenerateDynamicLoadPolymorphicInlineCacheSlot(instrInsert, inlineCacheOpnd, objectTypeOpnd);
- IR::LabelInstr* slotIndexLoadedLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::BranchInstr* branchToPatch = nullptr;
- IR::LabelInstr* nextLabel = nullptr;
- IR::RegOpnd* taggedTypeOpnd = nullptr;
- if (checkLocalInlineSlots)
- {
- GenerateLookUpInIndexCacheHelper<true /* CheckLocal */, true /* CheckInlineSlot */, false /* DoAdd */>(
- instrInsert,
- baseOpnd,
- opndSlotArray,
- opndSlotIndex,
- objectTypeOpnd,
- inlineCacheOpnd,
- slotIndexLoadedLabel,
- labelHelper,
- &nextLabel,
- &branchToPatch,
- &taggedTypeOpnd);
- }
- if (checkLocalAuxSlots)
- {
- GenerateLookUpInIndexCacheHelper<true /* CheckLocal */, false /* CheckInlineSlot */, false /* DoAdd */>(
- instrInsert,
- baseOpnd,
- opndSlotArray,
- opndSlotIndex,
- objectTypeOpnd,
- inlineCacheOpnd,
- slotIndexLoadedLabel,
- labelHelper,
- &nextLabel,
- &branchToPatch,
- &taggedTypeOpnd);
- }
- if (fromProto)
- {
- if (fromInlineSlots)
- {
- GenerateLookUpInIndexCacheHelper<false /* CheckLocal */, true /* CheckInlineSlot */, false /* DoAdd */>(
- instrInsert,
- baseOpnd,
- opndSlotArray,
- opndSlotIndex,
- objectTypeOpnd,
- inlineCacheOpnd,
- slotIndexLoadedLabel,
- labelHelper,
- &nextLabel,
- &branchToPatch,
- &taggedTypeOpnd);
- }
- if (fromAuxSlots)
- {
- GenerateLookUpInIndexCacheHelper<false /* CheckLocal */, false /* CheckInlineSlot */, false /* DoAdd */>(
- instrInsert,
- baseOpnd,
- opndSlotArray,
- opndSlotIndex,
- objectTypeOpnd,
- inlineCacheOpnd,
- slotIndexLoadedLabel,
- labelHelper,
- &nextLabel,
- &branchToPatch,
- &taggedTypeOpnd);
- }
- }
- if (doAdd)
- {
- Assert(opndSlotArray);
- if (fromInlineSlots)
- {
- GenerateLookUpInIndexCacheHelper<true /* CheckLocal */, true /* CheckInlineSlot */, true /* DoAdd */>(
- instrInsert,
- baseOpnd,
- opndSlotArray,
- opndSlotIndex,
- objectTypeOpnd,
- inlineCacheOpnd,
- slotIndexLoadedLabel,
- labelHelper,
- &nextLabel,
- &branchToPatch,
- &taggedTypeOpnd);
- }
- if (fromAuxSlots)
- {
- GenerateLookUpInIndexCacheHelper<true /* CheckLocal */, false /* CheckInlineSlot */, true /* DoAdd */>(
- instrInsert,
- baseOpnd,
- opndSlotArray,
- opndSlotIndex,
- objectTypeOpnd,
- inlineCacheOpnd,
- slotIndexLoadedLabel,
- labelHelper,
- &nextLabel,
- &branchToPatch,
- &taggedTypeOpnd);
- }
- }
- Assert(branchToPatch);
- Assert(nextLabel);
- Assert(nextLabel->labelRefs.Count() == 1 && nextLabel->labelRefs.Head() == branchToPatch);
- branchToPatch->SetTarget(labelHelper);
- nextLabel->Remove();
- instrInsert->InsertBefore(slotIndexLoadedLabel);
- IR::IndirOpnd * hitRateOpnd = IR::IndirOpnd::New(indexOpnd, hitRateOffset, TyInt32, m_func);
- IR::IntConstOpnd * incOpnd = IR::IntConstOpnd::New(1, TyInt32, m_func);
- // overflow check: not needed here, we don't allocate anything with hitrate
- InsertAdd(false, hitRateOpnd, hitRateOpnd, incOpnd, instrInsert);
- }
- template <bool CheckLocal, bool CheckInlineSlot, bool DoAdd>
- void
- Lowerer::GenerateLookUpInIndexCacheHelper(
- _In_ IR::Instr* insertInstr,
- _In_ IR::RegOpnd* baseOpnd,
- _In_opt_ IR::RegOpnd* opndSlotArray,
- _In_opt_ IR::RegOpnd* opndSlotIndex,
- _In_ IR::RegOpnd* objectTypeOpnd,
- _In_ IR::RegOpnd* inlineCacheOpnd,
- _In_ IR::LabelInstr* doneLabel,
- _In_ IR::LabelInstr* helperLabel,
- _Outptr_ IR::LabelInstr** nextLabel,
- _Outptr_ IR::BranchInstr** branchToPatch,
- _Inout_ IR::RegOpnd** taggedTypeOpnd)
- {
- CompileAssert(!DoAdd || CheckLocal);
- AnalysisAssert(!opndSlotArray || opndSlotIndex);
- *nextLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::RegOpnd* typeOpnd = nullptr;
- if (CheckInlineSlot)
- {
- typeOpnd = objectTypeOpnd;
- }
- else
- {
- if (*taggedTypeOpnd == nullptr)
- {
- *taggedTypeOpnd = IR::RegOpnd::New(TyMachReg, m_func);
- m_lowererMD.GenerateLoadTaggedType(insertInstr, objectTypeOpnd, *taggedTypeOpnd);
- }
- typeOpnd = *taggedTypeOpnd;
- }
- IR::RegOpnd* objectOpnd = nullptr;
- if (CheckLocal)
- {
- *branchToPatch = GenerateLocalInlineCacheCheck(insertInstr, typeOpnd, inlineCacheOpnd, *nextLabel, DoAdd);
- if (DoAdd)
- {
- if (!CheckInlineSlot)
- {
- GenerateAuxSlotAdjustmentRequiredCheck(insertInstr, inlineCacheOpnd, helperLabel);
- }
- GenerateSetObjectTypeFromInlineCache(insertInstr, baseOpnd, inlineCacheOpnd, !CheckInlineSlot);
- }
- objectOpnd = baseOpnd;
- }
- else
- {
- *branchToPatch = GenerateProtoInlineCacheCheck(insertInstr, typeOpnd, inlineCacheOpnd, *nextLabel);
- IR::RegOpnd* protoOpnd = IR::RegOpnd::New(TyMachReg, m_func);
- int32 protoObjOffset = (int32)offsetof(Js::InlineCache, u.proto.prototypeObject);
- IR::IndirOpnd* protoIndir = IR::IndirOpnd::New(inlineCacheOpnd, protoObjOffset, TyMachReg, m_func);
- InsertMove(protoOpnd, protoIndir, insertInstr);
- objectOpnd = protoOpnd;
- }
- if (opndSlotArray)
- {
- if (CheckInlineSlot)
- {
- InsertMove(opndSlotArray, objectOpnd, insertInstr);
- }
- else
- {
- IR::IndirOpnd* auxIndir = IR::IndirOpnd::New(objectOpnd, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, m_func);
- InsertMove(opndSlotArray, auxIndir, insertInstr);
- }
- size_t slotIndexOffset = CheckLocal ? offsetof(Js::InlineCache, u.local.slotIndex) : offsetof(Js::InlineCache, u.proto.slotIndex);
- IR::IndirOpnd* slotOffsetIndir = IR::IndirOpnd::New(inlineCacheOpnd, (int32)slotIndexOffset, TyUint16, m_func);
- // overflow check: not needed here, we don't allocate anything with hitrate
- InsertMove(opndSlotIndex, slotOffsetIndir, insertInstr);
- }
- InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
- insertInstr->InsertBefore(*nextLabel);
- }
- IR::IndirOpnd *
- Lowerer::GenerateFastElemIIntIndexCommon(
- IR::Instr * instr,
- bool isStore,
- IR::IndirOpnd * indirOpnd,
- IR::LabelInstr * labelHelper,
- IR::LabelInstr * labelCantUseArray,
- IR::LabelInstr *labelFallthrough,
- bool * pIsTypedArrayElement,
- bool *emitBailoutRef,
- IR::LabelInstr **pLabelSegmentLengthIncreased,
- bool checkArrayLengthOverflow /*= true*/,
- IR::Opnd** maskOpnd,
- bool forceGenerateFastPath /* = false */,
- bool returnLength,
- IR::LabelInstr *bailOutLabelInstr /* = nullptr*/,
- bool * indirOpndOverflowed /* = nullptr */)
- {
- IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
- IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
- Assert(!baseOpnd->IsTaggedInt() || (indexOpnd && indexOpnd->IsNotInt()));
- if (indirOpndOverflowed != nullptr)
- {
- *indirOpndOverflowed = false;
- }
- BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
- IRType indirType = TyVar;
- const ValueType baseValueType(baseOpnd->GetValueType());
- // TEST base, AtomTag -- check base not tagged int
- // JNE $helper
- // if (base.GetValueType() != Array) {
- // CMP [base], JavascriptArray::`vtable'
- // JNE $helper
- // }
- // TEST index, 1 -- index tagged int
- // JEQ $helper
- // if (inputIndex is not int const) {
- // MOV index, inputIndex
- // SAR index, Js::VarTag_Shift -- remote atom tag
- // JS $helper -- exclude negative index
- // }
- // MOV headSegment, [base + offset(head)]
- // CMP [headSegment + offset(length)], index -- bounds check
- // if (opcode == StElemI_A) {
- // JA $done (for typedarray, JA $toNumberHelper)
- // CMP [headSegment + offset(size)], index -- chunk has room?
- // JBE $helper
- // if (index is not int const) {
- // LEA newLength, [index + 1]
- // } else {
- // newLength = index + 1
- // }
- // if(BailOutOnInvalidatedArrayLength) {
- // CMP [base + offset(length)], newlength
- // JB $helper
- // }
- // MOV [headSegment + offset(length)], newLength -- update length on chunk
- // CMP [base + offset(length)], newLength
- // JAE $done
- // MOV [base + offset(length)], newLength -- update length on array
- // if(length to be returned){
- // SHL newLength, AtomTag
- // INC newLength
- // MOV dst, newLength
- // }
- // JMP $done
- //
- // $toNumberHelper: Call HelperOp_ConvNumber_Full
- // JMP $done
- // $done
- // } else {la
- // JBE $helper
- // }
- // return [headSegment + offset(elements) + index]
- // Caution: If making changes to the conditions under which we don't emit the typical array checks, make sure
- // the code in GlobOpt::ShouldAssumeIndirOpndHasNonNegativeIntIndex is updated accordingly. We don't want the
- // global optimizer to type specialize instructions, for which the lowerer is forced to emit unconditional
- // bailouts.
- bool isIndexNotInt = false;
- IntConstType value = 0;
- IR::Opnd * indexValueOpnd = nullptr;
- bool invertBoundCheckComparison = false;
- bool checkIndexConstOverflowed = false;
- if (indirOpnd->TryGetIntConstIndexValue(true, &value, &isIndexNotInt))
- {
- if (value >= 0)
- {
- indexValueOpnd = IR::IntConstOpnd::New(value, TyUint32, this->m_func);
- invertBoundCheckComparison = true; // facilitate folding the constant index into the compare instruction
- checkIndexConstOverflowed = true;
- }
- else
- {
- // If the index is a negative int constant we go directly to helper.
- Assert(!forceGenerateFastPath);
- return nullptr;
- }
- }
- else if (isIndexNotInt)
- {
- // If we know the index is not an int we go directly to helper.
- Assert(!forceGenerateFastPath);
- return nullptr;
- }
- //At this point indexValueOpnd is either NULL or contains the valueOpnd
- if(!forceGenerateFastPath && !ShouldGenerateArrayFastPath(baseOpnd, true, true, true))
- {
- return nullptr;
- }
- if(baseValueType.IsLikelyAnyOptimizedArray())
- {
- indirScale = GetArrayIndirScale(baseValueType);
- indirType = GetArrayIndirType(baseValueType);
- }
- if (checkIndexConstOverflowed && (static_cast<uint64>(value) << indirScale) > INT32_MAX &&
- indirOpndOverflowed != nullptr)
- {
- *indirOpndOverflowed = true;
- return nullptr;
- }
- IRType elementType = TyIllegal;
- IR::Opnd * element = nullptr;
- if(instr->m_opcode == Js::OpCode::InlineArrayPush)
- {
- element = instr->GetSrc2();
- elementType = element->GetType();
- }
- else if(isStore && instr->GetSrc1())
- {
- element = instr->GetSrc1();
- elementType = element->GetType();
- }
- Assert(isStore || (element == nullptr && elementType == TyIllegal));
- if (isStore && baseValueType.IsLikelyNativeArray() && indirType != elementType)
- {
- // We're trying to write a value of the wrong type, which should force a conversion of the array.
- // Go to the helper for that.
- return nullptr;
- }
- IR::RegOpnd *arrayOpnd = baseOpnd;
- IR::RegOpnd *headSegmentOpnd = nullptr;
- IR::Opnd *headSegmentLengthOpnd = nullptr;
- IR::AutoReuseOpnd autoReuseHeadSegmentOpnd, autoReuseHeadSegmentLengthOpnd;
- bool indexIsNonnegative = indexValueOpnd || indexOpnd->GetType() == TyUint32 || !checkArrayLengthOverflow;
- bool indexIsLessThanHeadSegmentLength = false;
- if(!baseValueType.IsAnyOptimizedArray())
- {
- arrayOpnd = GenerateArrayTest(baseOpnd, labelCantUseArray, labelCantUseArray, instr, true, isStore);
- }
- else
- {
- if(arrayOpnd->IsArrayRegOpnd())
- {
- IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
- if(arrayRegOpnd->HeadSegmentSym())
- {
- headSegmentOpnd = IR::RegOpnd::New(arrayRegOpnd->HeadSegmentSym(), TyMachPtr, m_func);
- DebugOnly(headSegmentOpnd->FreezeSymValue());
- autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
- }
- if(arrayRegOpnd->HeadSegmentLengthSym())
- {
- headSegmentLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->HeadSegmentLengthSym(), TyUint32, m_func);
- // This value can change over the course of this function
- //DebugOnly(headSegmentLengthOpnd->AsRegOpnd()->FreezeSymValue());
- autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
- }
- if (arrayRegOpnd->EliminatedLowerBoundCheck())
- {
- indexIsNonnegative = true;
- }
- if(arrayRegOpnd->EliminatedUpperBoundCheck())
- {
- indexIsLessThanHeadSegmentLength = true;
- }
- }
- }
- IR::AutoReuseOpnd autoReuseArrayOpnd;
- if(arrayOpnd->GetValueType().GetObjectType() != ObjectType::ObjectWithArray)
- {
- autoReuseArrayOpnd.Initialize(arrayOpnd, m_func);
- }
- const auto EnsureObjectArrayLoaded = [&]()
- {
- if(arrayOpnd->GetValueType().GetObjectType() != ObjectType::ObjectWithArray)
- {
- return;
- }
- arrayOpnd = LoadObjectArray(arrayOpnd, instr);
- autoReuseArrayOpnd.Initialize(arrayOpnd, m_func);
- };
- const bool doUpperBoundCheck = checkArrayLengthOverflow && !indexIsLessThanHeadSegmentLength;
- if(!indexValueOpnd)
- {
- indexValueOpnd =
- m_lowererMD.LoadNonnegativeIndex(
- indexOpnd,
- (
- indexIsNonnegative
- #if !INT32VAR
- ||
- // On 32-bit platforms, skip the negative check since for now, the unsigned upper bound check covers it
- doUpperBoundCheck
- #endif
- ),
- labelCantUseArray,
- labelHelper,
- instr);
- }
- const IR::AutoReuseOpnd autoReuseIndexValueOpnd(indexValueOpnd, m_func);
- if (baseValueType.IsLikelyTypedArray())
- {
- *pIsTypedArrayElement = true;
- if(doUpperBoundCheck)
- {
- if(!headSegmentLengthOpnd)
- {
- // (headSegmentLength = [base + offset(length)])
- int lengthOffset;
- lengthOffset = Js::Float64Array::GetOffsetOfLength();
- headSegmentLengthOpnd = IR::IndirOpnd::New(arrayOpnd, lengthOffset, TyUint32, m_func);
- autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
- }
- // CMP index, headSegmentLength -- upper bound check
- if(!invertBoundCheckComparison)
- {
- InsertCompare(indexValueOpnd, headSegmentLengthOpnd, instr);
- }
- else
- {
- InsertCompare(headSegmentLengthOpnd, indexValueOpnd, instr);
- }
- }
- }
- else
- {
- *pIsTypedArrayElement = false;
- if (isStore &&
- baseValueType.IsLikelyNativeIntArray() &&
- (!element->IsIntConstOpnd() || Js::SparseArraySegment<int32>::GetMissingItem() == element->AsIntConstOpnd()->AsInt32()))
- {
- Assert(instr->m_opcode != Js::OpCode::InlineArrayPush || bailOutLabelInstr);
- // Check for a write of the MissingItem value.
- InsertMissingItemCompareBranch(
- element,
- Js::OpCode::BrEq_A,
- instr->m_opcode == Js::OpCode::InlineArrayPush ? bailOutLabelInstr : labelCantUseArray,
- instr);
- }
- if(!headSegmentOpnd)
- {
- EnsureObjectArrayLoaded();
- // MOV headSegment, [base + offset(head)]
- indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfHead(), TyMachPtr, this->m_func);
- headSegmentOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
- InsertMove(headSegmentOpnd, indirOpnd, instr);
- }
- if(doUpperBoundCheck)
- {
- if(!headSegmentLengthOpnd)
- {
- // (headSegmentLength = [headSegment + offset(length)])
- headSegmentLengthOpnd =
- IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
- autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
- }
- // CMP index, headSegmentLength -- upper bound check
- if(!invertBoundCheckComparison)
- {
- InsertCompare(indexValueOpnd, headSegmentLengthOpnd, instr);
- }
- else
- {
- InsertCompare(headSegmentLengthOpnd, indexValueOpnd, instr);
- }
- }
- }
- const IR::BailOutKind bailOutKind = instr->HasBailOutInfo() ? instr->GetBailOutKind() : IR::BailOutInvalid;
- const bool needBailOutOnInvalidLength = !!(bailOutKind & (IR::BailOutOnInvalidatedArrayHeadSegment));
- const bool needBailOutToHelper = !!(bailOutKind & (IR::BailOutOnArrayAccessHelperCall));
- const bool needBailOutOnSegmentLengthCompare = needBailOutToHelper || needBailOutOnInvalidLength;
- bool usingSegmentLengthIncreasedLabel = false;
- if(indexIsLessThanHeadSegmentLength || needBailOutOnSegmentLengthCompare)
- {
- if (needBailOutOnSegmentLengthCompare)
- {
- // The bailout must be pre-op because it will not have completed the operation
- Assert(instr->GetBailOutInfo()->bailOutOffset == instr->GetByteCodeOffset());
- // TODO: Check this with lazy bailout
- // Verify other bailouts these can be combined with
- Assert(
- !(
- bailOutKind &
- IR::BailOutKindBits &
- ~(
- IR::LazyBailOut |
- IR::BailOutOnArrayAccessHelperCall |
- IR::BailOutOnInvalidatedArrayHeadSegment |
- IR::BailOutOnInvalidatedArrayLength |
- IR::BailOutConventionalNativeArrayAccessOnly |
- IR::BailOutOnMissingValue |
- (bailOutKind & IR::BailOutOnArrayAccessHelperCall ? IR::BailOutInvalid : IR::BailOutConvertedNativeArray)
- )
- )
- );
- if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
- {
- // Omit the helper call and generate a bailout instead
- Assert(emitBailoutRef);
- *emitBailoutRef = true;
- }
- }
- if (indexIsLessThanHeadSegmentLength)
- {
- Assert(!(bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment));
- }
- else
- {
- IR::LabelInstr *bailOutLabel;
- if (needBailOutOnInvalidLength)
- {
- Assert(isStore);
- // Lower a separate (but shared) bailout for this case, and preserve the bailout kind in the instruction if the
- // helper call is going to be generated, because the bailout kind needs to be lowered again and differently in the
- // helper call path.
- //
- // Generate:
- // (instr)
- // jmp $continue
- // $bailOut:
- // Bail out with IR::BailOutOnInvalidatedArrayHeadSegment
- // $continue:
- LowerOneBailOutKind(
- instr,
- IR::BailOutOnInvalidatedArrayHeadSegment,
- false,
- !(bailOutKind & IR::BailOutOnArrayAccessHelperCall));
- bailOutLabel = instr->GetOrCreateContinueLabel(true);
- InsertBranch(Js::OpCode::Br, labelFallthrough, bailOutLabel);
- }
- else
- {
- Assert(needBailOutToHelper);
- bailOutLabel = labelHelper;
- }
- // Bail out if the index is outside the head segment bounds
- // jae $bailOut
- Assert(checkArrayLengthOverflow);
- InsertBranch(
- !invertBoundCheckComparison ? Js::OpCode::BrGe_A : Js::OpCode::BrLe_A,
- true /* isUnsigned */,
- bailOutLabel,
- instr);
- }
- }
- else if (isStore && !baseValueType.IsLikelyTypedArray()) // #if (opcode == StElemI_A)
- {
- IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- LABELNAME(labelDone);
- IR::LabelInstr *labelSegmentLengthIncreased = nullptr;
- const bool isPush = instr->m_opcode != Js::OpCode::StElemI_A && instr->m_opcode != Js::OpCode::StElemI_A_Strict;
- // Put the head segment size check and length updates in a helper block since they're not the common path for StElem.
- // For push, that is the common path so keep it in a non-helper block.
- const bool isInHelperBlock = !isPush;
- if(checkArrayLengthOverflow)
- {
- if(pLabelSegmentLengthIncreased &&
- !(
- (baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()) ||
- ((instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict) &&
- instr->IsProfiledInstr() && !instr->AsProfiledInstr()->u.stElemInfo->LikelyFillsMissingValue())
- ))
- {
- // For arrays that are not guaranteed to have no missing values, before storing to an element where
- // (index < length), the element value needs to be checked to see if it's a missing value, and if so, fall back
- // to the helper. This is done to keep the missing value tracking precise in arrays. So, create a separate label
- // for the case where the length was increased (index >= length), and pass it back to GenerateFastStElemI, which
- // will fill in the rest.
- labelSegmentLengthIncreased = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelperBlock);
- LABELNAME(labelSegmentLengthIncreased);
- *pLabelSegmentLengthIncreased = labelSegmentLengthIncreased;
- // Since this is effectively a separate exit point, we need to do the spectre mitigations in this place as well.
- usingSegmentLengthIncreasedLabel = true;
- }
- else
- {
- labelSegmentLengthIncreased = labelDone;
- }
- // JB $done
- InsertBranch(
- !invertBoundCheckComparison ? Js::OpCode::BrLt_A : Js::OpCode::BrGt_A,
- true /* isUnsigned */,
- labelDone,
- instr);
- }
- if(isInHelperBlock)
- {
- InsertLabel(true /* isHelper */, instr);
- }
- EnsureObjectArrayLoaded();
- do // while(false);
- {
- if(checkArrayLengthOverflow)
- {
- if(instr->HasBailOutInfo() && instr->GetBailOutKind() & IR::BailOutOnMissingValue)
- {
- // Need to bail out if this store would create a missing value. The store would cause a missing value to be
- // created if (index > length && index < size). If (index >= size) we would go to helper anyway, and the bailout
- // handling for this is done after the helper call, so just go to helper if (index > length).
- //
- // jne $helper // branch for (cmp index, headSegmentLength)
- InsertBranch(Js::OpCode::BrNeq_A, labelHelper, instr);
- }
- else
- {
- // If (index < size) we will not call the helper, so the array flags must be updated to reflect that it no
- // longer has no missing values.
- //
- // jne indexGreaterThanLength // branch for (cmp index, headSegmentLength)
- // cmp index, [headSegment + offset(size)]
- // jae $helper
- // jmp indexLessThanSize
- // indexGreaterThanLength:
- // cmp index, [headSegment + offset(size)]
- // jae $helper
- // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
- // indexLessThanSize:
- // if(!index->IsConstOpnd()) {
- // sub temp, index, [headSegment + offset(size)]
- // sar temp, 31
- // and index, temp
- // }
- IR::LabelInstr *const indexGreaterThanLengthLabel = InsertLabel(true /* isHelper */, instr);
- LABELNAME(indexGreaterThanLengthLabel);
- IR::LabelInstr *const indexLessThanSizeLabel = InsertLabel(isInHelperBlock, instr);
- LABELNAME(indexLessThanSizeLabel);
- // jne indexGreaterThanLength // branch for (cmp index, headSegmentLength)
- InsertBranch(Js::OpCode::BrNeq_A, indexGreaterThanLengthLabel, indexGreaterThanLengthLabel);
- // cmp index, [headSegment + offset(size)]
- // jae $helper
- // jmp indexLessThanSize
- // indexGreaterThanLength:
- InsertCompareBranch(
- indexValueOpnd,
- IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, m_func),
- Js::OpCode::BrGe_A,
- true /* isUnsigned */,
- labelHelper,
- indexGreaterThanLengthLabel);
- InsertBranch(Js::OpCode::Br, indexLessThanSizeLabel, indexGreaterThanLengthLabel);
- // indexGreaterThanLength:
- // cmp index, [headSegment + offset(size)]
- // jae $helper
- // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
- // indexLessThanSize:
- InsertCompareBranch(
- indexValueOpnd,
- IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, m_func),
- Js::OpCode::BrGe_A,
- true /* isUnsigned */,
- labelHelper,
- indexLessThanSizeLabel);
- CompileAssert(
- static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
- Js::DynamicObjectFlags::HasNoMissingValues);
- InsertAnd(
- IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
- IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
- IR::IntConstOpnd::New(
- static_cast<uint8>(~Js::DynamicObjectFlags::HasNoMissingValues),
- TyUint8,
- m_func,
- true),
- indexLessThanSizeLabel);
- // In speculative cases, we want to avoid a write to an array setting the length to something huge, which
- // would then allow subsequent reads to hit arbitrary memory (in the speculative path). This is done with
- // a mask generated from the difference between the index and the size. Since we should have already gone
- // to the helper in any case where this would execute, it's a functional no-op.
- // indexLessThanSize:
- // In speculative cases, we want to avoid a write to an array setting the length to something huge, which
- // would then allow subsequent reads to hit arbitrary memory (in the speculative path). This is done with
- // a mask generated from the difference between the index and the size. Since we should have already gone
- // to the helper in any case where this would execute, it's a functional no-op.
- // if(!index->IsConstOpnd()) {
- // sub temp, index, [headSegment + offset(size)]
- // sar temp, 31
- // and index, temp
- // }
- if (!indexValueOpnd->IsConstOpnd()
- && (baseValueType.IsLikelyTypedArray()
- ? CONFIG_FLAG_RELEASE(PoisonTypedArrayStore)
- : ((indirType == TyVar && CONFIG_FLAG_RELEASE(PoisonVarArrayStore))
- || (IRType_IsNativeInt(indirType) && CONFIG_FLAG_RELEASE(PoisonIntArrayStore))
- || (IRType_IsFloat(indirType) && CONFIG_FLAG_RELEASE(PoisonFloatArrayStore)))
- )
- )
- {
- IR::RegOpnd* temp = IR::RegOpnd::New(TyUint32, m_func);
- InsertSub(
- false,
- temp,
- indexValueOpnd,
- IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, m_func),
- instr);
- InsertShift(Js::OpCode::Shr_A, false, temp, temp, IR::IntConstOpnd::New(31, TyInt8, m_func), instr);
- InsertAnd(indexValueOpnd, indexValueOpnd, temp, instr);
- }
- break;
- }
- }
- // CMP index, [headSegment + offset(size)]
- // JAE $helper
- indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, this->m_func);
- InsertCompareBranch(indexValueOpnd, indirOpnd, Js::OpCode::BrGe_A, true /* isUnsigned */, labelHelper, instr);
- } while(false);
- if(isPush)
- {
- IR::LabelInstr *const updateLengthLabel = InsertLabel(isInHelperBlock, instr);
- LABELNAME(updateLengthLabel);
- if(!doUpperBoundCheck && !headSegmentLengthOpnd)
- {
- // (headSegmentLength = [headSegment + offset(length)])
- headSegmentLengthOpnd =
- IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
- autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
- }
- // For push, it is guaranteed that (index >= length). We already know that (index < size), but we need to check if
- // (index > length) because in that case a missing value will be created and the missing value tracking in the array
- // needs to be updated.
- //
- // cmp index, headSegmentLength
- // je $updateLength
- // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
- // updateLength:
- InsertCompareBranch(
- indexValueOpnd,
- headSegmentLengthOpnd,
- Js::OpCode::BrEq_A,
- updateLengthLabel,
- updateLengthLabel);
- CompileAssert(
- static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
- Js::DynamicObjectFlags::HasNoMissingValues);
- InsertAnd(
- IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
- IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
- IR::IntConstOpnd::New(
- static_cast<uint8>(~Js::DynamicObjectFlags::HasNoMissingValues),
- TyUint8,
- m_func,
- true),
- updateLengthLabel);
- }
- if (baseValueType.IsArrayOrObjectWithArray())
- {
- // We didn't emit an array check, but if we are going to grow the array
- // We need to go to helper if there is an ES5 array/objectarray used as prototype
- GenerateIsEnabledArraySetElementFastPathCheck(labelHelper, instr);
- }
- IR::Opnd *newLengthOpnd;
- IR::AutoReuseOpnd autoReuseNewLengthOpnd;
- if (indexValueOpnd->IsRegOpnd())
- {
- // LEA newLength, [index + 1]
- newLengthOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
- autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
- InsertAdd(false /* needFlags */, newLengthOpnd, indexValueOpnd, IR::IntConstOpnd::New(1, TyUint32, m_func), instr);
- }
- else
- {
- newLengthOpnd = IR::IntConstOpnd::New(value + 1, TyUint32, this->m_func);
- autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
- }
- // This is a common enough case that we want to go through this path instead of the simpler one, since doing it this way is faster for preallocated but un-filled arrays.
- if (!!(bailOutKind & IR::BailOutOnInvalidatedArrayLength))
- {
- // If we'd increase the array length, go to the helper
- indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
- InsertCompareBranch(
- newLengthOpnd,
- indirOpnd,
- Js::OpCode::BrGt_A,
- true,
- labelHelper,
- instr);
- }
- // MOV [headSegment + offset(length)], newLength
- indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, length), TyUint32, this->m_func);
- InsertMove(indirOpnd, newLengthOpnd, instr);
- // We've changed the head segment length, so we may need to change the head segment length opnd
- if (headSegmentLengthOpnd != nullptr && !headSegmentLengthOpnd->IsIndirOpnd())
- {
- InsertMove(headSegmentLengthOpnd, newLengthOpnd, instr);
- }
- if (checkArrayLengthOverflow)
- {
- // CMP newLength, [base + offset(length)]
- // JBE $segmentLengthIncreased
- Assert(labelSegmentLengthIncreased);
- indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
- InsertCompareBranch(
- newLengthOpnd,
- indirOpnd,
- Js::OpCode::BrLe_A,
- true /* isUnsigned */,
- labelSegmentLengthIncreased,
- instr);
- if(!isInHelperBlock)
- {
- InsertLabel(true /* isHelper */, instr);
- }
- }
- // MOV [base + offset(length)], newLength
- indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
- InsertMove(indirOpnd, newLengthOpnd, instr);
- if(returnLength)
- {
- if(newLengthOpnd->GetSize() != MachPtr)
- {
- newLengthOpnd = newLengthOpnd->UseWithNewType(TyMachPtr, m_func)->AsRegOpnd();
- }
- // SHL newLength, AtomTag
- // INC newLength
- this->m_lowererMD.GenerateInt32ToVarConversion(newLengthOpnd, instr);
- // MOV dst, newLength
- InsertMove(instr->GetDst(), newLengthOpnd, instr);
- }
- // Calling code assumes that indirOpnd is initialized before labelSegmentLengthIncreased is reached
- if(labelSegmentLengthIncreased && labelSegmentLengthIncreased != labelDone)
- {
- // labelSegmentLengthIncreased:
- instr->InsertBefore(labelSegmentLengthIncreased);
- }
- // $done
- instr->InsertBefore(labelDone);
- }
- else // #else
- {
- if (checkArrayLengthOverflow)
- {
- if (*pIsTypedArrayElement && isStore)
- {
- IR::LabelInstr *labelInlineSet = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- LABELNAME(labelInlineSet);
- //For positive index beyond length or negative index its essentially nop for typed array store
- InsertBranch(
- !invertBoundCheckComparison ? Js::OpCode::BrLt_A : Js::OpCode::BrGt_A,
- true /* isUnsigned */,
- labelInlineSet,
- instr);
- // For typed array, call ToNumber before we fallThrough.
- if (instr->GetSrc1()->GetType() == TyVar && !instr->GetSrc1()->GetValueType().IsPrimitive())
- {
- // Enter an ophelper block
- IR::LabelInstr * opHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- LABELNAME(opHelper);
- instr->InsertBefore(opHelper);
- IR::Instr *toNumberInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
- toNumberInstr->SetSrc1(instr->GetSrc1());
- instr->InsertBefore(toNumberInstr);
- if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
- {
- // Bail out if this conversion triggers implicit calls.
- toNumberInstr = this->AddBailoutToHelperCallInstr(toNumberInstr, instr->GetBailOutInfo(), bailOutKind, instr);
- }
- LowerUnaryHelperMem(toNumberInstr, IR::HelperOp_ConvNumber_Full);
- }
- InsertBranch(Js::OpCode::Br, labelFallthrough, instr); //Jump to fallThrough
- instr->InsertBefore(labelInlineSet);
- }
- else
- {
- // JAE $helper
- InsertBranch(
- !invertBoundCheckComparison ? Js::OpCode::BrGe_A : Js::OpCode::BrLe_A,
- true /* isUnsigned */,
- labelHelper,
- instr);
- }
- }
- EnsureObjectArrayLoaded();
- if (instr->m_opcode == Js::OpCode::InlineArrayPop)
- {
- Assert(!baseValueType.IsLikelyTypedArray());
- Assert(bailOutLabelInstr);
- if (indexValueOpnd->IsIntConstOpnd())
- {
- // indirOpnd = [headSegment + index + offset(elements)]
- IntConstType offset = offsetof(Js::SparseArraySegment<Js::Var>, elements) + (value << indirScale);
- // TODO: Assert(Math::FitsInDWord(offset));
- indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
- }
- else
- {
- // indirOpnd = [headSegment + offset(elements) + (index << scale)]
- indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
- indirOpnd->SetOffset(offsetof(Js::SparseArraySegment<Js::Var>, elements));
- }
- IR::Opnd * tmpDst = nullptr;
- IR::Opnd * dst = instr->GetDst();
- // Pop might not have a dst, if not don't worry about returning the last element. But we still have to
- // worry about gaps, because these force us to access the prototype chain, which may have side-effects.
- if (dst || !baseValueType.HasNoMissingValues())
- {
- if (!dst)
- {
- dst = IR::RegOpnd::New(indirType, this->m_func);
- }
- else if (dst->AsRegOpnd()->m_sym == arrayOpnd->m_sym)
- {
- tmpDst = IR::RegOpnd::New(TyVar, this->m_func);
- dst = tmpDst;
- }
- // Use a mask to prevent arbitrary speculative reads
- // If you think this code looks highly similar to the code later in this function,
- // you'd be right. Unfortunately, I wasn't able to find a way to reduce duplication
- // here without significantly complicating the code structure.
- if (!headSegmentLengthOpnd)
- {
- headSegmentLengthOpnd =
- IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
- autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
- }
- IR::RegOpnd* localMaskOpnd = nullptr;
- #if TARGET_64
- IR::Opnd* lengthOpnd = nullptr;
- AnalysisAssert(headSegmentLengthOpnd != nullptr);
- lengthOpnd = IR::RegOpnd::New(headSegmentLengthOpnd->GetType(), m_func);
- {
- IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV_TRUNC, lengthOpnd, headSegmentLengthOpnd, m_func);
- instr->InsertBefore(instrMov);
- LowererMD::Legalize(instrMov);
- }
- if (lengthOpnd->GetSize() != MachPtr)
- {
- lengthOpnd = lengthOpnd->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
- }
- // MOV r1, [opnd + offset(type)]
- IR::RegOpnd* indexValueRegOpnd = IR::RegOpnd::New(indexValueOpnd->GetType(), m_func);
- {
- IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV_TRUNC, indexValueRegOpnd, indexValueOpnd, m_func);
- instr->InsertBefore(instrMov);
- LowererMD::Legalize(instrMov);
- }
- if (indexValueRegOpnd->GetSize() != MachPtr)
- {
- indexValueRegOpnd = indexValueRegOpnd->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
- }
- localMaskOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
- InsertSub(false, localMaskOpnd, indexValueRegOpnd, lengthOpnd, instr);
- InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(63, TyInt8, m_func), instr);
- #else
- localMaskOpnd = IR::RegOpnd::New(TyInt32, m_func);
- InsertSub(false, localMaskOpnd, indexValueOpnd, headSegmentLengthOpnd, instr);
- InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(31, TyInt8, m_func), instr);
- #endif
- // for pop we always do the masking before the load in cases where we load a value
- IR::RegOpnd* loadAddr = IR::RegOpnd::New(TyMachPtr, m_func);
- #if _M_ARM32_OR_ARM64
- if (indirOpnd->GetIndexOpnd() != nullptr && indirOpnd->GetScale() > 0)
- {
- // We don't support encoding for LEA with scale on ARM/ARM64, so do the scale calculation as a separate instruction
- IR::RegOpnd* fullIndexOpnd = IR::RegOpnd::New(indirOpnd->GetIndexOpnd()->GetType(), m_func);
- InsertShift(Js::OpCode::Shl_A, false, fullIndexOpnd, indirOpnd->GetIndexOpnd(), IR::IntConstOpnd::New(indirOpnd->GetScale(), TyInt8, m_func), instr);
- IR::IndirOpnd* newIndir = IR::IndirOpnd::New(indirOpnd->GetBaseOpnd(), fullIndexOpnd, indirType, m_func);
- if (indirOpnd->GetOffset() != 0)
- {
- newIndir->SetOffset(indirOpnd->GetOffset());
- }
- indirOpnd = newIndir;
- }
- #endif
- IR::AutoReuseOpnd reuseIndir(indirOpnd, m_func);
- InsertLea(loadAddr, indirOpnd, instr);
- InsertAnd(loadAddr, loadAddr, localMaskOpnd, instr);
- indirOpnd = IR::IndirOpnd::New(loadAddr, 0, indirType, m_func);
- // MOV dst, [head + offset]
- InsertMove(dst, indirOpnd, instr);
- //If the array has missing values, check for one
- if (!baseValueType.HasNoMissingValues())
- {
- InsertMissingItemCompareBranch(
- dst,
- Js::OpCode::BrEq_A,
- bailOutLabelInstr,
- instr);
- }
- }
- // MOV [head + offset], missing
- InsertMove(indirOpnd, GetMissingItemOpndForAssignment(indirType, m_func), instr);
- IR::Opnd *newLengthOpnd;
- IR::AutoReuseOpnd autoReuseNewLengthOpnd;
- if (indexValueOpnd->IsRegOpnd())
- {
- // LEA newLength, [index]
- newLengthOpnd = indexValueOpnd;
- autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
- }
- else
- {
- newLengthOpnd = IR::IntConstOpnd::New(value, TyUint32, this->m_func);
- autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
- }
- //update segment length and array length
- // MOV [headSegment + offset(length)], newLength
- IR::IndirOpnd *lengthIndirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, length), TyUint32, this->m_func);
- InsertMove(lengthIndirOpnd, newLengthOpnd, instr);
- // We've changed the head segment length, so we may need to change the head segment length opnd
- if (headSegmentLengthOpnd != nullptr && !headSegmentLengthOpnd->IsIndirOpnd())
- {
- InsertMove(headSegmentLengthOpnd, newLengthOpnd, instr);
- }
- // MOV [base + offset(length)], newLength
- lengthIndirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
- InsertMove(lengthIndirOpnd, newLengthOpnd, instr);
- if (tmpDst)
- {
- // The array opnd and the destination is the same, need to move the value in the tmp dst
- // to the actual dst
- InsertMove(instr->GetDst(), tmpDst, instr);
- }
- return indirOpnd;
- }
- } // #endif
- // Should we poison the load of the address to/from which the store/load happens?
- bool shouldPoisonLoad = maskOpnd != nullptr
- && (
- (!isStore && (!instr->IsSafeToSpeculate()) &&
- (baseValueType.IsLikelyTypedArray()
- ? CONFIG_FLAG_RELEASE(PoisonTypedArrayLoad)
- : ((indirType == TyVar && CONFIG_FLAG_RELEASE(PoisonVarArrayLoad))
- || (IRType_IsNativeInt(indirType) && CONFIG_FLAG_RELEASE(PoisonIntArrayLoad))
- || (IRType_IsFloat(indirType) && CONFIG_FLAG_RELEASE(PoisonFloatArrayLoad)))
- )
- )
- ||
- (isStore &&
- (baseValueType.IsLikelyTypedArray()
- ? CONFIG_FLAG_RELEASE(PoisonTypedArrayStore)
- : ((indirType == TyVar && CONFIG_FLAG_RELEASE(PoisonVarArrayStore))
- || (IRType_IsNativeInt(indirType) && CONFIG_FLAG_RELEASE(PoisonIntArrayStore))
- || (IRType_IsFloat(indirType) && CONFIG_FLAG_RELEASE(PoisonFloatArrayStore)))
- )
- )
- )
- ;
- // We have two exit paths for this function in the store case when we might grow the head
- // segment, due to tracking for missing elements. This unfortunately means that we need a
- // copy of the poisoning code on the other exit path, since the determination of the path
- // and the use of the path determination to decide whether we found the missing value are
- // things that have to happen on opposite sides of the poisoning.
- IR::Instr* insertForSegmentLengthIncreased = nullptr;
- if (shouldPoisonLoad && usingSegmentLengthIncreasedLabel)
- {
- insertForSegmentLengthIncreased = (*pLabelSegmentLengthIncreased)->m_next;
- }
- #if TARGET_32
- if (shouldPoisonLoad)
- {
- // Prevent index from being negative, which would break the poisoning
- if (indexValueOpnd->IsIntConstOpnd())
- {
- indexValueOpnd = IR::IntConstOpnd::New(value & INT32_MAX, TyUint32, m_func);
- }
- else
- {
- IR::RegOpnd* newIndexValueOpnd = IR::RegOpnd::New(TyUint32, m_func);
- InsertAnd(newIndexValueOpnd, indexValueOpnd, IR::IntConstOpnd::New(INT32_MAX, TyUint32, m_func), instr);
- if(insertForSegmentLengthIncreased != nullptr)
- {
- InsertAnd(newIndexValueOpnd, indexValueOpnd, IR::IntConstOpnd::New(INT32_MAX, TyUint32, m_func), insertForSegmentLengthIncreased);
- }
- indexValueOpnd = newIndexValueOpnd;
- }
- }
- #endif
- if (baseValueType.IsLikelyTypedArray())
- {
- if(!headSegmentOpnd)
- {
- // MOV headSegment, [base + offset(arrayBuffer)]
- int bufferOffset;
- bufferOffset = Js::Float64Array::GetOffsetOfBuffer();
- indirOpnd = IR::IndirOpnd::New(arrayOpnd, bufferOffset, TyMachPtr, this->m_func);
- headSegmentOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
- IR::AutoReuseOpnd reuseIndir(indirOpnd, m_func);
- InsertMove(headSegmentOpnd, indirOpnd, instr);
- if(insertForSegmentLengthIncreased != nullptr)
- {
- InsertMove(headSegmentOpnd, indirOpnd, insertForSegmentLengthIncreased);
- }
- }
- // indirOpnd = [headSegment + index]
- if (indexValueOpnd->IsIntConstOpnd())
- {
- IntConstType offset = (value << indirScale);
- // TODO: Assert(Math::FitsInDWord(offset));
- indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
- }
- else
- {
- indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
- }
- }
- else if (indexValueOpnd->IsIntConstOpnd())
- {
- // indirOpnd = [headSegment + index + offset(elements)]
- IntConstType offset = offsetof(Js::SparseArraySegment<Js::Var>, elements) + (value << indirScale);
- // TODO: Assert(Math::FitsInDWord(offset));
- indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
- }
- else
- {
- // indirOpnd = [headSegment + offset(elements) + (index << scale)]
- indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
- indirOpnd->SetOffset(offsetof(Js::SparseArraySegment<Js::Var>, elements));
- }
- if (shouldPoisonLoad)
- {
- // Use a mask to prevent arbitrary speculative reads
- if (!headSegmentLengthOpnd
- #if ENABLE_FAST_ARRAYBUFFER
- && !baseValueType.IsLikelyOptimizedVirtualTypedArray()
- #endif
- )
- {
- if (baseValueType.IsLikelyTypedArray())
- {
- int lengthOffset;
- lengthOffset = GetArrayOffsetOfLength(baseValueType);
- headSegmentLengthOpnd = IR::IndirOpnd::New(arrayOpnd, lengthOffset, TyUint32, m_func);
- autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
- }
- else
- {
- headSegmentLengthOpnd =
- IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
- autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
- }
- }
- IR::RegOpnd* localMaskOpnd = nullptr;
- #if TARGET_64
- IR::Opnd* lengthOpnd = nullptr;
- #if ENABLE_FAST_ARRAYBUFFER
- if (baseValueType.IsLikelyOptimizedVirtualTypedArray())
- {
- lengthOpnd = IR::IntConstOpnd::New(MAX_ASMJS_ARRAYBUFFER_LENGTH >> indirScale, TyMachReg, m_func);
- }
- else
- #endif
- {
- AnalysisAssert(headSegmentLengthOpnd != nullptr);
- lengthOpnd = IR::RegOpnd::New(headSegmentLengthOpnd->GetType(), m_func);
- IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV_TRUNC, lengthOpnd, headSegmentLengthOpnd, m_func);
- instr->InsertBefore(instrMov);
- LowererMD::Legalize(instrMov);
- if (insertForSegmentLengthIncreased != nullptr)
- {
- IR::Instr * instrMov2 = IR::Instr::New(Js::OpCode::MOV_TRUNC, lengthOpnd, headSegmentLengthOpnd, m_func);
- insertForSegmentLengthIncreased->InsertBefore(instrMov2);
- LowererMD::Legalize(instrMov2);
- }
- if (lengthOpnd->GetSize() != MachPtr)
- {
- lengthOpnd = lengthOpnd->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
- }
- }
- // MOV r1, [opnd + offset(type)]
- IR::RegOpnd* indexValueRegOpnd = IR::RegOpnd::New(indexValueOpnd->GetType(), m_func);
- IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV_TRUNC, indexValueRegOpnd, indexValueOpnd, m_func);
- instr->InsertBefore(instrMov);
- LowererMD::Legalize(instrMov);
- if (insertForSegmentLengthIncreased != nullptr)
- {
- IR::Instr * instrMov2 = IR::Instr::New(Js::OpCode::MOV_TRUNC, indexValueRegOpnd, indexValueOpnd, m_func);
- insertForSegmentLengthIncreased->InsertBefore(instrMov2);
- LowererMD::Legalize(instrMov2);
- }
- if (indexValueRegOpnd->GetSize() != MachPtr)
- {
- indexValueRegOpnd = indexValueRegOpnd->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
- }
- localMaskOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
- InsertSub(false, localMaskOpnd, indexValueRegOpnd, lengthOpnd, instr);
- InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(63, TyInt8, m_func), instr);
- if (insertForSegmentLengthIncreased != nullptr)
- {
- InsertSub(false, localMaskOpnd, indexValueRegOpnd, lengthOpnd, insertForSegmentLengthIncreased);
- InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(63, TyInt8, m_func), insertForSegmentLengthIncreased);
- }
- #else
- localMaskOpnd = IR::RegOpnd::New(TyInt32, m_func);
- InsertSub(false, localMaskOpnd, indexValueOpnd, headSegmentLengthOpnd, instr);
- InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(31, TyInt8, m_func), instr);
- if (insertForSegmentLengthIncreased != nullptr)
- {
- InsertSub(false, localMaskOpnd, indexValueOpnd, headSegmentLengthOpnd, insertForSegmentLengthIncreased);
- InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(31, TyInt8, m_func), insertForSegmentLengthIncreased);
- }
- #endif
- if ((IRType_IsNativeInt(indirType) || indirType == TyVar) && !isStore)
- {
- *maskOpnd = localMaskOpnd;
- }
- else
- {
- // for float values, do the poisoning before the load to avoid needing slow floating point conversions
- IR::RegOpnd* loadAddr = IR::RegOpnd::New(TyMachPtr, m_func);
- #if _M_ARM32_OR_ARM64
- if (indirOpnd->GetIndexOpnd() != nullptr && indirOpnd->GetScale() > 0)
- {
- // We don't support encoding for LEA with scale on ARM/ARM64, so do the scale calculation as a separate instruction
- IR::RegOpnd* fullIndexOpnd = IR::RegOpnd::New(indirOpnd->GetIndexOpnd()->GetType(), m_func);
- InsertShift(Js::OpCode::Shl_A, false, fullIndexOpnd, indirOpnd->GetIndexOpnd(), IR::IntConstOpnd::New(indirOpnd->GetScale(), TyInt8, m_func), instr);
- IR::IndirOpnd* newIndir = IR::IndirOpnd::New(indirOpnd->GetBaseOpnd(), fullIndexOpnd, indirType, m_func);
- if (insertForSegmentLengthIncreased != nullptr)
- {
- InsertShift(Js::OpCode::Shl_A, false, fullIndexOpnd, indirOpnd->GetIndexOpnd(), IR::IntConstOpnd::New(indirOpnd->GetScale(), TyInt8, m_func), insertForSegmentLengthIncreased);
- }
- if (indirOpnd->GetOffset() != 0)
- {
- newIndir->SetOffset(indirOpnd->GetOffset());
- }
- indirOpnd = newIndir;
- }
- #endif
- IR::AutoReuseOpnd reuseIndir(indirOpnd, m_func);
- InsertLea(loadAddr, indirOpnd, instr);
- InsertAnd(loadAddr, loadAddr, localMaskOpnd, instr);
- if (insertForSegmentLengthIncreased != nullptr)
- {
- InsertLea(loadAddr, indirOpnd, insertForSegmentLengthIncreased);
- InsertAnd(loadAddr, loadAddr, localMaskOpnd, insertForSegmentLengthIncreased);
- // We want to export a segmentLengthIncreasedLabel to the caller that is after the poisoning
- // code, since that's also the code that generates indirOpnd in this case.
- IR::LabelInstr* exportedSegmentLengthIncreasedLabel = IR::LabelInstr::New(Js::OpCode::Label, insertForSegmentLengthIncreased->m_func, (*pLabelSegmentLengthIncreased)->isOpHelper);
- LABELNAME(exportedSegmentLengthIncreasedLabel);
- insertForSegmentLengthIncreased->InsertBefore(exportedSegmentLengthIncreasedLabel);
- *pLabelSegmentLengthIncreased = exportedSegmentLengthIncreasedLabel;
- }
- indirOpnd = IR::IndirOpnd::New(loadAddr, 0, indirType, m_func);
- }
- }
- return indirOpnd;
- }
- IR::BranchInstr*
- Lowerer::InsertMissingItemCompareBranch(IR::Opnd* compareSrc, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr)
- {
- IR::Opnd* missingItemOpnd = GetMissingItemOpndForCompare(compareSrc->GetType(), m_func);
- if (compareSrc->IsFloat64())
- {
- Assert(compareSrc->IsRegOpnd() || compareSrc->IsIndirOpnd());
- return m_lowererMD.InsertMissingItemCompareBranch(compareSrc, missingItemOpnd, opcode, target, insertBeforeInstr);
- }
- else
- {
- Assert(compareSrc->IsInt32() || compareSrc->IsVar());
- return InsertCompareBranch(missingItemOpnd, compareSrc, opcode, target, insertBeforeInstr, true);
- }
- }
- IR::RegOpnd *
- Lowerer::GenerateUntagVar(IR::RegOpnd * opnd, IR::LabelInstr * labelFail, IR::Instr * insertBeforeInstr, bool generateTagCheck)
- {
- if (!opnd->IsVar())
- {
- AssertMsg(opnd->GetSize() == 4, "This should be 32-bit wide");
- return opnd;
- }
- AssertMsg(!opnd->IsNotInt(), "An opnd we know is not an int should not try to untag it as it will always fail");
- if (opnd->m_sym->IsIntConst())
- {
- int32 constValue = opnd->m_sym->GetIntConstValue();
- IR::IntConstOpnd* constOpnd = IR::IntConstOpnd::New(constValue, TyInt32, this->m_func);
- IR::RegOpnd* regOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
- InsertMove(regOpnd, constOpnd, insertBeforeInstr);
- return regOpnd;
- }
- return m_lowererMD.GenerateUntagVar(opnd, labelFail, insertBeforeInstr, generateTagCheck && !opnd->IsTaggedInt());
- }
- void
- Lowerer::GenerateNotZeroTest( IR::Opnd * opndSrc, IR::LabelInstr * isZeroLabel, IR::Instr * insertBeforeInstr)
- {
- InsertTestBranch(opndSrc, opndSrc, Js::OpCode::BrEq_A, isZeroLabel, insertBeforeInstr);
- }
- bool
- Lowerer::GenerateFastStringLdElem(IR::Instr * ldElem, IR::LabelInstr * labelHelper, IR::LabelInstr * labelFallThru)
- {
- IR::IndirOpnd * indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
- IR::RegOpnd * baseOpnd = indirOpnd->GetBaseOpnd();
- // don't generate the fast path if the instance is not likely string
- if (!baseOpnd->GetValueType().IsLikelyString())
- {
- return false;
- }
- Assert(!baseOpnd->IsTaggedInt());
- IR::RegOpnd * indexOpnd = indirOpnd->GetIndexOpnd();
- // Don't generate the fast path if the index operand is not likely int
- if (indexOpnd && !indexOpnd->GetValueType().IsLikelyInt())
- {
- return false;
- }
- // Make sure the instance is a string
- Assert(!indexOpnd || !indexOpnd->IsNotInt());
- GenerateStringTest(baseOpnd, ldElem, labelHelper);
- IR::Opnd * index32CmpOpnd;
- IR::RegOpnd * bufferOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- const IR::AutoReuseOpnd autoReuseBufferOpnd(bufferOpnd, m_func);
- IR::IndirOpnd * charIndirOpnd;
- if (indexOpnd)
- {
- // Untag the var and generate the indir into the string buffer
- IR::RegOpnd * index32Opnd = GenerateUntagVar(indexOpnd, labelHelper, ldElem);
- charIndirOpnd = IR::IndirOpnd::New(bufferOpnd, index32Opnd, 1, TyUint16, this->m_func);
- index32CmpOpnd = index32Opnd;
- }
- else
- {
- // Just use the offset to indirect into the string buffer
- charIndirOpnd = IR::IndirOpnd::New(bufferOpnd, indirOpnd->GetOffset() * sizeof(char16), TyUint16, this->m_func);
- index32CmpOpnd = IR::IntConstOpnd::New((uint32)indirOpnd->GetOffset(), TyUint32, this->m_func);
- }
- // Check if the index is in range of the string length
- // CMP [baseOpnd + offset(length)], indexOpnd -- string length
- // JBE $helper -- unsigned compare, and string length are at most INT_MAX - 1
- // -- so that even if we have a negative index, this will fail
- IR::RegOpnd* lengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
- InsertMove(lengthOpnd, IR::IndirOpnd::New(baseOpnd, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func), ldElem);
- InsertCompareBranch(lengthOpnd, index32CmpOpnd, Js::OpCode::BrLe_A, true, labelHelper, ldElem);
- // Load the string buffer and make sure it is not null
- // MOV bufferOpnd, [baseOpnd + offset(m_pszValue)]
- // TEST bufferOpnd, bufferOpnd
- // JEQ $labelHelper
- indirOpnd = IR::IndirOpnd::New(baseOpnd, offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func);
- InsertMove(bufferOpnd, indirOpnd, ldElem);
- GenerateNotZeroTest(bufferOpnd, labelHelper, ldElem);
- IR::RegOpnd* maskOpnd = nullptr;
- if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
- {
- // Mask off the sign before loading so that poisoning will work for negative indices
- if (index32CmpOpnd->IsIntConstOpnd())
- {
- charIndirOpnd->SetOffset((index32CmpOpnd->AsIntConstOpnd()->AsUint32() & INT32_MAX) * sizeof(char16));
- }
- else
- {
- InsertAnd(index32CmpOpnd, index32CmpOpnd, IR::IntConstOpnd::New(INT32_MAX, TyInt32, m_func), ldElem);
- }
- // All bits in mask will be 1 for a valid index or 0 for an OOB index
- maskOpnd = IR::RegOpnd::New(TyInt32, m_func);
- InsertSub(false, maskOpnd, index32CmpOpnd, lengthOpnd, ldElem);
- InsertShift(Js::OpCode::Shr_A, false, maskOpnd, maskOpnd, IR::IntConstOpnd::New(31, TyInt8, m_func), ldElem);
- }
- // Load the character and check if it is 7bit ASCI (which we have the cache for)
- // MOV charOpnd, [bufferOpnd + index32Opnd]
- // CMP charOpnd, 0x80
- // JAE $helper
- IR::RegOpnd * charOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
- const IR::AutoReuseOpnd autoReuseCharOpnd(charOpnd, m_func);
- InsertMove(charOpnd, charIndirOpnd, ldElem);
- if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
- {
- InsertAnd(charOpnd, charOpnd, maskOpnd, ldElem);
- }
- InsertCompareBranch(charOpnd, IR::IntConstOpnd::New(Js::CharStringCache::CharStringCacheSize, TyUint16, this->m_func),
- Js::OpCode::BrGe_A, true, labelHelper, ldElem);
- // Load the string from the cache
- // MOV charStringCache, <charStringCache, address>
- // MOV stringOpnd, [charStringCache + charOpnd * 4]
- IR::RegOpnd * cacheOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- const IR::AutoReuseOpnd autoReuseCacheOpnd(cacheOpnd, m_func);
- Assert(Js::JavascriptLibrary::GetCharStringCacheAOffset() == Js::JavascriptLibrary::GetCharStringCacheOffset());
- InsertMove(cacheOpnd, this->LoadLibraryValueOpnd(ldElem, LibraryValue::ValueCharStringCache), ldElem);
- // Check if we have created the string or not
- // TEST stringOpnd, stringOpnd
- // JE $helper
- IR::RegOpnd * stringOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- const IR::AutoReuseOpnd autoReuseStringOpnd(stringOpnd, m_func);
- InsertMove(stringOpnd, IR::IndirOpnd::New(cacheOpnd, charOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyVar, this->m_func), ldElem);
- GenerateNotZeroTest(stringOpnd, labelHelper, ldElem);
- InsertMove(ldElem->GetDst(), stringOpnd, ldElem);
- InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
- return true;
- }
- bool
- Lowerer::GenerateFastLdElemI(IR::Instr *& ldElem, bool *instrIsInHelperBlockRef)
- {
- Assert(instrIsInHelperBlockRef);
- bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
- instrIsInHelperBlock = false;
- IR::LabelInstr * labelHelper;
- IR::LabelInstr * labelFallThru;
- IR::LabelInstr * labelBailOut = nullptr;
- IR::LabelInstr * labelMissingNative = nullptr;
- IR::Opnd *src1 = ldElem->GetSrc1();
- AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd on LdElementI");
- IR::IndirOpnd * indirOpnd = src1->AsIndirOpnd();
- // From FastElemICommon:
- // TEST base, AtomTag -- check base not tagged int
- // JNE $helper
- // MOV r1, [base + offset(type)] -- check base isArray
- // CMP [r1 + offset(typeId)], TypeIds_Array
- // JNE $helper
- // TEST index, 1 -- index tagged int
- // JEQ $helper
- // MOV r2, index
- // SAR r2, Js::VarTag_Shift -- remoe atom tag
- // JS $helper -- exclude negative index
- // MOV r4, [base + offset(head)]
- // CMP r2, [r4 + offset(length)] -- bounds check
- // JAE $helper
- // MOV r3, [r4 + offset(elements)]
- // Generated here:
- // MOV dst, [r3 + r2]
- // TEST dst, dst
- // JNE $fallthrough
- if(ldElem->m_opcode == Js::OpCode::LdMethodElem && indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyOptimizedTypedArray())
- {
- // Typed arrays don't return objects, so it's not worth generating a fast path for LdMethodElem. Calling the helper also
- // generates a better error message. Skip the fast path and just generate a helper call.
- return true;
- }
- labelFallThru = ldElem->GetOrCreateContinueLabel();
- labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- // If we know for sure (based on flow graph) we're loading from the arguments object, then ignore the (path-based) profile info.
- bool isNativeArrayLoad = !ldElem->DoStackArgsOpt() && indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyNativeArray();
- bool needMissingValueCheck = true;
- bool emittedFastPath = false;
- bool emitBailout = false;
- if (ldElem->DoStackArgsOpt())
- {
- emittedFastPath = GenerateFastArgumentsLdElemI(ldElem, labelFallThru);
- emitBailout = true;
- }
- else if (GenerateFastStringLdElem(ldElem, labelHelper, labelFallThru))
- {
- emittedFastPath = true;
- }
- else
- {
- IR::LabelInstr * labelCantUseArray = labelHelper;
- if (isNativeArrayLoad)
- {
- if (ldElem->GetDst()->GetType() == TyVar)
- {
- // Skip the fast path and just generate a helper call
- return true;
- }
- // Specialized native array lowering for LdElem requires that it is profiled. When not profiled, GlobOpt should not
- // have specialized it.
- Assert(ldElem->IsProfiledInstr());
- labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- labelCantUseArray = labelBailOut;
- }
- Js::FldInfoFlags flags = Js::FldInfo_NoInfo;
- if (ldElem->IsProfiledInstr())
- {
- flags = ldElem->AsProfiledInstr()->u.ldElemInfo->flags;
- }
- bool isTypedArrayElement, isStringIndex, indirOpndOverflowed = false;
- IR::Opnd* maskOpnd = nullptr;
- indirOpnd =
- GenerateFastElemICommon(
- ldElem,
- false,
- src1->AsIndirOpnd(),
- labelHelper,
- labelCantUseArray,
- labelFallThru,
- &isTypedArrayElement,
- &isStringIndex,
- &emitBailout,
- &maskOpnd,
- nullptr, /* pLabelSegmentLengthIncreased */
- true, /* checkArrayLengthOverflow */
- false, /* forceGenerateFastPath */
- false, /* returnLength */
- nullptr, /* bailOutLabelInstr */
- &indirOpndOverflowed,
- flags);
- IR::Opnd *dst = ldElem->GetDst();
- IRType dstType = dst->AsRegOpnd()->GetType();
- // The index is negative or not int.
- if (indirOpnd == nullptr)
- {
- // could have bailout kind BailOutOnArrayAccessHelperCall if indirOpnd overflows
- Assert(!(ldElem->HasBailOutInfo() && ldElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall) || indirOpndOverflowed);
- // don't check fast path without bailout because it might not be TypedArray
- if (indirOpndOverflowed && ldElem->HasBailOutInfo())
- {
- bool bailoutForOpndOverflow = false;
- const IR::BailOutKind bailOutKind = ldElem->GetBailOutKind();
- // return undefined for typed array if load dest is var, bailout otherwise
- if ((bailOutKind & ~IR::BailOutKindBits) == IR::BailOutConventionalTypedArrayAccessOnly)
- {
- if (dst->IsVar())
- {
- // returns undefined in case of indirOpnd overflow which is consistent with behavior of interpreter
- IR::Opnd * undefinedOpnd = this->LoadLibraryValueOpnd(ldElem, LibraryValue::ValueUndefined);
- InsertMove(dst, undefinedOpnd, ldElem);
- ldElem->FreeSrc1();
- ldElem->FreeDst();
- ldElem->Remove();
- emittedFastPath = true;
- }
- else
- {
- bailoutForOpndOverflow = true;
- }
- }
- if (bailoutForOpndOverflow || (bailOutKind & (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)))
- {
- IR::Opnd * constOpnd = nullptr;
- if (dst->IsFloat())
- {
- constOpnd = IR::FloatConstOpnd::New(Js::JavascriptNumber::NaN, TyFloat64, m_func);
- }
- else
- {
- constOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func, true);
- }
- InsertMove(dst, constOpnd, ldElem);
- ldElem->FreeSrc1();
- ldElem->FreeDst();
- GenerateBailOut(ldElem, nullptr, nullptr);
- emittedFastPath = true;
- }
- return !emittedFastPath;
- }
- // The global optimizer should never type specialize a LdElem for which the index is not int or an integer constant
- // with a negative value. This would force an unconditional bail out on the main code path.
- else if (dst->IsVar())
- {
- if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(_u("Typed Array Lowering: function: %s (%s): instr %s, not specialized by glob opt due to negative or not likely int index.\n"),
- this->m_func->GetJITFunctionBody()->GetDisplayName(),
- this->m_func->GetDebugNumberSet(debugStringBuffer),
- Js::OpCodeUtil::GetOpCodeName(ldElem->m_opcode));
- Output::Flush();
- }
- // We must be dealing with some unconventional index value. Don't emit fast path, but go directly to helper.
- emittedFastPath = false;
- return true;
- }
- else
- {
- AssertMsg(false, "Global optimizer shouldn't have specialized this instruction.");
- Assert(dst->IsRegOpnd());
- // If global optimizer failed to notice the unconventional index and type specialized the dst,
- // there is nothing to do but bail out. This could happen if global optimizer's information based
- // on value tracking fails to recognize a non-integer index or a constant int index that is negative.
- // The bailout below ensures that we behave correctly in retail builds even under
- // these (unlikely) conditions. To satisfy the downstream code we must populate the type specialized operand
- // with some made up values, even though we will unconditionally bail out here and the values will never be
- // used.
- IR::IntConstOpnd *constOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func, true);
- InsertMove(dst, constOpnd, ldElem);
- ldElem->FreeSrc1();
- ldElem->FreeDst();
- GenerateBailOut(ldElem, nullptr, nullptr);
- return false;
- }
- }
- const IR::AutoReuseOpnd autoReuseIndirOpnd(indirOpnd, m_func);
- const ValueType baseValueType(src1->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
- if ((ldElem->HasBailOutInfo() &&
- ldElem->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset &&
- ldElem->GetBailOutInfo()->bailOutOffset <= ldElem->GetByteCodeOffset() &&
- dst->IsEqual(src1->AsIndirOpnd()->GetBaseOpnd())) ||
- (src1->AsIndirOpnd()->GetIndexOpnd() && dst->IsEqual(src1->AsIndirOpnd()->GetIndexOpnd())))
- {
- // This is a pre-op bailout where the dst is the same as one of the srcs. The dst may be trashed before bailing out,
- // but since the operation will be processed again in the interpreter, src values need to be kept intact. Use a
- // temporary dst until after the operation is complete.
- IR::Instr *instrSink = ldElem->SinkDst(Js::OpCode::Ld_A);
- // The sink instruction needs to be on the fall-through path
- instrSink->Unlink();
- labelFallThru->InsertAfter(instrSink);
- LowererMD::ChangeToAssign(instrSink);
- dst = ldElem->GetDst();
- }
- if (isTypedArrayElement)
- {
- // For typedArrays, convert the loaded element to the appropriate type
- IR::RegOpnd *reg;
- IR::AutoReuseOpnd autoReuseReg;
- Assert(dst->IsRegOpnd());
- if(indirOpnd->IsFloat())
- {
- AssertMsg((dstType == TyFloat64) || (dstType == TyVar), "For Float32Array LdElemI's dst should be specialized to TyFloat64 or not at all.");
- if(indirOpnd->IsFloat32())
- {
- // MOVSS reg32.f32, indirOpnd.f32
- IR::RegOpnd *reg32 = IR::RegOpnd::New(TyFloat32, this->m_func);
- const IR::AutoReuseOpnd autoReuseReg32(reg32, m_func);
- InsertMove(reg32, indirOpnd, ldElem);
- // CVTPS2PD dst/reg.f64, reg32.f64
- reg = dstType == TyFloat64 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyFloat64, this->m_func);
- autoReuseReg.Initialize(reg, m_func);
- InsertConvertFloat32ToFloat64(reg, reg32, ldElem);
- }
- else
- {
- Assert(indirOpnd->IsFloat64());
- // MOVSD dst/reg.f64, indirOpnd.f64
- reg = dstType == TyFloat64 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyFloat64, this->m_func);
- autoReuseReg.Initialize(reg, m_func);
- InsertMove(reg, indirOpnd, ldElem);
- }
- if (dstType != TyFloat64)
- {
- // Convert reg.f64 to var
- m_lowererMD.SaveDoubleToVar(dst->AsRegOpnd(), reg, ldElem, ldElem);
- }
- #if FLOATVAR
- // For NaNs, go to the helper to guarantee we don't have an illegal NaN
- // TODO(magardn): move this to MD code.
- #if _M_X64
- // UCOMISD reg, reg
- {
- IR::Instr *const instr = IR::Instr::New(Js::OpCode::UCOMISD, this->m_func);
- instr->SetSrc1(reg);
- instr->SetSrc2(reg);
- ldElem->InsertBefore(instr);
- }
- // JP $helper
- {
- IR::Instr *const instr = IR::BranchInstr::New(Js::OpCode::JP, labelHelper, this->m_func);
- ldElem->InsertBefore(instr);
- }
- #elif _M_ARM64
- // FCMP reg, reg
- {
- IR::Instr *const instr = IR::Instr::New(Js::OpCode::FCMP, this->m_func);
- instr->SetSrc1(reg);
- instr->SetSrc2(reg);
- ldElem->InsertBefore(instr);
- }
- // BVS $helper
- {
- IR::Instr *const instr = IR::BranchInstr::New(Js::OpCode::BVS, labelHelper, this->m_func);
- ldElem->InsertBefore(instr);
- }
- #endif
- #endif
- if(dstType == TyFloat64)
- {
- emitBailout = true;
- }
- }
- else
- {
- AssertMsg((dstType == TyInt32) || (dstType == TyVar), "For Int/UintArray LdElemI's dst should be specialized to TyInt32 or not at all.");
- reg = dstType == TyInt32 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyInt32, this->m_func);
- autoReuseReg.Initialize(reg, m_func);
- // Int32 and Uint32 arrays could overflow an int31, but the others can't
- if (indirOpnd->GetType() != TyUint32
- #if !INT32VAR
- && indirOpnd->GetType() != TyInt32
- #endif
- )
- {
- reg->SetValueType(ValueType::GetTaggedInt()); // Fits as a tagged-int
- }
- // MOV/MOVZX/MOVSX dst/reg.int32, IndirOpnd.type
- IR::Instr* instrMov = InsertMove(reg, indirOpnd, ldElem);
- if (maskOpnd)
- {
- #if TARGET_64
- if (maskOpnd->GetSize() != reg->GetType())
- {
- maskOpnd = maskOpnd->UseWithNewType(reg->GetType(), m_func)->AsRegOpnd();
- }
- #endif
- instrMov = InsertAnd(reg, reg, maskOpnd, ldElem);
- }
- if (dstType == TyInt32)
- {
- instrMov->dstIsTempNumber = ldElem->dstIsTempNumber;
- instrMov->dstIsTempNumberTransferred = ldElem->dstIsTempNumberTransferred;
- if (indirOpnd->GetType() == TyUint32)
- {
- // TEST dst, dst
- // JSB $helper (bailout)
- InsertCompareBranch(
- reg,
- IR::IntConstOpnd::New(0, TyUint32, this->m_func, /* dontEncode = */ true),
- Js::OpCode::BrLt_A,
- labelHelper,
- ldElem);
- }
- emitBailout = true;
- }
- else
- {
- // MOV dst, reg
- IR::Instr *const instr = IR::Instr::New(Js::OpCode::ToVar, dst, reg, this->m_func);
- instr->dstIsTempNumber = ldElem->dstIsTempNumber;
- instr->dstIsTempNumberTransferred = ldElem->dstIsTempNumberTransferred;
- ldElem->InsertBefore(instr);
- // Convert dst to var
- m_lowererMD.EmitLoadVar(instr, /* isFromUint32 = */ (indirOpnd->GetType() == TyUint32));
- }
- }
- // JMP $fallthrough
- InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
- emittedFastPath = true;
- if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
- {
- char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
- baseValueType.ToString(baseValueTypeStr);
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(_u("Typed Array Lowering: function: %s (%s), instr: %s, base value type: %S, %s."),
- this->m_func->GetJITFunctionBody()->GetDisplayName(),
- this->m_func->GetDebugNumberSet(debugStringBuffer),
- Js::OpCodeUtil::GetOpCodeName(ldElem->m_opcode),
- baseValueTypeStr,
- (!dst->IsVar() ? _u("specialized") : _u("not specialized")));
- Output::Print(_u("\n"));
- Output::Flush();
- }
- }
- else
- {
- // MOV dst, indirOpnd
- InsertMove(dst, indirOpnd, ldElem);
- if (maskOpnd)
- {
- #if TARGET_64
- if (maskOpnd->GetSize() != dst->GetType())
- {
- maskOpnd = maskOpnd->UseWithNewType(dst->GetType(), m_func)->AsRegOpnd();
- }
- #endif
- InsertAnd(dst, dst, maskOpnd, ldElem);
- }
- // The string index fast path does not operate on index properties (we don't get a PropertyString in that case), so
- // we don't need to do any further checks in that case
- // For LdMethodElem, if the loaded value is a tagged number, the error message generated by the helper call is
- // better than if we were to just try to call the number. Also, the call arguments need to be evaluated before
- // throwing the error, so just test whether it's an object and jump to helper if it's not.
- const bool needObjectTest = !isStringIndex && !isNativeArrayLoad && ldElem->m_opcode == Js::OpCode::LdMethodElem;
- needMissingValueCheck =
- !isStringIndex && !(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues());
- if(needMissingValueCheck)
- {
- // TEST dst, dst
- // JEQ $helper | JNE $fallthrough
- InsertMissingItemCompareBranch(
- dst,
- needObjectTest ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A,
- needObjectTest ? labelHelper : labelFallThru,
- ldElem);
- if (isNativeArrayLoad)
- {
- Assert(!needObjectTest);
- Assert(labelHelper != labelBailOut);
- if(ldElem->AsProfiledInstr()->u.ldElemInfo->GetElementType().HasBeenUndefined())
- {
- // We're going to bail out trying to load "missing value" into a type-spec'd opnd.
- // Branch to a point where we'll convert the array so that we don't keep bailing here.
- // (Gappy arrays are not well-suited to nativeness.)
- labelMissingNative = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- InsertBranch(Js::OpCode::Br, labelMissingNative, ldElem);
- }
- else
- {
- // If the value has not been profiled to be undefined at some point, jump directly to bail out
- InsertBranch(Js::OpCode::Br, labelBailOut, ldElem);
- }
- }
- }
- if(needObjectTest)
- {
- // GenerateObjectTest(dst)
- // JIsObject $fallthrough
- m_lowererMD.GenerateObjectTest(dst, ldElem, labelFallThru, true);
- }
- else if(!needMissingValueCheck)
- {
- // JMP $fallthrough
- InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
- }
- emittedFastPath = true;
- }
- }
- // $helper:
- // bailout or caller generated helper call
- // $fallthru:
- if (!emittedFastPath)
- {
- labelHelper->isOpHelper = false;
- }
- ldElem->InsertBefore(labelHelper);
- instrIsInHelperBlock = true;
- if (isNativeArrayLoad)
- {
- Assert(ldElem->HasBailOutInfo());
- Assert(labelHelper != labelBailOut);
- // Transform the original instr:
- //
- // $helper:
- // dst = LdElemI_A src (BailOut)
- // $fallthrough:
- //
- // to:
- //
- // b $fallthru <--- we get here if we loaded a valid element directly
- // $helper:
- // dst = LdElemI_A src
- // cmp dst, MissingItem
- // bne $fallthrough
- // $bailout:
- // BailOut
- // $fallthrough:
- LowerOneBailOutKind(ldElem, IR::BailOutConventionalNativeArrayAccessOnly, instrIsInHelperBlock);
- IR::Instr *const insertBeforeInstr = ldElem->m_next;
- // Do missing value check on value returned from helper so that we don't have to check the index against
- // array length. (We already checked it above against the segment length.)
- bool hasBeenUndefined = ldElem->AsProfiledInstr()->u.ldElemInfo->GetElementType().HasBeenUndefined();
- if (hasBeenUndefined)
- {
- if(!emitBailout)
- {
- if (labelMissingNative == nullptr)
- {
- labelMissingNative = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- #if DBG
- labelMissingNative->m_noLazyHelperAssert = true;
- #endif
- }
- InsertMissingItemCompareBranch(ldElem->GetDst(), Js::OpCode::BrEq_A, labelMissingNative, insertBeforeInstr);
- }
- InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
- if(labelMissingNative)
- {
- // We're going to bail out on a load from a gap, but convert the array to Var first, so we don't just
- // bail here over and over. Gappy arrays are not well suited to nativeness.
- // NOTE: only emit this call if the profile tells us that this has happened before ("hasBeenUndefined").
- // Emitting this in Navier-Stokes brutalizes the score.
- insertBeforeInstr->InsertBefore(labelMissingNative);
- IR::JnHelperMethod helperMethod;
- indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
- if (indirOpnd->GetBaseOpnd()->GetValueType().HasIntElements())
- {
- helperMethod = IR::HelperIntArr_ToVarArray;
- }
- else
- {
- Assert(indirOpnd->GetBaseOpnd()->GetValueType().HasFloatElements());
- helperMethod = IR::HelperFloatArr_ToVarArray;
- }
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, indirOpnd->GetBaseOpnd());
- IR::Instr *instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
- instrHelper->SetSrc1(IR::HelperCallOpnd::New(helperMethod, m_func));
- insertBeforeInstr->InsertBefore(instrHelper);
- m_lowererMD.LowerCall(instrHelper, 0);
- }
- }
- else
- {
- if(!emitBailout)
- {
- InsertMissingItemCompareBranch(ldElem->GetDst(), Js::OpCode::BrEq_A, labelBailOut, insertBeforeInstr);
- }
- InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
- }
- insertBeforeInstr->InsertBefore(labelBailOut);
- }
- if (emitBailout)
- {
- ldElem->UnlinkSrc1();
- ldElem->UnlinkDst();
- GenerateBailOut(ldElem, nullptr, nullptr);
- }
- return !emitBailout;
- }
- IR::Opnd *
- Lowerer::GetMissingItemOpnd(IRType type, Func *func)
- {
- if (type == TyVar)
- {
- return IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstantAddress, func, true);
- }
- if (type == TyInt32)
- {
- return IR::IntConstOpnd::New(Js::JavascriptNativeIntArray::MissingItem, TyInt32, func, true);
- }
- AssertMsg(false, "Only expecting TyVar and TyInt32 in Lowerer::GetMissingItemOpnd");
- __assume(false);
- }
- IR::Opnd*
- Lowerer::GetMissingItemOpndForAssignment(IRType type, Func *func)
- {
- switch (type)
- {
- case TyVar:
- case TyInt32:
- return GetMissingItemOpnd(type, func);
- case TyFloat64:
- return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetNativeFloatArrayMissingItemAddr(), TyFloat64, func);
- default:
- AnalysisAssertMsg(false, "Unexpected type in Lowerer::GetMissingItemOpndForAssignment");
- __assume(false);
- }
- }
- IR::Opnd *
- Lowerer::GetMissingItemOpndForCompare(IRType type, Func *func)
- {
- switch (type)
- {
- case TyVar:
- case TyInt32:
- return GetMissingItemOpnd(type, func);
- case TyFloat64:
- #if TARGET_64
- return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetNativeFloatArrayMissingItemAddr(), TyUint64, func);
- #else
- return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetNativeFloatArrayMissingItemAddr(), TyUint32, func);
- #endif
- default:
- AnalysisAssertMsg(false, "Unexpected type in Lowerer::GetMissingItemOpndForCompare");
- __assume(false);
- }
- }
- bool
- Lowerer::GenerateFastStElemI(IR::Instr *& stElem, bool *instrIsInHelperBlockRef)
- {
- Assert(instrIsInHelperBlockRef);
- bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
- instrIsInHelperBlock = false;
- IR::LabelInstr * labelHelper;
- IR::LabelInstr * labelSegmentLengthIncreased;
- IR::LabelInstr * labelFallThru;
- IR::LabelInstr * labelBailOut = nullptr;
- IR::Opnd *dst = stElem->GetDst();
- IR::IndirOpnd * indirOpnd = dst->AsIndirOpnd();
- AssertMsg(dst->IsIndirOpnd(), "Expected indirOpnd on StElementI");
- // From FastElemICommon:
- // TEST base, AtomTag -- check base not tagged int
- // JNE $helper
- // MOV r1, [base + offset(type)] -- check base isArray
- // CMP [r1 + offset(typeId)], TypeIds_Array
- // JNE $helper
- // TEST index, 1 -- index tagged int
- // JEQ $helper
- // MOV r2, index
- // SAR r2, Js::VarTag_Shift -- remove atom tag
- // JS $helper -- exclude negative index
- // MOV r4, [base + offset(head)]
- // CMP r2, [r4 + offset(length)] -- bounds check
- // JB $done
- // CMP r2, [r4 + offset(size)] -- chunk has room?
- // JAE $helper
- // LEA r5, [r2 + 1]
- // MOV [r4 + offset(length)], r5 -- update length on chunk
- // CMP r5, [base + offset(length)]
- // JBE $done
- // MOV [base + offset(length)], r5 -- update length on array
- // $done
- // LEA r3, [r4 + offset(elements)]
- // Generated here.
- // MOV [r3 + r2], src
- labelFallThru = stElem->GetOrCreateContinueLabel();
- labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- bool emitBailout = false;
- bool isNativeArrayStore = indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyNativeArray();
- IR::LabelInstr * labelCantUseArray = labelHelper;
- if (isNativeArrayStore)
- {
- if (stElem->GetSrc1()->GetType() != GetArrayIndirType(indirOpnd->GetBaseOpnd()->GetValueType()))
- {
- // Skip the fast path and just generate a helper call
- return true;
- }
- if(stElem->HasBailOutInfo())
- {
- const IR::BailOutKind bailOutKind = stElem->GetBailOutKind();
- if (bailOutKind & IR::BailOutConventionalNativeArrayAccessOnly)
- {
- labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- labelCantUseArray = labelBailOut;
- }
- }
- }
- Js::FldInfoFlags flags = Js::FldInfo_NoInfo;
- if (stElem->IsProfiledInstr())
- {
- flags = stElem->AsProfiledInstr()->u.stElemInfo->flags;
- }
- bool isTypedArrayElement, isStringIndex, indirOpndOverflowed = false;
- IR::Opnd* maskOpnd = nullptr;
- indirOpnd =
- GenerateFastElemICommon(
- stElem,
- true,
- indirOpnd,
- labelHelper,
- labelCantUseArray,
- labelFallThru,
- &isTypedArrayElement,
- &isStringIndex,
- &emitBailout,
- &maskOpnd,
- &labelSegmentLengthIncreased,
- true, /* checkArrayLengthOverflow */
- false, /* forceGenerateFastPath */
- false, /* returnLength */
- nullptr, /* bailOutLabelInstr */
- &indirOpndOverflowed,
- flags);
- IR::Opnd *src = stElem->GetSrc1();
- const IR::AutoReuseOpnd autoReuseSrc(src, m_func);
- // The index is negative or not int.
- if (indirOpnd == nullptr)
- {
- Assert(!(stElem->HasBailOutInfo() && stElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall) || indirOpndOverflowed);
- if (indirOpndOverflowed && stElem->HasBailOutInfo())
- {
- bool emittedFastPath = false;
- const IR::BailOutKind bailOutKind = stElem->GetBailOutKind();
- // ignore StElemI in case of indirOpnd overflow only for typed array which is consistent with behavior of interpreter
- if ((bailOutKind & ~IR::BailOutKindBits) == IR::BailOutConventionalTypedArrayAccessOnly)
- {
- stElem->FreeSrc1();
- stElem->FreeDst();
- stElem->Remove();
- emittedFastPath = true;
- }
- if (!emittedFastPath && (bailOutKind & (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)))
- {
- stElem->FreeSrc1();
- stElem->FreeDst();
- GenerateBailOut(stElem, nullptr, nullptr);
- emittedFastPath = true;
- }
- return !emittedFastPath;
- }
- // The global optimizer should never type specialize a StElem for which we know the index is not int or is a negative
- // int constant. This would result in an unconditional bailout on the main code path.
- else if (src->IsVar())
- {
- if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
- {
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(_u("Typed Array Lowering: function: %s (%s): instr %s, not specialized by glob opt due to negative or not likely int index.\n"),
- this->m_func->GetJITFunctionBody()->GetDisplayName(),
- this->m_func->GetDebugNumberSet(debugStringBuffer),
- Js::OpCodeUtil::GetOpCodeName(stElem->m_opcode));
- Output::Flush();
- }
- // We must be dealing with some atypical index value. Don't emit fast path, but go directly to helper.
- return true;
- }
- else
- {
- // If global optimizer failed to notice the unconventional index and type specialized the src,
- // there is nothing to do but bail out. We should never hit this code path, unless the global optimizer's conditions
- // for not specializing the instruction don't match the lowerer's conditions for not emitting the array checks (see above).
- // This could happen if global optimizer's information based on value tracking fails to recognize a non-integer index or
- // a constant int index that is negative. The bailout below ensures that we behave correctly in retail builds even under
- // these (unlikely) conditions.
- AssertMsg(false, "Global optimizer shouldn't have specialized this instruction.");
- stElem->FreeSrc1();
- stElem->FreeDst();
- GenerateBailOut(stElem, nullptr, nullptr);
- return false;
- }
- }
- const IR::AutoReuseOpnd autoReuseIndirOpnd(indirOpnd, m_func);
- const ValueType baseValueType(dst->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
- if (isTypedArrayElement)
- {
- if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
- {
- char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
- baseValueType.ToString(baseValueTypeStr);
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(_u("Typed Array Lowering: function: %s (%s), instr: %s, base value type: %S, %s."),
- this->m_func->GetJITFunctionBody()->GetDisplayName(),
- this->m_func->GetDebugNumberSet(debugStringBuffer),
- Js::OpCodeUtil::GetOpCodeName(stElem->m_opcode),
- baseValueTypeStr,
- (!src->IsVar() ? _u("specialized") : _u("not specialized")));
- Output::Print(_u("\n"));
- Output::Flush();
- }
- ObjectType objectType = baseValueType.GetObjectType();
- if(indirOpnd->IsFloat())
- {
- if (src->GetType() == TyFloat64)
- {
- IR::RegOpnd *const regSrc = src->AsRegOpnd();
- if (indirOpnd->IsFloat32())
- {
- // CVTSD2SS reg.f32, regSrc.f64 -- Convert regSrc from f64 to f32
- IR::RegOpnd *const reg = IR::RegOpnd::New(TyFloat32, this->m_func);
- const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
- InsertConvertFloat64ToFloat32(reg, regSrc, stElem);
- // MOVSS indirOpnd, reg
- InsertMove(indirOpnd, reg, stElem, false);
- }
- else
- {
- // MOVSD indirOpnd, regSrc
- InsertMove(indirOpnd, regSrc, stElem, false);
- }
- emitBailout = true;
- }
- else
- {
- Assert(src->GetType() == TyVar);
- // MOV reg, src
- IR::RegOpnd *const reg = IR::RegOpnd::New(TyVar, this->m_func);
- const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
- InsertMove(reg, src, stElem);
- // Convert to float, and assign to indirOpnd
- if (baseValueType.IsLikelyOptimizedVirtualTypedArray())
- {
- IR::RegOpnd* dstReg = IR::RegOpnd::New(indirOpnd->GetType(), this->m_func);
- m_lowererMD.EmitLoadFloat(dstReg, reg, stElem, stElem, labelHelper);
- InsertMove(indirOpnd, dstReg, stElem);
- }
- else
- {
- m_lowererMD.EmitLoadFloat(indirOpnd, reg, stElem, stElem, labelHelper);
- }
- }
- }
- else if (objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedVirtualArray || objectType == ObjectType::Uint8ClampedMixedArray)
- {
- Assert(indirOpnd->GetType() == TyUint8);
- IR::RegOpnd *regSrc;
- IR::AutoReuseOpnd autoReuseRegSrc;
- if(src->IsRegOpnd())
- {
- regSrc = src->AsRegOpnd();
- }
- else
- {
- regSrc = IR::RegOpnd::New(StackSym::New(src->GetType(), m_func), src->GetType(), m_func);
- autoReuseRegSrc.Initialize(regSrc, m_func);
- InsertMove(regSrc, src, stElem);
- }
- IR::Opnd *bitMaskOpnd;
- IRType srcType = regSrc->GetType();
- if ((srcType == TyFloat64) || (srcType == TyInt32))
- {
- // if (srcType == TyInt32) {
- // TEST regSrc, ~255
- // JE $storeValue
- // JSB $handleNegative
- // MOV indirOpnd, 255
- // JMP $fallThru
- // $handleNegative [isHelper = false]
- // MOV indirOpnd, 0
- // JMP $fallThru
- // $storeValue
- // MOV indirOpnd, regSrc
- // }
- // else {
- // MOVSD regTmp, regSrc
- // ADDSD regTmp, 0.5
- // CVTTSD2SI regOpnd, regTmp
- // TEST regOpnd, ~255
- // JE $storeValue
- // $handleOutOfBounds [isHelper = true]
- // COMISD regSrc, [&FloatZero]
- // JB $handleNegative
- // MOV regOpnd, 255
- // JMP $storeValue
- // $handleNegative [isHelper = true]
- // MOV regOpnd, 0
- // $storeValue
- // MOV indirOpnd, regOpnd
- // }
- // $fallThru
- IR::RegOpnd *regOpnd;
- IR::AutoReuseOpnd autoReuseRegOpnd;
- if (srcType == TyInt32)
- {
- // When srcType == TyInt32 we will never call the helper and we will never
- // modify the regOpnd. Therefore, it's okay to use regSrc directly, and it
- // reduces register pressure.
- regOpnd = regSrc;
- }
- else
- {
- #ifdef _M_IX86
- AssertMsg(AutoSystemInfo::Data.SSE2Available(), "GlobOpt shouldn't have specialized Uint8ClampedArray StElem to float64 if SSE2 is unavailable.");
- #endif
- regOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
- autoReuseRegOpnd.Initialize(regOpnd, m_func);
- Assert(objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedVirtualArray || objectType == ObjectType::Uint8ClampedMixedArray);
- // Uint8ClampedArray follows IEEE 754 rounding rules for ties which round up
- // odd integers and round down even integers. Both ties result in the nearest
- // even integer value.
- //
- // CVTSD2SI regOpnd, regSrc
- LowererMD::InsertConvertFloat64ToInt32(RoundModeHalfToEven, regOpnd, regSrc, stElem);
- }
- IR::LabelInstr *labelStoreValue = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
- #ifndef _M_ARM
- // TEST regOpnd, ~255
- // JE $storeValue
- bitMaskOpnd = IR::IntConstOpnd::New(~255, TyInt32, this->m_func, true);
- InsertTestBranch(regOpnd, bitMaskOpnd, Js::OpCode::BrEq_A, labelStoreValue, stElem);
- #else // ARM
- // Special case for ARM, a shift may be better
- //
- // ASRS tempReg, src, 8
- // BEQ $inlineSet
- InsertShiftBranch(
- Js::OpCode::Shr_A,
- IR::RegOpnd::New(TyInt32, this->m_func),
- regOpnd,
- IR::IntConstOpnd::New(8, TyInt8, this->m_func),
- Js::OpCode::BrEq_A,
- labelStoreValue,
- stElem);
- #endif
- IR::LabelInstr *labelHandleNegative = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, srcType == TyFloat64);
- if (srcType == TyInt32)
- {
- // JSB $handleNegativeOrOverflow
- InsertBranch(
- LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A),
- labelHandleNegative,
- stElem);
- // MOV IndirOpnd.u8, 255
- InsertMove(indirOpnd, IR::IntConstOpnd::New(255, TyUint8, this->m_func, true), stElem);
- // JMP $fallThru
- InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
- // $handleNegative [isHelper = false]
- stElem->InsertBefore(labelHandleNegative);
- // MOV IndirOpnd.u8, 0
- InsertMove(indirOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func, true), stElem);
- // JMP $fallThru
- InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
- }
- else
- {
- Assert(regOpnd != regSrc);
- // This label is just to ensure the following code is moved to the helper block.
- // $handleOutOfBounds [isHelper = true]
- IR::LabelInstr *labelHandleOutOfBounds = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- stElem->InsertBefore(labelHandleOutOfBounds);
- // COMISD regSrc, FloatZero
- // JB labelHandleNegative
- IR::MemRefOpnd * zeroOpnd = IR::MemRefOpnd::New(this->m_func->GetThreadContextInfo()->GetDoubleZeroAddr(), TyMachDouble, this->m_func);
- InsertCompareBranch(regSrc, zeroOpnd, Js::OpCode::BrNotGe_A, labelHandleNegative, stElem);
- // MOV regOpnd, 255
- InsertMove(regOpnd, IR::IntConstOpnd::New(255, TyUint8, this->m_func, true), stElem);
- // JMP $storeValue
- InsertBranch(Js::OpCode::Br, labelStoreValue, stElem);
- // $handleNegative [isHelper = true]
- stElem->InsertBefore(labelHandleNegative);
- // MOV regOpnd, 0
- InsertMove(regOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func, true), stElem);
- }
- // $storeValue
- stElem->InsertBefore(labelStoreValue);
- // MOV IndirOpnd.u8, regOpnd.u8
- InsertMove(indirOpnd, regOpnd, stElem);
- emitBailout = true;
- }
- else
- {
- Assert(srcType == TyVar);
- #if INT32VAR
- bitMaskOpnd = IR::AddrOpnd::New((Js::Var)~(INT_PTR)(Js::TaggedInt::ToVarUnchecked(255)), IR::AddrOpndKindConstantVar, this->m_func, true);
- #else
- bitMaskOpnd = IR::IntConstOpnd::New(~(INT_PTR)(Js::TaggedInt::ToVarUnchecked(255)), TyMachReg, this->m_func, true);
- #endif
- // Note: We are assuming that if no bits other than ~(TaggedInt(255)) are 1, that we have a tagged
- // int value between 0 - 255.
- // #if INT32VAR
- // This works for pointers because tagged int bit can't be on, and first 64k are not valid addresses
- // This works for floats because a valid float would have one of the upper 13 bits on.
- // #else
- // Any pointer is larger than 512 because first 64k memory is reserved by the OS
- // #endif
- IR::LabelInstr *labelInlineSet = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- #ifndef _M_ARM
- // TEST src, ~(TaggedInt(255)) -- Check for tagged int >= 255 and <= 0
- // JEQ $inlineSet
- InsertTestBranch(regSrc, bitMaskOpnd, Js::OpCode::BrEq_A, labelInlineSet, stElem);
- #else // ARM
- // Special case for ARM, a shift may be better
- //
- // ASRS tempReg, src, 8
- // BEQ $inlineSet
- InsertShiftBranch(
- Js::OpCode::Shr_A,
- IR::RegOpnd::New(TyInt32, this->m_func),
- regSrc,
- IR::IntConstOpnd::New(8, TyInt8, this->m_func),
- Js::OpCode::BrEq_A,
- labelInlineSet,
- stElem);
- #endif
- // Uint8ClampedArray::DirectSetItem(array, index, value);
- // Inserting a helper call. Make sure it observes the main instructions's requirements regarding implicit calls.
- if (!instrIsInHelperBlock)
- {
- stElem->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, m_func, true));
- }
- if (stElem->HasBailOutInfo() && (stElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall))
- {
- // Bail out instead of doing the helper call.
- Assert(labelHelper);
- this->InsertBranch(Js::OpCode::Br, labelHelper, stElem);
- }
- else
- {
- IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
- stElem->InsertBefore(instr);
- if (stElem->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(stElem->GetBailOutKind()))
- {
- // Bail out if this helper triggers implicit calls.
- instr = this->AddBailoutToHelperCallInstr(instr, stElem->GetBailOutInfo(), stElem->GetBailOutKind(), stElem);
- }
- m_lowererMD.LoadHelperArgument(instr, regSrc);
- IR::Opnd *indexOpnd = indirOpnd->GetIndexOpnd();
- if (indexOpnd == nullptr)
- {
- if (indirOpnd->GetOffset() == 0)
- {
- // There are two ways that we can get an indirOpnd with no index and 0 offset.
- // The first is that we're storing to element 0 in the array by constant offset.
- // The second is that we got a pointer back that has spectre masking, so it's going
- // to not have the appropriate index into the array. In that case, we need to regen
- // the index.
- // The plan is
- // 1. get the backing buffer pointer
- // 2. subtract that from the indexOpnd to get the numeric index
- // This is unfortunately slightly worse perf for constant writes of vars to index 0
- // of Uint8ClampedArrays, but that's hopefully uncommon enough that the impact will
- // be minimal
- // MOV backingBufferOpnd, [base + offset(arrayBuffer)]
- // SUB indexOpnd, backingBufferOpnd
- int bufferOffset = GetArrayOffsetOfHeadSegment(baseValueType);
- IR::IndirOpnd* arrayBufferOpnd = IR::IndirOpnd::New(stElem->GetDst()->AsIndirOpnd()->GetBaseOpnd(), bufferOffset, TyMachPtr, this->m_func);
- IR::RegOpnd* backingBufferOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- InsertMove(backingBufferOpnd, arrayBufferOpnd, instr);
- IR::RegOpnd* tempIndexOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- InsertSub(false, tempIndexOpnd, indirOpnd->GetBaseOpnd(), backingBufferOpnd, instr);
- indexOpnd = tempIndexOpnd->UseWithNewType(TyInt32, this->m_func);
- }
- else
- {
- indexOpnd = IR::IntConstOpnd::New(indirOpnd->GetOffset(), TyInt32, this->m_func);
- }
- }
- else
- {
- Assert(indirOpnd->GetOffset() == 0);
- }
- m_lowererMD.LoadHelperArgument(instr, indexOpnd);
- m_lowererMD.LoadHelperArgument(instr, stElem->GetDst()->AsIndirOpnd()->GetBaseOpnd());
- Assert(objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedMixedArray || objectType == ObjectType::Uint8ClampedVirtualArray);
- m_lowererMD.ChangeToHelperCall(instr, IR::JnHelperMethod::HelperUint8ClampedArraySetItem);
- // JMP $fallThrough
- InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
- }
- //$inlineSet
- stElem->InsertBefore(labelInlineSet);
- IR::RegOpnd *regOpnd;
- IR::AutoReuseOpnd autoReuseRegOpnd;
- #if INT32VAR
- regOpnd = regSrc;
- #else
- // MOV r1, src
- // SAR r1, 1
- regOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
- autoReuseRegOpnd.Initialize(regOpnd, m_func);
- InsertShift(
- Js::OpCode::Shr_A,
- false /* needFlags */,
- regOpnd,
- regSrc,
- IR::IntConstOpnd::New(1, TyInt8, this->m_func),
- stElem);
- #endif
- // MOV IndirOpnd.u8, reg.u8
- InsertMove(indirOpnd, regOpnd, stElem);
- }
- }
- else
- {
- if (src->IsInt32())
- {
- // MOV indirOpnd, src
- InsertMove(indirOpnd, src, stElem);
- emitBailout = true;
- }
- else if (src->IsFloat64())
- {
- AssertMsg(indirOpnd->GetType() == TyUint32, "Only StElemI to Uint32Array could be specialized to float64.");
- #ifdef _M_IX86
- AssertMsg(AutoSystemInfo::Data.SSE2Available(), "GloOpt shouldn't have specialized Uint32Array StElemI to float64 if SSE2 is unavailable.");
- #endif
- bool bailOutOnHelperCall = stElem->HasBailOutInfo() ? !!(stElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall) : false;
- if (bailOutOnHelperCall)
- {
- if(!GlobOpt::DoEliminateArrayAccessHelperCall(this->m_func))
- {
- // Array access helper call removal is already off for some reason. Prevent trying to rejit again
- // because it won't help and the same thing will happen again. Just abort jitting this function.
- if(PHASE_TRACE(Js::BailOutPhase, this->m_func))
- {
- Output::Print(_u(" Aborting JIT because EliminateArrayAccessHelperCall is already off\n"));
- Output::Flush();
- }
- throw Js::OperationAbortedException();
- }
- throw Js::RejitException(RejitReason::ArrayAccessHelperCallEliminationDisabled);
- }
- IR::RegOpnd *const reg = IR::RegOpnd::New(TyInt32, this->m_func);
- const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
- m_lowererMD.EmitFloatToInt(reg, src, stElem, stElem, labelHelper);
- // MOV indirOpnd, reg
- InsertMove(indirOpnd, reg, stElem);
- emitBailout = true;
- }
- else
- {
- Assert(src->IsVar());
- if(src->IsAddrOpnd())
- {
- IR::AddrOpnd *const addrSrc = src->AsAddrOpnd();
- Assert(addrSrc->IsVar());
- Assert(Js::TaggedInt::Is(addrSrc->m_address));
- // MOV indirOpnd, intValue
- InsertMove(
- indirOpnd,
- IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(addrSrc->m_address), TyInt32, m_func),
- stElem);
- }
- else
- {
- IR::RegOpnd *const regSrc = src->AsRegOpnd();
- // FromVar reg, Src
- IR::RegOpnd *const reg = IR::RegOpnd::New(TyInt32, this->m_func);
- const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
- IR::Instr * instr = IR::Instr::New(Js::OpCode::FromVar, reg, regSrc, stElem->m_func);
- stElem->InsertBefore(instr);
- // Convert reg to int32
- // Note: ToUint32 is implemented as (uint32)ToInt32()
- IR::BailOutKind bailOutKind = stElem->HasBailOutInfo() ? stElem->GetBailOutKind() : IR::BailOutInvalid;
- if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
- {
- instr = this->AddBailoutToHelperCallInstr(instr, stElem->GetBailOutInfo(), bailOutKind, stElem);
- }
- bool bailOutOnHelperCall = !!(bailOutKind & IR::BailOutOnArrayAccessHelperCall);
- m_lowererMD.EmitLoadInt32(instr, true /*conversionFromObjectAllowed*/, bailOutOnHelperCall, labelHelper);
- // MOV indirOpnd, reg
- InsertMove(indirOpnd, reg, stElem);
- }
- }
- }
- }
- else
- {
- if(labelSegmentLengthIncreased)
- {
- IR::Instr *const insertBeforeInstr = labelSegmentLengthIncreased->m_next;
- // We might be changing the array to have missing values here, or we might be
- // changing it to extend it; in either case, we're not going to make it _not_
- // have missing values after this operation, so just write and fallthrough.
- // labelSegmentLengthIncreased:
- // mov [segment + index], src
- // jmp $fallThru
- InsertMove(indirOpnd, src, insertBeforeInstr);
- InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
- }
- if (!(isStringIndex || (baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())))
- {
- if(!stElem->IsProfiledInstr() || stElem->AsProfiledInstr()->u.stElemInfo->LikelyFillsMissingValue())
- {
- // Check whether the store is filling a missing value. If so, fall back to the helper so that it can check whether
- // this store is filling the last missing value in the array. This is necessary to keep the missing value tracking
- // in arrays precise. The check is omitted when profile data says that the store is likely to create missing values.
- //
- // cmp [segment + index], Js::SparseArraySegment::MissingValue
- // je $helper
- InsertMissingItemCompareBranch(
- indirOpnd,
- Js::OpCode::BrEq_A,
- labelHelper,
- stElem);
- }
- else
- {
- GenerateIsEnabledArraySetElementFastPathCheck(labelHelper, stElem);
- }
- }
- // MOV [r3 + r2], src
- InsertMoveWithBarrier(indirOpnd, src, stElem);
- }
- // JMP $fallThru
- InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
- // $helper:
- // bailout or caller generated helper call
- // $fallThru:
- stElem->InsertBefore(labelHelper);
- instrIsInHelperBlock = true;
- if (isNativeArrayStore && !isStringIndex)
- {
- Assert(stElem->HasBailOutInfo());
- Assert(labelHelper != labelBailOut);
- // Transform the original instr:
- //
- // $helper:
- // dst = LdElemI_A src (BailOut)
- // $fallthrough:
- //
- // to:
- //
- // $helper:
- // dst = LdElemI_A src
- // b $fallthrough
- // $bailout:
- // BailOut
- // $fallthrough:
- LowerOneBailOutKind(stElem, IR::BailOutConventionalNativeArrayAccessOnly, instrIsInHelperBlock);
- IR::Instr *const insertBeforeInstr = stElem->m_next;
- InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
- insertBeforeInstr->InsertBefore(labelBailOut);
- }
- if (emitBailout)
- {
- stElem->FreeSrc1();
- stElem->FreeDst();
- GenerateBailOut(stElem, nullptr, nullptr);
- }
- return !emitBailout;
- }
- bool
- Lowerer::GenerateFastLdLen(IR::Instr *ldLen, bool *instrIsInHelperBlockRef)
- {
- Assert(instrIsInHelperBlockRef);
- bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
- instrIsInHelperBlock = false;
- // TEST src, AtomTag -- check src not tagged int
- // JNE $helper
- // CMP [src], JavascriptArray::`vtable' -- check base isArray
- // JNE $string
- // MOV length, [src + offset(length)] -- Load array length
- // JMP $tovar
- // $string:
- // CMP [src + offset(type)], static_string_type -- check src isString
- // JNE $helper
- // MOV length, [src + offset(length)] -- Load string length
- // $toVar:
- // TEST length, 0xC0000000 -- test for overflow of SHL, or negative
- // JNE $helper
- // SHL length, Js::VarTag_Shift -- restore the var tag on the result
- // INC length
- // MOV dst, length
- // JMP $fallthru
- // $helper:
- // CALL GetProperty(src, length_property_id, scriptContext)
- // $fallthru:
- IR::Opnd * opnd = ldLen->GetSrc1();
- IR::RegOpnd * dst = ldLen->GetDst()->AsRegOpnd();
- const ValueType srcValueType(opnd->GetValueType());
- IR::LabelInstr *const labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- if (ldLen->DoStackArgsOpt())
- {
- GenerateFastArgumentsLdLen(ldLen, ldLen->GetOrCreateContinueLabel());
- ldLen->Remove();
- return false;
- }
- else
- {
- const bool arrayFastPath = ShouldGenerateArrayFastPath(opnd, false, true, false);
- // HasBeenString instead of IsLikelyString because it could be a merge between StringObject and String, and this
- // information about whether it's a StringObject or some other object is not available in the profile data
- const bool stringFastPath = srcValueType.IsUninitialized() || srcValueType.HasBeenString();
- if(!(arrayFastPath || stringFastPath))
- {
- return true;
- }
- IR::RegOpnd * src;
- if (opnd->IsRegOpnd())
- {
- src = opnd->AsRegOpnd();
- }
- else
- {
- // LdLen has a PropertySymOpnd until globopt where the decision whether to convert it to LdFld is made. If globopt is skipped, the opnd will
- // still be a PropertySymOpnd here. In that case, do the conversion here.
- IR::SymOpnd * symOpnd = opnd->AsSymOpnd();
- PropertySym * propertySym = symOpnd->m_sym->AsPropertySym();
- src = IR::RegOpnd::New(propertySym->m_stackSym, IRType::TyVar, this->m_func);
- ldLen->ReplaceSrc1(src);
- opnd = src;
- }
- const int32 arrayOffsetOfLength =
- srcValueType.IsLikelyAnyOptimizedArray()
- ? GetArrayOffsetOfLength(srcValueType)
- : Js::JavascriptArray::GetOffsetOfLength();
- IR::LabelInstr *labelString = nullptr;
- IR::RegOpnd *arrayOpnd = src;
- IR::RegOpnd *arrayLengthOpnd = nullptr;
- IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
- if(arrayFastPath)
- {
- if(!srcValueType.IsAnyOptimizedArray())
- {
- if(stringFastPath)
- {
- // If we don't have info about the src value type or its object type, the array and string fast paths are
- // generated
- labelString = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- }
- arrayOpnd = GenerateArrayTest(src, labelHelper, stringFastPath ? labelString : labelHelper, ldLen, false);
- }
- else if(src->IsArrayRegOpnd())
- {
- IR::ArrayRegOpnd *const arrayRegOpnd = src->AsArrayRegOpnd();
- if(arrayRegOpnd->LengthSym())
- {
- arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), TyUint32, m_func);
- DebugOnly(arrayLengthOpnd->FreezeSymValue());
- autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
- }
- }
- }
- const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
- IR::RegOpnd *lengthOpnd = nullptr;
- IR::AutoReuseOpnd autoReuseLengthOpnd;
- const auto EnsureLengthOpnd = [&]()
- {
- if(lengthOpnd)
- {
- return;
- }
- lengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
- autoReuseLengthOpnd.Initialize(lengthOpnd, m_func);
- };
- if(arrayFastPath)
- {
- if(arrayLengthOpnd)
- {
- lengthOpnd = arrayLengthOpnd;
- autoReuseLengthOpnd.Initialize(lengthOpnd, m_func);
- Assert(!stringFastPath);
- }
- else
- {
- // MOV length, [array + offset(length)] -- Load array length
- EnsureLengthOpnd();
- IR::IndirOpnd *const indirOpnd = IR::IndirOpnd::New(arrayOpnd, arrayOffsetOfLength, TyUint32, this->m_func);
- InsertMove(lengthOpnd, indirOpnd, ldLen);
- }
- }
- if(stringFastPath)
- {
- IR::LabelInstr *labelToVar = nullptr;
- if(arrayFastPath)
- {
- // JMP $tovar
- labelToVar = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- InsertBranch(Js::OpCode::Br, labelToVar, ldLen);
- // $string:
- ldLen->InsertBefore(labelString);
- }
- // CMP [src + offset(type)], static_stringtype -- check src isString
- // JNE $helper
- GenerateStringTest(src, ldLen, labelHelper, nullptr, !arrayFastPath);
- // MOV length, [src + offset(length)] -- Load string length
- EnsureLengthOpnd();
- IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(src, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func);
- InsertMove(lengthOpnd, indirOpnd, ldLen);
- if(arrayFastPath)
- {
- // $toVar:
- ldLen->InsertBefore(labelToVar);
- }
- }
- Assert(lengthOpnd);
- if(ldLen->HasBailOutInfo() && (ldLen->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnIrregularLength)
- {
- Assert(ldLen->GetBailOutKind() == IR::BailOutOnIrregularLength || ldLen->HasLazyBailOut());
- Assert(dst->IsInt32());
- // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative
- // test length, length
- // js $helper
- // mov dst, length
- // jmp $fallthrough
- InsertCompareBranch(
- lengthOpnd,
- IR::IntConstOpnd::New(0, lengthOpnd->GetType(), m_func, true),
- Js::OpCode::BrLt_A,
- labelHelper,
- ldLen);
- InsertMove(dst, lengthOpnd, ldLen);
- InsertBranch(Js::OpCode::Br, ldLen->GetOrCreateContinueLabel(), ldLen);
- // $helper:
- // (Bail out with IR::BailOutOnIrregularLength)
- ldLen->InsertBefore(labelHelper);
- instrIsInHelperBlock = true;
- ldLen->FreeDst();
- ldLen->FreeSrc1();
- GenerateBailOut(ldLen);
- return false;
- }
- #if INT32VAR
- // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative
- // test length, length
- // js $helper
- InsertCompareBranch(
- lengthOpnd,
- IR::IntConstOpnd::New(0, lengthOpnd->GetType(), m_func, true),
- Js::OpCode::BrLt_A,
- labelHelper,
- ldLen);
- #else
- // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative.
- // Additionally, verify that the signed value's width is not greater than 31 bits, since it needs to be tagged.
- // test length, 0xC0000000
- // jne $helper
- InsertTestBranch(
- lengthOpnd,
- IR::IntConstOpnd::New(0xC0000000, TyUint32, this->m_func, true),
- Js::OpCode::BrNeq_A,
- labelHelper,
- ldLen);
- #endif
- #if INT32VAR
- //
- // dst_32 = MOV length
- // dst_64 = OR dst_64, Js::AtomTag_IntPtr
- //
- Assert(dst->GetType() == TyVar);
- IR::Opnd *dst32 = dst->Copy(this->m_func);
- dst32->SetType(TyInt32);
- // This will clear the top bits.
- InsertMove(dst32, lengthOpnd, ldLen);
- m_lowererMD.GenerateInt32ToVarConversion(dst, ldLen);
- #else
- // dst = SHL length, Js::VarTag_Shift -- restore the var tag on the result
- InsertShift(
- Js::OpCode::Shl_A,
- false /* needFlags */,
- dst,
- lengthOpnd,
- IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func),
- ldLen);
- // dst = ADD dst, AtomTag
- InsertAdd(
- false /* needFlags */,
- dst,
- dst,
- IR::IntConstOpnd::New(Js::AtomTag_Int32, TyUint32, m_func, true),
- ldLen);
- #endif
- // JMP $fallthrough
- InsertBranch(Js::OpCode::Br, ldLen->GetOrCreateContinueLabel(), ldLen);
- }
- // $helper:
- // (caller generates helper call)
- ldLen->InsertBefore(labelHelper);
- instrIsInHelperBlock = true;
- return true; // fast path was generated, helper call will be in a helper block
- }
- void
- Lowerer::GenerateFastInlineStringCodePointAt(IR::Instr* lastInstr, Func* func, IR::Opnd *strLength, IR::Opnd *srcIndex, IR::RegOpnd *lowerChar, IR::RegOpnd *strPtr)
- {
- //// Required State:
- // strLength - UInt32
- // srcIndex - TyVar if not Address
- // lowerChar - TyMachReg
- // strPtr - Addr
- //// Instructions
- // CMP [strLength], srcIndex + 1
- // JBE charCodeAt
- // CMP lowerChar 0xDC00
- // JGE charCodeAt
- // CMP lowerChar 0xD7FF
- // JLE charCodeAt
- // upperChar = MOVZX [strPtr + srcIndex + 1]
- // CMP upperChar 0xE000
- // JGE charCodeAt
- // CMP lowerChar 0xDBFF
- // JLE charCodeAt
- // lowerChar = SUB lowerChar - 0xD800
- // lowerChar = SHL lowerChar, 10
- // lowerChar = ADD lowerChar + upperChar
- // lowerChar = ADD lowerChar + 0x2400
- // :charCodeAt
- // :done
- // Asserts
- // Arm should change to Uint32 for the strLength
- Assert(strLength->GetType() == TyUint32 || strLength->GetType() == TyMachReg);
- Assert(srcIndex->GetType() == TyVar || srcIndex->IsAddrOpnd());
- Assert(lowerChar->GetType() == TyMachReg || lowerChar->GetType() == TyUint32);
- Assert(strPtr->IsRegOpnd());
- IR::RegOpnd *tempReg = IR::RegOpnd::New(TyMachReg, func);
- IR::LabelInstr *labelCharCodeAt = IR::LabelInstr::New(Js::OpCode::Label, func);
- IR::IndirOpnd *tempIndirOpnd;
- if (srcIndex->IsAddrOpnd())
- {
- uint32 length = Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address) + 1U;
- InsertCompareBranch(strLength, IR::IntConstOpnd::New(length, TyUint32, func), Js::OpCode::BrLe_A, true, labelCharCodeAt, lastInstr);
- tempIndirOpnd = IR::IndirOpnd::New(strPtr, (length) * sizeof(char16), TyUint16, func);
- }
- else
- {
- InsertMove(tempReg, srcIndex, lastInstr);
- #if INT32VAR
- IR::Opnd * reg32Bit = tempReg->UseWithNewType(TyInt32, func);
- InsertMove(tempReg, reg32Bit, lastInstr);
- tempReg = reg32Bit->AsRegOpnd();
- #else
- InsertShift(Js::OpCode::Shr_A, false, tempReg, tempReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, func), lastInstr);
- #endif
- InsertAdd(false, tempReg, tempReg, IR::IntConstOpnd::New(1, TyInt32, func), lastInstr);
- InsertCompareBranch(strLength, tempReg, Js::OpCode::BrLe_A, true, labelCharCodeAt, lastInstr);
- if(tempReg->GetSize() != MachPtr)
- {
- tempReg = tempReg->UseWithNewType(TyMachPtr, func)->AsRegOpnd();
- }
- tempIndirOpnd = IR::IndirOpnd::New(strPtr, tempReg, 1, TyUint16, func);
- }
- // By this point, we have added instructions before labelCharCodeAt to check for extra length required for the surrogate pair
- // The branching for that is already handled, all we have to do now is to check for correct values.
- // Validate char is in range [D800, DBFF]; otherwise just get a charCodeAt
- InsertCompareBranch(lowerChar, IR::IntConstOpnd::New(0xDC00, TyUint32, func), Js::OpCode::BrGe_A, labelCharCodeAt, lastInstr);
- InsertCompareBranch(lowerChar, IR::IntConstOpnd::New(0xD7FF, TyUint32, func), Js::OpCode::BrLe_A, labelCharCodeAt, lastInstr);
- // upperChar = MOVZX r3, [r1 + r3 * 2] -- this is the value of the upper surrogate pair char
- IR::RegOpnd *upperChar = IR::RegOpnd::New(TyInt32, func);
- InsertMove(upperChar, tempIndirOpnd, lastInstr);
- // Validate upper is in range [DC00, DFFF]; otherwise just get a charCodeAt
- InsertCompareBranch(upperChar, IR::IntConstOpnd::New(0xE000, TyUint32, func), Js::OpCode::BrGe_A, labelCharCodeAt, lastInstr);
- InsertCompareBranch(upperChar, IR::IntConstOpnd::New(0xDBFF, TyUint32, func), Js::OpCode::BrLe_A, labelCharCodeAt, lastInstr);
- // (lower - 0xD800) << 10 + second - 0xDC00 + 0x10000 -- 0x10000 - 0xDC00 = 0x2400
- // lowerChar = SUB lowerChar - 0xD800
- // lowerChar = SHL lowerChar, 10
- // lowerChar = ADD lowerChar + upperChar
- // lowerChar = ADD lowerChar + 0x2400
- InsertSub(false, lowerChar, lowerChar, IR::IntConstOpnd::New(0xD800, TyUint32, func), lastInstr);
- InsertShift(Js::OpCode::Shl_A, false, lowerChar, lowerChar, IR::IntConstOpnd::New(10, TyUint32, func), lastInstr);
- InsertAdd(false, lowerChar, lowerChar, upperChar, lastInstr);
- InsertAdd(false, lowerChar, lowerChar, IR::IntConstOpnd::New(0x2400, TyUint32, func), lastInstr);
- lastInstr->InsertBefore(labelCharCodeAt);
- }
- bool
- Lowerer::GenerateFastInlineStringFromCodePoint(IR::Instr* instr)
- {
- Assert(instr->m_opcode == Js::OpCode::CallDirect);
- // ArgOut sequence
- // s8.var = StartCall 2 (0x2).i32 #000c
- // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, String object
- // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to FromCharCode
- // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
- // s0[LikelyTaggedInt].var = CallDirect String_FromCodePoint.u32, arg1(s11)<0>.u32 #001c
- IR::Opnd * linkOpnd = instr->GetSrc2();
- IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
- linkOpnd = tmpInstr->GetSrc2();
- #if DBG
- IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
- Assert(argCount == 2);
- #endif
- IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
- Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
- IR::Opnd *src1 = argInstr->GetSrc1();
- if (src1->GetValueType().IsLikelyInt())
- {
- //Trying to generate this code
- // MOV resultOpnd, dst
- // MOV fromCharCodeIntArgOpnd, src1
- // SAR fromCharCodeIntArgOpnd, Js::VarTag_Shift
- // JAE $Helper
- // CMP fromCharCodeIntArgOpnd, Js::ScriptContext::CharStringCacheSize
- //
- // JAE $labelWCharStringCheck <
- // MOV resultOpnd, GetCharStringCache[fromCharCodeIntArgOpnd]
- // TST resultOpnd, resultOpnd //Check for null
- // JEQ $helper
- // JMP $Done
- //
- //$labelWCharStringCheck:
- // resultOpnd = Call HelperGetStringForCharW
- // JMP $Done
- //$helper:
- IR::RegOpnd * resultOpnd = nullptr;
- if (!instr->GetDst()->IsRegOpnd() || instr->GetDst()->IsEqual(src1))
- {
- resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
- }
- else
- {
- resultOpnd = instr->GetDst()->AsRegOpnd();
- }
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- IR::RegOpnd * fromCodePointIntArgOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
- IR::AutoReuseOpnd autoReuseFromCodePointIntArgOpnd(fromCodePointIntArgOpnd, instr->m_func);
- InsertMove(fromCodePointIntArgOpnd, src1, instr);
- //Check for tagged int and get the untagged version.
- fromCodePointIntArgOpnd = GenerateUntagVar(fromCodePointIntArgOpnd, labelHelper, instr);
- GenerateGetSingleCharString(fromCodePointIntArgOpnd, resultOpnd, labelHelper, doneLabel, instr, true);
- instr->InsertBefore(labelHelper);
- instr->InsertAfter(doneLabel);
- RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
- }
- return true;
- }
- bool
- Lowerer::GenerateFastInlineStringFromCharCode(IR::Instr* instr)
- {
- Assert(instr->m_opcode == Js::OpCode::CallDirect);
- // ArgOut sequence
- // s8.var = StartCall 2 (0x2).i32 #000c
- // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, String object
- // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to FromCharCode
- // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
- // s0[LikelyTaggedInt].var = CallDirect String_FromCharCode.u32, arg1(s11)<0>.u32 #001c
- IR::Opnd * linkOpnd = instr->GetSrc2();
- IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
- linkOpnd = tmpInstr->GetSrc2();
- #if DBG
- IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
- Assert(argCount == 2);
- #endif
- IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
- Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
- IR::Opnd *src1 = argInstr->GetSrc1();
- if (src1->GetValueType().IsLikelyInt())
- {
- //Trying to generate this code
- // MOV resultOpnd, dst
- // MOV fromCharCodeIntArgOpnd, src1
- // SAR fromCharCodeIntArgOpnd, Js::VarTag_Shift
- // JAE $Helper
- // CMP fromCharCodeIntArgOpnd, Js::ScriptContext::CharStringCacheSize
- //
- // JAE $labelWCharStringCheck <
- // MOV resultOpnd, GetCharStringCache[fromCharCodeIntArgOpnd]
- // TST resultOpnd, resultOpnd //Check for null
- // JEQ $helper
- // JMP $Done
- //
- //$labelWCharStringCheck:
- // resultOpnd = Call HelperGetStringForCharW
- // JMP $Done
- //$helper:
- IR::RegOpnd * resultOpnd = nullptr;
- if (!instr->GetDst()->IsRegOpnd() || instr->GetDst()->IsEqual(src1))
- {
- resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
- }
- else
- {
- resultOpnd = instr->GetDst()->AsRegOpnd();
- }
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- IR::RegOpnd * fromCharCodeIntArgOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
- IR::AutoReuseOpnd autoReuseFromCharCodeIntArgOpnd(fromCharCodeIntArgOpnd, instr->m_func);
- InsertMove(fromCharCodeIntArgOpnd, src1, instr);
- //Check for tagged int and get the untagged version.
- fromCharCodeIntArgOpnd = GenerateUntagVar(fromCharCodeIntArgOpnd, labelHelper, instr);
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- GenerateGetSingleCharString(fromCharCodeIntArgOpnd, resultOpnd, labelHelper, doneLabel, instr, false);
- instr->InsertBefore(labelHelper);
- instr->InsertAfter(doneLabel);
- RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
- }
- return true;
- }
- void
- Lowerer::GenerateGetSingleCharString(IR::RegOpnd * charCodeOpnd, IR::Opnd * resultOpnd, IR::LabelInstr * labelHelper, IR::LabelInstr * doneLabel, IR::Instr * instr, bool isCodePoint)
- {
- // MOV cacheReg, CharStringCache
- // CMP charCodeOpnd, Js::ScriptContext::CharStringCacheSize
- // JAE $labelWCharStringCheck <
- // MOV resultOpnd, cacheReg[charCodeOpnd]
- // TST resultOpnd, resultOpnd //Check for null
- // JEQ $helper
- // JMP $Done
- //
- //$labelWCharStringCheck:
- // Arg1 = charCodeOpnd
- // Arg0 = cacheReg
- // resultOpnd = Call HelperGetStringForCharW/CodePoint
- // JMP $Done
- //$helper:
- IR::LabelInstr *labelWCharStringCheck = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- //Try to load from in CharStringCacheA
- IR::RegOpnd *cacheRegOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
- IR::AutoReuseOpnd autoReuseCacheRegOpnd(cacheRegOpnd, instr->m_func);
- Assert(Js::JavascriptLibrary::GetCharStringCacheAOffset() == Js::JavascriptLibrary::GetCharStringCacheOffset());
- InsertMove(cacheRegOpnd, this->LoadLibraryValueOpnd(instr, LibraryValue::ValueCharStringCache), instr);
- InsertCompareBranch(charCodeOpnd, IR::IntConstOpnd::New(Js::CharStringCache::CharStringCacheSize, TyUint32, this->m_func), Js::OpCode::BrGe_A, true, labelWCharStringCheck, instr);
- InsertMove(resultOpnd, IR::IndirOpnd::New(cacheRegOpnd, charCodeOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyVar, instr->m_func), instr);
- InsertTestBranch(resultOpnd, resultOpnd, Js::OpCode::BrEq_A, labelHelper, instr);
- InsertMove(instr->GetDst(), resultOpnd, instr);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(labelWCharStringCheck);
- IR::JnHelperMethod helperMethod;
- if (isCodePoint)
- {
- helperMethod = IR::HelperGetStringForCharCodePoint;
- }
- else
- {
- InsertMove(charCodeOpnd, charCodeOpnd->UseWithNewType(TyUint16, instr->m_func), instr);
- helperMethod = IR::HelperGetStringForChar;
- }
- //Try to load from in CharStringCacheW or CharStringCacheCodePoint, this is a helper call.
- this->m_lowererMD.LoadHelperArgument(instr, charCodeOpnd);
- this->m_lowererMD.LoadHelperArgument(instr, cacheRegOpnd);
- IR::Instr* helperCallInstr = IR::Instr::New(Js::OpCode::Call, resultOpnd, IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
- instr->InsertBefore(helperCallInstr);
- this->m_lowererMD.LowerCall(helperCallInstr, 0);
- InsertMove(instr->GetDst(), resultOpnd, instr);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- }
- bool
- Lowerer::GenerateFastInlineGlobalObjectParseInt(IR::Instr *instr)
- {
- Assert(instr->m_opcode == Js::OpCode::CallDirect);
- // ArgOut sequence
- // s8.var = StartCall 2 (0x2).i32 #000c
- // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, global object
- // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to parseInt
- // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
- // s0[LikelyTaggedInt].var = CallDirect GlobalObject_ParseInt.u32, arg1(s11)<0>.u32 #001c
- IR::Opnd * linkOpnd = instr->GetSrc2();
- IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
- linkOpnd = tmpInstr->GetSrc2();
- #if DBG
- IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
- Assert(argCount == 2);
- #endif
- IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
- Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
- IR::Opnd *parseIntArgOpnd = argInstr->GetSrc1();
- if (parseIntArgOpnd->GetValueType().IsLikelyNumber())
- {
- //If likely int check for tagged int and set the dst
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- if (!parseIntArgOpnd->IsTaggedInt())
- {
- this->m_lowererMD.GenerateSmIntTest(parseIntArgOpnd, instr, labelHelper);
- }
- if (instr->GetDst())
- {
- this->InsertMove(instr->GetDst(), parseIntArgOpnd, instr);
- }
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(labelHelper);
- instr->InsertAfter(doneLabel);
- RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
- }
- return true;
- }
- void
- Lowerer::GenerateFastInlineArrayPop(IR::Instr * instr)
- {
- Assert(instr->m_opcode == Js::OpCode::InlineArrayPop);
- IR::Opnd *arrayOpnd = instr->GetSrc1();
- IR::LabelInstr *bailOutLabelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- bool isLikelyNativeArray = arrayOpnd->GetValueType().IsLikelyNativeArray();
- if (ShouldGenerateArrayFastPath(arrayOpnd, false, false, false))
- {
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- if(isLikelyNativeArray)
- {
- //We bailOut on cases like length == 0, Array Test failing cases (Runtime helper cannot handle these cases)
- GenerateFastPop(arrayOpnd, instr, labelHelper, doneLabel, bailOutLabelHelper);
- }
- else
- {
- //We jump to helper on cases like length == 0, Array Test failing cases
- GenerateFastPop(arrayOpnd, instr, labelHelper, doneLabel, labelHelper);
- }
- instr->InsertBefore(labelHelper);
- ///JMP to $doneLabel
- InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
- }
- else
- {
- //We assume here that the array will be a Var array. - Runtime Helper calls assume this.
- Assert(!isLikelyNativeArray);
- }
- instr->InsertAfter(doneLabel);
- if(isLikelyNativeArray)
- {
- //Lower IR::BailOutConventionalNativeArrayAccessOnly here.
- LowerOneBailOutKind(instr, IR::BailOutConventionalNativeArrayAccessOnly, false, false);
- instr->InsertAfter(bailOutLabelHelper);
- }
- GenerateHelperToArrayPopFastPath(instr, doneLabel, bailOutLabelHelper);
- }
- void
- Lowerer::GenerateFastInlineIsArray(IR::Instr * instr)
- {
- Assert(instr->m_opcode == Js::OpCode::CallDirect);
- IR::Opnd * dst = instr->GetDst();
- Assert(dst);
- //CallDirect src2
- IR::Opnd * linkOpnd = instr->GetSrc2();
- //ArgOut_A_InlineSpecialized
- IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
- IR::Opnd * argsOpnd[2] = { 0 };
- bool result = instr->FetchOperands(argsOpnd, 2);
- Assert(result);
- AnalysisAssert(argsOpnd[1]);
- IR::LabelInstr *helperLabel = InsertLabel(true, instr);
- IR::Instr * insertInstr = helperLabel;
- IR::LabelInstr *doneLabel = InsertLabel(false, instr->m_next);
- ValueType valueType = argsOpnd[1]->GetValueType();
- IR::RegOpnd * src = GetRegOpnd(argsOpnd[1], insertInstr, m_func, argsOpnd[1]->GetType());
- IR::LabelInstr *checkNotArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, valueType.IsLikelyArray());
- IR::LabelInstr *notArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, valueType.IsLikelyArray());
- if (!src->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(src, insertInstr, notArrayLabel);
- }
- // MOV typeOpnd, [opnd + offset(type)]
- IR::RegOpnd *typeOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
- const IR::AutoReuseOpnd autoReuseTypeOpnd(typeOpnd, m_func);
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, m_func);
- InsertMove(typeOpnd, indirOpnd, insertInstr);
- // MOV typeIdOpnd, [typeOpnd + offset(typeId)]
- IR::RegOpnd *typeIdOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
- const IR::AutoReuseOpnd autoReuseTypeIdOpnd(typeIdOpnd, m_func);
- indirOpnd = IR::IndirOpnd::New(typeOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func);
- InsertMove(typeIdOpnd, indirOpnd, insertInstr);
- // CMP typeIdOpnd, TypeIds_ArrayFirst
- // JLT $notArray
- InsertCompareBranch(
- typeIdOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_ArrayFirst, TyInt32, m_func),
- Js::OpCode::BrLt_A,
- checkNotArrayLabel,
- insertInstr);
- // CMP typeIdOpnd, TypeIds_ArrayLastWithES5
- // JGT $notArray
- InsertCompareBranch(
- typeIdOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_ArrayLastWithES5, TyInt32, m_func),
- Js::OpCode::BrGt_A,
- notArrayLabel,
- insertInstr);
- // MOV dst, True
- InsertMove(dst, LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), insertInstr);
- // JMP $done
- InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
- // $checkNotArray:
- insertInstr->InsertBefore(checkNotArrayLabel);
- // CMP typeIdOpnd, TypeIds_Proxy
- // JEQ $helperLabel
- InsertCompareBranch(
- typeIdOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
- Js::OpCode::BrEq_A,
- helperLabel,
- insertInstr);
- CompileAssert(Js::TypeIds_Proxy < Js::TypeIds_ArrayFirst);
- // CMP typeIdOpnd, TypeIds_HostDispatch
- // JEQ $helperLabel
- InsertCompareBranch(
- typeIdOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
- Js::OpCode::BrEq_A,
- helperLabel,
- insertInstr);
- CompileAssert(Js::TypeIds_HostDispatch < Js::TypeIds_ArrayFirst);
- // $notObjectLabel:
- insertInstr->InsertBefore(notArrayLabel);
- // MOV dst, False
- InsertMove(dst, LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), insertInstr);
- InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
- RelocateCallDirectToHelperPath(tmpInstr, helperLabel);
- }
- void
- Lowerer::GenerateFastInlineHasOwnProperty(IR::Instr * instr)
- {
- Assert(instr->m_opcode == Js::OpCode::CallDirect);
- //CallDirect src2
- IR::Opnd * linkOpnd = instr->GetSrc2();
- //ArgOut_A_InlineSpecialized
- IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
- IR::Opnd * argsOpnd[2] = { 0 };
- bool result = instr->FetchOperands(argsOpnd, 2);
- Assert(result);
- AnalysisAssert(argsOpnd[0] && argsOpnd[1]);
- if (argsOpnd[1]->GetValueType().IsNotString()
- || argsOpnd[0]->GetValueType().IsNotObject()
- || !argsOpnd[0]->IsRegOpnd()
- || !argsOpnd[1]->IsRegOpnd())
- {
- return;
- }
- IR::RegOpnd * thisObj = argsOpnd[0]->AsRegOpnd();
- IR::RegOpnd * propOpnd = argsOpnd[1]->AsRegOpnd();
- // fast path case where hasOwnProperty is being called using a property name loaded via a for-in loop
- bool generateForInFastpath = propOpnd->GetValueType().IsString()
- && propOpnd->m_sym->m_isSingleDef
- && (propOpnd->m_sym->m_instrDef->m_opcode == Js::OpCode::BrOnEmpty
- || propOpnd->m_sym->m_instrDef->m_opcode == Js::OpCode::BrOnNotEmpty);
- IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
- IR::LabelInstr * labelHelper = InsertLabel(true, instr);
- IR::LabelInstr * cacheMissLabel = generateForInFastpath ? IR::LabelInstr::New(Js::OpCode::Label, m_func, true) : labelHelper;
- IR::Instr * insertInstr = labelHelper;
- // GenerateObjectTest(propOpnd, $labelHelper)
- // CMP indexOpnd, PropertyString::`vtable'
- // JNE $helper
- // GenerateObjectTest(thisObj, $labelHelper)
- // MOV inlineCacheOpnd, propOpnd->lsElemInlineCache
- // MOV objectTypeOpnd, thisObj->type
- // GenerateDynamicLoadPolymorphicInlineCacheSlot(inlineCacheOpnd, objectTypeOpnd) ; loads inline cache for given type
- // GenerateLocalInlineCacheCheck(objectTypeOpnd, inlineCacheOpnd, $notInlineSlotsLabel) ; check for type in inline slots, jump to $notInlineSlotsLabel on failure
- // MOV dst, ValueTrue
- // JMP $done
- // $notInlineSlotsLabel:
- // GenerateLoadTaggedType(objectTypeOpnd, opndTaggedType)
- // GenerateLocalInlineCacheCheck(opndTaggedType, inlineCacheOpnd, $cacheMissLabel) ; check for type in aux slot, jump to $cacheMissLabel on failure
- // MOV dst, ValueTrue
- // JMP $done
- m_lowererMD.GenerateObjectTest(propOpnd, insertInstr, labelHelper);
- InsertCompareBranch(IR::IndirOpnd::New(propOpnd, 0, TyMachPtr, m_func), LoadVTableValueOpnd(insertInstr, VTableValue::VtablePropertyString), Js::OpCode::BrNeq_A, labelHelper, insertInstr);
- m_lowererMD.GenerateObjectTest(thisObj, insertInstr, labelHelper);
- IR::RegOpnd * inlineCacheOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
- InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(propOpnd, Js::PropertyString::GetOffsetOfLdElemInlineCache(), TyMachPtr, m_func), insertInstr);
- IR::RegOpnd * objectTypeOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
- InsertMove(objectTypeOpnd, IR::IndirOpnd::New(thisObj, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, m_func), insertInstr);
- GenerateDynamicLoadPolymorphicInlineCacheSlot(insertInstr, inlineCacheOpnd, objectTypeOpnd);
- IR::LabelInstr * notInlineSlotsLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- GenerateLocalInlineCacheCheck(insertInstr, objectTypeOpnd, inlineCacheOpnd, notInlineSlotsLabel);
- InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), insertInstr);
- InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
- insertInstr->InsertBefore(notInlineSlotsLabel);
- IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, m_func);
- m_lowererMD.GenerateLoadTaggedType(insertInstr, objectTypeOpnd, opndTaggedType);
- GenerateLocalInlineCacheCheck(insertInstr, opndTaggedType, inlineCacheOpnd, cacheMissLabel);
- InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), insertInstr);
- InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
- if (!generateForInFastpath)
- {
- RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
- return;
- }
- insertInstr->InsertBefore(cacheMissLabel);
- // CMP forInEnumeratorOpnd->canUseJitFastPath, 0
- // JEQ $labelHelper
- // MOV cachedDataTypeOpnd, forInEnumeratorOpnd->enumeratorInitialType
- // CMP thisObj->type, cachedDataTypeOpnd
- // JNE $labelHelper
- // CMP forInEnumeratorOpnd->enumeratingPrototype, 0
- // JNE $falseLabel
- // MOV dst, True
- // JMP $doneLabel
- // $falseLabel: [helper]
- // MOV dst, False
- // JMP $doneLabel
- // $labelHelper: [helper]
- // CallDirect code
- // ...
- // $doneLabel:
- IR::Opnd * forInEnumeratorOpnd = argsOpnd[1]->AsRegOpnd()->m_sym->m_instrDef->GetSrc1();
- // go to helper if we can't use JIT fastpath
- IR::Opnd * canUseJitFastPathOpnd = GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfCanUseJitFastPath(), TyInt8);
- InsertCompareBranch(canUseJitFastPathOpnd, IR::IntConstOpnd::New(0, TyInt8, m_func), Js::OpCode::BrEq_A, labelHelper, insertInstr);
- // go to helper if initial type is not same as the object we are querying
- IR::RegOpnd * cachedDataTypeOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
- InsertMove(cachedDataTypeOpnd, GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorInitialType(), TyMachPtr), insertInstr);
- InsertCompareBranch(cachedDataTypeOpnd, IR::IndirOpnd::New(thisObj, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, m_func), Js::OpCode::BrNeq_A, labelHelper, insertInstr);
- // if we haven't yet gone to helper, then we can check if we are enumerating the prototype to know if property is an own property
- IR::LabelInstr *falseLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::Opnd * enumeratingPrototype = GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratingPrototype(), TyInt8);
- InsertCompareBranch(enumeratingPrototype, IR::IntConstOpnd::New(0, TyInt8, m_func), Js::OpCode::BrNeq_A, falseLabel, insertInstr);
- // assume true is the main path
- InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), insertInstr);
- InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
- // load false on helper path
- insertInstr->InsertBefore(falseLabel);
- InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), insertInstr);
- InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
- RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
- }
- bool
- Lowerer::ShouldGenerateStringReplaceFastPath(IR::Instr * callInstr, IntConstType argCount)
- {
- // a.replace(b,c)
- // We want to emit the fast path if 'a' and 'c' are strings and 'b' is a regex
- //
- // argout sequence:
- // arg1(s12)<0>.var = ArgOut_A s2.var, s11.var #0014 <---- a
- // arg2(s13)<4>.var = ArgOut_A s3.var, arg1(s12)<0>.var #0018 <---- b
- // arg3(s14)<8>.var = ArgOut_A s4.var, arg2(s13)<4>.var #001c <---- c
- // s0[LikelyString].var = CallI s5[ffunc].var, arg3(s14)<8>.var #0020
- IR::Opnd *linkOpnd = callInstr->GetSrc2();
- Assert(argCount == 2);
- while(linkOpnd->IsSymOpnd())
- {
- IR::SymOpnd *src2 = linkOpnd->AsSymOpnd();
- StackSym *sym = src2->m_sym->AsStackSym();
- Assert(sym->m_isSingleDef);
- IR::Instr *argInstr = sym->m_instrDef;
- Assert(argCount >= 0);
- // check to see if 'a' and 'c' are likely strings
- if((argCount == 2 || argCount == 0) && (!argInstr->GetSrc1()->GetValueType().IsLikelyString()))
- {
- return false;
- }
- // we want 'b' to be regex. Don't generate fastpath if it is a tagged int
- if((argCount == 1) && (argInstr->GetSrc1()->IsTaggedInt()))
- {
- return false;
- }
- argCount--;
- linkOpnd = argInstr->GetSrc2();
- }
- return true;
- }
- bool
- Lowerer::GenerateFastReplace(IR::Opnd* strOpnd, IR::Opnd* src1, IR::Opnd* src2, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
- {
- // a.replace(b,c)
- // We want to emit the fast path if 'a' and 'c' are strings and 'b' is a regex
- //
- // strOpnd --> a
- // src1 --> b
- // src2 --> c
- IR::Opnd * callDst = callInstr->GetDst();
- Assert(strOpnd->GetValueType().IsLikelyString() && src2->GetValueType().IsLikelyString());
- if(!strOpnd->GetValueType().IsString())
- {
- strOpnd = GetRegOpnd(strOpnd, insertInstr, m_func, TyVar);
- this->GenerateStringTest(strOpnd->AsRegOpnd(), insertInstr, labelHelper);
- }
- if(!src1->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(src1, insertInstr, labelHelper);
- }
- IR::Opnd * vtableOpnd = LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptRegExp);
- // cmp [regex], vtableAddress
- // jne $labelHelper
- src1 = GetRegOpnd(src1, insertInstr, m_func, TyVar);
- InsertCompareBranch(
- IR::IndirOpnd::New(src1->AsRegOpnd(), 0, TyMachPtr, insertInstr->m_func),
- vtableOpnd,
- Js::OpCode::BrNeq_A,
- labelHelper,
- insertInstr);
- if(!src2->GetValueType().IsString())
- {
- src2 = GetRegOpnd(src2, insertInstr, m_func, TyVar);
- this->GenerateStringTest(src2->AsRegOpnd(), insertInstr, labelHelper);
- }
- IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, insertInstr->m_func);
- if (callDst)
- {
- helperCallInstr->SetDst(callDst);
- }
- insertInstr->InsertBefore(helperCallInstr);
- if (insertInstr->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(insertInstr->GetBailOutKind()))
- {
- helperCallInstr = AddBailoutToHelperCallInstr(helperCallInstr, insertInstr->GetBailOutInfo(), insertInstr->GetBailOutKind(), insertInstr);
- }
- //scriptContext, pRegEx, pThis, pReplace (to be pushed in reverse order)
- // pReplace, pThis, pRegEx
- this->m_lowererMD.LoadHelperArgument(helperCallInstr, src2);
- this->m_lowererMD.LoadHelperArgument(helperCallInstr, strOpnd);
- this->m_lowererMD.LoadHelperArgument(helperCallInstr, src1);
- // script context
- LoadScriptContext(helperCallInstr);
- if(callDst)
- {
- m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperRegExp_ReplaceStringResultUsed);
- }
- else
- {
- m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperRegExp_ReplaceStringResultNotUsed);
- }
- return true;
- }
- ///----
- void
- Lowerer::GenerateFastInlineStringSplitMatch(IR::Instr * instr)
- {
- // a.split(b,c (optional) )
- // We want to emit the fast path when
- // 1. c is not present, and
- // 2. 'a' is a string and 'b' is a regex.
- //
- // a.match(b)
- // We want to emit the fast path when 'a' is a string and 'b' is a regex.
- Assert(instr->m_opcode == Js::OpCode::CallDirect);
- IR::Opnd * callDst = instr->GetDst();
- //helperCallOpnd
- IR::Opnd * src1 = instr->GetSrc1();
- //ArgOut_A_InlineSpecialized
- IR::Instr * tmpInstr = instr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
- IR::Opnd * argsOpnd[2];
- if(!instr->FetchOperands(argsOpnd, 2))
- {
- return;
- }
- if(!argsOpnd[0]->GetValueType().IsLikelyString() || argsOpnd[1]->IsTaggedInt())
- {
- return;
- }
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- if(!argsOpnd[0]->GetValueType().IsString())
- {
- argsOpnd[0] = GetRegOpnd(argsOpnd[0], instr, m_func, TyVar);
- this->GenerateStringTest(argsOpnd[0]->AsRegOpnd(), instr, labelHelper);
- }
- if(!argsOpnd[1]->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(argsOpnd[1], instr, labelHelper);
- }
- IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
- // cmp [regex], vtableAddress
- // jne $labelHelper
- argsOpnd[1] = GetRegOpnd(argsOpnd[1], instr, m_func, TyVar);
- InsertCompareBranch(
- IR::IndirOpnd::New(argsOpnd[1]->AsRegOpnd(), 0, TyMachPtr, instr->m_func),
- vtableOpnd,
- Js::OpCode::BrNeq_A,
- labelHelper,
- instr);
- IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
- if (callDst)
- {
- helperCallInstr->SetDst(callDst);
- }
- instr->InsertBefore(helperCallInstr);
- if (instr->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(instr->GetBailOutKind()))
- {
- helperCallInstr = AddBailoutToHelperCallInstr(helperCallInstr, instr->GetBailOutInfo(), instr->GetBailOutKind(), instr);
- }
- // [stackAllocationPointer, ]scriptcontext, regexp, input[, limit] (to be pushed in reverse order)
- if(src1->AsHelperCallOpnd()->m_fnHelper == IR::JnHelperMethod::HelperString_Split)
- {
- //limit
- //As we are optimizing only for two operands, make limit UINT_MAX
- IR::Opnd* limit = IR::IntConstOpnd::New(UINT_MAX, TyUint32, instr->m_func);
- this->m_lowererMD.LoadHelperArgument(helperCallInstr, limit);
- }
- //input, regexp
- this->m_lowererMD.LoadHelperArgument(helperCallInstr, argsOpnd[0]);
- this->m_lowererMD.LoadHelperArgument(helperCallInstr, argsOpnd[1]);
- // script context
- LoadScriptContext(helperCallInstr);
- IR::JnHelperMethod helperMethod = IR::JnHelperMethod::HelperInvalid;
- IR::AutoReuseOpnd autoReuseStackAllocationOpnd;
- if(callDst && instr->dstIsTempObject)
- {
- switch(src1->AsHelperCallOpnd()->m_fnHelper)
- {
- case IR::JnHelperMethod::HelperString_Split:
- helperMethod = IR::JnHelperMethod::HelperRegExp_SplitResultUsedAndMayBeTemp;
- break;
- case IR::JnHelperMethod::HelperString_Match:
- helperMethod = IR::JnHelperMethod::HelperRegExp_MatchResultUsedAndMayBeTemp;
- break;
- default:
- Assert(false);
- __assume(false);
- }
- // Allocate some space on the stack for the result array
- IR::RegOpnd *const stackAllocationOpnd = IR::RegOpnd::New(TyVar, m_func);
- autoReuseStackAllocationOpnd.Initialize(stackAllocationOpnd, m_func);
- stackAllocationOpnd->SetValueType(callDst->GetValueType());
- GenerateMarkTempAlloc(stackAllocationOpnd, Js::JavascriptArray::StackAllocationSize, helperCallInstr);
- m_lowererMD.LoadHelperArgument(helperCallInstr, stackAllocationOpnd);
- }
- else
- {
- switch(src1->AsHelperCallOpnd()->m_fnHelper)
- {
- case IR::JnHelperMethod::HelperString_Split:
- helperMethod =
- callDst
- ? IR::JnHelperMethod::HelperRegExp_SplitResultUsed
- : IR::JnHelperMethod::HelperRegExp_SplitResultNotUsed;
- break;
- case IR::JnHelperMethod::HelperString_Match:
- helperMethod =
- callDst
- ? IR::JnHelperMethod::HelperRegExp_MatchResultUsed
- : IR::JnHelperMethod::HelperRegExp_MatchResultNotUsed;
- break;
- default:
- Assert(false);
- __assume(false);
- }
- }
- m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- instr->InsertAfter(doneLabel);
- instr->InsertBefore(labelHelper);
- InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
- RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
- }
- void
- Lowerer::GenerateFastInlineRegExpExec(IR::Instr * instr)
- {
- // a.exec(b)
- // We want to emit the fast path when 'a' is a regex and 'b' is a string
- Assert(instr->m_opcode == Js::OpCode::CallDirect);
- IR::Opnd * callDst = instr->GetDst();
- //ArgOut_A_InlineSpecialized
- IR::Instr * tmpInstr = instr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
- IR::Opnd * argsOpnd[2];
- if (!instr->FetchOperands(argsOpnd, 2))
- {
- return;
- }
- IR::Opnd *opndString = argsOpnd[1];
- if(!opndString->GetValueType().IsLikelyString() || argsOpnd[0]->IsTaggedInt())
- {
- return;
- }
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- if(!opndString->GetValueType().IsString())
- {
- opndString = GetRegOpnd(opndString, instr, m_func, TyVar);
- this->GenerateStringTest(opndString->AsRegOpnd(), instr, labelHelper);
- }
- IR::Opnd *opndRegex = argsOpnd[0];
- if(!opndRegex->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(opndRegex, instr, labelHelper);
- }
- IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
- // cmp [regex], vtableAddress
- // jne $labelHelper
- opndRegex = GetRegOpnd(opndRegex, instr, m_func, TyVar);
- InsertCompareBranch(
- IR::IndirOpnd::New(opndRegex->AsRegOpnd(), 0, TyMachPtr, instr->m_func),
- vtableOpnd,
- Js::OpCode::BrNeq_A,
- labelHelper,
- instr);
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- if (!PHASE_OFF(Js::ExecBOIFastPathPhase, m_func))
- {
- // Load pattern from regex operand
- IR::RegOpnd *opndPattern = IR::RegOpnd::New(TyMachPtr, m_func);
- Lowerer::InsertMove(
- opndPattern,
- IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfPattern(), TyMachPtr, m_func),
- instr);
- // Load program from pattern
- IR::RegOpnd *opndProgram = IR::RegOpnd::New(TyMachPtr, m_func);
- Lowerer::InsertMove(
- opndProgram,
- IR::IndirOpnd::New(opndPattern, offsetof(UnifiedRegex::RegexPattern, rep) + offsetof(UnifiedRegex::RegexPattern::UnifiedRep, program), TyMachPtr, m_func),
- instr);
- IR::LabelInstr *labelFastHelper = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- // We want the program's tag to be BOILiteral2Tag
- InsertCompareBranch(
- IR::IndirOpnd::New(opndProgram, (int32)UnifiedRegex::Program::GetOffsetOfTag(), TyUint8, m_func),
- IR::IntConstOpnd::New((IntConstType)UnifiedRegex::Program::GetBOILiteral2Tag(), TyUint8, m_func),
- Js::OpCode::BrNeq_A,
- labelFastHelper,
- instr);
- // Test the program's flags for "global"
- InsertTestBranch(
- IR::IndirOpnd::New(opndProgram, offsetof(UnifiedRegex::Program, flags), TyUint8, m_func),
- IR::IntConstOpnd::New(UnifiedRegex::GlobalRegexFlag, TyUint8, m_func),
- Js::OpCode::BrNeq_A,
- labelFastHelper,
- instr);
- IR::LabelInstr *labelNoMatch = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- // If string length < 2...
- InsertCompareBranch(
- IR::IndirOpnd::New(opndString->AsRegOpnd(), offsetof(Js::JavascriptString, m_charLength), TyUint32, m_func),
- IR::IntConstOpnd::New(2, TyUint32, m_func),
- Js::OpCode::BrLt_A,
- labelNoMatch,
- instr);
- // ...or the DWORD doesn't match the pattern...
- IR::RegOpnd *opndBuffer = IR::RegOpnd::New(TyMachReg, m_func);
- Lowerer::InsertMove(
- opndBuffer,
- IR::IndirOpnd::New(opndString->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, m_func),
- instr);
- IR::LabelInstr *labelGotString = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- InsertTestBranch(opndBuffer, opndBuffer, Js::OpCode::BrNeq_A, labelGotString, instr);
- m_lowererMD.LoadHelperArgument(instr, opndString);
- IR::Instr *instrCall = IR::Instr::New(Js::OpCode::Call, opndBuffer, IR::HelperCallOpnd::New(IR::HelperString_GetSz, m_func), m_func);
- instr->InsertBefore(instrCall);
- m_lowererMD.LowerCall(instrCall, 0);
- instr->InsertBefore(labelGotString);
- IR::RegOpnd *opndBufferDWORD = IR::RegOpnd::New(TyUint32, m_func);
- Lowerer::InsertMove(
- opndBufferDWORD,
- IR::IndirOpnd::New(opndBuffer, 0, TyUint32, m_func),
- instr);
- InsertCompareBranch(
- IR::IndirOpnd::New(opndProgram, (int32)(UnifiedRegex::Program::GetOffsetOfRep() + UnifiedRegex::Program::GetOffsetOfBOILiteral2Literal()), TyUint32, m_func),
- opndBufferDWORD,
- Js::OpCode::BrEq_A,
- labelFastHelper,
- instr);
- // ...then set the last index to 0...
- instr->InsertBefore(labelNoMatch);
- Lowerer::InsertMove(
- IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), TyVar, m_func),
- IR::AddrOpnd::NewNull(m_func),
- instr);
- Lowerer::InsertMove(
- IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), TyUint32, m_func),
- IR::IntConstOpnd::New(0, TyUint32, m_func),
- instr);
- // ...and set the dst to null...
- if (callDst)
- {
- Lowerer::InsertMove(
- callDst,
- LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
- instr);
- }
- // ...and we're done.
- this->InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(labelFastHelper);
- }
- IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
- if (callDst)
- {
- helperCallInstr->SetDst(callDst);
- }
- instr->InsertBefore(helperCallInstr);
- if (instr->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(instr->GetBailOutKind()))
- {
- helperCallInstr = AddBailoutToHelperCallInstr(helperCallInstr, instr->GetBailOutInfo(), instr->GetBailOutKind(), instr);
- }
- // [stackAllocationPointer, ]scriptcontext, regexp, string (to be pushed in reverse order)
- //string, regexp
- this->m_lowererMD.LoadHelperArgument(helperCallInstr, opndString);
- this->m_lowererMD.LoadHelperArgument(helperCallInstr, opndRegex);
- // script context
- LoadScriptContext(helperCallInstr);
- IR::JnHelperMethod helperMethod;
- IR::AutoReuseOpnd autoReuseStackAllocationOpnd;
- if (callDst)
- {
- if (instr->dstIsTempObject)
- {
- helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultUsedAndMayBeTemp;
- // Allocate some space on the stack for the result array
- IR::RegOpnd *const stackAllocationOpnd = IR::RegOpnd::New(TyVar, m_func);
- autoReuseStackAllocationOpnd.Initialize(stackAllocationOpnd, m_func);
- stackAllocationOpnd->SetValueType(callDst->GetValueType());
- GenerateMarkTempAlloc(stackAllocationOpnd, Js::JavascriptArray::StackAllocationSize, helperCallInstr);
- m_lowererMD.LoadHelperArgument(helperCallInstr, stackAllocationOpnd);
- }
- else
- {
- helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultUsed;
- }
- }
- else
- {
- helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultNotUsed;
- }
- m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
- instr->InsertAfter(doneLabel);
- instr->InsertBefore(labelHelper);
- InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
- RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
- }
- // Generate a fast path for the "in" operator that check quickly if we have an array or not and if the index of the data is contained in the array's length.
- void Lowerer::GenerateFastArrayIsIn(IR::Instr * instr)
- {
- // operator "foo in bar"
- IR::Opnd* src1 = instr->GetSrc1(); // foo
- IR::Opnd* src2 = instr->GetSrc2(); // bar
- if (
- !src1->GetValueType().IsLikelyInt() ||
- // Do not do a fast path if we know for sure we don't have an int
- src1->IsNotInt() ||
- !src2->GetValueType().IsLikelyArray() ||
- !src2->GetValueType().HasNoMissingValues())
- {
- return;
- }
- IR::LabelInstr* helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::LabelInstr* isArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::RegOpnd* src1Untagged = GenerateUntagVar(src1->AsRegOpnd(), helperLabel, instr);
- IR::RegOpnd* src2RegOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
- InsertMove(src2RegOpnd, src2, instr);
- IR::AutoReuseOpnd autoReuseArrayOpnd;
- m_lowererMD.GenerateObjectTest(src2RegOpnd, instr, helperLabel);
- IR::RegOpnd* arrayOpnd = src2RegOpnd->Copy(instr->m_func)->AsRegOpnd();
- autoReuseArrayOpnd.Initialize(arrayOpnd, instr->m_func, false /* autoDelete */);
- IR::Opnd* vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptArray);
- InsertCompareBranch(
- IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, instr->m_func),
- vtableOpnd,
- Js::OpCode::BrEq_A,
- isArrayLabel,
- instr);
- vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableNativeIntArray);
- InsertCompareBranch(
- IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, instr->m_func),
- vtableOpnd,
- Js::OpCode::BrEq_A,
- isArrayLabel,
- instr);
- vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableNativeFloatArray);
- InsertCompareBranch(
- IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, instr->m_func),
- vtableOpnd,
- Js::OpCode::BrNeq_A,
- helperLabel,
- instr);
- instr->InsertBefore(isArrayLabel);
- InsertTestBranch(
- IR::IndirOpnd::New(src2RegOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
- IR::IntConstOpnd::New(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues), TyUint8, m_func, true),
- Js::OpCode::BrEq_A,
- helperLabel,
- instr);
- IR::AutoReuseOpnd autoReuseHeadSegmentOpnd;
- IR::AutoReuseOpnd autoReuseHeadSegmentLengthOpnd;
- IR::IndirOpnd* indirOpnd = IR::IndirOpnd::New(src2RegOpnd, Js::JavascriptArray::GetOffsetOfHead(), TyMachPtr, this->m_func);
- IR::RegOpnd* headSegmentOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
- InsertMove(headSegmentOpnd, indirOpnd, instr);
- IR::Opnd* headSegmentLengthOpnd = IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
- autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
- InsertCompareBranch(
- src1Untagged,
- headSegmentLengthOpnd,
- Js::OpCode::BrGe_A,
- helperLabel,
- instr);
- InsertCompareBranch(
- src1Untagged,
- IR::IntConstOpnd::New(0, src1Untagged->GetType(), this->m_func),
- Js::OpCode::BrLt_A,
- helperLabel,
- instr);
- InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), instr);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(helperLabel);
- instr->InsertAfter(doneLabel);
- }
- // Generate a fast path for the "in" operator to use the cache where the key may be a PropertyString or Symbol.
- void Lowerer::GenerateFastObjectIsIn(IR::Instr * instr)
- {
- IR::RegOpnd* baseOpnd = GetRegOpnd(instr->GetSrc2(), instr, m_func, TyVar);
- IR::RegOpnd* indexOpnd = GetRegOpnd(instr->GetSrc1(), instr, m_func, TyVar);
- bool likelyStringIndex = indexOpnd->GetValueType().IsLikelyString();
- bool likelySymbolIndex = indexOpnd->GetValueType().IsLikelySymbol();
- if (!baseOpnd->GetValueType().IsLikelyObject() || !(likelyStringIndex || likelySymbolIndex))
- {
- return;
- }
- IR::LabelInstr* helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- if (likelyStringIndex)
- {
- GeneratePropertyStringTest(indexOpnd, instr, helperLabel, false /*isStore*/);
- const uint32 inlineCacheOffset = Js::PropertyString::GetOffsetOfLdElemInlineCache();
- const uint32 hitRateOffset = Js::PropertyString::GetOffsetOfHitRate();
- GenerateFastIsInSymbolOrStringIndex(instr, indexOpnd, baseOpnd, instr->GetDst(), inlineCacheOffset, hitRateOffset, helperLabel, doneLabel);
- }
- else
- {
- Assert(likelySymbolIndex);
- GenerateSymbolTest(indexOpnd, instr, helperLabel);
- const uint32 inlineCacheOffset = Js::JavascriptSymbol::GetOffsetOfLdElemInlineCache();
- const uint32 hitRateOffset = Js::JavascriptSymbol::GetOffsetOfHitRate();
- GenerateFastIsInSymbolOrStringIndex(instr, indexOpnd, baseOpnd, instr->GetDst(), inlineCacheOffset, hitRateOffset, helperLabel, doneLabel);
- }
- instr->InsertBefore(helperLabel);
- instr->InsertAfter(doneLabel);
- }
- // Given an operand, either cast it or move it to a register
- IR::RegOpnd * Lowerer::GetRegOpnd(IR::Opnd* opnd, IR::Instr* insertInstr, Func* func, IRType type)
- {
- if (opnd->IsRegOpnd())
- {
- return opnd->AsRegOpnd();
- }
- IR::RegOpnd *regOpnd = IR::RegOpnd::New(type, func);
- InsertMove(regOpnd, opnd, insertInstr);
- return regOpnd;
- }
- template <bool Saturate>
- void Lowerer::GenerateTruncWithCheck(_In_ IR::Instr* instr)
- {
- Assert(instr->GetSrc1()->IsFloat());
- if (instr->GetDst()->IsInt32() || instr->GetDst()->IsUInt32())
- {
- m_lowererMD.GenerateTruncWithCheck<Saturate>(instr);
- }
- else
- {
- Assert(instr->GetDst()->IsInt64());
- LoadScriptContext(instr);
- if (instr->GetSrc1()->IsFloat32())
- {
- m_lowererMD.LoadFloatHelperArgument(instr, instr->GetSrc1());
- }
- else
- {
- m_lowererMD.LoadDoubleHelperArgument(instr, instr->GetSrc1());
- }
- IR::JnHelperMethod helper;
- if (Saturate)
- {
- IR::JnHelperMethod helperList[2][2] = { IR::HelperF32ToI64Sat, IR::HelperF32ToU64Sat, IR::HelperF64ToI64Sat ,IR::HelperF64ToU64Sat };
- helper = helperList[instr->GetSrc1()->GetType() != TyFloat32][instr->GetDst()->GetType() == TyUint64];
- }
- else
- {
- IR::JnHelperMethod helperList[2][2] = { IR::HelperF32ToI64, IR::HelperF32ToU64, IR::HelperF64ToI64 ,IR::HelperF64ToU64 };
- helper = helperList[instr->GetSrc1()->GetType() != TyFloat32][instr->GetDst()->GetType() == TyUint64];
- }
- instr->UnlinkSrc1();
- this->m_lowererMD.ChangeToHelperCall(instr, helper);
- }
- }
- void
- Lowerer::RelocateCallDirectToHelperPath(IR::Instr* argoutInlineSpecialized, IR::LabelInstr* labelHelper)
- {
- IR::Opnd *linkOpnd = argoutInlineSpecialized->GetSrc2(); //ArgOut_A_InlineSpecialized src2; link to actual argouts.
- argoutInlineSpecialized->Unlink();
- labelHelper->InsertAfter(argoutInlineSpecialized);
- while(linkOpnd->IsSymOpnd())
- {
- IR::SymOpnd *src2 = linkOpnd->AsSymOpnd();
- StackSym *sym = src2->m_sym->AsStackSym();
- Assert(sym->m_isSingleDef);
- IR::Instr *argInstr = sym->m_instrDef;
- Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
- argInstr->Unlink();
- labelHelper->InsertAfter(argInstr);
- linkOpnd = argInstr->GetSrc2();
- }
- // Move startcall
- Assert(linkOpnd->IsRegOpnd());
- StackSym *sym = linkOpnd->AsRegOpnd()->m_sym;
- Assert(sym->m_isSingleDef);
- IR::Instr *startCall = sym->m_instrDef;
- Assert(startCall->m_opcode == Js::OpCode::StartCall);
- startCall->Unlink();
- labelHelper->InsertAfter(startCall);
- }
- bool
- Lowerer::GenerateFastInlineStringCharCodeAt(IR::Instr * instr, Js::BuiltinFunction index)
- {
- Assert(instr->m_opcode == Js::OpCode::CallDirect);
- //CallDirect src2
- IR::Opnd * linkOpnd = instr->GetSrc2();
- //ArgOut_A_InlineSpecialized
- IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
- IR::Opnd * argsOpnd[2] = {0};
- bool result = instr->FetchOperands(argsOpnd, 2);
- Assert(result);
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- instr->InsertAfter(doneLabel);
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- bool success = GenerateFastCharAt(index, instr->GetDst(), argsOpnd[0], argsOpnd[1],
- instr, instr, labelHelper, doneLabel);
- instr->InsertBefore(labelHelper);
- if (!success)
- {
- return false;
- }
- InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
- RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
- return true;
- }
- void
- Lowerer::GenerateCtz(IR::Instr* instr)
- {
- Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsInt64());
- Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsInt64());
- m_lowererMD.GenerateCtz(instr);
- }
- void
- Lowerer::GeneratePopCnt(IR::Instr* instr)
- {
- Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
- Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsUInt32() || instr->GetDst()->IsInt64());
- m_lowererMD.GeneratePopCnt(instr);
- }
- void
- Lowerer::GenerateFastInlineMathClz(IR::Instr* instr)
- {
- Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsInt64());
- Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsInt64());
- m_lowererMD.GenerateClz(instr);
- }
- void
- Lowerer::GenerateFastInlineMathImul(IR::Instr* instr)
- {
- IR::Opnd* src1 = instr->GetSrc1();
- IR::Opnd* src2 = instr->GetSrc2();
- IR::Opnd* dst = instr->GetDst();
- Assert(dst->IsInt32());
- Assert(src1->IsInt32());
- Assert(src2->IsInt32());
- IR::Instr* imul = IR::Instr::New(LowererMD::MDImulOpcode, dst, src1, src2, instr->m_func);
- instr->InsertBefore(imul);
- LowererMD::Legalize(imul);
- instr->Remove();
- }
- void
- Lowerer::LowerReinterpretPrimitive(IR::Instr* instr)
- {
- Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
- IR::Opnd* src1 = instr->GetSrc1();
- IR::Opnd* dst = instr->GetDst();
- Assert(dst->GetSize() == src1->GetSize());
- Assert((dst->IsFloat32() && src1->IsInt32()) ||
- (dst->IsInt32() && src1->IsFloat32()) ||
- (dst->IsInt64() && src1->IsFloat64()) ||
- (dst->IsFloat64() && src1->IsInt64()) );
- m_lowererMD.EmitReinterpretPrimitive(dst, src1, instr);
- instr->Remove();
- }
- void
- Lowerer::GenerateFastInlineMathFround(IR::Instr* instr)
- {
- IR::Opnd* src1 = instr->GetSrc1();
- IR::Opnd* dst = instr->GetDst();
- Assert(dst->IsFloat());
- Assert(src1->IsFloat());
- // This function is supposed to convert a float to the closest float32 representation.
- // However, it is a bit loose about types, which the ARM64 encoder takes issue with.
- #ifdef _M_ARM64
- LowererMD::GenerateFastInlineMathFround(instr);
- #else
- IR::Instr* fcvt64to32 = IR::Instr::New(LowererMD::MDConvertFloat64ToFloat32Opcode, dst, src1, instr->m_func);
- instr->InsertBefore(fcvt64to32);
- LowererMD::Legalize(fcvt64to32);
- if (dst->IsFloat64())
- {
- IR::Instr* fcvt32to64 = IR::Instr::New(LowererMD::MDConvertFloat32ToFloat64Opcode, dst, dst, instr->m_func);
- instr->InsertBefore(fcvt32to64);
- LowererMD::Legalize(fcvt32to64);
- }
- instr->Remove();
- #endif
- return;
- }
- bool
- Lowerer::GenerateFastInlineStringReplace(IR::Instr * instr)
- {
- Assert(instr->m_opcode == Js::OpCode::CallDirect);
- //CallDirect src2
- IR::Opnd * linkOpnd = instr->GetSrc2();
- //ArgOut_A_InlineSpecialized
- IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
- IR::Opnd * argsOpnd[3] = {0};
- bool result = instr->FetchOperands(argsOpnd, 3);
- Assert(result);
- AnalysisAssert(argsOpnd[0] && argsOpnd[1] && argsOpnd[2]);
- if (!argsOpnd[0]->GetValueType().IsLikelyString()
- || argsOpnd[1]->GetValueType().IsNotObject()
- || !argsOpnd[2]->GetValueType().IsLikelyString())
- {
- return false;
- }
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- instr->InsertAfter(doneLabel);
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- bool success = this->GenerateFastReplace(argsOpnd[0], argsOpnd[1], argsOpnd[2],
- instr, instr, labelHelper, doneLabel);
- instr->InsertBefore(labelHelper);
- if (!success)
- {
- return false;
- }
- InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
- RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
- return true;
- }
- #ifdef ENABLE_DOM_FAST_PATH
- /*
- Lower the DOMFastPathGetter opcode
- We have inliner generated bytecode:
- (dst)helpArg1: ExtendArg_A (src1)thisObject (src2)null
- (dst)helpArg2: ExtendArg_A (src1)funcObject (src2)helpArg1
- method: DOMFastPathGetter (src1)HelperCall (src2)helpArg2
- We'll convert it to a JavascriptFunction entry method call:
- CALL Helper funcObject CallInfo(CallFlags_Value, 3) thisObj
- */
- void
- Lowerer::LowerFastInlineDOMFastPathGetter(IR::Instr* instr)
- {
- IR::Opnd* helperOpnd = instr->UnlinkSrc1();
- Assert(helperOpnd->IsHelperCallOpnd());
- IR::Opnd *linkOpnd = instr->UnlinkSrc2();
- Assert(linkOpnd->IsRegOpnd());
- IR::Instr* prevInstr = linkOpnd->AsRegOpnd()->m_sym->m_instrDef;
- Assert(prevInstr->m_opcode == Js::OpCode::ExtendArg_A);
- IR::Opnd* funcObj = prevInstr->GetSrc1();
- Assert(funcObj->IsRegOpnd());
- // If the Extended_arg was CSE's across a loop or hoisted out of a loop,
- // adding a new reference down here might cause funcObj to now be liveOnBackEdge.
- // Use the addToLiveOnBackEdgeSyms bit vector to add it to a loop if we encounter one.
- // We'll clear it once we reach the Extended arg.
- this->addToLiveOnBackEdgeSyms->Set(funcObj->AsRegOpnd()->m_sym->m_id);
- Assert(prevInstr->GetSrc2() != nullptr);
- prevInstr = prevInstr->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
- Assert(prevInstr->m_opcode == Js::OpCode::ExtendArg_A);
- IR::Opnd* thisObj = prevInstr->GetSrc1();
- Assert(prevInstr->GetSrc2() == nullptr);
- Assert(thisObj->IsRegOpnd());
- this->addToLiveOnBackEdgeSyms->Set(thisObj->AsRegOpnd()->m_sym->m_id);
- const auto info = Lowerer::MakeCallInfoConst(Js::CallFlags_Value, 1, m_func);
- m_lowererMD.LoadHelperArgument(instr, thisObj);
- m_lowererMD.LoadHelperArgument(instr, info);
- m_lowererMD.LoadHelperArgument(instr, funcObj);
- instr->m_opcode = Js::OpCode::Call;
- IR::HelperCallOpnd *helperCallOpnd = Lowerer::CreateHelperCallOpnd(helperOpnd->AsHelperCallOpnd()->m_fnHelper, 3, m_func);
- instr->SetSrc1(helperCallOpnd);
- m_lowererMD.LowerCall(instr, 3); // we have funcobj, callInfo, and this.
- }
- #endif
- void
- Lowerer::GenerateFastInlineArrayPush(IR::Instr * instr)
- {
- Assert(instr->m_opcode == Js::OpCode::InlineArrayPush);
- IR::Opnd * baseOpnd = instr->GetSrc1();
- IR::Opnd * srcOpnd = instr->GetSrc2();
- bool returnLength = false;
- if(instr->GetDst())
- {
- returnLength = true;
- }
- IR::LabelInstr * bailOutLabelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- instr->InsertAfter(doneLabel);
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- //Don't Generate fast path according to ShouldGenerateArrayFastPath()
- //AND, Don't Generate fast path if the array is LikelyNative and the element is not specialized
- if(ShouldGenerateArrayFastPath(baseOpnd, false, false, false) &&
- !(baseOpnd->GetValueType().IsLikelyNativeArray() && srcOpnd->IsVar()))
- {
- GenerateFastPush(baseOpnd, srcOpnd, instr, instr, labelHelper, doneLabel, bailOutLabelHelper, returnLength);
- instr->InsertBefore(labelHelper);
- InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
- }
- if(baseOpnd->GetValueType().IsLikelyNativeArray())
- {
- //Lower IR::BailOutConventionalNativeArrayAccessOnly here.
- LowerOneBailOutKind(instr, IR::BailOutConventionalNativeArrayAccessOnly, false, false);
- instr->InsertAfter(bailOutLabelHelper);
- InsertBranch(Js::OpCode::Br, doneLabel, bailOutLabelHelper);
- }
- GenerateHelperToArrayPushFastPath(instr, bailOutLabelHelper);
- }
- bool Lowerer::GenerateFastPop(IR::Opnd *baseOpndParam, IR::Instr *callInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel, IR::LabelInstr * bailOutLabelHelper)
- {
- Assert(ShouldGenerateArrayFastPath(baseOpndParam, false, false, false));
- // TEST baseOpnd, AtomTag -- check baseOpnd not tagged int
- // JNE $helper
- // CMP [baseOpnd], JavascriptArray::`vtable' -- check baseOpnd isArray
- // JNE $helper
- // MOV r2, [baseOpnd + offset(length)] -- Load array length
- IR::RegOpnd * baseOpnd = baseOpndParam->AsRegOpnd();
- const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
- ValueType arrValueType(baseOpndParam->GetValueType());
- IR::RegOpnd *arrayOpnd = baseOpnd;
- IR::RegOpnd *arrayLengthOpnd = nullptr;
- IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
- if(!arrValueType.IsAnyOptimizedArray())
- {
- arrayOpnd = GenerateArrayTest(baseOpnd, bailOutLabelHelper, bailOutLabelHelper, callInstr, false, true);
- arrValueType = arrayOpnd->GetValueType().ToDefiniteObject().SetHasNoMissingValues(false);
- }
- else if(arrayOpnd->IsArrayRegOpnd())
- {
- IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
- if(arrayRegOpnd->LengthSym())
- {
- arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), arrayRegOpnd->LengthSym()->GetType(), m_func);
- DebugOnly(arrayLengthOpnd->FreezeSymValue());
- autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
- }
- }
- const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
- IR::AutoReuseOpnd autoReuseMutableArrayLengthOpnd;
- {
- IR::RegOpnd *const mutableArrayLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
- autoReuseMutableArrayLengthOpnd.Initialize(mutableArrayLengthOpnd, m_func);
- if(arrayLengthOpnd)
- {
- // mov mutableArrayLength, arrayLength
- InsertMove(mutableArrayLengthOpnd, arrayLengthOpnd, callInstr);
- }
- else
- {
- // MOV mutableArrayLength, [array + offset(length)] -- Load array length
- // We know this index is safe since, so mark it as UInt32 to avoid unnecessary conversion/checks
- InsertMove(
- mutableArrayLengthOpnd,
- IR::IndirOpnd::New(
- arrayOpnd,
- Js::JavascriptArray::GetOffsetOfLength(),
- mutableArrayLengthOpnd->GetType(),
- this->m_func),
- callInstr);
- }
- arrayLengthOpnd = mutableArrayLengthOpnd;
- }
- InsertCompareBranch(arrayLengthOpnd, IR::IntConstOpnd::New(0, TyUint32, this->m_func), Js::OpCode::BrEq_A, true, bailOutLabelHelper, callInstr);
- InsertSub(false, arrayLengthOpnd, arrayLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func),callInstr);
- IR::IndirOpnd *arrayRef = IR::IndirOpnd::New(arrayOpnd, arrayLengthOpnd, TyVar, this->m_func);
- arrayRef->GetBaseOpnd()->SetValueType(arrValueType);
- //Array length is going to overflow, hence don't check for Array.length and Segment.length overflow.
- bool isTypedArrayElement, isStringIndex;
- IR::IndirOpnd *const indirOpnd =
- GenerateFastElemICommon(
- callInstr,
- false,
- arrayRef,
- labelHelper,
- labelHelper,
- nullptr,
- &isTypedArrayElement,
- &isStringIndex,
- nullptr,
- nullptr,
- nullptr /*pLabelSegmentLengthIncreased*/,
- true /*checkArrayLengthOverflow*/,
- true /* forceGenerateFastPath */,
- false/* = returnLength */,
- bailOutLabelHelper /* = bailOutLabelInstr*/);
- Assert(!isTypedArrayElement);
- Assert(indirOpnd);
- return true;
- }
- bool Lowerer::GenerateFastPush(IR::Opnd *baseOpndParam, IR::Opnd *src, IR::Instr *callInstr,
- IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel, IR::LabelInstr * bailOutLabelHelper, bool returnLength)
- {
- Assert(ShouldGenerateArrayFastPath(baseOpndParam, false, false, false));
- // TEST baseOpnd, AtomTag -- check baseOpnd not tagged int
- // JNE $helper
- // CMP [baseOpnd], JavascriptArray::`vtable' -- check baseOpnd isArray
- // JNE $helper
- // MOV r2, [baseOpnd + offset(length)] -- Load array length
- IR::RegOpnd * baseOpnd = baseOpndParam->AsRegOpnd();
- const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
- ValueType arrValueType(baseOpndParam->GetValueType());
- IR::RegOpnd *arrayOpnd = baseOpnd;
- IR::RegOpnd *arrayLengthOpnd = nullptr;
- IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
- if(!arrValueType.IsAnyOptimizedArray())
- {
- arrayOpnd = GenerateArrayTest(baseOpnd, labelHelper, labelHelper, insertInstr, false, true);
- arrValueType = arrayOpnd->GetValueType().ToDefiniteObject().SetHasNoMissingValues(false);
- }
- else if(arrayOpnd->IsArrayRegOpnd())
- {
- IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
- if(arrayRegOpnd->LengthSym())
- {
- arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), arrayRegOpnd->LengthSym()->GetType(), m_func);
- DebugOnly(arrayLengthOpnd->FreezeSymValue());
- autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
- }
- }
- const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
- if(!arrayLengthOpnd)
- {
- // MOV arrayLength, [array + offset(length)] -- Load array length
- // We know this index is safe since, so mark it as UInt32 to avoid unnecessary conversion/checks
- arrayLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
- autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
- InsertMove(
- arrayLengthOpnd,
- IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), arrayLengthOpnd->GetType(), this->m_func),
- insertInstr);
- }
- IR::IndirOpnd *arrayRef = IR::IndirOpnd::New(arrayOpnd, arrayLengthOpnd, TyVar, this->m_func);
- arrayRef->GetBaseOpnd()->SetValueType(arrValueType);
- if (returnLength && src->IsEqual(insertInstr->GetDst()))
- {
- //If the dst is same as the src, then dst is going to be overridden by GenerateFastElemICommon in process of updating the length.
- //Save it in a temp register.
- IR::RegOpnd *opnd = IR::RegOpnd::New(src->GetType(), this->m_func);
- InsertMove(opnd, src, insertInstr);
- src = opnd;
- }
- //Array length is going to overflow, hence don't check for Array.length and Segment.length overflow.
- bool isTypedArrayElement, isStringIndex;
- IR::IndirOpnd *const indirOpnd =
- GenerateFastElemICommon(
- insertInstr,
- true,
- arrayRef,
- labelHelper,
- labelHelper,
- nullptr,
- &isTypedArrayElement,
- &isStringIndex,
- nullptr,
- nullptr,
- nullptr /*pLabelSegmentLengthIncreased*/,
- false /*checkArrayLengthOverflow*/,
- true /* forceGenerateFastPath */,
- returnLength,
- bailOutLabelHelper);
- Assert(!isTypedArrayElement);
- Assert(indirOpnd);
- // MOV [r3 + r2], src
- InsertMoveWithBarrier(indirOpnd, src, insertInstr);
- return true;
- }
- bool
- Lowerer::GenerateFastCharAt(Js::BuiltinFunction index, IR::Opnd *dst, IR::Opnd *srcStr, IR::Opnd *srcIndex, IR::Instr *callInstr,
- IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
- {
- // if regSrcStr is not object, JMP $helper
- // CMP [regSrcStr + offset(type)] , static string type -- check base string type
- // JNE $helper
- // MOV r1, [regSrcStr + offset(m_pszValue)]
- // TEST r1, r1
- // JEQ $helper
- // MOV r2, srcIndex
- // If r2 is not int, JMP $helper
- // Convert r2 to int
- // CMP [regSrcStr + offsetof(length)], r2
- // JBE $helper
- // MOVZX r2, [r1 + r2 * 2]
- // if (charAt)
- // PUSH r1
- // PUSH scriptContext
- // CALL GetStringFromChar
- // MOV dst, EAX
- // else (charCodeAt)
- // if (codePointAt)
- // Lowerer.GenerateFastCodePointAt -- Common inline functions
- // Convert r2 to Var
- // MOV dst, r2
- bool isInt = false;
- bool isNotTaggedValue = false;
- if (srcStr->IsRegOpnd())
- {
- if (srcStr->AsRegOpnd()->IsTaggedInt())
- {
- isInt = true;
- }
- else if (srcStr->AsRegOpnd()->IsNotTaggedValue())
- {
- isNotTaggedValue = true;
- }
- }
- IR::RegOpnd *regSrcStr = GetRegOpnd(srcStr, insertInstr, m_func, TyVar);
- if (!isNotTaggedValue)
- {
- if (!isInt)
- {
- m_lowererMD.GenerateObjectTest(regSrcStr, insertInstr, labelHelper);
- }
- else
- {
- // Insert delete branch opcode to tell the dbChecks not to assert on this helper label
- IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
- insertInstr->InsertBefore(fakeBr);
- InsertBranch(Js::OpCode::Br, labelHelper, insertInstr);
- }
- }
- // Bail out if index a constant and is less than zero.
- if (srcIndex->IsAddrOpnd() && Js::TaggedInt::ToInt32(srcIndex->AsAddrOpnd()->m_address) < 0)
- {
- labelHelper->isOpHelper = false;
- InsertBranch(Js::OpCode::Br, labelHelper, insertInstr);
- return false;
- }
- GenerateStringTest(regSrcStr, insertInstr, labelHelper, nullptr, false);
- // r1 contains the value of the char16* pointer inside JavascriptString.
- // MOV r1, [regSrcStr + offset(m_pszValue)]
- IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(regSrcStr->AsRegOpnd(), Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, this->m_func);
- InsertMove(r1, indirOpnd, insertInstr);
- // TEST r1, r1 -- Null pointer test
- // JEQ $helper
- InsertTestBranch(r1, r1, Js::OpCode::BrEq_A, labelHelper, insertInstr);
- IR::RegOpnd *strLength = IR::RegOpnd::New(TyUint32, m_func);
- InsertMove(strLength, IR::IndirOpnd::New(regSrcStr, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func), insertInstr);
- IR::Opnd* indexOpnd = nullptr;
- if (srcIndex->IsAddrOpnd())
- {
- uint32 indexValue = Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address);
- // CMP [regSrcStr + offsetof(length)], index
- // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
- // JBE $helper
- InsertCompareBranch(strLength, IR::IntConstOpnd::New(indexValue, TyUint32, m_func), Js::OpCode::BrLe_A, true, labelHelper, insertInstr);
- // Mask off the sign so that poisoning will work for negative indices
- #if TARGET_32
- uint32 maskedIndex = CONFIG_FLAG_RELEASE(PoisonStringLoad) ? (indexValue & INT32_MAX) : indexValue;
- #else
- uint32 maskedIndex = indexValue;
- #endif
- indirOpnd = IR::IndirOpnd::New(r1, maskedIndex * sizeof(char16), TyUint16, this->m_func);
- indexOpnd = IR::IntConstOpnd::New(maskedIndex, TyMachPtr, m_func);
- }
- else
- {
- IR::RegOpnd *r2 = IR::RegOpnd::New(TyVar, this->m_func);
- // MOV r2, srcIndex
- InsertMove(r2, srcIndex, insertInstr);
- r2 = GenerateUntagVar(r2, labelHelper, insertInstr);
- // CMP [regSrcStr + offsetof(length)], r2
- // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
- // JBE $helper
- InsertCompareBranch(strLength, r2, Js::OpCode::BrLe_A, true, labelHelper, insertInstr);
- #if TARGET_32
- if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
- {
- // Mask off the sign so that poisoning will work for negative indices
- InsertAnd(r2, r2, IR::IntConstOpnd::New(INT32_MAX, TyInt32, m_func), insertInstr);
- }
- #endif
- if (r2->GetSize() != MachPtr)
- {
- r2 = r2->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
- }
- indexOpnd = r2;
- indirOpnd = IR::IndirOpnd::New(r1, r2, 1, TyUint16, this->m_func);
- }
- IR::RegOpnd* maskOpnd = nullptr;
- if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
- {
- maskOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
- if (strLength->GetSize() != MachPtr)
- {
- strLength = strLength->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
- }
- InsertSub(false, maskOpnd, indexOpnd, strLength, insertInstr);
- InsertShift(Js::OpCode::Shr_A, false, maskOpnd, maskOpnd, IR::IntConstOpnd::New(MachRegInt * 8 - 1, TyInt8, m_func), insertInstr);
- if (maskOpnd->GetSize() != TyUint32)
- {
- maskOpnd = maskOpnd->UseWithNewType(TyUint32, this->m_func)->AsRegOpnd();
- }
- }
- // MOVZX charReg, [r1 + r2 * 2] -- this is the value of the char
- IR::RegOpnd *charReg = IR::RegOpnd::New(TyUint32, this->m_func);
- InsertMove(charReg, indirOpnd, insertInstr);
- if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
- {
- InsertAnd(charReg, charReg, maskOpnd, insertInstr);
- }
- if (index == Js::BuiltinFunction::JavascriptString_CharAt)
- {
- IR::Opnd *resultOpnd;
- if (dst->IsEqual(srcStr))
- {
- resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
- }
- else
- {
- resultOpnd = dst;
- }
- GenerateGetSingleCharString(charReg, resultOpnd, labelHelper, doneLabel, insertInstr, false);
- }
- else
- {
- Assert(index == Js::BuiltinFunction::JavascriptString_CharCodeAt || index == Js::BuiltinFunction::JavascriptString_CodePointAt);
- if (index == Js::BuiltinFunction::JavascriptString_CodePointAt)
- {
- GenerateFastInlineStringCodePointAt(insertInstr, this->m_func, strLength, srcIndex, charReg, r1);
- }
- if (charReg->GetSize() != MachPtr)
- {
- charReg = charReg->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
- }
- m_lowererMD.GenerateInt32ToVarConversion(charReg, insertInstr);
- // MOV dst, charReg
- InsertMove(dst, charReg, insertInstr);
- }
- return true;
- }
- IR::Opnd*
- Lowerer::GenerateArgOutForInlineeStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
- {
- Assert(callInstr->m_func->IsInlinee());
- Func *func = callInstr->m_func;
- uint32 actualCount = func->actualCount - 1; // don't count this pointer
- Assert(actualCount < Js::InlineeCallInfo::MaxInlineeArgoutCount);
- const auto firstRealArgStackSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
- this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr); //Start after this pointer
- IR::SymOpnd *firstArg = IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, func);
- const IR::AutoReuseOpnd autoReuseFirstArg(firstArg, func);
- IR::RegOpnd* argInOpnd = IR::RegOpnd::New(TyMachReg, func);
- const IR::AutoReuseOpnd autoReuseArgInOpnd(argInOpnd, func);
- InsertLea(argInOpnd, firstArg, callInstr);
- IR::IndirOpnd *argIndirOpnd = nullptr;
- IR::Instr* argout = nullptr;
- #if defined(_M_IX86)
- // Maintain alignment
- if ((actualCount & 1) == 0)
- {
- IR::Instr *alignPush = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
- alignPush->SetSrc1(IR::IntConstOpnd::New(1, TyInt32, this->m_func));
- callInstr->InsertBefore(alignPush);
- }
- #endif
- for(uint i = actualCount; i > 0; i--)
- {
- argIndirOpnd = IR::IndirOpnd::New(argInOpnd, (i - 1) * MachPtr, TyMachReg, func);
- argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
- argout->SetSrc1(argIndirOpnd);
- callInstr->InsertBefore(argout);
- // i represents ith arguments from actuals, with is i + 3 counting this, callInfo and function object
- this->m_lowererMD.LoadDynamicArgument(argout, i + 3);
- }
- return IR::IntConstOpnd::New(func->actualCount, TyMachReg, func);
- }
- // For AMD64 and ARM only.
- void
- Lowerer::LowerInlineSpreadArgOutLoopUsingRegisters(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd)
- {
- Func *const func = callInstr->m_func;
- IR::LabelInstr *oneArgLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertCompareBranch(indexOpnd, IR::IntConstOpnd::New(1, TyUint8, func), Js::OpCode::BrEq_A, true, oneArgLabel, callInstr);
- IR::LabelInstr *startLoopLabel = InsertLoopTopLabel(callInstr);
- Loop * loop = startLoopLabel->GetLoop();
- loop->regAlloc.liveOnBackEdgeSyms->Set(indexOpnd->m_sym->m_id);
- loop->regAlloc.liveOnBackEdgeSyms->Set(arrayElementsStartOpnd->m_sym->m_id);
- InsertSub(false, indexOpnd, indexOpnd, IR::IntConstOpnd::New(1, TyInt8, func), callInstr);
- IR::IndirOpnd *elemPtrOpnd = IR::IndirOpnd::New(arrayElementsStartOpnd, indexOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyMachPtr, func);
- // Generate argout for n+2 arg (skipping function object + this)
- IR::Instr *argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
- // X64 requires a reg opnd
- IR::RegOpnd *elemRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
- Lowerer::InsertMove(elemRegOpnd, elemPtrOpnd, callInstr);
- argout->SetSrc1(elemRegOpnd);
- argout->SetSrc2(indexOpnd);
- callInstr->InsertBefore(argout);
- this->m_lowererMD.LoadDynamicArgumentUsingLength(argout);
- InsertCompareBranch(indexOpnd, IR::IntConstOpnd::New(1, TyUint8, func), Js::OpCode::BrNeq_A, true, startLoopLabel, callInstr);
- // Emit final argument into register 4 on AMD64 and ARM
- callInstr->InsertBefore(oneArgLabel);
- argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
- argout->SetSrc1(elemPtrOpnd);
- callInstr->InsertBefore(argout);
- this->m_lowererMD.LoadDynamicArgument(argout, 4); //4 to denote this is 4th register after this, callinfo & function object
- }
- IR::Instr *
- Lowerer::LowerCallIDynamicSpread(IR::Instr *callInstr, ushort callFlags)
- {
- Assert(callInstr->m_opcode == Js::OpCode::CallIDynamicSpread);
- IR::Instr * insertBeforeInstrForCFG = nullptr;
- Func *const func = callInstr->m_func;
- if (func->IsInlinee())
- {
- throw Js::RejitException(RejitReason::InlineSpreadDisabled);
- }
- IR::Instr *spreadArrayInstr = callInstr;
- IR::SymOpnd *argLinkOpnd = spreadArrayInstr->UnlinkSrc2()->AsSymOpnd();
- StackSym *argLinkSym = argLinkOpnd->m_sym->AsStackSym();
- AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
- argLinkOpnd->Free(this->m_func);
- spreadArrayInstr = argLinkSym->m_instrDef;
- Assert(spreadArrayInstr->m_opcode == Js::OpCode::ArgOut_A_SpreadArg);
- IR::Opnd *arraySrcOpnd = spreadArrayInstr->UnlinkSrc1();
- IR::RegOpnd *arrayOpnd = GetRegOpnd(arraySrcOpnd, spreadArrayInstr, func, TyMachPtr);
- argLinkOpnd = spreadArrayInstr->UnlinkSrc2()->AsSymOpnd();
- // Walk the arg chain and find the start call
- argLinkSym = argLinkOpnd->m_sym->AsStackSym();
- AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
- argLinkOpnd->Free(this->m_func);
- // Nothing to be done for the function object, emit as normal
- IR::Instr *thisInstr = argLinkSym->m_instrDef;
- IR::RegOpnd *thisOpnd = thisInstr->UnlinkSrc2()->AsRegOpnd();
- argLinkSym = thisOpnd->m_sym->AsStackSym();
- thisInstr->Unlink();
- thisInstr->FreeDst();
- // Remove the array ArgOut instr and StartCall, they are no longer needed
- spreadArrayInstr->Unlink();
- spreadArrayInstr->FreeDst();
- IR::Instr *startCallInstr = argLinkSym->m_instrDef;
- Assert(startCallInstr->m_opcode == Js::OpCode::StartCall);
- insertBeforeInstrForCFG = startCallInstr->GetNextRealInstr();
- startCallInstr->Remove();
- IR::RegOpnd *argsLengthOpnd = IR::RegOpnd::New(TyUint32, func);
- IR::IndirOpnd *arrayLengthPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, func);
- Lowerer::InsertMove(argsLengthOpnd, arrayLengthPtrOpnd, callInstr);
- // Don't bother expanding args if there are zero
- IR::LabelInstr *zeroArgsLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertCompareBranch(argsLengthOpnd, IR::IntConstOpnd::New(0, TyInt8, func), Js::OpCode::BrEq_A, true, zeroArgsLabel, callInstr);
- IR::RegOpnd *indexOpnd = IR::RegOpnd::New(TyUint32, func);
- Lowerer::InsertMove(indexOpnd, argsLengthOpnd, callInstr);
- // Get the array head offset and length
- IR::IndirOpnd *arrayHeadPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfHead(), TyMachPtr, func);
- IR::RegOpnd *arrayElementsStartOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertAdd(false, arrayElementsStartOpnd, arrayHeadPtrOpnd, IR::IntConstOpnd::New(offsetof(Js::SparseArraySegment<Js::Var>, elements), TyUint8, func), callInstr);
- this->m_lowererMD.LowerInlineSpreadArgOutLoop(callInstr, indexOpnd, arrayElementsStartOpnd);
- // Resume if we have zero args
- callInstr->InsertBefore(zeroArgsLabel);
- // Lower call
- callInstr->m_opcode = Js::OpCode::CallIDynamic;
- callInstr = m_lowererMD.LowerCallIDynamic(callInstr, thisInstr, argsLengthOpnd, callFlags, insertBeforeInstrForCFG);
- return callInstr;
- }
- IR::Instr *
- Lowerer::LowerCallIDynamic(IR::Instr * callInstr, ushort callFlags)
- {
- if (!this->m_func->GetHasStackArgs())
- {
- throw Js::RejitException(RejitReason::InlineApplyDisabled);
- }
- IR::Instr * insertBeforeInstrForCFG = nullptr;
- // Lower args and look for StartCall
- IR::Instr * argInstr = callInstr;
- IR::SymOpnd * argLinkOpnd = argInstr->UnlinkSrc2()->AsSymOpnd();
- StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
- AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
- argLinkOpnd->Free(this->m_func);
- argInstr = argLinkSym->m_instrDef;
- Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
- IR::Instr* saveThisArgOutInstr = argInstr;
- saveThisArgOutInstr->Unlink();
- saveThisArgOutInstr->FreeDst();
- argLinkOpnd = argInstr->UnlinkSrc2()->AsSymOpnd();
- argLinkSym = argLinkOpnd->m_sym->AsStackSym();
- AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
- argLinkOpnd->Free(this->m_func);
- argInstr = argLinkSym->m_instrDef;
- Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_FromStackArgs);
- IR::Opnd* argsLength = m_lowererMD.GenerateArgOutForStackArgs(callInstr, argInstr);
- IR::RegOpnd* startCallDstOpnd = argInstr->UnlinkSrc2()->AsRegOpnd();
- argLinkSym = startCallDstOpnd->m_sym->AsStackSym();
- startCallDstOpnd->Free(this->m_func);
- argInstr->Remove();// Remove ArgOut_A_FromStackArgs
- argInstr = argLinkSym->m_instrDef;
- Assert(argInstr->m_opcode == Js::OpCode::StartCall);
- insertBeforeInstrForCFG = argInstr->GetNextRealInstr();
- argInstr->Remove(); //Remove start call
- return m_lowererMD.LowerCallIDynamic(callInstr, saveThisArgOutInstr, argsLength, callFlags, insertBeforeInstrForCFG);
- }
- //This is only for x64 & ARM.
- IR::Opnd*
- Lowerer::GenerateArgOutForStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
- {
- // For architectures were we only pass 4 parameters in registers, the
- // generated code looks something like this:
- // s25.var = LdLen_A s4.var
- // s26.var = Ld_A s25.var
- // BrEq_I4 $L3, s25.var,0 // If we have no further arguments to pass, don't pass them
- // $L2:
- // BrEq_I4 $L4, s25.var,1 // Loop through the rest of the arguments, putting them on the stack
- // s25.var = SUB_I4 s25.var, 0x1
- // s10.var = LdElemI_A [s4.var+s25.var].var
- // ArgOut_A_Dynamic s10.var, s25.var
- // Br $L2
- // $L4:
- // s25.var = LdImm 0 // set s25 to 0, since it'll be 1 on the way into this block
- // s10.var = LdElemI_A [s4.var + 0 * MachReg].var // The last one has to be put into argslot 4, since this is likely a register, not a stack location.
- // ArgOut_A_Dynamic s10.var, 4
- // $L3:
- //
- // Generalizing this for more register-passed parameters gives us code
- // something like this:
- // s25.var = LdLen_A s4.var
- // s26.var = Ld_A s25.var
- // BrLe_I4 $L3, s25.var,0 // If we have no further arguments to pass, don't pass them
- // $L2:
- // BrLe_I4 $L4, s25.var,INT_REG_COUNT-3 // Loop through the rest of the arguments up to the number passed in registers, putting them on the stack
- // s25.var = SUB_I4 s25.var, 0x1
- // s10.var = LdElemI_A [s4.var+s25.var].var
- // ArgOut_A_Dynamic s10.var, s25.var
- // Br $L2
- // $L4:
- // foreach of the remaining ones, N going down from (the number we can pass in regs -1) to 1 (0 omitted as we know that it'll be at least one register argument):
- // BrEq_I4 $L__N, s25.var, N
- // end foreach
- // foreach of the remaining ones, N going down from (the number we can pass in regs -1) to 0:
- // $L__N:
- // s10.var = LdElemI_A [s4.var + N * MachReg].var // The last one has to be put into argslot 4, since this is likely a register, not a stack location.
- // ArgOut_A_Dynamic s10.var, N+3
- // end foreach
- // $L3:
- #if defined(_M_IX86)
- // We get a compilation error on x86 due to assigning a negative to a uint
- // TODO: don't even define this function on x86 - we Assert(false) anyway there.
- // Alternatively, don't define when INT_ARG_REG_COUNT - 4 < 0
- AssertOrFailFast(false);
- return nullptr;
- #else
- Assert(stackArgsInstr->m_opcode == Js::OpCode::ArgOut_A_FromStackArgs);
- Assert(callInstr->m_opcode == Js::OpCode::CallIDynamic);
- this->m_lowererMD.GenerateFunctionObjectTest(callInstr, callInstr->GetSrc1()->AsRegOpnd(), false);
- if (callInstr->m_func->IsInlinee())
- {
- return this->GenerateArgOutForInlineeStackArgs(callInstr, stackArgsInstr);
- }
- Func *func = callInstr->m_func;
- IR::RegOpnd* stackArgs = stackArgsInstr->GetSrc1()->AsRegOpnd();
- IR::RegOpnd* ldLenDstOpnd = IR::RegOpnd::New(TyMachReg, func);
- const IR::AutoReuseOpnd autoReuseLdLenDstOpnd(ldLenDstOpnd, func);
- IR::Instr* ldLen = IR::Instr::New(Js::OpCode::LdLen_A, ldLenDstOpnd ,stackArgs, func);
- ldLenDstOpnd->SetValueType(ValueType::GetTaggedInt()); /*LdLen_A works only on stack arguments*/
- callInstr->InsertBefore(ldLen);
- GenerateFastRealStackArgumentsLdLen(ldLen);
- IR::Instr* saveLenInstr = IR::Instr::New(Js::OpCode::MOV, IR::RegOpnd::New(TyMachReg, func), ldLenDstOpnd, func);
- saveLenInstr->GetDst()->SetValueType(ValueType::GetTaggedInt());
- callInstr->InsertBefore(saveLenInstr);
- IR::LabelInstr* doneArgs = IR::LabelInstr::New(Js::OpCode::Label, func);
- IR::Instr* branchDoneArgs = IR::BranchInstr::New(Js::OpCode::BrEq_I4, doneArgs, ldLenDstOpnd, IR::IntConstOpnd::New(0, TyInt8, func),func);
- callInstr->InsertBefore(branchDoneArgs);
- this->m_lowererMD.EmitInt4Instr(branchDoneArgs);
- IR::LabelInstr* startLoop = InsertLoopTopLabel(callInstr);
- Loop * loop = startLoop->GetLoop();
- IR::LabelInstr* endLoop = IR::LabelInstr::New(Js::OpCode::Label, func);
- IR::Instr* branchOutOfLoop = IR::BranchInstr::New(Js::OpCode::BrLe_I4, endLoop, ldLenDstOpnd, IR::IntConstOpnd::New(INT_ARG_REG_COUNT - 3, TyInt8, func),func);
- callInstr->InsertBefore(branchOutOfLoop);
- this->m_lowererMD.EmitInt4Instr(branchOutOfLoop);
- IR::Instr* subInstr = IR::Instr::New(Js::OpCode::Sub_I4, ldLenDstOpnd, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyMachReg, func),func);
- callInstr->InsertBefore(subInstr);
- this->m_lowererMD.EmitInt4Instr(subInstr);
- IR::IndirOpnd *nthArgument = IR::IndirOpnd::New(stackArgs, ldLenDstOpnd, TyMachReg, func);
- IR::RegOpnd* ldElemDstOpnd = IR::RegOpnd::New(TyMachReg,func);
- const IR::AutoReuseOpnd autoReuseldElemDstOpnd(ldElemDstOpnd, func);
- IR::Instr* ldElem = IR::Instr::New(Js::OpCode::LdElemI_A, ldElemDstOpnd, nthArgument, func);
- callInstr->InsertBefore(ldElem);
- GenerateFastStackArgumentsLdElemI(ldElem);
- IR::Instr* argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
- argout->SetSrc1(ldElemDstOpnd);
- argout->SetSrc2(ldLenDstOpnd);
- callInstr->InsertBefore(argout);
- this->m_lowererMD.LoadDynamicArgumentUsingLength(argout);
- IR::BranchInstr *tailBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, startLoop, func);
- callInstr->InsertBefore(tailBranch);
- callInstr->InsertBefore(endLoop);
- loop->regAlloc.liveOnBackEdgeSyms->Set(ldLenDstOpnd->m_sym->m_id);
- // Note: This loop iteratively adds instructions in two locations; in the block
- // of branches that jump to the "load elements to argOuts" instructions, and in
- // the the block of load elements to argOuts instructions themselves.
- // 4 to denote this is 4th register after this, callinfo & function object
- // INT_ARG_REG_COUNT is the number of parameters passed in int regs
- uint current_reg_pass = INT_ARG_REG_COUNT - 4;
- do
- {
- // If we're on this pass we know we have to do at least one of these, so skip
- // the branch if we're on the last one.
- if (current_reg_pass != INT_ARG_REG_COUNT - 4)
- {
- IR::LabelInstr* loadBlockLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- IR::Instr* branchToBlock = IR::BranchInstr::New(Js::OpCode::BrEq_I4, loadBlockLabel, ldLenDstOpnd, IR::IntConstOpnd::New(current_reg_pass + 1, TyInt8, func), func);
- endLoop->InsertAfter(branchToBlock);
- callInstr->InsertBefore(loadBlockLabel);
- }
- // TODO: We can further optimize this with a GenerateFastStackArgumentsLdElemI that can
- // handle us passing along constant argument references and encode them into the offset
- // instead of having to use an IndirOpnd; this would allow us to save a few bytes here,
- // and reduce register pressure a hair
- // stemp.var = LdImm current_reg_pass
- IR::RegOpnd* localTemp = IR::RegOpnd::New(TyInt32, func);
- // We need to make it a tagged int because GenerateFastStackArgumentsLdElemI asserts if
- // it is not.
- localTemp->SetValueType(ValueType::GetTaggedInt());
- const IR::AutoReuseOpnd autoReuseldElemDstOpnd3(localTemp, func);
- this->InsertMove(localTemp, IR::IntConstOpnd::New(current_reg_pass, TyInt8, func, true), callInstr);
- // sTemp = LdElem_I [s4.var + current_reg_pass (aka stemp.var) ]
- nthArgument = IR::IndirOpnd::New(stackArgs, localTemp, TyMachReg, func);
- ldElemDstOpnd = IR::RegOpnd::New(TyMachReg, func);
- const IR::AutoReuseOpnd autoReuseldElemDstOpnd2(ldElemDstOpnd, func);
- ldElem = IR::Instr::New(Js::OpCode::LdElemI_A, ldElemDstOpnd, nthArgument, func);
- callInstr->InsertBefore(ldElem);
- GenerateFastStackArgumentsLdElemI(ldElem);
- argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
- argout->SetSrc1(ldElemDstOpnd);
- callInstr->InsertBefore(argout);
- this->m_lowererMD.LoadDynamicArgument(argout, current_reg_pass + 4);
- }
- while (current_reg_pass-- != 0);
- callInstr->InsertBefore(doneArgs);
- /*return the length which will be used for callInfo generations & stack allocation*/
- return saveLenInstr->GetDst()->AsRegOpnd();
- #endif
- }
- void
- Lowerer::GenerateLoadStackArgumentByIndex(IR::Opnd *dst, IR::RegOpnd *indexOpnd, IR::Instr *instr, int32 offset, Func *func)
- {
- // Load argument set dst = [ebp + index].
- IR::RegOpnd *ebpOpnd = IR::Opnd::CreateFramePointerOpnd(func);
- IR::IndirOpnd *argIndirOpnd = nullptr;
- // The stack looks like this:
- // [new.target or FrameDisplay] <== EBP + formalParamOffset (4) + callInfo.Count
- // arguments[n] <== EBP + formalParamOffset (4) + n
- // ...
- // arguments[1] <== EBP + formalParamOffset (4) + 2
- // arguments[0] <== EBP + formalParamOffset (4) + 1
- // this or new.target <== EBP + formalParamOffset (4)
- // callinfo
- // function object
- // return addr
- // EBP-> EBP chain
- //actual arguments offset is LowererMD::GetFormalParamOffset() + 1 (this)
- int32 actualOffset = GetFormalParamOffset() + offset;
- Assert(GetFormalParamOffset() == 4);
- const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
- argIndirOpnd = IR::IndirOpnd::New(ebpOpnd, indexOpnd, indirScale, TyMachReg, this->m_func);
- argIndirOpnd->SetOffset(actualOffset << indirScale);
- Lowerer::InsertMove(dst, argIndirOpnd, instr);
- }
- //This function assumes there is stackargs bailout and index is always on the range.
- bool
- Lowerer::GenerateFastStackArgumentsLdElemI(IR::Instr* ldElem)
- {
- // MOV dst, ebp [(valueOpnd + 5) *4] // 5 for the stack layout
- //
- IR::IndirOpnd *indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
- // Now load the index and check if it is an integer.
- IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
- Assert (indexOpnd && indexOpnd->IsTaggedInt());
- if(ldElem->m_func->IsInlinee())
- {
- IR::IndirOpnd *argIndirOpnd = GetArgsIndirOpndForInlinee(ldElem, indexOpnd);
- Lowerer::InsertMove(ldElem->GetDst(), argIndirOpnd, ldElem);
- }
- else
- {
- GenerateLoadStackArgumentByIndex(ldElem->GetDst(), indexOpnd, ldElem, indirOpnd->GetOffset() + 1, m_func); // +1 to offset 'this'
- }
- ldElem->Remove();
- return false;
- }
- IR::IndirOpnd*
- Lowerer::GetArgsIndirOpndForInlinee(IR::Instr* ldElem, IR::Opnd* valueOpnd)
- {
- Assert(ldElem->m_func->IsInlinee());
- IR::IndirOpnd* argIndirOpnd = nullptr;
- // Address of argument after 'this'
- const auto firstRealArgStackSym = ldElem->m_func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
- this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr); //Start after this pointer
- IR::SymOpnd *firstArg = IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, ldElem->m_func);
- const IR::AutoReuseOpnd autoReuseFirstArg(firstArg, m_func);
- IR::RegOpnd *const baseOpnd = IR::RegOpnd::New(TyMachReg, ldElem->m_func);
- const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
- InsertLea(baseOpnd, firstArg, ldElem);
- if (valueOpnd->IsIntConstOpnd())
- {
- IntConstType offset = valueOpnd->AsIntConstOpnd()->GetValue() * MachPtr;
- // TODO: Assert(Math::FitsInDWord(offset));
- argIndirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, ldElem->m_func);
- }
- else
- {
- Assert(valueOpnd->IsRegOpnd());
- const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
- argIndirOpnd = IR::IndirOpnd::New(baseOpnd, valueOpnd->AsRegOpnd(), indirScale, TyMachReg, ldElem->m_func);
- }
- return argIndirOpnd;
- }
- IR::IndirOpnd*
- Lowerer::GetArgsIndirOpndForTopFunction(IR::Instr* ldElem, IR::Opnd* valueOpnd)
- {
- // Load argument set dst = [ebp + index] (or grab from the generator object if m_func is a generator function).
- IR::RegOpnd *baseOpnd = m_func->GetJITFunctionBody()->IsCoroutine() ? LoadGeneratorArgsPtr(ldElem) : IR::Opnd::CreateFramePointerOpnd(m_func);
- IR::IndirOpnd* argIndirOpnd = nullptr;
- // The stack looks like this:
- // ...
- // arguments[1]
- // arguments[0]
- // this
- // callinfo
- // function object
- // return addr
- // EBP-> EBP chain
- //actual arguments offset is LowererMD::GetFormalParamOffset() + 1 (this)
- uint16 actualOffset = m_func->GetJITFunctionBody()->IsCoroutine() ? 1 : GetFormalParamOffset() + 1; //5
- Assert(actualOffset == 5 || m_func->GetJITFunctionBody()->IsGenerator());
- if (valueOpnd->IsIntConstOpnd())
- {
- IntConstType offset = (valueOpnd->AsIntConstOpnd()->GetValue() + actualOffset) * MachPtr;
- // TODO: Assert(Math::FitsInDWord(offset));
- argIndirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, this->m_func);
- }
- else
- {
- const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
- argIndirOpnd = IR::IndirOpnd::New(baseOpnd->AsRegOpnd(), valueOpnd->AsRegOpnd(), indirScale, TyMachReg, this->m_func);
- // Need to offset valueOpnd by 5. Instead of changing valueOpnd, we can just add an offset to the indir. Changing
- // valueOpnd requires creation of a temp sym (if it's not already a temp) so that the value of the sym that
- // valueOpnd represents is not changed.
- argIndirOpnd->SetOffset(actualOffset << indirScale);
- }
- return argIndirOpnd;
- }
- void
- Lowerer::GenerateCheckForArgumentsLength(IR::Instr* ldElem, IR::LabelInstr* labelCreateHeapArgs, IR::Opnd* actualParamOpnd, IR::Opnd* valueOpnd, Js::OpCode opcode)
- {
- // Check if index < nr_actuals.
- InsertCompare(actualParamOpnd, valueOpnd, ldElem);
- // Jump to helper if index >= nr_actuals.
- // Do an unsigned check here so that a negative index will also fail.
- // (GenerateLdValueFromCheckedIndexOpnd does not guarantee positive index on x86.)
- InsertBranch(opcode, true, labelCreateHeapArgs, ldElem);
- }
- bool
- Lowerer::GenerateFastArgumentsLdElemI(IR::Instr* ldElem, IR::LabelInstr *labelFallThru)
- {
- // ---GenerateSmIntTest
- // ---GenerateLdValueFromCheckedIndexOpnd
- // ---LoadInputParamCount
- // CMP actualParamOpnd, valueOpnd //Compare between the actual count & the index count (say i in arguments[i])
- // JLE $labelCreateHeapArgs
- // MOV dst, ebp [(valueOpnd + 5) *4] // 5 for the stack layout
- // JMP $fallthrough
- //
- //labelCreateHeapArgs:
- // ---Bail out to create Heap Arguments object
- Assert(ldElem->DoStackArgsOpt());
- IR::IndirOpnd *indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
- bool isInlinee = ldElem->m_func->IsInlinee();
- Func *func = ldElem->m_func;
- IR::LabelInstr *labelCreateHeapArgs = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- // Now load the index and check if it is an integer.
- bool emittedFastPath = false;
- bool isNotInt = false;
- IntConstType value = 0;
- IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
- IR::Opnd *valueOpnd = nullptr;
- IR::Opnd *actualParamOpnd = nullptr;
- bool hasIntConstIndex = indirOpnd->TryGetIntConstIndexValue(true, &value, &isNotInt);
- if (isNotInt || (isInlinee && hasIntConstIndex && value >= (ldElem->m_func->actualCount - 1)))
- {
- //Outside the range of actuals, skip
- }
- else if (labelFallThru != nullptr && !(hasIntConstIndex && value < 0)) //if index is not a negative int constant
- {
- if (isInlinee)
- {
- actualParamOpnd = IR::IntConstOpnd::New(ldElem->m_func->actualCount - 1, TyInt32, func);
- }
- else
- {
- // Load actuals count, LoadHeapArguments will reuse the generated instructions here
- IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldElem, -1 /* don't include 'this' while counting actuals. */);
- actualParamOpnd = loadInputParamCountInstr->GetDst()->UseWithNewType(TyInt32,this->m_func);
- }
- if (hasIntConstIndex)
- {
- //Constant index
- valueOpnd = IR::IntConstOpnd::New(value, TyInt32, func);
- }
- else
- {
- //Load valueOpnd from the index
- valueOpnd =
- m_lowererMD.LoadNonnegativeIndex(
- indexOpnd,
- (
- #if INT32VAR
- indexOpnd->GetType() == TyUint32
- #else
- // On 32-bit platforms, skip the negative check since for now, the unsigned upper bound check covers it
- true
- #endif
- ),
- labelCreateHeapArgs,
- labelCreateHeapArgs,
- ldElem);
- }
- if (isInlinee)
- {
- if (!hasIntConstIndex)
- {
- //Runtime check if to make sure length is within the arguments.length range.
- GenerateCheckForArgumentsLength(ldElem, labelCreateHeapArgs, valueOpnd, actualParamOpnd, Js::OpCode::BrGe_A);
- }
- }
- else
- {
- GenerateCheckForArgumentsLength(ldElem, labelCreateHeapArgs, actualParamOpnd, valueOpnd, Js::OpCode::BrLe_A);
- }
- IR::Opnd *argIndirOpnd = nullptr;
- if (isInlinee)
- {
- argIndirOpnd = GetArgsIndirOpndForInlinee(ldElem, valueOpnd);
- }
- else
- {
- argIndirOpnd = GetArgsIndirOpndForTopFunction(ldElem, valueOpnd);
- }
- Lowerer::InsertMove(ldElem->GetDst(), argIndirOpnd, ldElem);
- // JMP $done
- InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
- // $labelCreateHeapArgs:
- ldElem->InsertBefore(labelCreateHeapArgs);
- emittedFastPath = true;
- }
- if (!emittedFastPath)
- {
- throw Js::RejitException(RejitReason::DisableStackArgOpt);
- }
- return emittedFastPath;
- }
- bool
- Lowerer::GenerateFastRealStackArgumentsLdLen(IR::Instr *ldLen)
- {
- if(ldLen->m_func->IsInlinee())
- {
- //Get the length of the arguments
- Lowerer::InsertMove(ldLen->GetDst(),
- IR::IntConstOpnd::New(ldLen->m_func->actualCount - 1, TyUint32, ldLen->m_func),
- ldLen);
- }
- else
- {
- IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldLen, -1);
- IR::RegOpnd *actualCountOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
- Lowerer::InsertMove(ldLen->GetDst(), actualCountOpnd, ldLen);
- }
- ldLen->Remove();
- return false;
- }
- bool
- Lowerer::GenerateFastArgumentsLdLen(IR::Instr *ldLen, IR::LabelInstr* labelFallThru)
- {
- // TEST argslot, argslot //Test if the arguments slot is zero
- // JNE $helper
- // actualCountOpnd <-LoadInputParamCount fastpath
- // SHL actualCountOpnd, actualCountOpnd, 1 // Left shift for tagging
- // INC actualCountOpnd // Tagging
- // MOV dst, actualCountOpnd
- // JMP $fallthrough
- //$helper:
- Assert(ldLen->DoStackArgsOpt());
- if(ldLen->m_func->IsInlinee())
- {
- //Get the length of the arguments
- Lowerer::InsertMove(ldLen->GetDst(),
- IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(ldLen->m_func->actualCount - 1), IR::AddrOpndKindConstantVar, ldLen->m_func), // -1 to exclude this pointer
- ldLen);
- }
- else
- {
- IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldLen, -1);
- IR::RegOpnd *actualCountOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
- this->m_lowererMD.GenerateInt32ToVarConversion(actualCountOpnd, ldLen);
- Lowerer::InsertMove(ldLen->GetDst(), actualCountOpnd, ldLen);
- }
- return true;
- }
- IR::RegOpnd*
- Lowerer::GenerateFunctionTypeFromFixedFunctionObject(IR::Instr *insertInstrPt, IR::Opnd* functionObjOpnd)
- {
- IR::RegOpnd * functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::Opnd *functionTypeOpnd = nullptr;
- if(functionObjOpnd->IsAddrOpnd())
- {
- IR::AddrOpnd* functionObjAddrOpnd = functionObjOpnd->AsAddrOpnd();
- // functionTypeRegOpnd = MOV [fixed function address + type offset]
- functionObjAddrOpnd->m_address;
- functionTypeOpnd = IR::MemRefOpnd::New((void *)((intptr_t)functionObjAddrOpnd->m_address + Js::RecyclableObject::GetOffsetOfType()), TyMachPtr, this->m_func,
- IR::AddrOpndKindDynamicObjectTypeRef);
- }
- else
- {
- functionTypeOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, this->m_func);
- }
- Lowerer::InsertMove(functionTypeRegOpnd, functionTypeOpnd, insertInstrPt);
- return functionTypeRegOpnd;
- }
- void
- Lowerer::FinalLower()
- {
- this->m_lowererMD.FinalLower();
- // We check if there are any lazy bailouts in
- // LowererMD::FinalLower, so only insert the thunk
- // if needed
- if (this->m_func->HasLazyBailOut())
- {
- this->InsertLazyBailOutThunk();
- }
- // Ensure that the StartLabel and EndLabel are inserted
- // before the prolog and after the epilog respectively
- IR::LabelInstr * startLabel = m_func->GetFuncStartLabel();
- if (startLabel != nullptr)
- {
- m_func->m_headInstr->InsertAfter(startLabel);
- }
- IR::LabelInstr * endLabel = m_func->GetFuncEndLabel();
- if (endLabel != nullptr)
- {
- m_func->m_tailInstr->GetPrevRealInstr()->InsertBefore(endLabel);
- }
- }
- void
- Lowerer::InsertLazyBailOutThunk()
- {
- #if defined(_M_IX86) || defined(_M_X64)
- if (!this->m_func->IsTopFunc())
- {
- return;
- }
- Assert(this->m_func->GetLazyBailOutRecordSlot() != nullptr);
- IR::Instr *tailInstr = this->m_func->m_tailInstr;
- // Label (LazyBailOutThunk):
- IR::LabelInstr *lazyBailOutLabel = IR::LabelInstr::New(Js::OpCode::LazyBailOutThunkLabel, this->m_func, true /* isOpHelper */);
- lazyBailOutLabel->m_hasNonBranchRef = true; // Make sure that this label isn't removed
- LABELNAMESET(lazyBailOutLabel, "LazyBailOutThunk");
- tailInstr->InsertBefore(lazyBailOutLabel);
- #ifdef _M_X64
- // 1. Save registers used for parameters, and rax, if necessary, into the shadow space allocated for register parameters:
- // mov [rsp + 16], RegArg1 (if branchConditionOpnd)
- // mov [rsp + 8], RegArg0
- // mov [rsp], rax
- extern const IRType RegTypes[RegNumCount];
- const RegNum regs[3] = { RegRAX, RegArg0, RegArg1 };
- for (int i = 2; i >= 0; i--)
- {
- RegNum reg = regs[i];
- const IRType regType = RegTypes[reg];
- Lowerer::InsertMove(
- IR::SymOpnd::New(this->m_func->m_symTable->GetArgSlotSym(static_cast<Js::ArgSlot>(i + 1)), regType, this->m_func),
- IR::RegOpnd::New(nullptr, reg, regType, this->m_func),
- tailInstr
- );
- }
- #endif
- // 2. Always enable implicit call flag
- // If StFld/StElem instructions have both LazyBailOut and BailOnImplicitCallPreop and the operation turns out to not
- // be an implicit call, at that point, we have already disabled the implicit calls flag. We would then do lazy bailout
- // and not go back to the remaining code. Therefore, we need to re-enable implicit calls again in the thunk.
- IR::Opnd *disableImplicitCallFlagAddress = this->m_lowererMD.GenerateMemRef(
- this->m_func->GetThreadContextInfo()->GetDisableImplicitFlagsAddr(),
- TyInt8,
- tailInstr /* insertBeforeInstr */
- );
- #ifdef _M_X64
- // On x64, we might decide to load the address of implicit flag to a register,
- // but since we are in Lowerer (past RegAlloc), all the operands won't have any
- // registers assigned to them. We force them to be rcx (because they are going
- // to be replaced anyway).
- // TODO: This hack doesn't work with ARM/ARM64
- // Will need to revisit this if we decide to do lazy bailout on those platforms
- IR::Instr *moveInstr = Lowerer::InsertMove(
- disableImplicitCallFlagAddress,
- IR::IntConstOpnd::New(DisableImplicitNoFlag, TyInt8, this->m_func, true),
- tailInstr /* insertBeforeInstr */
- );
- if (moveInstr->GetDst()->IsIndirOpnd())
- {
- moveInstr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->AsRegOpnd()->SetReg(RegArg0);
- }
-
- if (moveInstr->m_prev->GetDst()->IsRegOpnd())
- {
- moveInstr->m_prev->GetDst()->AsRegOpnd()->SetReg(RegArg0);
- }
- #else
- Lowerer::InsertMove(
- disableImplicitCallFlagAddress,
- IR::IntConstOpnd::New(DisableImplicitNoFlag, TyInt8, this->m_func, true),
- tailInstr /* insertBeforeInstr */
- );
- #endif
- #ifdef _M_X64
- // 3. mov rcx, [rbp + offset] ; for bailout record
- IR::RegOpnd *arg0 = IR::RegOpnd::New(nullptr, RegArg0, TyMachPtr, this->m_func);
- IR::SymOpnd *bailOutRecordAddr = IR::SymOpnd::New(this->m_func->GetLazyBailOutRecordSlot(), TyMachPtr, this->m_func);
- Lowerer::InsertMove(arg0, bailOutRecordAddr, tailInstr, false /* generateWriteBarrier */);
- #else
- // 3. Put the BailOutRecord on the stack for x86
- IR::Instr *const newInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
- IR::SymOpnd *bailOutRecordAddr = IR::SymOpnd::New(this->m_func->GetLazyBailOutRecordSlot(), TyMachPtr, this->m_func);
- newInstr->SetSrc1(bailOutRecordAddr);
- tailInstr->InsertBefore(newInstr);
- #endif
- // 4. call SaveAllRegistersAndBailOut
- IR::Instr *callInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
- callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSaveAllRegistersAndBailOut, this->m_func));
- tailInstr->InsertBefore(callInstr);
- m_lowererMD.LowerCall(callInstr, 0);
- // 5. jmp to function's epilog
- IR::LabelInstr *exitLabel = this->m_func->m_exitInstr->GetPrevLabelInstr();
- IR::BranchInstr *branchInstr = IR::BranchInstr::New(Js::OpCode::JMP, exitLabel, this->m_func);
- tailInstr->InsertBefore(branchInstr);
- #endif
- }
- void
- Lowerer::EHBailoutPatchUp()
- {
- Assert(this->m_func->isPostLayout);
- // 1. Insert return thunks for all the regions.
- // 2. Set the hasBailedOut bit to true on all bailout paths in EH regions.
- // 3. Insert code after every bailout in a try or catch region to save the return value on the stack, and jump to the return thunk (See Region.h) of that region.
- // 4. Insert code right before the epilog, to restore the return value (saved in 2.) from a bailout into eax.
- IR::LabelInstr * restoreReturnValueFromBailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- IR::LabelInstr * epilogLabel;
- IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->GetPrevRealInstrOrLabel();
- if (exitPrevInstr->IsLabelInstr())
- {
- epilogLabel = exitPrevInstr->AsLabelInstr();
- }
- else
- {
- epilogLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- this->m_func->m_exitInstr->InsertBefore(epilogLabel);
- }
- IR::Instr * tmpInstr = nullptr;
- bool restoreReturnFromBailoutEmitted = false;
- FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, this->m_func)
- {
- if (instr->IsLabelInstr())
- {
- this->currentRegion = instr->AsLabelInstr()->GetRegion();
- }
- // Consider (radua): Assert(this->currentRegion) here?
- if (this->currentRegion)
- {
- RegionType currentRegionType = this->currentRegion->GetType();
- if (currentRegionType == RegionTypeTry || currentRegionType == RegionTypeCatch || currentRegionType == RegionTypeFinally)
- {
- if (this->currentRegion->IsNonExceptingFinally())
- {
- Region * parent = this->currentRegion->GetParent();
- while (parent->IsNonExceptingFinally())
- {
- parent = parent->GetParent();
- }
- if (parent->GetType() == RegionTypeRoot)
- {
- continue;
- }
- }
- this->InsertReturnThunkForRegion(this->currentRegion, restoreReturnValueFromBailoutLabel);
- if (instr->HasBailOutInfo())
- {
- if (instr->GetBailOutInfo()->bailOutFunc == this->m_func)
- {
- // We dont set this bit for inlined code, if there was a bailout in the inlined code,
- // and an exception was thrown, we want the caller's handler to handle the exception accordingly.
- // TODO : Revisit when we start inlining functions with try-catch/try-finally
- this->SetHasBailedOut(instr);
- }
- tmpInstr = this->EmitEHBailoutStackRestore(instr);
- this->EmitSaveEHBailoutReturnValueAndJumpToRetThunk(tmpInstr);
- if (!restoreReturnFromBailoutEmitted)
- {
- this->EmitRestoreReturnValueFromEHBailout(restoreReturnValueFromBailoutLabel, epilogLabel);
- restoreReturnFromBailoutEmitted = true;
- }
- }
- }
- }
- }
- NEXT_INSTR_IN_FUNC_EDITING
- }
- bool
- Lowerer::GenerateFastLdFld(IR::Instr * const instrLdFld, IR::JnHelperMethod helperMethod, IR::JnHelperMethod polymorphicHelperMethod,
- IR::LabelInstr ** labelBailOut, IR::RegOpnd* typeOpnd, bool* pIsHelper, IR::LabelInstr** pLabelHelper)
- {
- // Generates:
- //
- // r1 = object->type
- // if (r1 is taggedInt) goto helper
- // Load inline cache
- // if monomorphic
- // r2 = address of the monomorphic inline cache
- // if polymorphic
- // r2 = address of the polymorphic inline cache array
- // r3 = (type >> PIC shift amount) & (PIC size - 1)
- // r2 = r2 + r3
- // Try load property using proto cache (if protoFirst)
- // Try load property using local cache
- // Try loading property using proto cache (if !protoFirst)
- // Try loading property using flags cache
- //
- // Loading property using local cache:
- // if (r1 == r2->u.local.type)
- // result = load inline slot r2->u.local.slotIndex from r1
- // goto fallthru
- // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.local.type)
- // result = load aux slot r2->u.local.slotIndex from r1
- // goto fallthru
- //
- // Loading property using proto cache:
- // if (r1 == r2->u.proto.type)
- // r3 = r2->u.proto.prototypeObject
- // result = load inline slot r2->u.proto.slotIndex from r3
- // goto fallthru
- // if (r1 | InlineCacheAuxSlotTypeTag) == r2.u.proto.type)
- // r3 = r2->u.proto.prototypeObject
- // result = load aux slot r2->u.proto.slotIndex from r3
- // goto fallthru
- //
- // Loading property using flags cache:
- // if (r2->u.accessor.flags & (Js::InlineCacheGetterFlag | Js::InlineCacheSetterFlag) == 0)
- // if (r1 == r2->u.accessor.type)
- // result = load inline slot r2->u.accessor.slotIndex from r1
- // goto fallthru
- // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.accessor.type)
- // result = load aux slot r2->u.accessor.slotIndex from r1
- // goto fallthru
- //
- // Loading an inline slot:
- // result = [r1 + slotIndex * sizeof(Var)]
- //
- // Loading an aux slot:
- // slotArray = r1->auxSlots
- // result = [slotArray + slotIndex * sizeof(Var)]
- //
- // We only emit the code block for a type of cache (local/proto/flags) if the profile data
- // indicates that type of cache was used to load the property in the past.
- // We don't emit the type check with aux slot tag if the profile data indicates that we didn't
- // load the property from an aux slot before.
- // We don't emit the type check without an aux slot tag if the profile data indicates that we didn't
- // load the property from an inline slot before.
- IR::Opnd * opndSrc = instrLdFld->GetSrc1();
- AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->IsPropertySymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(), "Expected PropertySym as src of LdFld");
- Assert(!instrLdFld->DoStackArgsOpt());
- IR::PropertySymOpnd * propertySymOpnd = opndSrc->AsPropertySymOpnd();
- PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
- PHASE_PRINT_TESTTRACE(
- Js::ObjTypeSpecPhase,
- this->m_func,
- _u("Field load: %s, property ID: %d, func: %s, cache ID: %d, cloned cache: false\n"),
- Js::OpCodeUtil::GetOpCodeName(instrLdFld->m_opcode),
- propertySym->m_propertyId,
- this->m_func->GetJITFunctionBody()->GetDisplayName(),
- propertySymOpnd->m_inlineCacheIndex);
- Assert(pIsHelper != nullptr);
- bool& isHelper = *pIsHelper;
- Assert(pLabelHelper != nullptr);
- IR::LabelInstr*& labelHelper = *pLabelHelper;
- bool doLocal = true;
- bool doProto = instrLdFld->m_opcode == Js::OpCode::LdMethodFld
- || instrLdFld->m_opcode == Js::OpCode::LdRootMethodFld
- || instrLdFld->m_opcode == Js::OpCode::ScopedLdMethodFld;
- bool doProtoFirst = doProto;
- bool doInlineSlots = true;
- bool doAuxSlots = true;
- if (!PHASE_OFF(Js::ProfileBasedFldFastPathPhase, this->m_func) && instrLdFld->IsProfiledInstr())
- {
- IR::ProfiledInstr * profiledInstrLdFld = instrLdFld->AsProfiledInstr();
- if (profiledInstrLdFld->u.FldInfo().flags != Js::FldInfo_NoInfo)
- {
- doProto = !!(profiledInstrLdFld->u.FldInfo().flags & Js::FldInfo_FromProto);
- doLocal = !!(profiledInstrLdFld->u.FldInfo().flags & Js::FldInfo_FromLocal);
- if ((profiledInstrLdFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromInlineSlots)
- {
- // If the inline slots flag is set and the aux slots flag is not, only generate the inline slots check
- doAuxSlots = false;
- }
- else if ((profiledInstrLdFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromAuxSlots)
- {
- // If the aux slots flag is set and the inline slots flag is not, only generate the aux slots check
- doInlineSlots = false;
- }
- }
- else if (!profiledInstrLdFld->u.FldInfo().valueType.IsUninitialized())
- {
- // We have value type info about the field but no flags. This means we shouldn't generate any
- // fast paths for this field load.
- doLocal = false;
- doProto = false;
- }
- }
- if (!doLocal && !doProto)
- {
- return false;
- }
- IR::LabelInstr * labelFallThru = instrLdFld->GetOrCreateContinueLabel();
- if (labelHelper == nullptr)
- {
- labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- }
- IR::RegOpnd * opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
- bool usePolymorphicInlineCache = !!propertySymOpnd->m_runtimePolymorphicInlineCache;
- IR::RegOpnd * opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
- if (usePolymorphicInlineCache)
- {
- Lowerer::InsertMove(opndInlineCache, IR::AddrOpnd::New(propertySymOpnd->m_runtimePolymorphicInlineCache->GetInlineCachesAddr(), IR::AddrOpndKindDynamicInlineCache, this->m_func, true), instrLdFld);
- }
- else
- {
- Lowerer::InsertMove(opndInlineCache, this->LoadRuntimeInlineCacheOpnd(instrLdFld, propertySymOpnd, isHelper), instrLdFld);
- }
- if (typeOpnd == nullptr)
- {
- typeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- GenerateObjectTestAndTypeLoad(instrLdFld, opndBase, typeOpnd, labelHelper);
- }
- if (usePolymorphicInlineCache)
- {
- LowererMD::GenerateLoadPolymorphicInlineCacheSlot(instrLdFld, opndInlineCache, typeOpnd, propertySymOpnd->m_runtimePolymorphicInlineCache->GetSize());
- }
- IR::LabelInstr * labelNext = nullptr;
- IR::Opnd * opndDst = instrLdFld->GetDst();
- IR::RegOpnd * opndTaggedType = nullptr;
- IR::BranchInstr * labelNextBranchToPatch = nullptr;
- if (doProto && doProtoFirst)
- {
- if (doInlineSlots)
- {
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- labelNextBranchToPatch = GenerateProtoInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
- GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
- instrLdFld->InsertBefore(labelNext);
- }
- if (doAuxSlots)
- {
- if (opndTaggedType == nullptr)
- {
- opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
- LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
- }
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- labelNextBranchToPatch = GenerateProtoInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
- GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
- instrLdFld->InsertBefore(labelNext);
- }
- }
- if (doLocal)
- {
- if (doInlineSlots)
- {
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- labelNextBranchToPatch = GenerateLocalInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
- GenerateLdFldFromLocalInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
- instrLdFld->InsertBefore(labelNext);
- }
- if (doAuxSlots)
- {
- if (opndTaggedType == nullptr)
- {
- opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
- LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
- }
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- labelNextBranchToPatch = GenerateLocalInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
- GenerateLdFldFromLocalInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
- instrLdFld->InsertBefore(labelNext);
- }
- }
- if (doProto && !doProtoFirst)
- {
- if (doInlineSlots)
- {
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- labelNextBranchToPatch = GenerateProtoInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
- GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
- instrLdFld->InsertBefore(labelNext);
- }
- if (doAuxSlots)
- {
- if (opndTaggedType == nullptr)
- {
- opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
- LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
- }
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- labelNextBranchToPatch = GenerateProtoInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
- GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
- instrLdFld->InsertBefore(labelNext);
- }
- }
- Assert(labelNextBranchToPatch);
- labelNextBranchToPatch->SetTarget(labelHelper);
- labelNext->Remove();
- // $helper:
- // dst = CALL Helper(inlineCache, base, field, scriptContext)
- // $fallthru:
- isHelper = true;
- // Return false to indicate the original instruction was not lowered. Caller will insert the helper label.
- return false;
- }
- void
- Lowerer::GenerateAuxSlotAdjustmentRequiredCheck(
- IR::Instr * instrToInsertBefore,
- IR::RegOpnd * opndInlineCache,
- IR::LabelInstr * labelHelper)
- {
- // regSlotCap = MOV [&(inlineCache->u.local.rawUInt16)] // sized to 16 bits
- IR::RegOpnd * regSlotCap = IR::RegOpnd::New(TyMachReg, instrToInsertBefore->m_func);
- IR::IndirOpnd * memSlotCap = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.rawUInt16), TyUint16, instrToInsertBefore->m_func);
- InsertMove(regSlotCap, memSlotCap, instrToInsertBefore);
- IR::IntConstOpnd * constSelectorBitCount = IR::IntConstOpnd::New(Js::InlineCache::CacheLayoutSelectorBitCount, TyUint16, instrToInsertBefore->m_func, /* dontEncode = */ true);
- #if _M_ARM64
- IR::Instr * testBranch = InsertBranch(Js::OpCode::TBZ, labelHelper, instrToInsertBefore);
- testBranch->SetSrc1(regSlotCap);
- testBranch->SetSrc2(constSelectorBitCount);
- #else
- // SAR regSlotCap, Js::InlineCache::CacheLayoutSelectorBitCount
- InsertShiftBranch(Js::OpCode::Shr_A, regSlotCap, regSlotCap, constSelectorBitCount, Js::OpCode::BrNeq_A, true, labelHelper, instrToInsertBefore);
- #endif
- }
- void
- Lowerer::GenerateSetObjectTypeFromInlineCache(
- IR::Instr * instrToInsertBefore,
- IR::RegOpnd * opndBase,
- IR::RegOpnd * opndInlineCache,
- bool isTypeTagged)
- {
- // regNewType = MOV [&(inlineCache->u.local.type)]
- IR::RegOpnd * regNewType = IR::RegOpnd::New(TyMachReg, instrToInsertBefore->m_func);
- IR::IndirOpnd * memNewType = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.type), TyMachReg, instrToInsertBefore->m_func);
- InsertMove(regNewType, memNewType, instrToInsertBefore);
- // AND regNewType, ~InlineCacheAuxSlotTypeTag
- if (isTypeTagged)
- {
- // On 64-bit platforms IntConstOpnd isn't big enough to hold TyMachReg values.
- IR::IntConstOpnd * constTypeTagComplement = IR::IntConstOpnd::New(~InlineCacheAuxSlotTypeTag, TyMachReg, instrToInsertBefore->m_func, /* dontEncode = */ true);
- InsertAnd(regNewType, regNewType, constTypeTagComplement, instrToInsertBefore);
- }
- // MOV base->type, regNewType
- IR::IndirOpnd * memObjType = IR::IndirOpnd::New(opndBase, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrToInsertBefore->m_func);
- InsertMove(memObjType, regNewType, instrToInsertBefore);
- }
- bool
- Lowerer::GenerateFastStFld(IR::Instr * const instrStFld, IR::JnHelperMethod helperMethod, IR::JnHelperMethod polymorphicHelperMethod, IR::LabelInstr ** labelBailOut, IR::RegOpnd* typeOpnd,
- bool* pIsHelper, IR::LabelInstr** pLabelHelper, bool withPutFlags, Js::PropertyOperationFlags flags)
- {
- // Generates:
- //
- // r1 = object->type
- // if (r1 is taggedInt) goto helper
- // Load inline cache
- // if monomorphic
- // r2 = address of the monomorphic inline cache
- // if polymorphic
- // r2 = address of the polymorphic inline cache array
- // r3 = (type >> PIC shift amount) & (PIC size - 1)
- // r2 = r2 + r3
- // Try store property using local cache
- //
- // Loading property using local cache:
- // if (r1 == r2->u.local.type)
- // store value to inline slot r2->u.local.slotIndex on r1
- // goto fallthru
- // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.local.type)
- // store value to aux slot r2->u.local.slotIndex on r1
- // goto fallthru
- //
- // Storing to an inline slot:
- // [r1 + slotIndex * sizeof(Var)] = value
- //
- // Storing to an aux slot:
- // slotArray = r1->auxSlots
- // [slotArray + slotIndex * sizeof(Var)] = value
- //
- // We don't emit the type check with aux slot tag if the profile data indicates that we didn't
- // store the property to an aux slot before.
- // We don't emit the type check without an aux slot tag if the profile data indicates that we didn't
- // store the property to an inline slot before.
- IR::Opnd * opndSrc = instrStFld->GetSrc1();
- IR::Opnd * opndDst = instrStFld->GetDst();
- AssertMsg(opndDst->IsSymOpnd() && opndDst->AsSymOpnd()->IsPropertySymOpnd() && opndDst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected PropertySym as dst of StFld");
- IR::PropertySymOpnd * propertySymOpnd = opndDst->AsPropertySymOpnd();
- PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
- PHASE_PRINT_TESTTRACE(
- Js::ObjTypeSpecPhase,
- this->m_func,
- _u("Field store: %s, property ID: %u, func: %s, cache ID: %d, cloned cache: false\n"),
- Js::OpCodeUtil::GetOpCodeName(instrStFld->m_opcode),
- propertySym->m_propertyId,
- this->m_func->GetJITFunctionBody()->GetDisplayName(),
- propertySymOpnd->m_inlineCacheIndex);
- Assert(pIsHelper != nullptr);
- bool& isHelper = *pIsHelper;
- Assert(pLabelHelper != nullptr);
- IR::LabelInstr*& labelHelper = *pLabelHelper;
- bool doStore = true;
- bool doAdd = false;
- bool doInlineSlots = true;
- bool doAuxSlots = true;
- if (!PHASE_OFF(Js::ProfileBasedFldFastPathPhase, this->m_func) && instrStFld->IsProfiledInstr())
- {
- IR::ProfiledInstr * profiledInstrStFld = instrStFld->AsProfiledInstr();
- if (profiledInstrStFld->u.FldInfo().flags != Js::FldInfo_NoInfo)
- {
- if (!(profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromLocal | Js::FldInfo_FromLocalWithoutProperty)))
- {
- return false;
- }
- if (!PHASE_OFF(Js::AddFldFastPathPhase, this->m_func))
- {
- // We always try to do the store field fast path, unless the profile specifically says we never set, but always add a property here.
- if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromLocal | Js::FldInfo_FromLocalWithoutProperty)) == Js::FldInfo_FromLocalWithoutProperty)
- {
- doStore = false;
- }
- // On the other hand, we only emit the add field fast path, if the profile explicitly says we do add properties here.
- if (!!(profiledInstrStFld->u.FldInfo().flags & Js::FldInfo_FromLocalWithoutProperty))
- {
- doAdd = true;
- }
- }
- else
- {
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- #endif
- PHASE_PRINT_TRACE(Js::AddFldFastPathPhase, this->m_func,
- _u("AddFldFastPath: function: %s(%s) property ID: %u no fast path, because the phase is off.\n"),
- this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
- propertySym->m_propertyId);
- }
- if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromInlineSlots)
- {
- // If the inline slots flag is set and the aux slots flag is not, only generate the inline slots check
- doAuxSlots = false;
- }
- else if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromAuxSlots)
- {
- // If the aux slots flag is set and the inline slots flag is not, only generate the aux slots check
- doInlineSlots = false;
- }
- }
- else if (!profiledInstrStFld->u.FldInfo().valueType.IsUninitialized())
- {
- // We have value type info about the field but no flags. This means we shouldn't generate any
- // fast paths for this field store.
- return false;
- }
- }
- Assert(doStore || doAdd);
- if (labelHelper == nullptr)
- {
- labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- }
- IR::LabelInstr * labelFallThru = instrStFld->GetOrCreateContinueLabel();
- IR::RegOpnd * opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
- bool usePolymorphicInlineCache = !!propertySymOpnd->m_runtimePolymorphicInlineCache;
- if (doAdd)
- {
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- #endif
- PHASE_PRINT_TRACE(Js::AddFldFastPathPhase, this->m_func,
- _u("AddFldFastPath: function: %s(%s) property ID: %d %s fast path for %s.\n"),
- this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
- propertySym->m_propertyId,
- usePolymorphicInlineCache ? _u("poly") : _u("mono"), doStore ? _u("store and add") : _u("add only"));
- }
- IR::RegOpnd * opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
- if (usePolymorphicInlineCache)
- {
- Lowerer::InsertMove(opndInlineCache, IR::AddrOpnd::New(propertySymOpnd->m_runtimePolymorphicInlineCache->GetInlineCachesAddr(), IR::AddrOpndKindDynamicInlineCache, this->m_func, true), instrStFld);
- }
- else
- {
- Lowerer::InsertMove(opndInlineCache, this->LoadRuntimeInlineCacheOpnd(instrStFld, propertySymOpnd, isHelper), instrStFld);
- }
- if (typeOpnd == nullptr)
- {
- typeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- GenerateObjectTestAndTypeLoad(instrStFld, opndBase, typeOpnd, labelHelper);
- }
- if (usePolymorphicInlineCache)
- {
- LowererMD::GenerateLoadPolymorphicInlineCacheSlot(instrStFld, opndInlineCache, typeOpnd, propertySymOpnd->m_runtimePolymorphicInlineCache->GetSize());
- }
- IR::LabelInstr * labelNext = nullptr;
- IR::RegOpnd * opndTaggedType = nullptr;
- IR::BranchInstr * lastBranchToNext = nullptr;
- if (doStore)
- {
- if (doInlineSlots)
- {
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- lastBranchToNext = GenerateLocalInlineCacheCheck(instrStFld, typeOpnd, opndInlineCache, labelNext);
- this->GetLowererMD()->GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, true);
- instrStFld->InsertBefore(labelNext);
- }
- if (doAuxSlots)
- {
- if (opndTaggedType == nullptr)
- {
- opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
- LowererMD::GenerateLoadTaggedType(instrStFld, typeOpnd, opndTaggedType);
- }
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- lastBranchToNext = GenerateLocalInlineCacheCheck(instrStFld, opndTaggedType, opndInlineCache, labelNext);
- this->GetLowererMD()->GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, false);
- instrStFld->InsertBefore(labelNext);
- }
- }
- if (doAdd)
- {
- if (doInlineSlots)
- {
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- lastBranchToNext = GenerateLocalInlineCacheCheck(instrStFld, typeOpnd, opndInlineCache, labelNext, true);
- GenerateSetObjectTypeFromInlineCache(instrStFld, opndBase, opndInlineCache, false);
- this->GetLowererMD()->GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, true);
- instrStFld->InsertBefore(labelNext);
- }
- if (doAuxSlots)
- {
- if (opndTaggedType == nullptr)
- {
- opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
- LowererMD::GenerateLoadTaggedType(instrStFld, typeOpnd, opndTaggedType);
- }
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- lastBranchToNext = GenerateLocalInlineCacheCheck(instrStFld, opndTaggedType, opndInlineCache, labelNext, true);
- GenerateAuxSlotAdjustmentRequiredCheck(instrStFld, opndInlineCache, labelHelper);
- GenerateSetObjectTypeFromInlineCache(instrStFld, opndBase, opndInlineCache, true);
- this->GetLowererMD()->GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, false);
- instrStFld->InsertBefore(labelNext);
- }
- }
- Assert(lastBranchToNext);
- lastBranchToNext->SetTarget(labelHelper);
- labelNext->Remove();
- // $helper:
- // CALL Helper(inlineCache, base, field, src, scriptContext)
- // $fallthru:
- isHelper = true;
- // Return false to indicate the original instruction was not lowered. Caller will insert the helper label.
- return false;
- }
- bool Lowerer::GenerateFastStFldForCustomProperty(IR::Instr *const instr, IR::LabelInstr * *const labelHelperRef)
- {
- Assert(instr);
- Assert(labelHelperRef);
- Assert(!*labelHelperRef);
- switch(instr->m_opcode)
- {
- case Js::OpCode::StFld:
- case Js::OpCode::StFldStrict:
- break;
- default:
- return false;
- }
- IR::SymOpnd *const symOpnd = instr->GetDst()->AsSymOpnd();
- PropertySym *const propertySym = symOpnd->m_sym->AsPropertySym();
- if(propertySym->m_propertyId != Js::PropertyIds::lastIndex || !symOpnd->IsPropertySymOpnd())
- {
- return false;
- }
- const ValueType objectValueType(symOpnd->GetPropertyOwnerValueType());
- if(!objectValueType.IsLikelyRegExp())
- {
- return false;
- }
- if(instr->HasBailOutInfo())
- {
- const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if(!BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind) || bailOutKind & IR::BailOutKindBits)
- {
- // Other bailout kinds will likely need bailout checks that would not be generated here. In particular, if a type
- // check is necessary here to guard against downstream property accesses on the same object, the type check will
- // fail and cause a bailout if the object is a RegExp object since the "lastIndex" property accesses are not cached.
- return false;
- }
- }
- Func *const func = instr->m_func;
- IR::RegOpnd *const objectOpnd = symOpnd->CreatePropertyOwnerOpnd(func);
- const IR::AutoReuseOpnd autoReuseObjectOpnd(objectOpnd, func);
- IR::LabelInstr *labelHelper = nullptr;
- if(!objectOpnd->IsNotTaggedValue())
- {
- // test object, 1
- // jnz $helper
- if(!labelHelper)
- {
- *labelHelperRef = labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- }
- m_lowererMD.GenerateObjectTest(objectOpnd, instr, labelHelper);
- }
- if(!objectValueType.IsObject())
- {
- // cmp [object], Js::JavascriptRegExp::vtable
- // jne $helper
- if(!labelHelper)
- {
- *labelHelperRef = labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- }
- InsertCompareBranch(
- IR::IndirOpnd::New(objectOpnd, 0, TyMachPtr, func),
- LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp),
- Js::OpCode::BrNeq_A,
- labelHelper,
- instr);
- objectOpnd->SetValueType(objectValueType.ToDefiniteObject());
- }
- // mov [object + offset(lastIndexVar)], src
- // mov [object + offset(lastIndexOrFlag)], Js::JavascriptRegExp::NotCachedValue
- // jmp $done
- InsertMove(
- IR::IndirOpnd::New(objectOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), TyVar, func),
- instr->GetSrc1(),
- instr);
- InsertMove(
- IR::IndirOpnd::New(objectOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), TyUint32, func),
- IR::IntConstOpnd::New(Js::JavascriptRegExp::NotCachedValue, TyUint32, func, true),
- instr);
- InsertBranch(Js::OpCode::Br, instr->GetOrCreateContinueLabel(), instr);
- return true;
- }
- IR::RegOpnd *
- Lowerer::GenerateIsBuiltinRecyclableObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool checkObjectAndDynamicObject, IR::LabelInstr *labelContinue, bool isInHelper)
- {
- // CMP [srcReg], Js::DynamicObject::`vtable'
- // JEQ $fallThough
- // MOV r1, [src1 + offset(type)] -- get the type id
- // MOV r1, [r1 + offset(typeId)]
- // ADD r1, ~TypeIds_LastStaticType -- if (typeId > TypeIds_LastStaticType && typeId <= TypeIds_LastBuiltinDynamicObject)
- // CMP r1, (TypeIds_LastBuiltinDynamicObject - TypeIds_LastStaticType - 1)
- // JA $helper
- //fallThrough:
- IR::LabelInstr *labelFallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
- if (checkObjectAndDynamicObject)
- {
- if (!regOpnd->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(regOpnd, insertInstr, labelHelper);
- }
- GenerateIsDynamicObject(regOpnd, insertInstr, labelFallthrough, true);
- }
- IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::RegOpnd * typeIdRegOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
- IR::IndirOpnd *indirOpnd;
- // MOV typeRegOpnd, [src1 + offset(type)]
- indirOpnd = IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
- InsertMove(typeRegOpnd, indirOpnd, insertInstr);
- // MOV typeIdRegOpnd, [typeRegOpnd + offset(typeId)]
- indirOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
- InsertMove(typeIdRegOpnd, indirOpnd, insertInstr);
- // ADD typeIdRegOpnd, ~TypeIds_LastStaticType
- InsertAdd(false, typeIdRegOpnd, typeIdRegOpnd,
- IR::IntConstOpnd::New(~Js::TypeIds_LastStaticType, TyInt32, this->m_func, true), insertInstr);
- // CMP typeIdRegOpnd, (TypeIds_LastBuiltinDynamicObject - TypeIds_LastStaticType - 1)
- InsertCompare(
- typeIdRegOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_LastBuiltinDynamicObject - Js::TypeIds_LastStaticType - 1, TyInt32, this->m_func),
- insertInstr);
- if (labelContinue)
- {
- // On success, go to continuation label.
- InsertBranch(Js::OpCode::BrLe_A, true, labelContinue, insertInstr);
- }
- else
- {
- // On failure, go to helper.
- InsertBranch(Js::OpCode::BrGt_A, true, labelHelper, insertInstr);
- }
- // $fallThrough
- insertInstr->InsertBefore(labelFallthrough);
- return typeRegOpnd;
- }
- void Lowerer::GenerateIsDynamicObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool fContinueLabel)
- {
- // CMP [srcReg], Js::DynamicObject::`vtable'
- InsertCompare(
- IR::IndirOpnd::New(regOpnd, 0, TyMachPtr, m_func),
- LoadVTableValueOpnd(insertInstr, VTableValue::VtableDynamicObject),
- insertInstr);
- if (fContinueLabel)
- {
- // JEQ $fallThough
- Lowerer::InsertBranch(Js::OpCode::BrEq_A, labelHelper, insertInstr);
- }
- else
- {
- // JNE $helper
- Lowerer::InsertBranch(Js::OpCode::BrNeq_A, labelHelper, insertInstr);
- }
- }
- void Lowerer::GenerateIsRecyclableObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool checkObjectAndDynamicObject)
- {
- // CMP [srcReg], Js::DynamicObject::`vtable'
- // JEQ $fallThough
- // MOV r1, [src1 + offset(type)] -- get the type id
- // MOV r1, [r1 + offset(typeId)]
- // ADD r1, ~TypeIds_LastJavascriptPrimitiveType -- if (typeId > TypeIds_LastJavascriptPrimitiveType && typeId <= TypeIds_LastTrueJavascriptObjectType)
- // CMP r1, (TypeIds_LastTrueJavascriptObjectType - TypeIds_LastJavascriptPrimitiveType - 1)
- // JA $helper
- //fallThrough:
- IR::LabelInstr *labelFallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- if (checkObjectAndDynamicObject)
- {
- if (!regOpnd->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(regOpnd, insertInstr, labelHelper);
- }
- this->GenerateIsDynamicObject(regOpnd, insertInstr, labelFallthrough, true);
- }
- IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::RegOpnd * typeIdRegOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
- // MOV r1, [src1 + offset(type)]
- InsertMove(typeRegOpnd, IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func), insertInstr);
- // MOV r1, [r1 + offset(typeId)]
- InsertMove(typeIdRegOpnd, IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func), insertInstr);
- // ADD r1, ~TypeIds_LastJavascriptPrimitiveType
- InsertAdd(false, typeIdRegOpnd, typeIdRegOpnd, IR::IntConstOpnd::New(~Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, this->m_func, true), insertInstr);
- // CMP r1, (TypeIds_LastTrueJavascriptObjectType - TypeIds_LastJavascriptPrimitiveType - 1)
- InsertCompare(
- typeIdRegOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_LastTrueJavascriptObjectType - Js::TypeIds_LastJavascriptPrimitiveType - 1, TyInt32, this->m_func),
- insertInstr);
- // JA $helper
- InsertBranch(Js::OpCode::BrGe_A, true, labelHelper, insertInstr);
- // $fallThrough
- insertInstr->InsertBefore(labelFallthrough);
- }
- bool
- Lowerer::GenerateLdThisCheck(IR::Instr * instr)
- {
- //
- // If not a recyclable object, jump to $helper
- // MOV dst, src1 -- return the object itself
- // JMP $fallthrough
- // $helper:
- // (caller generates helper call)
- // $fallthrough:
- //
- IR::RegOpnd * src1 = instr->GetSrc1()->AsRegOpnd();
- IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::LabelInstr * fallthrough = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- GenerateIsRecyclableObject(src1, instr, helper);
- // MOV dst, src1
- if (instr->GetDst() && !instr->GetDst()->IsEqual(src1))
- {
- InsertMove(instr->GetDst(), src1, instr);
- }
- // JMP $fallthrough
- InsertBranch(Js::OpCode::Br, fallthrough, instr);
- // $helper:
- // (caller generates helper call)
- // $fallthrough:
- instr->InsertBefore(helper);
- instr->InsertAfter(fallthrough);
- return true;
- }
- //
- // TEST src, Js::AtomTag
- // JNE $done
- // MOV typeReg, objectSrc + offsetof(RecyclableObject::type)
- // CMP [typeReg + offsetof(Type::typeid)], TypeIds_ActivationObject
- // JEQ $helper
- // $done:
- // MOV dst, src
- // JMP $fallthru
- // helper:
- // MOV dst, undefined
- // $fallthru:
- bool
- Lowerer::GenerateLdThisStrict(IR::Instr* instr)
- {
- IR::RegOpnd * src1 = instr->GetSrc1()->AsRegOpnd();
- IR::RegOpnd * typeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::LabelInstr * done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::LabelInstr * fallthru = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*helper*/true);
- bool assign = instr->GetDst() && !instr->GetDst()->IsEqual(src1);
- if (!src1->IsNotTaggedValue())
- {
- // TEST src1, Js::AtomTag
- // JNE $done
- this->m_lowererMD.GenerateObjectTest(src1, instr, assign ? done : fallthru);
- }
- IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(src1, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
- Lowerer::InsertMove(typeReg, indirOpnd, instr);
- IR::IndirOpnd * typeID = IR::IndirOpnd::New(typeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
- IR::Opnd * activationObject = IR::IntConstOpnd::New(Js::TypeIds_ActivationObject, TyMachReg, this->m_func);
- Lowerer::InsertCompare(typeID, activationObject, instr);
- // JEQ $helper
- Lowerer::InsertBranch(Js::OpCode::BrEq_A, helper, instr);
- if (assign)
- {
- // $done:
- instr->InsertBefore(done);
- // MOV dst, src
- Lowerer::InsertMove(instr->GetDst(), src1, instr);
- }
- // JMP $fallthru
- Lowerer::InsertBranch(Js::OpCode::Br, fallthru, instr);
- instr->InsertBefore(helper);
- if (instr->GetDst())
- {
- // MOV dst, undefined
- Lowerer::InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined), instr);
- }
- // $fallthru:
- instr->InsertAfter(fallthru);
- return true;
- }
- // given object instanceof function, functionReg is a register with function,
- // objectReg is a register with instance and inlineCache is an InstIsInlineCache.
- // We want to generate:
- //
- // fallback on helper (will patch the inline cache) if function does not match the cache
- // MOV dst, Js::false
- // CMP functionReg, [&(inlineCache->function)]
- // JNE helper
- //
- // fallback if object is a tagged int
- // TEST objectReg, Js::AtomTag
- // JNE done
- //
- // return false if object is a primitive
- // CMP [typeReg + offsetof(Type::typeid)], TypeIds_LastJavascriptPrimitiveType
- // JLE done
- // fallback if object's type is not the cached type
- // MOV typeReg, objectSrc + offsetof(RecyclableObject::type)
- // CMP typeReg, [&(inlineCache->type]
- // JNE checkPrimType
- // use the cached result and fallthrough
- // MOV dst, [&(inlineCache->result)]
- // JMP done
- //
- //
- // $helper
- // $done
- bool
- Lowerer::GenerateFastIsInst(IR::Instr * instr)
- {
- IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::LabelInstr * done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::RegOpnd * typeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::Opnd * objectSrc;
- IR::Opnd * functionSrc;
- intptr_t inlineCache;
- IR::Instr * instrArg;
- // We are going to use the extra ArgOut_A instructions to lower the helper call later,
- // so we leave them alone here and clean them up then.
- inlineCache = instr->m_func->GetJITFunctionBody()->GetIsInstInlineCache(instr->GetSrc1()->AsIntConstOpnd()->AsUint32());
- Assert(instr->GetSrc2()->AsRegOpnd()->m_sym->m_isSingleDef);
- instrArg = instr->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
- objectSrc = instrArg->GetSrc1();
- Assert(instrArg->GetSrc2()->AsRegOpnd()->m_sym->m_isSingleDef);
- instrArg = instrArg->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
- functionSrc = instrArg->GetSrc1();
- Assert(instrArg->GetSrc2() == nullptr);
- // MOV dst, Js::false
- InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
- IR::RegOpnd * functionReg = GetRegOpnd(functionSrc, instr, m_func, TyMachReg);
- // CMP functionReg, [&(inlineCache->function)]
- {
- IR::Opnd* cacheFunction = IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfFunction(), TyMachReg, m_func, IR::AddrOpndKindDynamicIsInstInlineCacheFunctionRef);
- InsertCompare(functionReg, cacheFunction, instr);
- }
- // JNE helper
- InsertBranch(Js::OpCode::BrNeq_A, helper, instr);
- IR::RegOpnd * objectReg = GetRegOpnd(objectSrc, instr, m_func, TyMachReg);
- // TEST objectReg, Js::AtomTag
- // JNE done
- m_lowererMD.GenerateObjectTest(objectReg, instr, done);
- // MOV typeReg, objectSrc + offsetof(RecyclableObject::type)
- InsertMove(typeReg, IR::IndirOpnd::New(objectReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func), instr);
- // CMP [typeReg + offsetof(Type::typeid)], TypeIds_LastJavascriptPrimitiveType
- {
- IR::IndirOpnd * typeId = IR::IndirOpnd::New(typeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func);
- IR::IntConstOpnd * lastPrimitive = IR::IntConstOpnd::New(Js::TypeId::TypeIds_LastJavascriptPrimitiveType, TyInt32, m_func);
- InsertCompare(typeId, lastPrimitive, instr);
- }
- // JLE done
- InsertBranch(Js::OpCode::BrLe_A, done, instr);
- // CMP typeReg, [&(inlineCache->type]
- {
- IR::Opnd * cacheType = IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfType(), TyMachReg, m_func, IR::AddrOpndKindDynamicIsInstInlineCacheTypeRef);
- InsertCompare(typeReg, cacheType, instr);
- }
- // JNE helper
- InsertBranch(Js::OpCode::BrNeq_A, helper, instr);
- // MOV dst, [&(inlineCache->result)]
- {
- IR::Opnd * cacheResult = IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfResult(), TyMachReg, m_func, IR::AddrOpndKindDynamicIsInstInlineCacheResultRef);
- InsertMove(instr->GetDst(), cacheResult, instr);
- }
- // JMP done
- InsertBranch(Js::OpCode::Br, done, instr);
- // LABEL helper
- instr->InsertBefore(helper);
- instr->InsertAfter(done);
- return true;
- }
- void Lowerer::GenerateBooleanNegate(IR::Instr * instr, IR::Opnd * srcBool, IR::Opnd * dst)
- {
- // dst = src
- // dst = dst ^ (true ^ false) (= !src)
- Lowerer::InsertMove(dst, srcBool, instr);
- ScriptContextInfo* sci = instr->m_func->GetScriptContextInfo();
- IR::AddrOpnd* xorval = IR::AddrOpnd::New(sci->GetTrueAddr() ^ sci->GetFalseAddr(), IR::AddrOpndKindDynamicMisc, instr->m_func, true);
- InsertXor(dst, dst, xorval, instr);
- }
- bool Lowerer::GenerateJSBooleanTest(IR::RegOpnd * regSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
- {
- if (regSrc->GetValueType().IsBoolean())
- {
- if (fContinueLabel)
- {
- // JMP $labelTarget
- InsertBranch(Js::OpCode::Br, labelTarget, insertInstr);
- #if DBG
- if (labelTarget->isOpHelper)
- {
- labelTarget->m_noHelperAssert = true;
- }
- #endif
- }
- return false;
- }
- IR::IndirOpnd * vtablePtrOpnd = IR::IndirOpnd::New(regSrc, 0, TyMachPtr, this->m_func);
- IR::Opnd * jsBooleanVTable = LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptBoolean);
- InsertCompare(vtablePtrOpnd, jsBooleanVTable, insertInstr);
- if (fContinueLabel)
- {
- // JEQ $labelTarget
- InsertBranch(Js::OpCode::BrEq_A, labelTarget, insertInstr);
- // $helper
- InsertLabel(true, insertInstr);
- }
- else
- {
- // JNE $labelTarget
- InsertBranch(Js::OpCode::BrNeq_A, labelTarget, insertInstr);
- }
- return true;
- }
- bool Lowerer::GenerateFastEqBoolInt(IR::Instr * instr, bool *pNeedHelper, bool isInHelper)
- {
- Assert(instr);
- // There's a total of 8 modes for this function, based on these inferred flags
- bool isBranchNotCompare = instr->IsBranchInstr();
- bool isStrict = false;
- bool isNegOp = false;
- switch (instr->m_opcode)
- {
- case Js::OpCode::BrSrEq_A:
- case Js::OpCode::BrSrNotNeq_A:
- case Js::OpCode::BrSrNeq_A:
- case Js::OpCode::BrSrNotEq_A:
- case Js::OpCode::CmSrEq_A:
- case Js::OpCode::CmSrNeq_A:
- isStrict = true;
- break;
- default:
- break;
- }
- switch (instr->m_opcode)
- {
- case Js::OpCode::BrSrEq_A:
- case Js::OpCode::BrSrNotNeq_A:
- case Js::OpCode::CmSrEq_A:
- case Js::OpCode::BrEq_A:
- case Js::OpCode::BrNotNeq_A:
- case Js::OpCode::CmEq_A:
- isNegOp = false;
- break;
- case Js::OpCode::BrSrNeq_A:
- case Js::OpCode::BrSrNotEq_A:
- case Js::OpCode::CmSrNeq_A:
- case Js::OpCode::BrNeq_A:
- case Js::OpCode::BrNotEq_A:
- case Js::OpCode::CmNeq_A:
- isNegOp = true;
- break;
- default:
- // This opcode is not one of the ones that should be handled here.
- return false;
- break;
- }
- IR::Opnd *src1 = instr->GetSrc1();
- IR::Opnd *src2 = instr->GetSrc2();
- // The instrucions given to this _should_ all be 2-arg.
- Assert(src1 && src2);
- if (!(src1 && src2))
- {
- return false;
- }
- // If it's a branch instruction, we'll want these to be defined
- //IR::BranchInstr *instrBranch = nullptr;
- IR::LabelInstr *targetInstr = nullptr;
- IR::LabelInstr *labelFallthrough = nullptr;
- if (isBranchNotCompare)
- {
- IR::BranchInstr * instrBranch = instr->AsBranchInstr();
- targetInstr = instrBranch->GetTarget();
- labelFallthrough = instrBranch->GetOrCreateContinueLabel(isInHelper);
- }
- // Assume we need the helper until we can show otherwise.
- *pNeedHelper = true;
- // If we don't know the final types well enough at JIT time, a helper block to set
- // the inputs to the correct types will be needed.
- IR::LabelInstr *labelHelper = nullptr;
- // If we're doing a compare and can handle it early, then we want to skip the helper
- IR::LabelInstr *labelDone = instr->GetOrCreateContinueLabel(isInHelper);
- // Normallize for orderings
- IR::Opnd *srcBool = nullptr;
- IR::Opnd *srcInt = nullptr;
- if (src1->GetValueType().IsLikelyBoolean() && src2->GetValueType().IsLikelyTaggedInt())
- {
- srcBool = src1;
- srcInt = src2;
- }
- else if (src1->GetValueType().IsLikelyTaggedInt() && src2->GetValueType().IsLikelyBoolean())
- {
- srcInt = src1;
- srcBool = src2;
- }
- else
- {
- return false;
- }
- // If either instruction is constant, we can simplify the check. If both are constant, we can eliminate it
- bool srcIntConst = false;
- bool srcIntConstVal = false;
- // If we're comparing with a number that is not 0 or 1, then the two are inequal by default
- bool srcIntIsBoolable = false;
- bool srcBoolConst = false;
- bool srcBoolConstVal = false;
- if (srcInt->IsIntConstOpnd())
- {
- IR::IntConstOpnd * constSrcInt = srcInt->AsIntConstOpnd();
- IntConstType constIntVal = constSrcInt->GetValue();
- srcIntConst = true;
- if (constIntVal == 0)
- {
- srcIntConstVal = false;
- srcIntIsBoolable = true;
- }
- else if (constIntVal == 1)
- {
- srcIntConstVal = true;
- srcIntIsBoolable = true;
- }
- }
- else if (srcInt->IsAddrOpnd())
- {
- IR::AddrOpnd * addrSrcInt = srcInt->AsAddrOpnd();
- if (!(addrSrcInt && addrSrcInt->IsVar() && Js::TaggedInt::Is(addrSrcInt->m_address)))
- {
- return false;
- }
- int32 constIntVal = Js::TaggedInt::ToInt32(addrSrcInt->m_address);
- srcIntConst = true;
- if (constIntVal == 0)
- {
- srcIntConstVal = false;
- srcIntIsBoolable = true;
- }
- else if (constIntVal == 1)
- {
- srcIntConstVal = true;
- srcIntIsBoolable = true;
- }
- }
- else if (srcInt->IsConstOpnd())
- {
- // Not handled yet
- return false;
- }
- if (srcBool->IsIntConstOpnd())
- {
- IR::IntConstOpnd * constSrcBool = srcBool->AsIntConstOpnd();
- IntConstType constIntVal = constSrcBool->GetValue();
- srcBoolConst = true;
- srcBoolConstVal = constIntVal != 0;
- }
- else if (srcBool->IsAddrOpnd())
- {
- IR::AddrOpnd * addrSrcBool = srcInt->AsAddrOpnd();
- if (!(addrSrcBool && addrSrcBool->IsVar() && Js::TaggedInt::Is(addrSrcBool->m_address)))
- {
- return false;
- }
- int32 value = Js::TaggedInt::ToInt32(addrSrcBool->m_address);
- srcBoolConst = true;
- srcBoolConstVal = value != 0;
- }
- else if (srcBool->IsConstOpnd())
- {
- // Not handled yet
- return false;
- }
- // Do these checks here, since that way we avoid emitting instructions before exiting earlier
- if (srcInt->GetValueType().IsTaggedInt() && srcBool->GetValueType().IsBoolean()) {
- // ok, we know the types, so no helper needed
- *pNeedHelper = false;
- }
- else
- {
- labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- // check the types and jump to the helper if incorrect
- if (!srcInt->IsConstOpnd() && !srcInt->GetValueType().IsTaggedInt())
- {
- this->m_lowererMD.GenerateSmIntTest(srcInt->AsRegOpnd(), instr, labelHelper);
- }
- if (!srcBool->IsConstOpnd() && !srcBool->GetValueType().IsBoolean())
- {
- if (!srcBool->GetValueType().IsObject())
- {
- this->m_lowererMD.GenerateObjectTest(srcBool->AsRegOpnd(), instr, labelHelper, false);
- }
- GenerateJSBooleanTest(srcBool->AsRegOpnd(), instr, labelHelper, false);
- }
- }
- // At this point, we know both which operand is an integer and which is a boolean,
- // whether either operand is constant, and what the constant true/false values are
- // for any constant operands. This should allow us to emit some decent code.
- LibraryValue equalResultValue = !isNegOp ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
- LibraryValue inequalResultValue = !isNegOp ? LibraryValue::ValueFalse : LibraryValue::ValueTrue;
- IR::LabelInstr *equalResultTarget = !isNegOp ? targetInstr : labelFallthrough;
- IR::LabelInstr *inequalResultTarget = !isNegOp ? labelFallthrough : targetInstr;
- // For the Sr instructions, we now know that the types are different, so we can immediately
- // decide what the result will be.
- if (isStrict)
- {
- if (isBranchNotCompare)
- {
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, inequalResultTarget, this->m_func));
- #if DBG
- // Since we're not making a non-helper path to one of the branches, we need to tell
- // DbCheckPostLower that we are going to have a non-helper label without non-helper
- // branches.
- // Note: this following line isn't good practice in general
- equalResultTarget->m_noHelperAssert = true;
- #endif
- }
- else
- {
- Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, inequalResultValue), instr);
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
- }
- }
- // Now that we've checked the types, we can lower some instructions to quickly do the check
- // in the case that it's not a type-strict strict equality/inequality check.
- else if (srcIntConst && srcBoolConst)
- {
- // If both arguments are constant, we can statically determine the result.
- bool sameVal = srcIntConstVal == srcBoolConstVal;
- if (isBranchNotCompare)
- {
- // For constant branches, branch to the target
- Assert(instr);
- IR::LabelInstr * target = sameVal && srcIntIsBoolable ? equalResultTarget : inequalResultTarget;
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, target, this->m_func));
- #if DBG
- // Since we're not making a non-helper path to one of the branches, we need to tell
- // DbCheckPostLower that we are going to have a non-helper label without non-helper
- // branches.
- // Note: this following line isn't good practice in general
- (sameVal && srcIntIsBoolable ? inequalResultTarget : equalResultTarget)->m_noHelperAssert = true;
- #endif
- }
- else
- {
- // For constant compares, load the constant result
- Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, sameVal && srcIntIsBoolable ? equalResultValue : inequalResultValue), instr);
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
- }
- }
- else if (!srcIntConst && !srcBoolConst)
- {
- // If neither is constant, we can still do a bit better than loading the helper
- IR::LabelInstr * firstFalse = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- IR::LabelInstr * forceInequal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- // We branch based on the zero-ness of the integer argument to two checks against the boolean argument
- this->m_lowererMD.GenerateTaggedZeroTest(srcInt->AsRegOpnd(), instr, firstFalse);
- // If it's not zero, then it's either 1, in which case it's true, or it's something else, in which
- // case the two will compare as inequal
- InsertCompareBranch(
- IR::IntConstOpnd::New((((IntConstType)1) << Js::VarTag_Shift) + Js::AtomTag, IRType::TyVar, this->m_func, true),
- srcInt->AsRegOpnd(),
- Js::OpCode::BrNeq_A,
- isBranchNotCompare ? inequalResultTarget : forceInequal, // in the case of branching, we can go straight to the inequal target; for compares, we need to load the value
- instr,
- true);
- if (isBranchNotCompare)
- {
- // if the int evaluates to 1 (true)
- InsertCompareBranch(
- srcBool,
- LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue),
- instr->m_opcode,
- targetInstr,
- instr);
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallthrough, this->m_func));
- // if the int evaluates to 0 (false)
- instr->InsertBefore(firstFalse);
- InsertCompareBranch(
- srcBool,
- LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse),
- instr->m_opcode,
- targetInstr,
- instr);
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallthrough, this->m_func));
- }
- else
- {
- // the int resolves to 1 (true)
- // Load either the bool or its complement into the dst reg, depending on the opcode
- if (isNegOp)
- {
- GenerateBooleanNegate(instr, srcBool, instr->GetDst());
- }
- else
- {
- this->InsertMove(instr->GetDst(), srcBool, instr);
- }
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
- // the int resolves to 0 (false)
- // Handle the complement case
- instr->InsertBefore(firstFalse);
- if (!isNegOp)
- {
- GenerateBooleanNegate(instr, srcBool, instr->GetDst());
- }
- else
- {
- this->InsertMove(instr->GetDst(), srcBool, instr);
- }
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
- // the int resolves to something other than 0 or 1 (inequal to a bool)
- instr->InsertBefore(forceInequal);
- Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, inequalResultValue), instr);
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
- }
- }
- else if (srcIntConst)
- {
- if (isBranchNotCompare)
- {
- if (srcIntIsBoolable)
- {
- LibraryValue intval = srcIntConstVal ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
- InsertCompareBranch(
- srcBool,
- LoadLibraryValueOpnd(instr, intval),
- instr->m_opcode,
- targetInstr,
- instr);
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallthrough, this->m_func));
- }
- else
- {
- // Since a constant int that isn't 0 or 1 will always be inequal to bools, just jump to the inequal result
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, inequalResultTarget, this->m_func));
- #if DBG
- // Since we're not making a non-helper path to one of the branches, we need to tell
- // DbCheckPostLower that we are going to have a non-helper label without non-helper
- // branches.
- // Note: this following line isn't good practice in general
- equalResultTarget->m_noHelperAssert = true;
- #endif
- }
- }
- else
- {
- if (srcIntIsBoolable)
- {
- bool directPassthrough = isNegOp != srcIntConstVal;
- if (directPassthrough)
- {
- // If this case is hit, the result value is the same as the value in srcBool
- this->InsertMove(instr->GetDst(), srcBool, instr);
- }
- else
- {
- // Otherwise, the result value is the negation of the value in srcBool
- GenerateBooleanNegate(instr, srcBool, instr->GetDst());
- }
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
- }
- else
- {
- Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, inequalResultValue), instr);
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
- }
- }
- }
- else if (srcBoolConst)
- {
- if (isBranchNotCompare)
- {
- this->m_lowererMD.GenerateTaggedZeroTest(srcInt->AsRegOpnd(), instr, srcBoolConstVal ? inequalResultTarget : equalResultTarget);
- if (srcBoolConstVal)
- {
- // If it's not zero, then it's either 1, in which case it's true, or it's something else, in which
- // case we have an issue.
- InsertCompareBranch(IR::IntConstOpnd::New((((IntConstType)1) << Js::VarTag_Shift) + Js::AtomTag, IRType::TyVar, this->m_func), srcInt->AsRegOpnd(), Js::OpCode::BrNeq_A, inequalResultTarget, instr, true);
- }
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, srcBoolConstVal ? equalResultTarget : inequalResultTarget, this->m_func));
- }
- else
- {
- IR::LabelInstr* isNonZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- IR::LabelInstr* isZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- this->m_lowererMD.GenerateTaggedZeroTest(srcInt->AsRegOpnd(), instr, isZero);
- if (srcBoolConstVal)
- {
- // If it's not zero, then it's either 1, in which case it's true, or it's something else, in which
- // case we have an issue.
- InsertCompareBranch(IR::IntConstOpnd::New((((IntConstType)1) << Js::VarTag_Shift) + Js::AtomTag, IRType::TyVar, this->m_func), srcInt->AsRegOpnd(), Js::OpCode::BrNeq_A, isZero, instr, true);
- }
- instr->InsertBefore(isNonZero);
- Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, srcBoolConstVal ? equalResultValue : inequalResultValue), instr);
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
- instr->InsertBefore(isZero);
- Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, !srcBoolConstVal ? equalResultValue : inequalResultValue), instr);
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
- }
- }
- if (*pNeedHelper)
- {
- instr->InsertBefore(labelHelper);
- }
- return true;
- }
- // Generate fast path for StrictEquals when one of the source have a definite valuetype
- bool Lowerer::GenerateFastBrOrCmEqDefinite(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool *pNeedHelper, bool isBranch, bool isInHelper)
- {
- IR::Opnd *src1 = instr->GetSrc1();
- IR::Opnd *src2 = instr->GetSrc2();
- if (!src1->GetValueType().IsDefinite() && !src2->GetValueType().IsDefinite())
- {
- return false;
- }
- if (src1->IsEqual(src2))
- {
- return false;
- }
- if (src1->GetValueType().IsDefinite() && src2->GetValueType().IsDefinite())
- {
- if (src1->IsTaggedValue() || src2->IsTaggedValue())
- {
- return true;
- }
- }
- IR::LabelInstr * labelBranchSuccess = nullptr;
- IR::LabelInstr * labelBranchFailure = nullptr;
- IR::LabelInstr * labelFallThrough = instr->GetOrCreateContinueLabel();
- IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
- LibraryValue successValueType = ValueInvalid;
- LibraryValue failureValueType = ValueInvalid;
- IR::Opnd * definiteSrc = src1->GetValueType().IsDefinite() ? src1 : src2;
- IR::Opnd * likelySrc = src1->GetValueType().IsDefinite() ? src2 : src1;
- bool isEqual = !instr->IsNeq();
- if (!isBranch)
- {
- labelBranchSuccess = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
- labelBranchFailure = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
- successValueType = isEqual ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
- failureValueType = isEqual ? LibraryValue::ValueFalse : LibraryValue::ValueTrue;
- }
- else
- {
- labelBranchSuccess = isEqual ? instr->AsBranchInstr()->GetTarget() : labelFallThrough;
- labelBranchFailure = isEqual ? labelFallThrough : instr->AsBranchInstr()->GetTarget();
- }
- Assert(likelySrc->IsRegOpnd());
- if (definiteSrc->GetValueType().IsAnyArray() || definiteSrc->GetValueType().IsSymbol() || definiteSrc->GetValueType().IsBoolean() || definiteSrc->GetValueType().IsPrimitiveOrObject())
- {
- InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, labelBranchSuccess, instr);
- IR::BranchInstr * branch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelBranchFailure, this->m_func);
- instr->InsertBefore(branch);
- *pNeedHelper = false;
- }
- else if (definiteSrc->GetValueType().IsObject() && !CONFIG_FLAG(ESBigInt))
- {
- InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, labelBranchSuccess, instr);
- if (!likelySrc->GetValueType().IsDefinite())
- {
- m_lowererMD.GenerateObjectTest(likelySrc->AsRegOpnd(), instr, labelBranchFailure);
- IR::RegOpnd * likelyTypeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::IndirOpnd * likelyType = IR::IndirOpnd::New(likelySrc->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
- Lowerer::InsertMove(likelyTypeReg, likelyType, instr);
- IR::Opnd *likelyFlags = IR::IndirOpnd::New(likelyTypeReg, Js::Type::GetOffsetOfFlags(), TyInt8, this->m_func);
- InsertTestBranch(likelyFlags, IR::IntConstOpnd::New(TypeFlagMask_EngineExternal, TyInt8, this->m_func), Js::OpCode::BrNeq_A, labelHelper, instr);
- }
- else
- {
- *pNeedHelper = false;
- }
- IR::BranchInstr * branch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelBranchFailure, this->m_func);
- instr->InsertBefore(branch);
- }
- else if (definiteSrc->IsTaggedInt())
- {
- InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, labelBranchSuccess, instr);
- IR::BranchInstr * branch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func);
- instr->InsertBefore(branch);
- }
- else
- {
- return true;
- }
- if (!isBranch)
- {
- instr->InsertBefore(labelBranchSuccess);
- InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, successValueType), instr);
- InsertBranch(Js::OpCode::Br, labelFallThrough, instr);
- instr->InsertBefore(labelBranchFailure);
- InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, failureValueType), instr);
- InsertBranch(Js::OpCode::Br, labelFallThrough, instr);
- }
- instr->InsertBefore(labelHelper);
- return true;
- }
- // Generate fast path for Strict Equals when both sources are likely boolean/likely object/likely symbol
- bool Lowerer::GenerateFastBrEqLikely(IR::BranchInstr * instrBranch, bool *pNeedHelper, bool isInHelper)
- {
- IR::Opnd *src1 = instrBranch->GetSrc1();
- IR::Opnd *src2 = instrBranch->GetSrc2();
- IR::LabelInstr *targetInstr = instrBranch->GetTarget();
- IR::LabelInstr *labelEqualLikely = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
- IR::LabelInstr *labelTrue = instrBranch->GetOrCreateContinueLabel(isInHelper);
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- *pNeedHelper = true;
- if (!this->GenerateFastBooleanAndObjectEqLikely(instrBranch, src1, src2, labelHelper, labelEqualLikely, pNeedHelper, isInHelper))
- {
- return false;
- }
- instrBranch->InsertBefore(labelEqualLikely);
- IR::BranchInstr *newBranch = IR::BranchInstr::New(instrBranch->m_opcode, targetInstr, src1, src2, this->m_func);
- instrBranch->InsertBefore(newBranch);
- this->m_lowererMD.LowerCondBranch(newBranch);
- newBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelTrue, this->m_func);
- instrBranch->InsertBefore(newBranch);
- instrBranch->InsertBefore(labelHelper);
- return true;
- }
- bool Lowerer::GenerateFastBooleanAndObjectEqLikely(IR::Instr * instr, IR::Opnd *src1, IR::Opnd *src2, IR::LabelInstr * labelHelper, IR::LabelInstr * labelEqualLikely, bool *pNeedHelper, bool isInHelper)
- {
- *pNeedHelper = true;
- if (!src1 || !src2)
- {
- return false;
- }
- bool isStrictCompare = false;
- bool isStrictMode = this->m_func->GetJITFunctionBody()->IsStrictMode();
- switch (instr->m_opcode)
- {
- case Js::OpCode::BrSrEq_A:
- case Js::OpCode::BrSrNotNeq_A:
- case Js::OpCode::BrSrNeq_A:
- case Js::OpCode::BrSrNotEq_A:
- case Js::OpCode::CmSrEq_A:
- case Js::OpCode::CmSrNeq_A:
- isStrictCompare = true;
- break;
- }
- if (src1->GetValueType().IsLikelyBoolean() && src2->GetValueType().IsLikelyBoolean())
- {
- //
- // Booleans
- //
- if (isStrictCompare)
- {
- if (!src1->GetValueType().IsBoolean() && !src2->GetValueType().IsBoolean())
- {
- this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper, false);
- if (GenerateJSBooleanTest(src2->AsRegOpnd(), instr, labelEqualLikely, true))
- {
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func));
- }
- }
- else
- {
- *pNeedHelper = false;
- }
- }
- else
- {
- this->m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instr, labelHelper, false);
- GenerateJSBooleanTest(src1->AsRegOpnd(), instr, labelHelper, false);
- this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper, false);
- if (GenerateJSBooleanTest(src2->AsRegOpnd(), instr, labelEqualLikely, true))
- {
- instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func));
- }
- }
- }
- else if (src1->GetValueType().HasBeenObject() && src2->GetValueType().HasBeenObject())
- {
- //
- // Objects
- //
- IR::LabelInstr *labelTypeIdCheck = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
- if (!isStrictCompare)
- {
- // If not strictBr, verify both sides are dynamic objects
- this->m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instr, labelHelper, false);
- this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper, false);
- GenerateIsDynamicObject(src1->AsRegOpnd(), instr, labelTypeIdCheck, false);
- }
- else
- {
- this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper, false);
- }
- GenerateIsDynamicObject(src2->AsRegOpnd(), instr, labelEqualLikely, true);
- instr->InsertBefore(labelTypeIdCheck);
- if (isStrictMode)
- {
- labelTypeIdCheck->isOpHelper = true;
- IR::BranchInstr *branchToHelper = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func);
- instr->InsertBefore(branchToHelper);
- }
- else
- {
- if (!ExternalLowerer::TryGenerateFastExternalEqTest(src1, src2, instr, labelHelper, labelEqualLikely, this, isStrictCompare, isInHelper))
- {
- if (!isStrictCompare)
- {
- GenerateIsBuiltinRecyclableObject(src1->AsRegOpnd(), instr, labelHelper, false /*checkObjectAndDynamicObject*/, nullptr /*labelContinue*/, isInHelper);
- }
- GenerateIsBuiltinRecyclableObject(src2->AsRegOpnd(), instr, labelHelper, false /*checkObjectAndDynamicObject*/, nullptr /*labelContinue*/, isInHelper);
- }
- }
- }
- else if (src1->GetValueType().IsLikelySymbol() && src2->GetValueType().IsLikelySymbol())
- {
- this->GenerateSymbolTest(src1->AsRegOpnd(), instr, labelHelper, nullptr, true);
- this->GenerateSymbolTest(src2->AsRegOpnd(), instr, labelHelper, nullptr, true);
- }
- else
- {
- return false;
- }
- return true;
- }
- bool Lowerer::GenerateFastCmEqLikely(IR::Instr * instr, bool *pNeedHelper, bool isInHelper)
- {
- *pNeedHelper = false;
- Assert(instr->m_opcode == Js::OpCode::CmSrEq_A ||
- instr->m_opcode == Js::OpCode::CmSrNeq_A ||
- instr->m_opcode == Js::OpCode::CmEq_A ||
- instr->m_opcode == Js::OpCode::CmNeq_A);
- bool isNegOp = false;
- bool isStrict = false;
- switch (instr->m_opcode)
- {
- case Js::OpCode::CmSrEq_A:
- isStrict = true;
- break;
- case Js::OpCode::CmSrNeq_A:
- isStrict = true;
- case Js::OpCode::CmNeq_A:
- isNegOp = true;
- break;
- }
- IR::Opnd *src1 = instr->GetSrc1();
- IR::Opnd *src2 = instr->GetSrc2();
- IR::LabelInstr *labelEqualLikely = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
- IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- if (!this->GenerateFastBooleanAndObjectEqLikely(instr, src1, src2, labelHelper, labelEqualLikely, pNeedHelper, isInHelper))
- {
- return false;
- }
- instr->InsertBefore(labelEqualLikely);
- // $labelEqualLikely
- //
- // Will only come here for
- // if src2 is dynamic object(matches Js::DynamicObject::`vtable'), for non strict cm both src1 and src2 should be dynamic object
- // or if src2 is builtin recyclableobject(typeId > TypeIds_LastStaticType && typeId <= TypeIds_LastBuiltinDynamicObject)
- // or if CustomExternalType with no operations usage flags
- //
- // src1->IsEqual(src2)
- // MOV DST SUCCESS
- // JMP $DONE
- // CMP src1, src2
- // MOV DST SUCCESS
- // JEQ $DONE
- // MOV DST FAILURE
- // JMP $DONE
- LibraryValue successValueType = !isNegOp ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
- LibraryValue failureValueType = !isNegOp ? LibraryValue::ValueFalse : LibraryValue::ValueTrue;
- if (src1->IsEqual(src2))
- {
- Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, successValueType), instr);
- instr->InsertBefore(IR::BranchInstr::New(this->m_lowererMD.MDUncondBranchOpcode, labelDone, this->m_func));
- }
- else
- {
- IR::LabelInstr *cmEqual = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
- this->InsertCompareBranch(src1, src2, isStrict ? Js::OpCode::BrSrEq_A : Js::OpCode::BrEq_A, cmEqual, instr);
- Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, failureValueType), instr);
- instr->InsertBefore(IR::BranchInstr::New(this->m_lowererMD.MDUncondBranchOpcode, labelDone, this->m_func));
- instr->InsertBefore(cmEqual);
- Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, successValueType), instr);
- instr->InsertBefore(IR::BranchInstr::New(this->m_lowererMD.MDUncondBranchOpcode, labelDone, this->m_func));
- }
- instr->InsertBefore(labelHelper);
- instr->InsertAfter(labelDone);
- return true;
- }
- bool
- Lowerer::GenerateFastBrOrCmString(IR::Instr* instr)
- {
- IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
- IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
- if (!srcReg1 ||
- !srcReg2 ||
- srcReg1->IsTaggedInt() ||
- srcReg2->IsTaggedInt() ||
- (!srcReg1->GetValueType().HasHadStringTag() && !srcReg2->GetValueType().IsString()) ||
- (!srcReg2->GetValueType().HasHadStringTag() && !srcReg1->GetValueType().IsString()))
- {
- return false;
- }
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- IR::LabelInstr *labelBranchFail = nullptr;
- IR::LabelInstr *labelBranchSuccess = nullptr;
- bool isEqual = false;
- bool isStrict = false;
- bool isBranch = true;
- bool isCmNegOp = false;
- switch (instr->m_opcode)
- {
- case Js::OpCode::BrSrEq_A:
- case Js::OpCode::BrSrNotNeq_A:
- isStrict = true;
- case Js::OpCode::BrEq_A:
- case Js::OpCode::BrNotNeq_A:
- labelBranchFail = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- labelBranchSuccess = instr->AsBranchInstr()->GetTarget();
- instr->InsertAfter(labelBranchFail);
- isEqual = true;
- break;
- case Js::OpCode::BrSrNeq_A:
- case Js::OpCode::BrSrNotEq_A:
- isStrict = true;
- case Js::OpCode::BrNeq_A:
- case Js::OpCode::BrNotEq_A:
- labelBranchSuccess = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- labelBranchFail = instr->AsBranchInstr()->GetTarget();
- instr->InsertAfter(labelBranchSuccess);
- isEqual = false;
- break;
- case Js::OpCode::CmSrEq_A:
- isStrict = true;
- case Js::OpCode::CmEq_A:
- isEqual = true;
- isBranch = false;
- break;
- case Js::OpCode::CmSrNeq_A:
- isStrict = true;
- case Js::OpCode::CmNeq_A:
- isEqual = false;
- isBranch = false;
- isCmNegOp = true;
- break;
- default:
- Assume(UNREACHED);
- }
- if (!isBranch)
- {
- labelBranchSuccess = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- labelBranchFail = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- }
- GenerateFastStringCheck(instr, srcReg1, srcReg2, isEqual, isStrict, labelHelper, labelBranchSuccess, labelBranchFail);
- IR::LabelInstr *labelFallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- if (!isBranch)
- {
- const LibraryValue successValueType = !isCmNegOp ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
- const LibraryValue failureValueType = !isCmNegOp ? LibraryValue::ValueFalse : LibraryValue::ValueTrue;
- instr->InsertBefore(labelBranchSuccess);
- InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, successValueType), instr);
- InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
- instr->InsertBefore(labelBranchFail);
- InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, failureValueType), instr);
- InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
- }
- instr->InsertBefore(labelHelper);
- instr->InsertAfter(labelFallthrough);
- #if DBG
- // The fast-path for strings assumes the case where 2 strings are equal is rare, and marks that path as 'helper'.
- // This breaks the helper label dbchecks as it can result in non-helper blocks be reachable only from helper blocks.
- // Use m_isHelperToNonHelperBranch and m_noHelperAssert to fix this.
- IR::Instr *blockEndInstr;
- if (isEqual)
- {
- blockEndInstr = labelHelper->GetNextBranchOrLabel();
- }
- else
- {
- blockEndInstr = instr->GetNextBranchOrLabel();
- }
- if (blockEndInstr->IsBranchInstr())
- {
- blockEndInstr->AsBranchInstr()->m_isHelperToNonHelperBranch = true;
- }
- labelFallthrough->m_noHelperAssert = true;
- #endif
- return true;
- }
- bool
- Lowerer::GenerateFastStringCheck(IR::Instr *instr, IR::RegOpnd *srcReg1, IR::RegOpnd *srcReg2, bool isEqual, bool isStrict, IR::LabelInstr *labelHelper, IR::LabelInstr *labelBranchSuccess, IR::LabelInstr *labelBranchFail)
- {
- Assert(instr->m_opcode == Js::OpCode::BrSrEq_A ||
- instr->m_opcode == Js::OpCode::BrSrNeq_A ||
- instr->m_opcode == Js::OpCode::BrEq_A ||
- instr->m_opcode == Js::OpCode::BrNeq_A ||
- instr->m_opcode == Js::OpCode::BrSrNotEq_A ||
- instr->m_opcode == Js::OpCode::BrSrNotNeq_A ||
- instr->m_opcode == Js::OpCode::BrNotEq_A ||
- instr->m_opcode == Js::OpCode::BrNotNeq_A ||
- instr->m_opcode == Js::OpCode::CmEq_A ||
- instr->m_opcode == Js::OpCode::CmNeq_A ||
- instr->m_opcode == Js::OpCode::CmSrEq_A ||
- instr->m_opcode == Js::OpCode::CmSrNeq_A);
- // if src1 is not string
- // generate object test, if not equal jump to $helper
- // compare type check to string, if not jump to $helper
- //
- // if strict mode generate string test as above for src2 and jump to $failure if failed any time
- // else if not strict generate string test as above for src2 and jump to $helper if failed any time
- //
- // Compare length of src1 and src2 if not equal goto $failure
- //
- // if src1 is not flat string jump to $helper
- //
- // if src1 and src2 m_pszValue pointer match goto $success
- //
- // if src2 is not flat string jump to $helper
- //
- // if first character of src1 and src2 doesn't match goto $failure
- //
- // shift left by 1 length of src1 (length*2)
- //
- // wmemcmp src1 and src2 flat strings till length * 2
- //
- // test eax (result of wmemcmp)
- // if equal jump to $success else to $failure
- //
- // $success
- // jmp to $fallthrough
- // $failure
- // jmp to $fallthrough
- // $helper
- //
- // $fallthrough
- // Generates:
- // GenerateObjectTest(src1);
- // CMP srcReg1, srcReg2
- // JEQ $success
- // MOV s1, [srcReg1 + offset(Type)]
- // CMP type, static_string_type
- // JNE $helper
- // GenerateObjectTest(src2);
- // MOV s2, [srcReg2 + offset(Type)]
- // CMP type, static_string_type
- // JNE $fail ; if src1 is string but not src2, src1 !== src2 if isStrict
- // MOV s3, [srcReg1,offset(m_charLength)]
- // CMP [srcReg2,offset(m_charLength)], s3
- // JNE $fail <--- length check done
- // MOV s4, [srcReg1,offset(m_pszValue)]
- // CMP s4, 0
- // JEQ $helper
- // MOV s5, [srcReg2,offset(m_pszValue)]
- // CMP s5, 0
- // JEQ $helper
- // MOV s6,[s4]
- // CMP [s5], s6 -First character comparison
- // JNE $fail
- // SHL length, 1
- // eax = wmemcmp(src1String, src2String, length*2)
- // TEST eax, eax
- // JEQ $success
- // JMP $fail
- IR::Instr* instrInsert = instr;
- GenerateStringTest(srcReg1, instrInsert, labelHelper);
- if (srcReg1->IsEqual(srcReg2))
- {
- InsertBranch(Js::OpCode::Br, labelBranchSuccess, instrInsert);
- #if DBG
- if (instr->IsBranchInstr())
- {
- // we might have other cases on helper path which will generate branch to the target
- instr->AsBranchInstr()->GetTarget()->m_noHelperAssert = true;
- }
- #endif
- return true;
- }
- // CMP srcReg1, srcReg2 - Ptr comparison
- // JEQ $branchSuccess
- InsertCompareBranch(srcReg1, srcReg2, Js::OpCode::BrEq_A, labelBranchSuccess, instrInsert);
- if (isStrict)
- {
- GenerateStringTest(srcReg2, instrInsert, labelBranchFail);
- }
- else
- {
- GenerateStringTest(srcReg2, instrInsert, labelHelper);
- }
- if (isStrict && (srcReg1->m_sym->m_isStrEmpty || srcReg2->m_sym->m_isStrEmpty))
- {
- IR::RegOpnd* otherOpnd = srcReg1->m_sym->m_isStrEmpty ? srcReg2 : srcReg1;
- InsertCompareBranch(IR::IndirOpnd::New(otherOpnd, Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, m_func), IR::IntConstOpnd::New(0, TyUint32, this->m_func, true), Js::OpCode::BrNeq_A, labelBranchFail, instrInsert);
- return true;
- }
- // MOV s3, [srcReg1,offset(m_charLength)]
- // CMP [srcReg2,offset(m_charLength)], s3
- // JNE $branchfail
- IR::RegOpnd * src1LengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
- InsertMove(src1LengthOpnd, IR::IndirOpnd::New(srcReg1, Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, m_func), instrInsert);
- InsertCompareBranch(IR::IndirOpnd::New(srcReg2, Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, m_func), src1LengthOpnd, Js::OpCode::BrNeq_A, labelBranchFail, instrInsert);
- // MOV s4, [src1,offset(m_pszValue)]
- // CMP s4, 0
- // JEQ $helper
- // MOV s5, [src2,offset(m_pszValue)]
- // CMP s5, 0
- // JEQ $helper
- IR::RegOpnd * src1FlatString = IR::RegOpnd::New(TyMachPtr, m_func);
- InsertMove(src1FlatString, IR::IndirOpnd::New(srcReg1, Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, m_func), instrInsert);
- InsertCompareBranch(src1FlatString, IR::IntConstOpnd::New(0, TyUint32, m_func), Js::OpCode::BrEq_A, labelHelper, instrInsert);
- IR::RegOpnd * src2FlatString = IR::RegOpnd::New(TyMachPtr, m_func);
- InsertMove(src2FlatString, IR::IndirOpnd::New(srcReg2, Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, m_func), instrInsert);
- InsertCompareBranch(src2FlatString, IR::IntConstOpnd::New(0, TyUint32, m_func), Js::OpCode::BrEq_A, labelHelper, instrInsert);
- // MOV s6,[s4]
- // CMP [s5], s6 -First character comparison
- // JNE $branchfail
- IR::RegOpnd * src1FirstChar = IR::RegOpnd::New(TyUint16, m_func);
- InsertMove(src1FirstChar, IR::IndirOpnd::New(src1FlatString, 0, TyUint16, m_func), instrInsert);
- InsertCompareBranch(IR::IndirOpnd::New(src2FlatString, 0, TyUint16, m_func), src1FirstChar, Js::OpCode::BrNeq_A, labelBranchFail, instrInsert);
- // eax = wmemcmp(src1String, src2String, length)
- m_lowererMD.LoadHelperArgument(instr, src1LengthOpnd);
- m_lowererMD.LoadHelperArgument(instr, src1FlatString);
- m_lowererMD.LoadHelperArgument(instr, src2FlatString);
- IR::RegOpnd *dstOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
- IR::Instr *instrCall = IR::Instr::New(Js::OpCode::Call, dstOpnd, IR::HelperCallOpnd::New(IR::HelperWMemCmp, m_func), m_func);
- instr->InsertBefore(instrCall);
- m_lowererMD.LowerCall(instrCall, 3);
- // TEST eax, eax
- // JEQ success
- InsertTestBranch(dstOpnd, dstOpnd, Js::OpCode::BrEq_A, labelBranchSuccess, instrInsert);
- // JMP fail
- InsertBranch(Js::OpCode::Br, labelBranchFail, instrInsert);
- return true;
- }
- bool Lowerer::GenerateFastBrBool(IR::BranchInstr *const instr)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::BrFalse_A || instr->m_opcode == Js::OpCode::BrTrue_A);
- Func *const func = instr->m_func;
- if(!instr->GetSrc1()->IsRegOpnd())
- {
- LowererMD::ChangeToAssign(instr->HoistSrc1(Js::OpCode::Ld_A));
- }
- IR::RegOpnd *const src = instr->GetSrc1()->Copy(func)->AsRegOpnd();
- const IR::AutoReuseOpnd autoReuseSrc(src, func);
- const ValueType srcOriginalValueType(src->GetValueType());
- ValueType srcValueType(srcOriginalValueType);
- IR::LabelInstr *const labelTarget = instr->GetTarget();
- IR::LabelInstr *const labelFallthrough = instr->GetOrCreateContinueLabel();
- if(labelTarget == labelFallthrough)
- {
- // Nothing to do
- instr->Remove();
- return false;
- }
- const bool branchOnFalse = instr->m_opcode == Js::OpCode::BrFalse_A;
- IR::LabelInstr *const labelFalse = branchOnFalse ? labelTarget : labelFallthrough;
- IR::LabelInstr *const labelTrue = branchOnFalse ? labelFallthrough : labelTarget;
- const Js::OpCode compareWithFalseBranchToTargetOpCode = branchOnFalse ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A;
- IR::LabelInstr *lastLabelBeforeHelper = nullptr;
- /// Typespec'd float
- if (instr->GetSrc1()->GetType() == TyFloat64)
- {
- InsertFloatCheckForZeroOrNanBranch(instr->GetSrc1(), branchOnFalse, labelTarget, labelFallthrough, instr);
- Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
- instr->Remove();
- return false;
- }
- ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- // Null fast path
- if (srcValueType.HasBeenNull() || srcOriginalValueType.IsUninitialized())
- {
- if(srcValueType.IsNull())
- {
- // jmp $false
- InsertBranch(Js::OpCode::Br, labelFalse, instr);
- // Skip lowering call to helper
- Assert(instr->m_prev->IsBranchInstr());
- instr->Remove();
- return false;
- }
- // cmp src, null
- // je $false
- InsertCompareBranch(
- src,
- LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
- Js::OpCode::BrEq_A,
- labelFalse,
- instr);
- src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Null));
- }
- ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- // Undefined fast path
- if(srcValueType.HasBeenUndefined() || srcOriginalValueType.IsUninitialized())
- {
- if(srcValueType.IsUndefined())
- {
- // jmp $false
- InsertBranch(Js::OpCode::Br, labelFalse, instr);
- // Skip lowering call to helper
- Assert(instr->m_prev->IsBranchInstr());
- instr->Remove();
- return false;
- }
- // cmp src, undefined
- // je $false
- InsertCompareBranch(
- src,
- LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
- Js::OpCode::BrEq_A,
- labelFalse,
- instr);
- src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Undefined));
- }
- ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- // Tagged int fast path
- const bool isNotInt = src->IsNotInt();
- bool checkedForTaggedInt = isNotInt;
- if( (
- srcValueType.HasBeenInt() ||
- srcValueType.HasBeenUnknownNumber() ||
- srcOriginalValueType.IsUninitialized()
- ) && !isNotInt)
- {
- checkedForTaggedInt = true;
- IR::LabelInstr *notTaggedIntLabel = nullptr;
- if(!src->IsTaggedInt())
- {
- // test src, 1
- // jz $notTaggedInt
- notTaggedIntLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- m_lowererMD.GenerateSmIntTest(src, instr, notTaggedIntLabel);
- }
- // cmp src, tag(0)
- // je/jne $target
- m_lowererMD.GenerateTaggedZeroTest(src, instr);
- Lowerer::InsertBranch(compareWithFalseBranchToTargetOpCode, labelTarget, instr);
- if(src->IsTaggedInt())
- {
- // Skip lowering call to helper
- Assert(instr->m_prev->IsBranchInstr());
- instr->Remove();
- return false;
- }
- // jmp $fallthrough
- Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
- // $notTaggedInt:
- if(notTaggedIntLabel)
- {
- instr->InsertBefore(notTaggedIntLabel);
- lastLabelBeforeHelper = notTaggedIntLabel;
- }
- }
- ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- // Float fast path
- bool generateFloatTest = srcValueType.IsLikelyFloat();
- #ifdef _M_IX86
- if (!AutoSystemInfo::Data.SSE2Available())
- {
- generateFloatTest = false;
- }
- #endif
- bool checkedForTaggedFloat =
- #if FLOATVAR
- srcValueType.IsNotNumber();
- #else
- true; // there are no tagged floats, indicate that it has been checked
- #endif
- if (generateFloatTest)
- {
- // if(srcValueType.IsFloat()) // skip tagged int check?
- //
- // ValueType::IsFloat() does not guarantee that the storage is not in a tagged int.
- // The tagged int check is necessary. It does, however, guarantee that as long as the value is not
- // stored in a tagged int, that it is definitely stored in a JavascriptNumber/TaggedFloat.
- IR::LabelInstr *const notFloatLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- if(!checkedForTaggedInt)
- {
- checkedForTaggedInt = true;
- m_lowererMD.GenerateSmIntTest(src, instr, notFloatLabel, nullptr, true);
- }
- // cmp [src], JavascriptNumber::vtable
- // jne $notFloat
- #if FLOATVAR
- checkedForTaggedFloat = true;
- IR::RegOpnd *const floatOpnd = m_lowererMD.CheckFloatAndUntag(src, instr, notFloatLabel);
- #else
- m_lowererMD.GenerateFloatTest(src, instr, notFloatLabel);
- IR::IndirOpnd *const floatOpnd = IR::IndirOpnd::New(src, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, func);
- #endif
- // cmp src, 0.0
- // jp $false
- // je/jne $target
- // jmp $fallthrough
- InsertFloatCheckForZeroOrNanBranch(floatOpnd, branchOnFalse, labelTarget, labelFallthrough, instr);
- Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
- // $notFloat:
- instr->InsertBefore(notFloatLabel);
- lastLabelBeforeHelper = notFloatLabel;
- src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::AnyNumber));
- }
- IR::LabelInstr *labelHelper = nullptr;
- bool _didObjectTest = checkedForTaggedInt && checkedForTaggedFloat;
- const auto EnsureObjectTest = [&]()
- {
- if(_didObjectTest)
- {
- return;
- }
- if(!labelHelper)
- {
- labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- }
- m_lowererMD.GenerateObjectTest(src, instr, labelHelper);
- _didObjectTest = true;
- };
- ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- // Boolean fast path
- if (srcValueType.HasBeenBoolean() || srcOriginalValueType.IsUninitialized())
- {
- IR::LabelInstr *notBooleanLabel = nullptr;
- if (!srcValueType.IsBoolean())
- {
- EnsureObjectTest();
- // cmp [src], JavascriptBoolean::vtable
- // jne $notBoolean
- notBooleanLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertCompareBranch(
- IR::IndirOpnd::New(src, 0, TyMachPtr, func),
- LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptBoolean),
- Js::OpCode::BrNeq_A,
- notBooleanLabel,
- instr);
- }
- // cmp src, false
- // je/jne $target
- InsertCompareBranch(
- src,
- LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse),
- compareWithFalseBranchToTargetOpCode,
- labelTarget,
- instr);
- if (srcValueType.IsBoolean())
- {
- // Skip lowering call to helper
- Assert(!labelHelper);
- Assert(instr->m_prev->IsBranchInstr());
- instr->Remove();
- return false;
- }
- // jmp $fallthrough
- Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
- if (notBooleanLabel)
- {
- instr->InsertBefore(notBooleanLabel);
- lastLabelBeforeHelper = notBooleanLabel;
- }
- src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Boolean));
- }
- ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- // String fast path
- if(srcValueType.HasBeenString())
- {
- IR::LabelInstr *notStringLabel = nullptr;
- if(!srcValueType.IsString())
- {
- EnsureObjectTest();
- notStringLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- GenerateStringTest(src, instr, notStringLabel, nullptr, false);
- }
- // cmp [src + offset(length)], 0
- // jeq/jne $target
- InsertCompareBranch(
- IR::IndirOpnd::New(src, Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func),
- IR::IntConstOpnd::New(0, TyUint32, func, true),
- compareWithFalseBranchToTargetOpCode,
- labelTarget,
- instr);
- if(srcValueType.IsString())
- {
- // Skip lowering call to helper
- Assert(!labelHelper);
- Assert(instr->m_prev->IsBranchInstr());
- instr->Remove();
- return false;
- }
- // jmp $fallthrough
- Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
- if(notStringLabel)
- {
- instr->InsertBefore(notStringLabel);
- lastLabelBeforeHelper = notStringLabel;
- }
- src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::String));
- }
- ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- // Object fast path
- if (srcValueType.IsLikelyObject())
- {
- if(srcValueType.IsObject())
- {
- if(srcValueType.GetObjectType() > ObjectType::Object)
- {
- // Specific object types that are tracked are equivalent to 'true'
- // jmp $true
- InsertBranch(Js::OpCode::Br, labelTrue, instr);
- // Skip lowering call to helper
- Assert(!labelHelper);
- Assert(instr->m_prev->IsBranchInstr());
- instr->Remove();
- return false;
- }
- }
- else
- {
- EnsureObjectTest();
- }
- // mov srcType, [src + offset(type)] -- load type
- IR::RegOpnd *const srcType = IR::RegOpnd::New(TyMachPtr, func);
- const IR::AutoReuseOpnd autoReuseR1(srcType, func);
- InsertMove(srcType, IR::IndirOpnd::New(src, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func), instr);
- // test [srcType + offset(flags)], TypeFlagMask_IsFalsy -- check if falsy
- // jnz $false
- InsertTestBranch(
- IR::IndirOpnd::New(srcType, Js::Type::GetOffsetOfFlags(), TyUint8, func),
- IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyUint8, func),
- Js::OpCode::BrNeq_A,
- labelFalse,
- instr);
- // cmp [srcType + offset(typeId)], TypeIds_LastJavascriptPrimitiveType -- check base TypeIds_LastJavascriptPrimitiveType
- // ja $true
- InsertCompareBranch(
- IR::IndirOpnd::New(srcType, Js::Type::GetOffsetOfTypeId(), TyInt32, func),
- IR::IntConstOpnd::New(Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, func),
- Js::OpCode::BrGt_A,
- true /* isUnsigned */,
- labelTrue,
- instr);
- if(!labelHelper)
- {
- labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- }
- lastLabelBeforeHelper = nullptr;
- }
- ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- // Helper call
- // $helper:
- if(lastLabelBeforeHelper)
- {
- Assert(instr->m_prev == lastLabelBeforeHelper);
- lastLabelBeforeHelper->isOpHelper = true;
- }
- if (labelHelper)
- {
- Assert(labelHelper->isOpHelper);
- instr->InsertBefore(labelHelper);
- }
- // call JavascriptConversion::ToBoolean
- IR::RegOpnd *const toBoolDst = IR::RegOpnd::New(TyInt32, func);
- const IR::AutoReuseOpnd autoReuseToBoolDst(toBoolDst, func);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, toBoolDst, instr->GetSrc1(), func);
- instr->InsertBefore(callInstr);
- LowerUnaryHelperMem(callInstr, IR::HelperConv_ToBoolean);
- // test eax, eax
- InsertTest(toBoolDst, toBoolDst, instr);
- // je/jne $target
- Assert(instr->IsBranchInstr());
- instr->FreeSrc1();
- instr->m_opcode = LowererMD::MDBranchOpcode(compareWithFalseBranchToTargetOpCode);
- Assert(instr->AsBranchInstr()->GetTarget() == labelTarget);
- // Skip lowering another call to helper
- return false;
- }
- // Helper method used in LowerMD by all platforms.
- // Creates HelperCallOpnd or DiagHelperCallOpnd, based on helperMethod and state.
- // static
- IR::HelperCallOpnd*
- Lowerer::CreateHelperCallOpnd(IR::JnHelperMethod helperMethod, int helperArgCount, Func* func)
- {
- Assert(func);
- IR::HelperCallOpnd* helperCallOpnd;
- if (CONFIG_FLAG(EnableContinueAfterExceptionWrappersForHelpers) &&
- func->IsJitInDebugMode() &&
- HelperMethodAttributes::CanThrow(helperMethod))
- {
- // Create DiagHelperCallOpnd to indicate that it's needed to wrap original helper with try-catch wrapper,
- // so that we can ignore exception and bailout to next stmt in debugger.
- // For details, see: Lib\Runtime\Debug\DiagHelperMethodWrapper.{h,cpp}.
- helperCallOpnd = IR::DiagHelperCallOpnd::New(helperMethod, func, helperArgCount);
- }
- else
- {
- helperCallOpnd = IR::HelperCallOpnd::New(helperMethod, func);
- }
- return helperCallOpnd;
- }
- bool
- Lowerer::TryGenerateFastBrOrCmTypeOf(IR::Instr *instr, IR::Instr **prev, bool isNeqOp, bool *pfNoLower)
- {
- Assert(prev);
- Assert(instr->m_opcode == Js::OpCode::BrSrEq_A ||
- instr->m_opcode == Js::OpCode::BrSrNeq_A ||
- instr->m_opcode == Js::OpCode::BrSrNotEq_A ||
- instr->m_opcode == Js::OpCode::BrSrNotNeq_A ||
- instr->m_opcode == Js::OpCode::CmSrEq_A ||
- instr->m_opcode == Js::OpCode::CmSrNeq_A ||
- instr->m_opcode == Js::OpCode::BrEq_A ||
- instr->m_opcode == Js::OpCode::BrNeq_A ||
- instr->m_opcode == Js::OpCode::BrNotEq_A ||
- instr->m_opcode == Js::OpCode::BrNotNeq_A ||
- instr->m_opcode == Js::OpCode::CmEq_A ||
- instr->m_opcode == Js::OpCode::CmNeq_A);
- //
- // instr - (Br/Cm)(Sr)(N(ot))eq_A
- // instr->m_prev - typeOf
- //
- IR::Instr *instrLd = instr->GetPrevRealInstrOrLabel();
- bool skippedLoads = false;
- //Skip intermediate Ld_A which might be inserted by flow graph peeps
- while (instrLd && instrLd->m_opcode == Js::OpCode::Ld_A )
- {
- if (!(instrLd->GetDst()->IsRegOpnd() && instrLd->GetDst()->AsRegOpnd()->m_fgPeepTmp))
- {
- return false;
- }
- if (instrLd->HasBailOutInfo())
- {
- return false;
- }
- instrLd = instrLd->GetPrevRealInstrOrLabel();
- skippedLoads = true;
- }
- IR::Instr *typeOf = instrLd;
- IR::RegOpnd *instrSrc1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
- IR::RegOpnd *instrSrc2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
- if (typeOf && (typeOf->m_opcode == Js::OpCode::Typeof))
- {
- IR::RegOpnd *typeOfDst = typeOf->GetDst()->IsRegOpnd() ? typeOf->GetDst()->AsRegOpnd() : nullptr;
- if (typeOfDst && instrSrc1 && instrSrc2)
- {
- do
- {
- IR::RegOpnd *typeOpnd = nullptr;
- IR::RegOpnd *idOpnd = nullptr;
- if (instrSrc1->m_sym == typeOfDst->m_sym)
- {
- typeOpnd = instrSrc1;
- idOpnd = instrSrc2;
- }
- else if (instrSrc2->m_sym == typeOfDst->m_sym)
- {
- typeOpnd = instrSrc2;
- idOpnd = instrSrc1;
- }
- else
- {
- // Neither source turned out to be the typeOpnd
- break;
- }
- if (!typeOpnd->m_isTempLastUse)
- {
- break;
- }
- if (!(idOpnd->m_sym->m_isSingleDef && idOpnd->m_sym->m_isStrConst))
- {
- return false;
- }
- // The second argument to [Cm|Br]TypeOf is the typeid.
- IR::IntConstOpnd *typeIdOpnd = nullptr;
- Assert(idOpnd->m_sym->m_isSingleDef);
- Assert(idOpnd->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd());
- // We can't optimize non-javascript type strings.
- JITJavascriptString *typeNameJsString = JITJavascriptString::FromVar(idOpnd->m_sym->m_instrDef->GetSrc1()->AsAddrOpnd()->m_localAddress);
- const char16 *typeName = typeNameJsString->GetString();
- Js::InternalString typeNameString(typeName, typeNameJsString->GetLength());
- if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::UndefinedTypeNameString))
- {
- typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Undefined, TyInt32, instr->m_func);
- }
- else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::ObjectTypeNameString))
- {
- typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Object, TyInt32, instr->m_func);
- }
- else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::BooleanTypeNameString))
- {
- typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Boolean, TyInt32, instr->m_func);
- }
- else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::NumberTypeNameString))
- {
- typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Number, TyInt32, instr->m_func);
- }
- else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::StringTypeNameString))
- {
- typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_String, TyInt32, instr->m_func);
- }
- else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::FunctionTypeNameString))
- {
- typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, instr->m_func);
- }
- else
- {
- return false;
- }
- if (skippedLoads)
- {
- //validate none of dst of Ld_A overlaps with typeof src or dst
- IR::Opnd* typeOfSrc = typeOf->GetSrc1();
- instrLd = typeOf->GetNextRealInstr();
- while (instrLd != instr)
- {
- if (instrLd->GetDst()->IsEqual(typeOfDst) || instrLd->GetDst()->IsEqual(typeOfSrc))
- {
- return false;
- }
- instrLd = instrLd->GetNextRealInstr();
- }
- typeOf->Unlink();
- instr->InsertBefore(typeOf);
- }
- // The first argument to [Cm|Br]TypeOf is the first arg to the TypeOf instruction.
- IR::Opnd *objectOpnd = typeOf->GetSrc1();
- Assert(objectOpnd->IsRegOpnd());
- // Now emit this instruction and remove the ldstr and typeOf.
- *prev = typeOf->m_prev;
- *pfNoLower = false;
- if (instr->IsBranchInstr())
- {
- GenerateFastBrTypeOf(instr, objectOpnd->AsRegOpnd(), typeIdOpnd, typeOf, pfNoLower, isNeqOp);
- }
- else
- {
- GenerateFastCmTypeOf(instr, objectOpnd->AsRegOpnd(), typeIdOpnd, typeOf, pfNoLower, isNeqOp);
- }
- return true;
- } while (false);
- }
- }
- if (instrSrc1 && instrSrc1->GetStackSym()->IsSingleDef() && instrSrc2 && instrSrc2->GetStackSym()->IsSingleDef() &&
- (
- ((instrSrc1->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::Typeof) &&
- ((instrSrc2->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::Typeof) || instrSrc2->GetStackSym()->GetIsStrConst()))
- ||
- ((instrSrc2->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::Typeof) &&
- ((instrSrc1->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::Typeof) || instrSrc1->GetStackSym()->GetIsStrConst()))
- )
- )
- {
- *pfNoLower = true;
- if (instr->IsBranchInstr())
- {
- InsertCompareBranch(instrSrc1, instrSrc2, isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, instr->AsBranchInstr()->GetTarget(), instr);
- instr->Remove();
- }
- else
- {
- if (instrSrc1->IsEqual(instrSrc2))
- {
- InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, isNeqOp ? LibraryValue::ValueFalse : LibraryValue::ValueTrue), instr);
- }
- else
- {
- // t1 = typeof o1
- // t2 = typeof o2
- // dst = t1 == t2
- // MOV dst, true
- // CMP t1, t2
- // x86, amd64
- // CMOVNE dst, false
- // arm
- // BEQ $done
- // MOV dst, false
- // $done
- if (instr->GetDst()->IsEqual(instrSrc1))
- {
- IR::Instr* hoistInstr = m_lowererMD.ChangeToAssign(instr->HoistSrc1(Js::OpCode::Ld_A));
- instrSrc1 = hoistInstr->GetDst()->AsRegOpnd();
- }
- if (instr->GetDst()->IsEqual(instrSrc2))
- {
- IR::Instr* hoistInstr = m_lowererMD.ChangeToAssign(instr->HoistSrc2(Js::OpCode::Ld_A));
- instrSrc2 = hoistInstr->GetDst()->AsRegOpnd();
- }
- InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), instr);
- #if defined(_M_ARM32_OR_ARM64)
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
- InsertCompareBranch(instrSrc1, instrSrc2, isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, doneLabel, instr);
- InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
- instr->InsertBefore(doneLabel);
- #else
- InsertCompare(instrSrc1, instrSrc2, instr);
- LowererMD::InsertCmovCC(isNeqOp ? Js::OpCode::CMOVE : Js::OpCode::CMOVNE, instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
- #endif
- }
- instr->Remove();
- }
- return true;
- }
- return false;
- }
- void
- Lowerer::GenerateFalsyObjectTest(IR::Instr * insertInstr, IR::RegOpnd * typeOpnd, IR::LabelInstr * falsyLabel)
- {
- IR::Opnd *flagsOpnd = IR::IndirOpnd::New(typeOpnd, Js::Type::GetOffsetOfFlags(), TyInt32, this->m_func);
- InsertTestBranch(flagsOpnd, IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyInt32, this->m_func), Js::OpCode::BrNeq_A, falsyLabel, insertInstr);
- }
- void
- Lowerer::GenerateFalsyObjectTest(IR::Instr *insertInstr, IR::RegOpnd *typeOpnd, Js::TypeId typeIdToCheck, IR::LabelInstr* target, IR::LabelInstr* done, bool isNeqOp)
- {
- if (!this->m_func->GetThreadContextInfo()->CanBeFalsy(typeIdToCheck) && typeIdToCheck != Js::TypeIds_Undefined)
- {
- // Don't need the check for falsy, the typeId we are looking for doesn't care
- return;
- }
- IR::Opnd *flagsOpnd = IR::IndirOpnd::New(typeOpnd, Js::Type::GetOffsetOfFlags(), TyInt32, this->m_func);
- InsertTest(flagsOpnd, IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyInt32, this->m_func), insertInstr);
- if (typeIdToCheck == Js::TypeIds_Undefined)
- {
- //Falsy object returns true for undefined ((typeof falsyObj) == "undefined")
- InsertBranch( Js::OpCode::BrNeq_A, true, isNeqOp ? done : target, insertInstr);
- }
- else
- {
- //Falsy object returns false for all other types ((typeof falsyObj) != "function")
- InsertBranch( Js::OpCode::BrNeq_A, true, isNeqOp? target : done , insertInstr);
- }
- }
- ///----------------------------------------------------------------------------
- ///
- /// LowererMD::GenerateFastBrTypeOf
- ///
- ///----------------------------------------------------------------------------
- void
- Lowerer::GenerateFastBrTypeOf(IR::Instr *branch, IR::RegOpnd *object, IR::IntConstOpnd *typeIdOpnd, IR::Instr *typeOf, bool *pfNoLower, bool isNeqOp)
- {
- Js::TypeId typeId = static_cast<Js::TypeId>(typeIdOpnd->GetValue());
- IR::LabelInstr *target = branch->AsBranchInstr()->GetTarget();
- IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
- IR::LabelInstr *helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
- switch(branch->m_opcode)
- {
- case Js::OpCode::BrSrNeq_A:
- case Js::OpCode::BrNeq_A:
- case Js::OpCode::BrSrNotEq_A:
- case Js::OpCode::BrNotEq_A:
- case Js::OpCode::BrSrEq_A:
- case Js::OpCode::BrEq_A:
- case Js::OpCode::BrSrNotNeq_A:
- case Js::OpCode::BrNotNeq_A:
- break;
- default:
- Assert(UNREACHED);
- __assume(UNREACHED);
- }
- // JNE/BNE (typeId == Js::TypeIds_Number) ? $target : $done
- IR::LabelInstr *label = (typeId == Js::TypeIds_Number) ? target : done;
- if (isNeqOp)
- label = (label == target) ? done : target;
- m_lowererMD.GenerateObjectTest(object, branch, label);
- // MOV typeRegOpnd, [object + offset(Type)]
- InsertMove(typeRegOpnd,
- IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
- branch);
- GenerateFalsyObjectTest(branch, typeRegOpnd, typeId, target, done, isNeqOp);
- // MOV objTypeId, [typeRegOpnd + offset(TypeId)]
- IR::RegOpnd* objTypeIdOpnd = IR::RegOpnd::New(TyInt32, m_func);
- InsertMove(objTypeIdOpnd,
- IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
- branch);
- // CMP objTypeId, typeId
- // JEQ/JGE $done
- if (typeId == Js::TypeIds_Object)
- {
- InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrGe_A, isNeqOp ? done : target, branch);
- }
- else if (typeId == Js::TypeIds_Function)
- {
- InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrEq_A, isNeqOp ? done : target, branch);
- }
- else if (typeId == Js::TypeIds_Number)
- {
- //Check for the typeIds between TypeIds_FirstNumberType <= typeIds <= TypeIds_LastNumberType
- InsertSub(false, objTypeIdOpnd, objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_FirstNumberType, TyInt32, branch->m_func),branch);
- InsertCompare(objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_LastNumberType - Js::TypeIds_FirstNumberType, TyInt32, branch->m_func), branch);
- InsertBranch(isNeqOp ? Js::OpCode::BrGt_A : Js::OpCode::BrLe_A, true, target, branch);
- }
- else
- {
- InsertCompare(objTypeIdOpnd, typeIdOpnd, branch);
- InsertBranch(isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, target, branch);
- }
- // This could be 'null' which, for historical reasons, has a TypeId < TypeIds_Object but
- // is still a Javascript "object."
- if (typeId == Js::TypeIds_Object)
- {
- // CMP object, 0xXXXXXXXX
- // JEQ isNeqOp ? $done : $target
- InsertCompareBranch(object,
- LoadLibraryValueOpnd(branch, LibraryValue::ValueNull),
- Js::OpCode::BrEq_A,
- isNeqOp ? done : target,
- branch);
- }
- branch->InsertAfter(done); // Get this label first
- // "object" or "function" may come from HostDispatch. Needs helper if that's the case.
- if (typeId == Js::TypeIds_Object || typeId == Js::TypeIds_Function)
- {
- // CMP objTypeId, TypeIds_Proxy. typeof proxy could be 'object' or 'function' depends on the target
- // JNE isNeqOp ? $target : $done
- InsertCompareBranch(objTypeIdOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
- Js::OpCode::BrEq_A,
- helper,
- branch);
- // CMP objTypeId, TypeIds_HostDispatch
- // JNE isNeqOp ? $target : $done
- InsertCompareBranch(objTypeIdOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
- Js::OpCode::BrNeq_A,
- isNeqOp ? target : done,
- branch);
- // Now emit Typeof and lower it like we would've for the helper call.
- {
- branch->InsertBefore(helper);
- typeOf->Unlink();
- branch->InsertBefore(typeOf);
- if (branch->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(branch->GetBailOutKind()) &&
- (!typeOf->HasBailOutInfo() || !BailOutInfo::IsBailOutOnImplicitCalls(typeOf->GetBailOutKind())))
- {
- typeOf = AddBailoutToHelperCallInstr(typeOf, branch->GetBailOutInfo(), branch->GetBailOutKind(), branch);
- }
- LowerUnaryHelperMem(typeOf, IR::HelperOp_Typeof);
- }
- }
- else // Other primitive types don't need helper
- {
- typeOf->Remove();
- branch->Remove();
- *pfNoLower = true;
- }
- // $done:
- }
- ///----------------------------------------------------------------------------
- ///
- /// LowererMD::GenerateFastCmTypeOf
- ///
- ///----------------------------------------------------------------------------
- void
- Lowerer::GenerateFastCmTypeOf(IR::Instr *compare, IR::RegOpnd *object, IR::IntConstOpnd *typeIdOpnd, IR::Instr *typeOf, bool *pfNoLower, bool isNeqOp)
- {
- Assert(compare->m_opcode == Js::OpCode::CmSrEq_A ||
- compare->m_opcode == Js::OpCode::CmEq_A ||
- compare->m_opcode == Js::OpCode::CmSrNeq_A ||
- compare->m_opcode == Js::OpCode::CmNeq_A);
- Js::TypeId typeId = static_cast<Js::TypeId>(typeIdOpnd->GetValue());
- IR::LabelInstr *movFalse = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
- IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
- IR::LabelInstr *helper= IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::RegOpnd *dst = compare->GetDst()->IsRegOpnd() ? compare->GetDst()->AsRegOpnd() : nullptr;
- IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
- Assert(dst);
- if (dst->IsEqual(object))
- {
- //dst same as the src of typeof. As we need to move true to dst first we need to save the src to a new opnd
- IR::RegOpnd *newObject = IR::RegOpnd::New(object->GetType(), m_func);
- InsertMove(newObject, object, compare); //Save src
- object = newObject;
- }
- // mov dst, 'true'
- InsertMove(dst,
- LoadLibraryValueOpnd(compare, LibraryValue::ValueTrue),
- compare);
- // TEST object, 1
- // JNE (typeId == Js::TypeIds_Number) ? $done : $movFalse
- IR::LabelInstr *target = (typeId == Js::TypeIds_Number) ? done : movFalse;
- if (isNeqOp)
- {
- target = (target == done) ? movFalse : done;
- }
- m_lowererMD.GenerateObjectTest(object, compare, target);
- // MOV typeRegOpnd, [object + offset(Type)]
- InsertMove(typeRegOpnd,
- IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
- compare);
- GenerateFalsyObjectTest(compare, typeRegOpnd, typeId, done, movFalse, isNeqOp);
- // MOV objTypeId, [typeRegOpnd + offset(TypeId)]
- IR::RegOpnd* objTypeIdOpnd = IR::RegOpnd::New(TyInt32, m_func);
- InsertMove(objTypeIdOpnd,
- IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
- compare);
- // CMP objTypeId, typeId
- // JEQ/JGE $done
- if (typeId == Js::TypeIds_Object)
- {
- InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrGe_A, isNeqOp ? movFalse : done, compare);
- }
- else if (typeId == Js::TypeIds_Function)
- {
- InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrEq_A, isNeqOp ? movFalse : done, compare);
- }
- else if (typeId == Js::TypeIds_Number)
- {
- //Check for the typeIds between TypeIds_FirstNumberType <= typeIds <= TypeIds_LastNumberType
- InsertCompareBranch(objTypeIdOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_LastNumberType, TyInt32, compare->m_func),
- Js::OpCode::BrGt_A,
- isNeqOp ? done : movFalse,
- compare);
- InsertCompareBranch(objTypeIdOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_FirstNumberType, TyInt32, compare->m_func),
- isNeqOp? Js::OpCode::BrLt_A : Js::OpCode::BrGe_A,
- done,
- compare);
- }
- else
- {
- InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, done, compare);
- }
- // This could be 'null' which, for historical reasons, has a TypeId < TypeIds_Object but
- // is still a Javascript "object."
- if (typeId == Js::TypeIds_Object)
- {
- // CMP object, 0xXXXXXXXX
- // JEQ isNeqOp ? $movFalse : $done
- InsertCompareBranch(object,
- LoadLibraryValueOpnd(compare, LibraryValue::ValueNull),
- Js::OpCode::BrEq_A,
- isNeqOp ? movFalse : done,
- compare);
- }
- compare->InsertAfter(done); // Get this label first
- // "object" or "function" may come from HostDispatch. Needs helper if that's the case.
- if (typeId == Js::TypeIds_Object || typeId == Js::TypeIds_Function)
- {
- // CMP objTypeId, TypeIds_Proxy
- // JNE isNeqOp ? $done : $movFalse
- InsertCompareBranch(objTypeIdOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
- Js::OpCode::BrEq_A,
- helper,
- compare);
- // CMP objTypeId, TypeIds_HostDispatch
- // JNE isNeqOp ? $done : $movFalse
- InsertCompareBranch(objTypeIdOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
- Js::OpCode::BrNeq_A,
- isNeqOp ? done : movFalse,
- compare);
- // Now emit Typeof like we would've for the helper call.
- {
- compare->InsertBefore(helper);
- typeOf->Unlink();
- compare->InsertBefore(typeOf);
- if (compare->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(compare->GetBailOutKind()) &&
- (!typeOf->HasBailOutInfo() || !BailOutInfo::IsBailOutOnImplicitCalls(typeOf->GetBailOutKind())))
- {
- typeOf = AddBailoutToHelperCallInstr(typeOf, compare->GetBailOutInfo(), compare->GetBailOutKind(), compare);
- }
- LowerUnaryHelperMem(typeOf, IR::HelperOp_Typeof);
- }
- // JMP/B $done
- InsertBranch(Js::OpCode::Br, done, done);
- }
- else // Other primitive types don't need helper
- {
- typeOf->Remove();
- dst = compare->UnlinkDst()->AsRegOpnd();
- compare->Remove();
- *pfNoLower = true;
- }
- // $movFalse: (insert before $done)
- done->InsertBefore(movFalse);
- // MOV dst, 'false'
- InsertMove(dst, LoadLibraryValueOpnd(done, LibraryValue::ValueFalse), done);
- // $done:
- }
- void
- Lowerer::GenerateCheckForCallFlagNew(IR::Instr* instrInsert)
- {
- Func *func = instrInsert->m_func;
- IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- Assert(!func->IsInlinee());
- // MOV s1, [ebp + 4] // s1 = call info
- // AND s2, s1, Js::CallFlags_New // s2 = s1 & Js::CallFlags_New
- // CMP s2, 0
- // JNE $Done
- // CALL RuntimeTypeError
- // $Done
- IR::SymOpnd* callInfoOpnd = Lowerer::LoadCallInfo(instrInsert);
- Assert(Js::CallInfo::ksizeofCount == 24);
- IR::RegOpnd* isNewFlagSetRegOpnd = IR::RegOpnd::New(TyMachReg, func);
- InsertAnd(isNewFlagSetRegOpnd, callInfoOpnd, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_New << Js::CallInfo::ksizeofCount, TyMachReg, func, true), instrInsert);
- InsertTestBranch(isNewFlagSetRegOpnd, isNewFlagSetRegOpnd, Js::OpCode::BrNeq_A, labelDone, instrInsert);
- IR::Instr *throwInstr = IR::Instr::New(
- Js::OpCode::RuntimeTypeError,
- IR::RegOpnd::New(TyMachReg, m_func),
- IR::IntConstOpnd::New(SCODE_CODE(JSERR_ClassConstructorCannotBeCalledWithoutNew), TyInt32, m_func),
- m_func);
- instrInsert->InsertBefore(throwInstr);
- this->LowerUnaryHelperMem(throwInstr, IR::HelperOp_RuntimeTypeError);
- instrInsert->InsertBefore(labelDone);
- instrInsert->Remove();
- }
- void
- Lowerer::GenerateJavascriptOperatorsIsConstructorGotoElse(IR::Instr *instrInsert, IR::RegOpnd *instanceRegOpnd, IR::LabelInstr *labelReturnTrue, IR::LabelInstr *labelReturnFalse)
- {
- // $ProxyLoop:
- // // if (!VarIs<RecyclableObject>(instance)) { goto $ReturnFalse }; // omitted: VarIs<RecyclableObject>(instance) always true
- // MOV s0, instance->type
- // MOV s1, s0->typeId
- // CMP s1, TypeIds_Proxy
- // JNE $NotProxy
- //
- // MOV instance, instance->target
- // JMP $ProxyLoop
- //
- // $NotProxy:
- // CMP s1, TypeIds_Function
- // JNE $ReturnFalse // external
- //
- // MOV s0, instance->functionInfo
- // MOV s1, s0->attributes
- // TEST s1, ErrorOnNew
- // JNE $ReturnFalse // external
- //
- // JMP $ReturnTrue // external
- Func *func = instrInsert->m_func;
- IR::LabelInstr *labelProxyLoop = InsertLoopTopLabel(instrInsert);
- IR::LabelInstr *labelNotProxy = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- IR::RegOpnd *indir0RegOpnd = IR::RegOpnd::New(TyMachPtr, func);
- IR::RegOpnd *indir1RegOpnd = IR::RegOpnd::New(TyUint32, func);
- Loop * loop = labelProxyLoop->GetLoop();
- loop->regAlloc.liveOnBackEdgeSyms->Set(instanceRegOpnd->m_sym->m_id);
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func);
- Lowerer::InsertMove(indir0RegOpnd, indirOpnd, instrInsert);
- indirOpnd = IR::IndirOpnd::New(indir0RegOpnd, Js::Type::GetOffsetOfTypeId(), TyUint32, func);
- Lowerer::InsertMove(indir1RegOpnd, indirOpnd, instrInsert);
- InsertCompareBranch(indir1RegOpnd, IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyUint32, func, true), Js::OpCode::BrNeq_A, labelNotProxy, instrInsert);
- indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::JavascriptProxy::GetOffsetOfTarget(), TyMachPtr, func);
- Lowerer::InsertMove(instanceRegOpnd, indirOpnd, instrInsert);
- InsertBranch(Js::OpCode::Br, labelProxyLoop, instrInsert);
- instrInsert->InsertBefore(labelNotProxy);
- InsertCompareBranch(indir1RegOpnd, IR::IntConstOpnd::New(Js::TypeIds_Function, TyUint32, func, true), Js::OpCode::BrNeq_A, labelReturnFalse, instrInsert);
- indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::JavascriptFunction::GetOffsetOfFunctionInfo(), TyMachPtr, func);
- Lowerer::InsertMove(indir0RegOpnd, indirOpnd, instrInsert);
- indirOpnd = IR::IndirOpnd::New(indir0RegOpnd, Js::FunctionInfo::GetAttributesOffset(), TyUint32, func);
- Lowerer::InsertMove(indir1RegOpnd, indirOpnd, instrInsert);
- InsertTestBranch(indir1RegOpnd, IR::IntConstOpnd::New(Js::FunctionInfo::Attributes::ErrorOnNew, TyUint32, func, true), Js::OpCode::BrNeq_A, labelReturnFalse, instrInsert);
- InsertBranch(Js::OpCode::Br, labelReturnTrue, instrInsert);
- }
- void
- Lowerer::GenerateRecyclableObjectGetPrototypeNullptrGoto(IR::Instr *instrInsert, IR::RegOpnd *instanceRegOpnd, IR::LabelInstr *labelReturnNullptr)
- {
- // MOV instance, instance->type
- // MOV flags, instance->flags
- // TEST flags, TypeFlagMask_HasSpecialPrototype
- // JNE $ReturnNullptr // external, bypassing nullptr check
- // MOV instance, instance->prototype
- Func *func = instrInsert->m_func;
- IR::RegOpnd *flagsRegOpnd = IR::RegOpnd::New(TyUint32, func);
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func);
- Lowerer::InsertMove(instanceRegOpnd, indirOpnd, instrInsert);
- indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::Type::GetOffsetOfFlags(), TyUint32, func);
- Lowerer::InsertMove(flagsRegOpnd, indirOpnd, instrInsert);
- InsertTestBranch(flagsRegOpnd, IR::IntConstOpnd::New(TypeFlagMask_HasSpecialPrototype, TyUint32, func, true), Js::OpCode::BrNeq_A, labelReturnNullptr, instrInsert);
- indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::Type::GetOffsetOfPrototype(), TyMachPtr, func);
- Lowerer::InsertMove(instanceRegOpnd, indirOpnd, instrInsert);
- }
- void
- Lowerer::GenerateRecyclableObjectIsElse(IR::Instr *instrInsert, IR::RegOpnd *instanceRegOpnd, IR::LabelInstr *labelFalse)
- {
- Func *func = instrInsert->m_func;
- #if INT32VAR
- InsertTestBranch(instanceRegOpnd, IR::AddrOpnd::New((Js::Var)0xffff000000000000, IR::AddrOpndKindConstantVar, func, true), Js::OpCode::BrNeq_A, labelFalse, instrInsert);
- #else
- InsertTestBranch(instanceRegOpnd, IR::IntConstOpnd::New(Js::AtomTag, TyUint32, func, true), Js::OpCode::BrNeq_A, labelFalse, instrInsert);
- #endif
- }
- void
- Lowerer::GenerateLdHomeObj(IR::Instr* instr)
- {
- // MOV dst, undefined
- // MOV instance, functionObject // functionObject through stack params or src1
- // CMP [instance], VtableStackScriptFunction
- // JE $Done
- // MOV instance, instance->homeObj
- // TEST instance, instance
- // JZ $Done
- // MOV dst, instance
- // $Done:
- Func *func = instr->m_func;
- IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- IR::LabelInstr *labelInlineFunc = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- IR::LabelInstr *testLabel = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- IR::LabelInstr *scriptFuncLabel = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- IR::Opnd *opndUndefAddress = this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined);
- IR::RegOpnd *instanceRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
- IR::Opnd *dstOpnd = instr->GetDst();
- Assert(dstOpnd->IsRegOpnd());
- Lowerer::InsertMove(dstOpnd, opndUndefAddress, instr);
- IR::Opnd * functionObjOpnd = nullptr;
- m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
- Lowerer::InsertMove(instanceRegOpnd, functionObjOpnd, instr);
- IR::Opnd * vtableAddressOpnd = this->LoadVTableValueOpnd(instr, VTableValue::VtableStackScriptFunction);
- IR::BranchInstr* branchInstr = InsertCompareBranch(IR::IndirOpnd::New(instanceRegOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
- Js::OpCode::BrEq_A, true, labelDone, instr);
- InsertObjectPoison(instanceRegOpnd, branchInstr, instr, false);
- if (func->GetJITFunctionBody()->HasHomeObj())
- {
- // Is this an function with inline cache and home obj??
- IR::Opnd * vtableAddressInlineFuncHomObjOpnd = this->LoadVTableValueOpnd(instr, VTableValue::VtableScriptFunctionWithInlineCacheAndHomeObj);
- IR::BranchInstr* inlineFuncHomObjOpndBr = InsertCompareBranch(IR::IndirOpnd::New(instanceRegOpnd, 0, TyMachPtr, func), vtableAddressInlineFuncHomObjOpnd, Js::OpCode::BrNeq_A, labelInlineFunc, instr);
- InsertObjectPoison(instanceRegOpnd, inlineFuncHomObjOpndBr, instr, false);
- IR::IndirOpnd *indirInlineFuncHomeObjOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::FunctionWithHomeObj<Js::ScriptFunctionWithInlineCache>::GetOffsetOfHomeObj(), TyMachPtr, func);
- Lowerer::InsertMove(instanceRegOpnd, indirInlineFuncHomeObjOpnd, instr);
- InsertBranch(Js::OpCode::Br, testLabel, instr);
- instr->InsertBefore(labelInlineFunc);
- // Is this a function with inline cache, home obj and computed name??
- IR::Opnd * vtableAddressInlineFuncHomObjCompNameOpnd = this->LoadVTableValueOpnd(instr, VTableValue::VtableScriptFunctionWithInlineCacheHomeObjAndComputedName);
- IR::BranchInstr* inlineFuncHomObjCompNameBr = InsertCompareBranch(IR::IndirOpnd::New(instanceRegOpnd, 0, TyMachPtr, func), vtableAddressInlineFuncHomObjCompNameOpnd, Js::OpCode::BrNeq_A, scriptFuncLabel, instr);
- InsertObjectPoison(instanceRegOpnd, inlineFuncHomObjCompNameBr, instr, false);
- IR::IndirOpnd *indirInlineFuncHomeObjCompNameOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::FunctionWithComputedName<Js::FunctionWithHomeObj<Js::ScriptFunctionWithInlineCache>>::GetOffsetOfHomeObj(), TyMachPtr, func);
- Lowerer::InsertMove(instanceRegOpnd, indirInlineFuncHomeObjCompNameOpnd, instr);
- InsertBranch(Js::OpCode::Br, testLabel, instr);
- instr->InsertBefore(scriptFuncLabel);
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::ScriptFunctionWithHomeObj::GetOffsetOfHomeObj(), TyMachPtr, func);
- Lowerer::InsertMove(instanceRegOpnd, indirOpnd, instr);
- }
- else
- {
- // Even if the function does not have home object in eval cases we still have the LdHomeObj opcode
- InsertBranch(Js::OpCode::Br, labelDone, instr);
- }
- instr->InsertBefore(testLabel);
- InsertTestBranch(instanceRegOpnd, instanceRegOpnd, Js::OpCode::BrEq_A, labelDone, instr);
- Lowerer::InsertMove(dstOpnd, instanceRegOpnd, instr);
- instr->InsertBefore(labelDone);
- instr->Remove();
- }
- void
- Lowerer::GenerateLdHomeObjProto(IR::Instr* instr)
- {
- // MOV dst, undefined
- // MOV instance, src1 // homeObj
- // TEST instance, instance
- // JZ $Done
- //
- // if (!VarIs<RecyclableObject>(instance)) goto $Done
- // MOV type, [instance+Offset(type)]
- // MOV typeId, [type+Offset(typeId)]
- // CMP typeId, TypeIds_Null
- // JEQ $Err
- // CMP typeId, TypeIds_Undefined
- // JNE $NoErr
- //
- // $Err:
- // ThrowRuntimeReferenceError(JSERR_BadSuperReference);
- //
- // $NoErr:
- // instance = ((RecyclableObject*)instance)->GetPrototype();
- // if (instance == nullptr) goto $Done;
- //
- // if (!VarIs<RecyclableObject>(instance)) goto $Done
- //
- // MOV dst, instance
- // $Done:
- Func *func = instr->m_func;
- IR::Opnd *src1Opnd = instr->UnlinkSrc1();
- IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- IR::LabelInstr *labelErr = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- IR::LabelInstr *labelNoErr = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- IR::Opnd *opndUndefAddress = this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined);
- IR::RegOpnd *instanceRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
- IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
- IR::RegOpnd *typeIdRegOpnd = IR::RegOpnd::New(TyUint32, func);
- IR::Opnd *dstOpnd = instr->GetDst();
- Assert(dstOpnd->IsRegOpnd());
- Lowerer::InsertMove(dstOpnd, opndUndefAddress, instr);
- Lowerer::InsertMove(instanceRegOpnd, src1Opnd, instr);
- InsertTestBranch(instanceRegOpnd, instanceRegOpnd, Js::OpCode::BrEq_A, labelDone, instr);
- this->GenerateRecyclableObjectIsElse(instr, instanceRegOpnd, labelDone);
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func);
- Lowerer::InsertMove(typeRegOpnd, indirOpnd, instr);
- indirOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyUint32, func);
- Lowerer::InsertMove(typeIdRegOpnd, indirOpnd, instr);
- InsertCompareBranch(typeIdRegOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_Null, TyUint32, func, true), Js::OpCode::BrEq_A, labelErr, instr);
- InsertCompareBranch(typeIdRegOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_Undefined, TyUint32, func, true), Js::OpCode::BrNeq_A, labelNoErr, instr);
- instr->InsertBefore(labelErr);
- this->GenerateRuntimeError(instr, JSERR_BadSuperReference, IR::HelperOp_RuntimeReferenceError);
- instr->InsertBefore(labelNoErr);
- this->GenerateRecyclableObjectGetPrototypeNullptrGoto(instr, instanceRegOpnd, labelDone);
- this->GenerateRecyclableObjectIsElse(instr, instanceRegOpnd, labelDone);
- Lowerer::InsertMove(dstOpnd, instanceRegOpnd, instr);
- instr->InsertBefore(labelDone);
- instr->Remove();
- }
- void
- Lowerer::GenerateLdFuncObj(IR::Instr* instr)
- {
- // MOV dst, functionObject // functionObject through stack params or src1
- IR::Opnd *dstOpnd = instr->GetDst();
- IR::Opnd *functionObjOpnd = nullptr;
- m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
- Lowerer::InsertMove(dstOpnd, functionObjOpnd, instr);
- instr->Remove();
- }
- void
- Lowerer::GenerateLdFuncObjProto(IR::Instr* instr)
- {
- // MOV instance, src1
- //
- // instance = ((RecyclableObject*)instance)->GetPrototype();
- // if (instance == nullptr) goto $ThrowTypeError;
- //
- // MOV dst, instance
- //
- // if (!JavascriptOperators::IsConstructor(instance))
- // goto $ThrowTypeError;
- // else
- // goto $Done;
- //
- // $helperLabelThrowTypeError:
- // ThrowRuntimeTypeError(JSERR_NotAConstructor);
- //
- // $Done:
- Func *func = instr->m_func;
- IR::Opnd *src1Opnd = instr->UnlinkSrc1();
- IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- IR::LabelInstr *helperLabelThrowTypeError = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- IR::RegOpnd *instanceRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
- IR::Opnd *dstOpnd = instr->GetDst();
- Lowerer::InsertMove(instanceRegOpnd, src1Opnd, instr);
- this->GenerateRecyclableObjectGetPrototypeNullptrGoto(instr, instanceRegOpnd, helperLabelThrowTypeError);
- Lowerer::InsertMove(dstOpnd, instanceRegOpnd, instr);
- this->GenerateJavascriptOperatorsIsConstructorGotoElse(instr, instanceRegOpnd, labelDone, helperLabelThrowTypeError);
- instr->InsertBefore(helperLabelThrowTypeError);
- this->GenerateRuntimeError(instr, JSERR_NotAConstructor, IR::HelperOp_RuntimeTypeError);
- instr->InsertBefore(labelDone);
- instr->Remove();
- }
- void
- Lowerer::GenerateLoadNewTarget(IR::Instr* instrInsert)
- {
- Func *func = instrInsert->m_func;
- IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- IR::LabelInstr * labelLoadArgNewTarget = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- IR::Opnd* opndUndefAddress = this->LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueUndefined);
- Assert(!func->IsInlinee());
- if (func->GetJITFunctionBody()->IsCoroutine())
- {
- instrInsert->SetSrc1(opndUndefAddress);
- LowererMD::ChangeToAssign(instrInsert);
- return;
- }
- // MOV dst, undefined // dst = undefined
- // MOV s1, callInfo // s1 = callInfo
- // TEST s1, Js::CallFlags_NewTarget << 24 // if (callInfo.Flags & Js::CallFlags_NewTarget)
- // JNE $LoadLastArgument // goto $LoadLastArgument
- // TEST s1, Js::CallFlags_New << 24 // if (!(callInfo.Flags & Js::CallFlags_New))
- // JE $Done // goto $Done
- // MOV dst, functionObject // dst = functionObject
- // JMP $Done // goto $Done
- // $LoadLastArgument
- // AND s1, s1, (0x00FFFFFF) // s2 = callInfo.Count == arguments.length + 2
- // MOV dst, [ebp + (s1 - 1) * sizeof(Var) + formalParamOffset * sizeof(Var) ] // points to new.target
- // $Done
- IR::Opnd *dstOpnd = instrInsert->GetDst();
- Assert(dstOpnd->IsRegOpnd());
- Lowerer::InsertMove(dstOpnd, opndUndefAddress, instrInsert);
- IR::SymOpnd *callInfoOpnd = Lowerer::LoadCallInfo(instrInsert);
- Assert(Js::CallInfo::ksizeofCount == 24);
- IR::RegOpnd *s1 = IR::RegOpnd::New(TyUint32, func);
- Lowerer::InsertMove(s1, callInfoOpnd, instrInsert);
- InsertTestBranch(s1, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_NewTarget << Js::CallInfo::ksizeofCount, TyUint32, func, true), Js::OpCode::BrNeq_A, labelLoadArgNewTarget, instrInsert);
- InsertTestBranch(s1, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_New << Js::CallInfo::ksizeofCount, TyUint32, func, true), Js::OpCode::BrEq_A, labelDone, instrInsert);
- IR::Instr* loadFuncInstr = IR::Instr::New(Js::OpCode::AND, func);
- loadFuncInstr->SetDst(instrInsert->GetDst());
- LoadFuncExpression(loadFuncInstr);
- instrInsert->InsertBefore(loadFuncInstr);
- InsertBranch(Js::OpCode::Br, labelDone, instrInsert);
- instrInsert->InsertBefore(labelLoadArgNewTarget);
- InsertAnd(s1, s1, IR::IntConstOpnd::New(0x00FFFFFF, TyUint32, func, true), instrInsert); // callInfo.Count
- // [formalOffset (4) + callInfo.Count] points to 'new.target' - see diagram in GenerateLoadStackArgumentByIndex()
- GenerateLoadStackArgumentByIndex(dstOpnd, s1, instrInsert, 0, m_func);
- instrInsert->InsertBefore(labelDone);
- instrInsert->Remove();
- }
- void
- Lowerer::GenerateGetCurrentFunctionObject(IR::Instr * instr)
- {
- Func * func = this->m_func;
- IR::Instr * insertBeforeInstr = instr->m_next;
- IR::RegOpnd * functionObjectOpnd = instr->GetDst()->AsRegOpnd();
- IR::Opnd * vtableAddressOpnd = this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableStackScriptFunction);
- IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- IR::BranchInstr *branchInstr = InsertCompareBranch(IR::IndirOpnd::New(functionObjectOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
- Js::OpCode::BrNeq_A, true, labelDone, insertBeforeInstr);
- InsertObjectPoison(functionObjectOpnd, branchInstr, insertBeforeInstr, false);
- IR::RegOpnd * boxedFunctionObjectOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(boxedFunctionObjectOpnd, IR::IndirOpnd::New(functionObjectOpnd,
- Js::StackScriptFunction::GetOffsetOfBoxedScriptFunction(), TyMachPtr, func), insertBeforeInstr);
- InsertTestBranch(boxedFunctionObjectOpnd, boxedFunctionObjectOpnd, Js::OpCode::BrEq_A, true, labelDone, insertBeforeInstr);
- InsertMove(functionObjectOpnd, boxedFunctionObjectOpnd, insertBeforeInstr);
- insertBeforeInstr->InsertBefore(labelDone);
- }
- IR::Opnd *
- Lowerer::GetInlineCacheFromFuncObjectForRuntimeUse(IR::Instr * instr, IR::PropertySymOpnd * propSymOpnd, bool isHelper)
- {
- // MOV s1, [ebp + 8] //s1 = function object
- // MOV s2, [s1 + offset(hasInlineCaches)]
- // TEST s2, s2
- // JE $L1
- // MOV s3, [s1 + offset(m_inlineCaches)] //s3 = inlineCaches from function object
- // MOV s4, [s3 + index*scale] //s4 = inlineCaches[index]
- // JMP $L2
- // $L1
- // MOV s3, propSym->m_runtimeCache
- // $L2
- byte indirScale = this->m_lowererMD.GetDefaultIndirScale();
- IR::RegOpnd * funcObjOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- IR::Instr * funcObjInstr = IR::Instr::New(Js::OpCode::Ld_A, funcObjOpnd, instr->m_func);
- instr->InsertBefore(funcObjInstr);
- LoadFuncExpression(funcObjInstr);
- IR::RegOpnd * funcObjHasInlineCachesOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
- this->InsertMove(funcObjHasInlineCachesOpnd, IR::IndirOpnd::New(funcObjOpnd, Js::ScriptFunction::GetOffsetOfHasInlineCaches(), TyUint8, instr->m_func), instr);
- IR::LabelInstr * inlineCachesNullLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, isHelper);
- InsertTestBranch(funcObjHasInlineCachesOpnd, funcObjHasInlineCachesOpnd, Js::OpCode::BrEq_A, inlineCachesNullLabel, instr);
- IR::RegOpnd * inlineCachesOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
- Lowerer::InsertMove(inlineCachesOpnd, IR::IndirOpnd::New(funcObjOpnd, Js::ScriptFunctionWithInlineCache::GetOffsetOfInlineCaches(), TyMachPtr, instr->m_func), instr);
- IR::RegOpnd * inlineCacheOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
- IR::RegOpnd * indexOpnd = IR::RegOpnd::New(TyMachReg, instr->m_func);
- int inlineCacheOffset;
- if (!Int32Math::Mul(sizeof(Js::InlineCache *), propSymOpnd->m_inlineCacheIndex, &inlineCacheOffset))
- {
- Lowerer::InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCachesOpnd, inlineCacheOffset, TyMachPtr, instr->m_func), instr);
- }
- else
- {
- Lowerer::InsertMove(indexOpnd, IR::IntConstOpnd::New(propSymOpnd->m_inlineCacheIndex, TyUint32, instr->m_func), instr);
- Lowerer::InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCachesOpnd, indexOpnd, indirScale, TyMachPtr, instr->m_func), instr);
- }
- IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, isHelper);
- InsertBranch(LowererMD::MDUncondBranchOpcode, continueLabel, instr);
- IR::Instr * ldCacheFromPropSymOpndInstr = this->InsertMove(inlineCacheOpnd, IR::AddrOpnd::New(propSymOpnd->m_runtimeInlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func), instr);
- ldCacheFromPropSymOpndInstr->InsertBefore(inlineCachesNullLabel);
- ldCacheFromPropSymOpndInstr->InsertAfter(continueLabel);
- return inlineCacheOpnd;
- }
- IR::Instr *
- Lowerer::LowerInitClass(IR::Instr * instr)
- {
- // scriptContext
- IR::Instr * prevInstr = LoadScriptContext(instr);
- // extends
- if (instr->GetSrc2() != nullptr)
- {
- IR::Opnd * extendsOpnd = instr->UnlinkSrc2();
- m_lowererMD.LoadHelperArgument(instr, extendsOpnd);
- }
- else
- {
- IR::AddrOpnd* extendsOpnd = IR::AddrOpnd::NewNull(this->m_func);
- m_lowererMD.LoadHelperArgument(instr, extendsOpnd);
- }
- // constructor
- IR::Opnd * ctorOpnd = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, ctorOpnd);
- // call
- m_lowererMD.ChangeToHelperCall(instr, IR::HelperOP_InitClass);
- return prevInstr;
- }
- void
- Lowerer::LowerNewConcatStrMulti(IR::Instr * instr)
- {
- IR::IntConstOpnd * countOpnd = instr->UnlinkSrc1()->AsIntConstOpnd();
- IR::RegOpnd * dstOpnd = instr->UnlinkDst()->AsRegOpnd();
- uint8 count = (uint8)countOpnd->GetValue();
- Assert(dstOpnd->GetValueType().IsString());
- GenerateRecyclerAlloc(IR::HelperAllocMemForConcatStringMulti, Js::ConcatStringMulti::GetAllocSize(count), dstOpnd, instr);
- GenerateRecyclerMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, VTableValue::VtableConcatStringMulti), instr);
- GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfType(),
- this->LoadLibraryValueOpnd(instr, LibraryValue::ValueStringTypeStatic), instr);
- GenerateRecyclerMemInitNull(dstOpnd, Js::ConcatStringMulti::GetOffsetOfpszValue(), instr);
- GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), 0, instr);
- GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfSlotCount(), countOpnd->AsUint32(), instr);
- instr->Remove();
- }
- void
- Lowerer::LowerNewConcatStrMultiBE(IR::Instr * instr)
- {
- // Lower
- // t1 = SetConcatStrMultiBE s1
- // t2 = SetConcatStrMultiBE s2, t1
- // t3 = SetConcatStrMultiBE s3, t2
- // s = NewConcatStrMultiBE 3, t3
- // to
- // s = new concat string
- // s+0 = s1
- // s+1 = s2
- // s+2 = s3
- Assert(instr->GetSrc1()->IsConstOpnd());
- Assert(instr->GetDst()->IsRegOpnd());
- IR::RegOpnd * newString = instr->GetDst()->AsRegOpnd();
- IR::Opnd * newConcatItemOpnd = nullptr;
- uint index = instr->GetSrc1()->AsIntConstOpnd()->AsUint32() - 1;
- IR::Instr * concatItemInstr = nullptr;
- IR::Opnd * linkOpnd = instr->GetSrc2();
- while (linkOpnd)
- {
- Assert(linkOpnd->IsRegOpnd());
- concatItemInstr = linkOpnd->GetStackSym()->GetInstrDef();
- Assert(concatItemInstr->m_opcode == Js::OpCode::SetConcatStrMultiItemBE);
- IR::Opnd * concatItemOpnd = concatItemInstr->GetSrc1();
- Assert(concatItemOpnd->IsRegOpnd());
- // If one of the concat items is equal to the dst of the concat expressions (s = s + a + b),
- // hoist the load of that item to before the setting of the new string to the dst.
- if (concatItemOpnd->IsEqual(newString))
- {
- if (!newConcatItemOpnd)
- {
- IR::Instr * hoistSrcInstr = concatItemInstr->HoistSrc1(Js::OpCode::Ld_A);
- newConcatItemOpnd = hoistSrcInstr->GetDst();
- }
- concatItemOpnd = newConcatItemOpnd;
- }
- else
- {
- // If only some of the SetConcatStrMultiItemBE instructions were CSE'd and the rest, along with the NewConcatStrMultiBE
- // instruction, were in a loop, the strings on the CSE'd Set*BE instructions will become live on back edge. Add them to
- // addToLiveOnBackEdgeSyms here and clear when we reach the Set*BE instruction.
- // Note that we are doing this only for string opnds which are not the same as the dst of the concat expression. Reasoning
- // behind this is that if a loop has a concat expression with one of its sources same as the dst, the Set*BE instruction
- // for the dst wouldn't have been CSE'd as the dst's value is changing in the loop and the backward pass should have set the
- // symbol as live on backedge.
- this->addToLiveOnBackEdgeSyms->Set(concatItemOpnd->GetStackSym()->m_id);
- }
- IR::Instr * newConcatItemInstr = IR::Instr::New(Js::OpCode::SetConcatStrMultiItem,
- IR::IndirOpnd::New(newString, index, TyVar, instr->m_func),
- concatItemOpnd,
- instr->m_func);
- instr->InsertAfter(newConcatItemInstr);
- this->LowerSetConcatStrMultiItem(newConcatItemInstr);
- linkOpnd = concatItemInstr->GetSrc2();
- index--;
- }
- Assert(index == -1);
- this->LowerNewConcatStrMulti(instr);
- }
- void
- Lowerer::LowerSetConcatStrMultiItem(IR::Instr * instr)
- {
- Func * func = this->m_func;
- IR::IndirOpnd * dstOpnd = instr->GetDst()->AsIndirOpnd();
- IR::RegOpnd * concatStrOpnd = dstOpnd->GetBaseOpnd();
- IR::RegOpnd * srcOpnd = instr->UnlinkSrc1()->AsRegOpnd();
- Assert(concatStrOpnd->GetValueType().IsString());
- Assert(srcOpnd->GetValueType().IsString());
- srcOpnd = GenerateGetImmutableOrScriptUnreferencedString(srcOpnd, instr, IR::HelperOp_CompoundStringCloneForConcat);
- instr->SetSrc1(srcOpnd);
- IR::IndirOpnd * dstLength = IR::IndirOpnd::New(concatStrOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), TyUint32, func);
- IR::Opnd * srcLength;
- if (srcOpnd->m_sym->m_isStrConst)
- {
- srcLength = IR::IntConstOpnd::New(JITJavascriptString::FromVar(srcOpnd->m_sym->GetConstAddress(true))->GetLength(), TyUint32, func);
- }
- else
- {
- srcLength = IR::RegOpnd::New(TyUint32, func);
- InsertMove(srcLength, IR::IndirOpnd::New(srcOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), TyUint32, func), instr);
- }
- IR::Instr *onOverflowInsertBeforeInstr;
- InsertAddWithOverflowCheck(false, dstLength, dstLength, srcLength, instr, &onOverflowInsertBeforeInstr);
- IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, func);
- callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperOp_OutOfMemoryError, func));
- instr->InsertBefore(onOverflowInsertBeforeInstr);
- onOverflowInsertBeforeInstr->InsertBefore(callInstr);
- this->m_lowererMD.LowerCall(callInstr, 0);
- dstOpnd->SetOffset(dstOpnd->GetOffset() * sizeof(Js::JavascriptString *) + Js::ConcatStringMulti::GetOffsetOfSlots());
- LowererMD::ChangeToWriteBarrierAssign(instr, func);
- }
- IR::RegOpnd *
- Lowerer::GenerateGetImmutableOrScriptUnreferencedString(IR::RegOpnd * strOpnd, IR::Instr * insertBeforeInstr, IR::JnHelperMethod helperMethod, bool reloadDst)
- {
- if (strOpnd->m_sym->m_isStrConst)
- {
- return strOpnd;
- }
- Func * const func = this->m_func;
- IR::RegOpnd *dstOpnd = reloadDst == true ? IR::RegOpnd::New(TyVar, func) : strOpnd;
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- if (!strOpnd->IsNotTaggedValue())
- {
- this->m_lowererMD.GenerateObjectTest(strOpnd, insertBeforeInstr, doneLabel);
- }
- // CMP [strOpnd], Js::CompoundString::`vtable'
- // JEQ $helper
- InsertCompareBranch(
- IR::IndirOpnd::New(strOpnd, 0, TyMachPtr, func),
- this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableCompoundString),
- Js::OpCode::BrEq_A,
- helperLabel,
- insertBeforeInstr);
- if (reloadDst)
- {
- InsertMove(dstOpnd, strOpnd, insertBeforeInstr);
- }
- InsertBranch(Js::OpCode::Br, doneLabel, insertBeforeInstr);
- insertBeforeInstr->InsertBefore(helperLabel);
- this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, strOpnd);
- IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, dstOpnd, func);
- callInstr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
- insertBeforeInstr->InsertBefore(callInstr);
- this->m_lowererMD.LowerCall(callInstr, 0);
- insertBeforeInstr->InsertBefore(doneLabel);
- return dstOpnd;
- }
- void
- Lowerer::LowerConvStrCommon(IR::JnHelperMethod helper, IR::Instr * instr)
- {
- IR::RegOpnd * src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
- if (!src1Opnd->GetValueType().IsNotString())
- {
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- this->GenerateStringTest(src1Opnd, instr, helperLabel);
- InsertMove(instr->GetDst(), src1Opnd, instr);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(helperLabel);
- instr->InsertAfter(doneLabel);
- }
- if (instr->GetSrc2())
- {
- this->m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
- }
- this->LoadScriptContext(instr);
- this->m_lowererMD.LoadHelperArgument(instr, src1Opnd);
- this->m_lowererMD.ChangeToHelperCall(instr, helper);
- }
- void
- Lowerer::LowerConvStr(IR::Instr * instr)
- {
- LowerConvStrCommon(IR::HelperOp_ConvString, instr);
- }
- void
- Lowerer::LowerCoerseStr(IR::Instr* instr)
- {
- LowerConvStrCommon(IR::HelperOp_CoerseString, instr);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerCoerseStrOrRegex - This method is used for String.Replace(arg1, arg2)
- /// where arg1 is regex or string
- /// if arg1 is not regex, then do String.Replace(CoerseStr(arg1), arg2);
- ///
- /// CoerseStrOrRegex arg1
- ///
- /// if (value == regex) goto :done
- /// else
- ///helper:
- /// ConvStr value
- ///done:
- ///----------------------------------------------------------------------------
- void
- Lowerer::LowerCoerseStrOrRegex(IR::Instr* instr)
- {
- IR::RegOpnd * src1Opnd = instr->GetSrc1()->AsRegOpnd();
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- // if (value == regex) goto :done
- if (!src1Opnd->IsNotTaggedValue())
- {
- this->m_lowererMD.GenerateObjectTest(src1Opnd, instr, helperLabel);
- }
- IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
- InsertCompareBranch(IR::IndirOpnd::New(src1Opnd, 0, TyMachPtr, instr->m_func),
- vtableOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
- InsertMove(instr->GetDst(), src1Opnd, instr);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(helperLabel);
- instr->InsertAfter(doneLabel);
- // helper: ConvStr value
- LowerConvStr(instr);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerCoerseRegex - This method is used for String.Match(arg1)
- /// if arg1 is regex, then pass CreateRegEx(arg1) to String.Match
- ///
- ///----------------------------------------------------------------------------
- void
- Lowerer::LowerCoerseRegex(IR::Instr* instr)
- {
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::RegOpnd * src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
- if (!src1Opnd->IsNotTaggedValue())
- {
- this->m_lowererMD.GenerateObjectTest(src1Opnd, instr, helperLabel);
- }
- IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
- InsertCompareBranch(IR::IndirOpnd::New(src1Opnd, 0, TyMachPtr, instr->m_func),
- vtableOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
- InsertMove(instr->GetDst(), src1Opnd, instr);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(helperLabel);
- instr->InsertAfter(doneLabel);
- this->LoadScriptContext(instr);
- this->m_lowererMD.LoadHelperArgument(instr, IR::AddrOpnd::NewNull(instr->m_func)); // option
- this->m_lowererMD.LoadHelperArgument(instr, src1Opnd); // regex
- this->m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_CoerseRegex);
- }
- void
- Lowerer::LowerConvPrimStr(IR::Instr * instr)
- {
- LowerConvStrCommon(IR::HelperOp_ConvPrimitiveString, instr);
- }
- void
- Lowerer::GenerateRecyclerAlloc(IR::JnHelperMethod allocHelper, size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, bool inOpHelper)
- {
- size_t alignedSize = HeapInfo::GetAlignedSizeNoCheck(allocSize);
- this->GenerateRecyclerAllocAligned(allocHelper, alignedSize, newObjDst, insertionPointInstr, inOpHelper);
- }
- void
- Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, int32 value, IR::Instr * insertBeforeInstr, bool isZeroed)
- {
- IRType type = TyInt32;
- if (isZeroed)
- {
- if (value == 0)
- {
- // Recycler memory are zero initialized
- return;
- }
- if (value > 0 && value <= USHORT_MAX)
- {
- // Recycler memory are zero initialized, so we can just initialize the 8 or 16 bits of value
- type = (value <= UCHAR_MAX)? TyUint8 : TyUint16;
- }
- }
- Func * func = this->m_func;
- InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), IR::IntConstOpnd::New(value, type, func), insertBeforeInstr);
- }
- void
- Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, uint32 value, IR::Instr * insertBeforeInstr, bool isZeroed)
- {
- IRType type = TyUint32;
- if (isZeroed)
- {
- if (value == 0)
- {
- // Recycler memory are zero initialized
- return;
- }
- if (value <= USHORT_MAX)
- {
- // Recycler memory are zero initialized, so we can just initialize the 8 or 16 bits of value
- type = (value <= UCHAR_MAX)? TyUint8 : TyUint16;
- }
- }
- Func * func = this->m_func;
- InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), IR::IntConstOpnd::New(value, type, func), insertBeforeInstr);
- }
- void
- Lowerer::GenerateMemInitNull(IR::RegOpnd * opnd, int32 offset, IR::Instr * insertBeforeInstr, bool isZeroed)
- {
- if (isZeroed)
- {
- return;
- }
- GenerateMemInit(opnd, offset, IR::AddrOpnd::NewNull(m_func), insertBeforeInstr);
- }
- void
- Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, IR::Opnd * value, IR::Instr * insertBeforeInstr, bool isZeroed)
- {
- IRType type = value->GetType();
- Func * func = this->m_func;
- InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), value, insertBeforeInstr);
- }
- void
- Lowerer::GenerateMemInit(IR::RegOpnd * opnd, IR::RegOpnd * offset, IR::Opnd * value, IR::Instr * insertBeforeInstr, bool isZeroed)
- {
- IRType type = value->GetType();
- Func * func = this->m_func;
- InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), value, insertBeforeInstr);
- }
- void
- Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, int32 value, IR::Instr * insertBeforeInstr)
- {
- GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
- }
- void
- Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, uint32 value, IR::Instr * insertBeforeInstr)
- {
- GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
- }
- void
- Lowerer::GenerateRecyclerMemInitNull(IR::RegOpnd * opnd, int32 offset, IR::Instr * insertBeforeInstr)
- {
- GenerateMemInitNull(opnd, offset, insertBeforeInstr, true);
- }
- void
- Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, IR::Opnd * value, IR::Instr * insertBeforeInstr)
- {
- GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
- }
- void
- Lowerer::GenerateMemCopy(IR::Opnd * dst, IR::Opnd * src, uint32 size, IR::Instr * insertBeforeInstr)
- {
- Func * func = this->m_func;
- this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, IR::IntConstOpnd::New(size, TyUint32, func));
- this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, src);
- this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, dst);
- IR::Instr * memcpyInstr = IR::Instr::New(Js::OpCode::Call, func);
- memcpyInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperMemCpy, func));
- insertBeforeInstr->InsertBefore(memcpyInstr);
- m_lowererMD.LowerCall(memcpyInstr, 3);
- }
- bool
- Lowerer::GenerateSimplifiedInt4Rem(
- IR::Instr *const remInstr,
- IR::LabelInstr *const skipBailOutLabel) const
- {
- Assert(remInstr);
- Assert(remInstr->m_opcode == Js::OpCode::Rem_I4 || remInstr->m_opcode == Js::OpCode::RemU_I4);
- auto *dst = remInstr->GetDst(), *src1 = remInstr->GetSrc1(), *src2 = remInstr->GetSrc2();
- Assert(src1 && src2);
- Assert(dst->IsRegOpnd());
- bool isModByPowerOf2 = (remInstr->HasBailOutInfo() && remInstr->GetBailOutKind() == IR::BailOnModByPowerOf2);
- if (PHASE_OFF(Js::Phase::MathFastPathPhase, remInstr->m_func->GetTopFunc()) && !isModByPowerOf2)
- return false;
- if (!(src2->IsIntConstOpnd() && Math::IsPow2(src2->AsIntConstOpnd()->AsInt32())) && !isModByPowerOf2)
- {
- return false;
- }
- // We have:
- // s3 = s1 % s2 , where s2 = +2^i
- //
- // Generate:
- // test s1, s1
- // js $slowPathLabel
- // s3 = and s1, 0x00..fff (2^i - 1)
- // jmp $doneLabel
- // $slowPathLabel:
- // (Slow path)
- // (Neg zero check)
- // (Bailout code)
- // $doneLabel:
- IR::LabelInstr *doneLabel = skipBailOutLabel, *slowPathLabel;
- if (!doneLabel)
- {
- doneLabel = IR::LabelInstr::New(Js::OpCode::Label, remInstr->m_func);
- remInstr->InsertAfter(doneLabel);
- }
- slowPathLabel = IR::LabelInstr::New(Js::OpCode::Label, remInstr->m_func, isModByPowerOf2);
- remInstr->InsertBefore(slowPathLabel);
- // test s1, s1
- InsertTest(src1, src1, slowPathLabel);
- // jsb $slowPathLabel
- InsertBranch(LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), slowPathLabel, slowPathLabel);
- // s3 = and s1, 0x00..fff (2^i - 1)
- IR::Opnd* maskOpnd;
- if(isModByPowerOf2)
- {
- Assert(isModByPowerOf2);
- maskOpnd = IR::RegOpnd::New(TyInt32, remInstr->m_func);
- // mov maskOpnd, s2
- InsertMove(maskOpnd, src2, slowPathLabel);
- // dec maskOpnd
- InsertSub(/*needFlags*/ true, maskOpnd, maskOpnd, IR::IntConstOpnd::New(1, TyInt32, this->m_func, /*dontEncode*/true), slowPathLabel);
- // maskOpnd < 0 goto $slowPath
- InsertBranch(LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), slowPathLabel, slowPathLabel);
- // TEST src2, maskOpnd
- InsertTestBranch(src2, maskOpnd, Js::OpCode::BrNeq_A, slowPathLabel, slowPathLabel);
- }
- else
- {
- Assert(src2->IsIntConstOpnd());
- int32 mask = src2->AsIntConstOpnd()->AsInt32() - 1;
- maskOpnd = IR::IntConstOpnd::New(mask, TyInt32, remInstr->m_func);
- }
- // dst = src1 & maskOpnd
- InsertAnd(dst, src1, maskOpnd, slowPathLabel);
- // jmp $doneLabel
- InsertBranch(Js::OpCode::Br, doneLabel, slowPathLabel);
- return true;
- }
- #if DBG
- bool
- Lowerer::ValidOpcodeAfterLower(IR::Instr* instr, Func * func)
- {
- Js::OpCode opcode = instr->m_opcode;
- if (opcode > Js::OpCode::MDStart)
- {
- return true;
- }
- switch (opcode)
- {
- case Js::OpCode::Ret:
- case Js::OpCode::Label:
- case Js::OpCode::StatementBoundary:
- case Js::OpCode::DeletedNonHelperBranch:
- case Js::OpCode::FunctionEntry:
- case Js::OpCode::FunctionExit:
- case Js::OpCode::TryCatch:
- case Js::OpCode::TryFinally:
- case Js::OpCode::Catch:
- case Js::OpCode::GeneratorResumeJumpTable:
- case Js::OpCode::Break:
- #ifdef _M_X64
- case Js::OpCode::PrologStart:
- case Js::OpCode::PrologEnd:
- #endif
- #ifdef _M_IX86
- case Js::OpCode::BailOutStackRestore:
- #endif
- return true;
- case Js::OpCode::RestoreOutParam:
- Assert(func->isPostRegAlloc);
- return true;
- // These may be removed by peep
- case Js::OpCode::StartCall:
- case Js::OpCode::LoweredStartCall:
- case Js::OpCode::Nop:
- case Js::OpCode::ArgOut_A_InlineBuiltIn:
- return func && !func->isPostPeeps;
- case Js::OpCode::InlineeStart:
- case Js::OpCode::InlineeEnd:
- return instr->m_func->m_hasInlineArgsOpt;
- #ifdef _M_X64
- case Js::OpCode::LdArgSize:
- case Js::OpCode::LdSpillSize:
- return func && !func->isPostFinalLower;
- #endif
- case Js::OpCode::Leave:
- Assert(!func->IsLoopBodyInTry());
- Assert(func->HasTry() && func->DoOptimizeTry());
- return func && !func->isPostFinalLower; //Lowered in FinalLower phase
- case Js::OpCode::LazyBailOutThunkLabel:
- return func && func->HasLazyBailOut() && func->isPostFinalLower; //Lowered in FinalLower phase
- };
- return false;
- }
- #endif
- void Lowerer::LowerProfiledBeginSwitch(IR::JitProfilingInstr* instr)
- {
- Assert(instr->isBeginSwitch);
- m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
- m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
- instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleProfiledSwitch, m_func));
- m_lowererMD.LowerCall(instr, 0);
- }
- void Lowerer::LowerProfiledBinaryOp(IR::JitProfilingInstr* instr, IR::JnHelperMethod meth)
- {
- m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
- m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
- m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
- instr->SetSrc1(IR::HelperCallOpnd::New(meth, m_func));
- m_lowererMD.LowerCall(instr, 0);
- }
- void Lowerer::GenerateNullOutGeneratorFrame(IR::Instr* insertInstr)
- {
- // null out frame pointer on generator object to signal completion to JavascriptGenerator::CallGenerator
- // s = MOV prm1
- // s[offset of JavascriptGenerator::frame] = MOV nullptr
- StackSym *symSrc = StackSym::NewImplicitParamSym(3, m_func);
- m_func->SetArgOffset(symSrc, LowererMD::GetFormalParamOffset() * MachPtr);
- IR::SymOpnd *srcOpnd = IR::SymOpnd::New(symSrc, TyMachPtr, m_func);
- IR::RegOpnd *dstOpnd = IR::RegOpnd::New(TyMachReg, m_func);
- InsertMove(dstOpnd, srcOpnd, insertInstr);
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(dstOpnd, Js::JavascriptGenerator::GetFrameOffset(), TyMachPtr, m_func);
- IR::AddrOpnd *addrOpnd = IR::AddrOpnd::NewNull(m_func);
- InsertMove(indirOpnd, addrOpnd, insertInstr);
- }
- void Lowerer::LowerFunctionExit(IR::Instr* funcExit)
- {
- if (m_func->GetJITFunctionBody()->IsCoroutine())
- {
- GenerateNullOutGeneratorFrame(funcExit->m_prev);
- }
- if (!m_func->DoSimpleJitDynamicProfile())
- {
- return;
- }
- IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, m_func);
- callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleCleanImplicitCallFlags, m_func));
- funcExit->m_prev->InsertBefore(callInstr);
- m_lowererMD.LoadHelperArgument(callInstr, CreateFunctionBodyOpnd(funcExit->m_func));
- m_lowererMD.LowerCall(callInstr, 0);
- }
- void Lowerer::LowerFunctionEntry(IR::Instr* funcEntry)
- {
- Assert(funcEntry->m_opcode == Js::OpCode::FunctionEntry);
- //Don't do a body call increment for loops or asm.js
- if (m_func->IsLoopBody() || m_func->GetJITFunctionBody()->IsAsmJsMode())
- {
- return;
- }
- IR::Instr *const insertBeforeInstr = this->m_func->GetFunctionEntryInsertionPoint();
- LowerFunctionBodyCallCountChange(insertBeforeInstr);
- if (m_func->DoSimpleJitDynamicProfile())
- {
- // Only generate the argument profiling if the function expects to have some arguments to profile and only if
- // it has implicit ArgIns (the latter is a restriction imposed by the Interpreter, so it is mirrored in SimpleJit)
- if (m_func->GetJITFunctionBody()->GetInParamsCount() > 1 && m_func->GetJITFunctionBody()->HasImplicitArgIns())
- {
- // Call out to the argument profiling helper
- IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, m_func);
- callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleProfileParameters, m_func));
- insertBeforeInstr->InsertBefore(callInstr);
- m_lowererMD.LoadHelperArgument(callInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
- m_lowererMD.LowerCall(callInstr, 0);
- }
- // Clear existing ImplicitCallFlags
- const auto starFlag = GetImplicitCallFlagsOpnd();
- this->InsertMove(starFlag, CreateClearImplicitCallFlagsOpnd(), insertBeforeInstr);
- }
- }
- void Lowerer::LowerFunctionBodyCallCountChange(IR::Instr *const insertBeforeInstr)
- {
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- const bool isSimpleJit = func->IsSimpleJit();
- if ((isSimpleJit && PHASE_OFF(Js::FullJitPhase, m_func)))
- {
- return;
- }
- // mov countAddress, <countAddress>
- IR::RegOpnd *const countAddressOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, func), TyMachPtr, func);
- const IR::AutoReuseOpnd autoReuseCountAddressOpnd(countAddressOpnd, func);
- InsertMove(
- countAddressOpnd,
- IR::AddrOpnd::New((Js::Var)func->GetWorkItem()->GetCallsCountAddress(), IR::AddrOpndKindDynamicMisc, func, true),
- insertBeforeInstr);
- IR::IndirOpnd *const countOpnd = IR::IndirOpnd::New(countAddressOpnd, 0, TyUint32, func);
- const IR::AutoReuseOpnd autoReuseCountOpnd(countOpnd, func);
- if(!isSimpleJit)
- {
- InsertAdd(false, countOpnd, countOpnd, IR::IntConstOpnd::New(1, TyUint32, func), insertBeforeInstr);
- return;
- }
- IR::Instr *onOverflowInsertBeforeInstr;
- InsertDecUInt32PreventOverflow(
- countOpnd,
- countOpnd,
- insertBeforeInstr,
- &onOverflowInsertBeforeInstr);
- // ($overflow:)
- // TransitionFromSimpleJit(framePointer)
- m_lowererMD.LoadHelperArgument(onOverflowInsertBeforeInstr, IR::Opnd::CreateFramePointerOpnd(func));
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
- callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperTransitionFromSimpleJit, func));
- onOverflowInsertBeforeInstr->InsertBefore(callInstr);
- m_lowererMD.LowerCall(callInstr, 0);
- }
- IR::Opnd*
- Lowerer::GetImplicitCallFlagsOpnd()
- {
- return GetImplicitCallFlagsOpnd(m_func);
- }
- IR::Opnd*
- Lowerer::GetImplicitCallFlagsOpnd(Func * func)
- {
- return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetImplicitCallFlagsAddr(), GetImplicitCallFlagsType(), func);
- }
- IR::Opnd*
- Lowerer::CreateClearImplicitCallFlagsOpnd()
- {
- return IR::IntConstOpnd::New(Js::ImplicitCall_None, GetImplicitCallFlagsType(), m_func);
- }
- void
- Lowerer::GenerateFlagInlineCacheCheckForGetterSetter(
- IR::Instr * insertBeforeInstr,
- IR::RegOpnd * opndInlineCache,
- IR::LabelInstr * labelNext)
- {
- uint accessorFlagMask;
- if (PHASE_OFF(Js::InlineGettersPhase, insertBeforeInstr->m_func))
- {
- accessorFlagMask = Js::InlineCache::GetSetterFlagMask();
- }
- else if (PHASE_OFF(Js::InlineSettersPhase, insertBeforeInstr->m_func))
- {
- accessorFlagMask = Js::InlineCache::GetGetterFlagMask();
- }
- else
- {
- accessorFlagMask = Js::InlineCache::GetGetterSetterFlagMask();
- }
- // Generate:
- //
- // TEST [&(inlineCache->u.accessor.flags)], Js::InlineCacheGetterFlag | Js::InlineCacheSetterFlag
- // JEQ $next
- IR::Opnd * flagsOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.rawUInt16), TyInt8, insertBeforeInstr->m_func);
- IR::Opnd * accessorOpnd = IR::IntConstOpnd::New(accessorFlagMask, TyInt8, this->m_func);
- InsertTestBranch(flagsOpnd, accessorOpnd, Js::OpCode::BrEq_A, labelNext, insertBeforeInstr);
- }
- IR::BranchInstr *
- Lowerer::GenerateLocalInlineCacheCheck(
- IR::Instr * instrLdSt,
- IR::RegOpnd * opndType,
- IR::RegOpnd * inlineCache,
- IR::LabelInstr * labelNext,
- bool checkTypeWithoutProperty)
- {
- // Generate:
- //
- // CMP s1, [&(inlineCache->u.local.type/typeWithoutProperty)]
- // JNE $next
- IR::Opnd* typeOpnd;
- if (checkTypeWithoutProperty)
- {
- typeOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.local.typeWithoutProperty), TyMachReg, instrLdSt->m_func);
- }
- else
- {
- typeOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.local.type), TyMachReg, instrLdSt->m_func);
- }
- InsertCompare(opndType, typeOpnd, instrLdSt);
- return InsertBranch(Js::OpCode::BrNeq_A, labelNext, instrLdSt);
- }
- IR::BranchInstr *
- Lowerer::GenerateProtoInlineCacheCheck(
- IR::Instr * instrLdSt,
- IR::RegOpnd * opndType,
- IR::RegOpnd * inlineCache,
- IR::LabelInstr * labelNext)
- {
- // Generate:
- //
- // CMP s1, [&(inlineCache->u.proto.type)]
- // JNE $next
- IR::Opnd* typeOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.proto.type), TyMachReg, instrLdSt->m_func);
- InsertCompare(opndType, typeOpnd, instrLdSt);
- return InsertBranch(Js::OpCode::BrNeq_A, labelNext, instrLdSt);
- }
- void
- Lowerer::GenerateFlagInlineCacheCheck(
- IR::Instr * instrLdSt,
- IR::RegOpnd * opndType,
- IR::RegOpnd * opndInlineCache,
- IR::LabelInstr * labelNext)
- {
- // Generate:
- //
- // CMP s1, [&(inlineCache->u.accessor.type)]
- // JNE $next
- IR::Opnd* typeOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.type), TyMachReg, instrLdSt->m_func);
- // CMP s1, [&(inlineCache->u.flag.type)]
- InsertCompareBranch(opndType, typeOpnd, Js::OpCode::BrNeq_A, labelNext, instrLdSt);
- }
- void
- Lowerer::GenerateLdFldFromLocalInlineCache(
- IR::Instr * instrLdFld,
- IR::RegOpnd * opndBase,
- IR::Opnd * opndDst,
- IR::RegOpnd * opndInlineCache,
- IR::LabelInstr * labelFallThru,
- bool isInlineSlot)
- {
- // Generate:
- //
- // s1 = MOV base->slots -- load the slot array
- // s2 = MOVZXw [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
- // dst = MOV [s1 + s2 * Scale] -- load the value directly from the slot
- // JMP $fallthru
- IR::IndirOpnd * opndIndir = nullptr;
- IR::RegOpnd * opndSlotArray = nullptr;
- if (!isInlineSlot)
- {
- opndSlotArray = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
- opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrLdFld->m_func);
- InsertMove(opndSlotArray, opndIndir, instrLdFld);
- }
- // s2 = MOVZXw [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
- IR::RegOpnd * opndReg2 = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
- opndIndir = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.slotIndex), TyUint16, instrLdFld->m_func);
- InsertMove(opndReg2, opndIndir, instrLdFld);
- if (isInlineSlot)
- {
- // dst = MOV [base + s2 * Scale] -- load the value directly from the slot
- opndIndir = IR::IndirOpnd::New(opndBase, opndReg2, LowererMD::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
- InsertMove(opndDst, opndIndir, instrLdFld);
- }
- else
- {
- // dst = MOV [s1 + s2 * Scale] -- load the value directly from the slot
- opndIndir = IR::IndirOpnd::New(opndSlotArray, opndReg2, LowererMD::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
- InsertMove(opndDst, opndIndir, instrLdFld);
- }
- // JMP $fallthru
- InsertBranch(Js::OpCode::Br, labelFallThru, instrLdFld);
- }
- void
- Lowerer::GenerateLdFldFromProtoInlineCache(
- IR::Instr * instrLdFld,
- IR::RegOpnd * opndBase,
- IR::Opnd * opndDst,
- IR::RegOpnd * inlineCache,
- IR::LabelInstr * labelFallThru,
- bool isInlineSlot)
- {
- // Generate:
- //
- // s1 = MOV [&(inlineCache->u.proto.prototypeObject)] -- load the cached prototype object
- // s1 = MOV [&s1->slots] -- load the slot array
- // s2 = MOVZXW [&(inlineCache->u.proto.slotIndex)] -- load the cached slot index
- // dst = MOV [s1 + s2*4]
- // JMP $fallthru
- IR::IndirOpnd * opndIndir = nullptr;
- IR::RegOpnd * opndProtoSlots = nullptr;
- // s1 = MOV [&(inlineCache->u.proto.prototypeObject)] -- load the cached prototype object
- IR::RegOpnd * opndProto = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
- opndIndir = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.proto.prototypeObject), TyMachReg, instrLdFld->m_func);
- InsertMove(opndProto, opndIndir, instrLdFld);
- if (!isInlineSlot)
- {
- // s1 = MOV [&s1->slots] -- load the slot array
- opndProtoSlots = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
- opndIndir = IR::IndirOpnd::New(opndProto, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrLdFld->m_func);
- InsertMove(opndProtoSlots, opndIndir, instrLdFld);
- }
- // s2 = MOVZXW [&(inlineCache->u.proto.slotIndex)] -- load the cached slot index
- IR::RegOpnd * opndSlotIndex = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
- opndIndir = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.proto.slotIndex), TyUint16, instrLdFld->m_func);
- InsertMove(opndSlotIndex, opndIndir, instrLdFld);
- if (isInlineSlot)
- {
- // dst = MOV [s1 + s2*4]
- opndIndir = IR::IndirOpnd::New(opndProto, opndSlotIndex, LowererMD::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
- InsertMove(opndDst, opndIndir, instrLdFld);
- }
- else
- {
- // dst = MOV [s1 + s2*4]
- opndIndir = IR::IndirOpnd::New(opndProtoSlots, opndSlotIndex, LowererMD::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
- InsertMove(opndDst, opndIndir, instrLdFld);
- }
- // JMP $fallthru
- InsertBranch(Js::OpCode::Br, labelFallThru, instrLdFld);
- }
- void
- Lowerer::GenerateLdFldFromFlagInlineCache(
- IR::Instr * insertBeforeInstr,
- IR::RegOpnd * opndBase,
- IR::Opnd * opndDst,
- IR::RegOpnd * opndInlineCache,
- IR::LabelInstr * labelFallThru,
- bool isInlineSlot)
- {
- // Generate:
- //
- // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
- // s1 = MOV [&s1->slots] -- load the slot array
- // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
- // dst = MOV [s1 + s2 * 4]
- // JMP $fallthru
- IR::IndirOpnd * opndIndir = nullptr;
- IR::RegOpnd * opndObjSlots = nullptr;
- // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
- IR::RegOpnd * opndObject = IR::RegOpnd::New(TyMachReg, this->m_func);
- opndIndir = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.object), TyMachReg, this->m_func);
- InsertMove(opndObject, opndIndir, insertBeforeInstr);
- if (!isInlineSlot)
- {
- // s1 = MOV [&s1->slots] -- load the slot array
- opndObjSlots = IR::RegOpnd::New(TyMachReg, this->m_func);
- opndIndir = IR::IndirOpnd::New(opndObject, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
- InsertMove(opndObjSlots, opndIndir, insertBeforeInstr);
- }
- // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
- IR::RegOpnd * opndSlotIndex = IR::RegOpnd::New(TyMachReg, this->m_func);
- opndIndir = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.slotIndex), TyUint16, this->m_func);
- InsertMove(opndSlotIndex, opndIndir, insertBeforeInstr);
- if (isInlineSlot)
- {
- // dst = MOV [s1 + s2 * 4]
- opndIndir = IR::IndirOpnd::New(opndObject, opndSlotIndex, this->m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
- InsertMove(opndDst, opndIndir, insertBeforeInstr);
- }
- else
- {
- // dst = MOV [s1 + s2 * 4]
- opndIndir = IR::IndirOpnd::New(opndObjSlots, opndSlotIndex, this->m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
- InsertMove(opndDst, opndIndir, insertBeforeInstr);
- }
- // JMP $fallthru
- InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
- }
- void
- Lowerer::LowerSpreadArrayLiteral(IR::Instr *instr)
- {
- LoadScriptContext(instr);
- IR::Opnd *src2Opnd = instr->UnlinkSrc2();
- m_lowererMD.LoadHelperArgument(instr, src2Opnd);
- IR::Opnd *src1Opnd = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, src1Opnd);
- this->m_lowererMD.ChangeToHelperCall(instr, IR::HelperSpreadArrayLiteral);
- }
- IR::Instr *
- Lowerer::LowerSpreadCall(IR::Instr *instr, Js::CallFlags callFlags, bool setupProfiledVersion)
- {
- // Get the target function object, and emit function object test.
- IR::RegOpnd * functionObjOpnd = instr->UnlinkSrc1()->AsRegOpnd();
- functionObjOpnd->m_isCallArg = true;
- if (!(callFlags & Js::CallFlags_New) && !setupProfiledVersion)
- {
- IR::LabelInstr* continueAfterExLabel = InsertContinueAfterExceptionLabelForDebugger(m_func, instr, false);
- this->m_lowererMD.GenerateFunctionObjectTest(instr, functionObjOpnd, false, continueAfterExLabel);
- }
- IR::Instr *spreadIndicesInstr;
- spreadIndicesInstr = GetLdSpreadIndicesInstr(instr);
- Assert(spreadIndicesInstr->m_opcode == Js::OpCode::LdSpreadIndices);
- // Get AuxArray
- IR::Opnd *spreadIndicesOpnd = spreadIndicesInstr->UnlinkSrc1();
- // Remove LdSpreadIndices from the argument chain
- instr->ReplaceSrc2(spreadIndicesInstr->UnlinkSrc2());
- // Emit the normal args
- if (!(callFlags & Js::CallFlags_New))
- {
- callFlags = (Js::CallFlags)(callFlags | (instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed));
- }
- // Profiled helper call requires three more parameters, ArrayProfileId, profileId, and the frame pointer.
- // This is just following the convention of HelperProfiledNewScObjArray call.
- const unsigned short extraArgsCount = setupProfiledVersion ? 5 : 2; // function object and AuxArray
- int32 argCount = this->m_lowererMD.LowerCallArgs(instr, (ushort)callFlags, extraArgsCount);
- // Emit our extra (first) args for the Spread helper in reverse order
- if (setupProfiledVersion)
- {
- IR::JitProfilingInstr* jitInstr = (IR::JitProfilingInstr*)instr;
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(jitInstr->arrayProfileId, m_func));
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(jitInstr->profileId, m_func));
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
- }
- m_lowererMD.LoadHelperArgument(instr, functionObjOpnd);
- m_lowererMD.LoadHelperArgument(instr, spreadIndicesOpnd);
- // Change the call target to our helper
- IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(setupProfiledVersion ? IR::HelperProfiledNewScObjArraySpread : IR::HelperSpreadCall, this->m_func);
- instr->SetSrc1(helperOpnd);
- return this->m_lowererMD.LowerCall(instr, (Js::ArgSlot)argCount);
- }
- void
- Lowerer::LowerDivI4Common(IR::Instr * instr)
- {
- Assert(instr);
- Assert((instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::Div_I4) ||
- (instr->m_opcode == Js::OpCode::RemU_I4 || instr->m_opcode == Js::OpCode::DivU_I4));
- Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
- const bool isRem = instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::RemU_I4;
- // MIN_INT/-1 path is only needed for signed operations
- // TEST src2, src2
- // JEQ $div0
- // CMP src1, MIN_INT
- // JEQ $minInt
- // JMP $div
- // $div0: [helper]
- // MOV dst, 0
- // JMP $done
- // $minInt: [helper]
- // CMP src2, -1
- // JNE $div
- // dst = MOV src1 / 0
- // JMP $done
- // $div:
- // dst = IDIV src2, src1
- // $done:
- IR::LabelInstr * div0Label = InsertLabel(true, instr);
- IR::LabelInstr * divLabel = InsertLabel(false, instr);
- IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
- IR::Opnd * dst = instr->GetDst();
- IR::Opnd * src1 = instr->GetSrc1();
- IR::Opnd * src2 = instr->GetSrc2();
- bool isWasm = m_func->GetJITFunctionBody()->IsWasmFunction();
- Assert(!isWasm || isRem);
- if (!isWasm)
- {
- InsertTestBranch(src2, src2, Js::OpCode::BrEq_A, div0Label, div0Label);
- InsertMove(dst, IR::IntConstOpnd::NewFromType(0, dst->GetType(), m_func), divLabel);
- InsertBranch(Js::OpCode::Br, doneLabel, divLabel);
- }
- if (instr->GetSrc1()->IsSigned())
- {
- IR::LabelInstr * minIntLabel = nullptr;
- // we need to check for INT_MIN/-1 if divisor is either -1 or variable, and dividend is either INT_MIN or variable
- int64 intMin = IRType_IsInt64(src1->GetType()) ? LONGLONG_MIN : INT_MIN;
- bool needsMinOverNeg1Check = !(src2->IsImmediateOpnd() && src2->GetImmediateValue(m_func) != -1);
- if (src1->IsImmediateOpnd())
- {
- if (needsMinOverNeg1Check && src1->GetImmediateValue(m_func) == intMin)
- {
- minIntLabel = InsertLabel(true, divLabel);
- InsertBranch(Js::OpCode::Br, minIntLabel, div0Label);
- }
- else
- {
- needsMinOverNeg1Check = false;
- }
- }
- else if(needsMinOverNeg1Check)
- {
- minIntLabel = InsertLabel(true, divLabel);
- InsertCompareBranch(src1, IR::IntConstOpnd::NewFromType(intMin, src1->GetType(), m_func), Js::OpCode::BrEq_A, minIntLabel, div0Label);
- }
- if (needsMinOverNeg1Check)
- {
- Assert(minIntLabel);
- Assert(!src2->IsImmediateOpnd() || src2->GetImmediateValue(m_func) == -1);
- if (!src2->IsImmediateOpnd())
- {
- InsertCompareBranch(src2, IR::IntConstOpnd::NewFromType(-1, src2->GetType(), m_func), Js::OpCode::BrNeq_A, divLabel, divLabel);
- }
- InsertMove(dst, !isRem ? src1 : IR::IntConstOpnd::NewFromType(0, dst->GetType(), m_func), divLabel);
- InsertBranch(Js::OpCode::Br, doneLabel, divLabel);
- }
- }
- InsertBranch(Js::OpCode::Br, divLabel, div0Label);
- m_lowererMD.EmitInt4Instr(instr);
- }
- void
- Lowerer::LowerRemI4(IR::Instr * instr)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::RemU_I4);
- //Generate fast path for const divisors
- if (m_lowererMD.GenerateFastDivAndRem(instr))
- {
- return;
- }
- if (m_func->GetJITFunctionBody()->IsAsmJsMode())
- {
- LowerDivI4Common(instr);
- }
- else
- {
- m_lowererMD.EmitInt4Instr(instr);
- }
- }
- void
- Lowerer::LowerTrapIfZero(IR::Instr * const instr)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::TrapIfZero);
- Assert(instr->GetSrc1());
- Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
- IR::Opnd * src1 = instr->GetSrc1();
- if (src1->IsImmediateOpnd())
- {
- if (src1->GetImmediateValue(m_func) == 0)
- {
- GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_DivideByZero), TyInt32, m_func), instr);
- }
- }
- else
- {
- IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
- InsertCompareBranch(src1, IR::IntConstOpnd::NewFromType(0, src1->GetType(), m_func), Js::OpCode::BrNeq_A, doneLabel, doneLabel);
- InsertLabel(true, doneLabel);
- GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_DivideByZero), TyInt32, m_func), doneLabel);
- }
- LowererMD::ChangeToAssign(instr);
- }
- IR::Instr*
- Lowerer::LowerTrapIfUnalignedAccess(IR::Instr * const instr)
- {
- IR::Opnd* dst = instr->UnlinkDst();
- IR::Opnd* src1 = instr->UnlinkSrc1();
- IR::Opnd* src2 = instr->GetSrc2();
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::TrapIfUnalignedAccess);
- Assert(src1 && !src1->IsVar());
- Assert(src2 && src2->IsImmediateOpnd());
- Assert(src2->GetSize() > 1);
- uint32 mask = src2->GetSize() - 1;
- uint32 cmpValue = (uint32)src2->GetImmediateValue(m_func);
- InsertMove(dst, src1, instr);
- IR::IntConstOpnd* maskOpnd = IR::IntConstOpnd::New(mask, src1->GetType(), m_func);
- IR::RegOpnd* maskedOpnd = IR::RegOpnd::New(src1->GetType(), m_func);
- IR::Instr* maskInstr = IR::Instr::New(Js::OpCode::And_I4, maskedOpnd, src1, maskOpnd, m_func);
- instr->InsertBefore(maskInstr);
- IR::IntConstOpnd* cmpOpnd = IR::IntConstOpnd::New(cmpValue, maskedOpnd->GetType(), m_func, true);
- IR::LabelInstr* alignedLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::Instr* branch = IR::BranchInstr::New(Js::OpCode::BrEq_I4, alignedLabel, maskedOpnd, cmpOpnd, m_func);
- instr->InsertBefore(branch);
- InsertLabel(true, instr);
- GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_UnalignedAtomicAccess), TyInt32, m_func), instr);
- instr->InsertBefore(alignedLabel);
- instr->Remove();
- // The check and branch are not fully lowered yet, let them go in the lower loop.
- return branch;
- }
- void
- Lowerer::LowerTrapIfMinIntOverNegOne(IR::Instr * const instr)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::TrapIfMinIntOverNegOne);
- Assert(instr->GetSrc1());
- Assert(instr->GetSrc2());
- Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
- IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
- IR::Opnd * src1 = instr->GetSrc1();
- IR::Opnd * src2 = instr->UnlinkSrc2();
- int64 intMin = src1->IsInt64() ? LONGLONG_MIN : INT_MIN;
- if (src1->IsImmediateOpnd())
- {
- if (src1->GetImmediateValue(m_func) != intMin)
- {
- // Const value not min int, will not trap
- doneLabel->Remove();
- src2->Free(m_func);
- LowererMD::ChangeToAssign(instr);
- return;
- }
- // Is min int no need to do check
- }
- else
- {
- InsertCompareBranch(src1, IR::IntConstOpnd::NewFromType(intMin, src1->GetType(), m_func), Js::OpCode::BrNeq_A, doneLabel, doneLabel);
- }
- if (src2->IsImmediateOpnd())
- {
- if (src2->GetImmediateValue(m_func) != -1)
- {
- // Const value not min int, will not trap
- doneLabel->Remove();
- src2->Free(m_func);
- LowererMD::ChangeToAssign(instr);
- return;
- }
- // Is -1 no need to do check
- src2->Free(m_func);
- }
- else
- {
- InsertCompareBranch(src2, IR::IntConstOpnd::NewFromType(-1, src2->GetType(), m_func), Js::OpCode::BrNeq_A, doneLabel, doneLabel);
- }
- InsertLabel(true, doneLabel);
- GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(VBSERR_Overflow), TyInt32, m_func), doneLabel);
- LowererMD::ChangeToAssign(instr);
- }
- void
- Lowerer::GenerateThrow(IR::Opnd* errorCode, IR::Instr * instr)
- {
- IR::Instr *throwInstr = IR::Instr::New(Js::OpCode::RuntimeTypeError, IR::RegOpnd::New(TyMachReg, m_func), errorCode, m_func);
- instr->InsertBefore(throwInstr);
- const bool isWasm = m_func->GetJITFunctionBody() && m_func->GetJITFunctionBody()->IsWasmFunction();
- LowerUnaryHelperMem(throwInstr, isWasm ? IR::HelperOp_WebAssemblyRuntimeError : IR::HelperOp_RuntimeTypeError);
- }
- void
- Lowerer::LowerDivI4(IR::Instr * instr)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::Div_I4 || instr->m_opcode == Js::OpCode::DivU_I4);
- #ifdef _M_IX86
- if (
- instr->GetDst() && instr->GetDst()->IsInt64() ||
- instr->GetSrc1() && instr->GetSrc1()->IsInt64() ||
- instr->GetSrc2() && instr->GetSrc2()->IsInt64()
- )
- {
- m_lowererMD.EmitInt64Instr(instr);
- return;
- }
- #endif
- Assert(instr->GetSrc2());
- if (m_func->GetJITFunctionBody()->IsWasmFunction())
- {
- if (!m_lowererMD.GenerateFastDivAndRem(instr))
- {
- m_lowererMD.EmitInt4Instr(instr);
- }
- return;
- }
- if (m_func->GetJITFunctionBody()->IsAsmJsMode())
- {
- if (!m_lowererMD.GenerateFastDivAndRem(instr))
- {
- LowerDivI4Common(instr);
- }
- return;
- }
- if(!instr->HasBailOutInfo())
- {
- if (!m_lowererMD.GenerateFastDivAndRem(instr))
- {
- m_lowererMD.EmitInt4Instr(instr);
- }
- return;
- }
- Assert(!(instr->GetBailOutKind() & ~(IR::BailOnDivResultNotInt | IR::BailOutOnNegativeZero | IR::BailOutOnDivByZero | IR::BailOutOnDivOfMinInt)));
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- // Split out and generate the bailout instruction
- const auto nonBailOutInstr = IR::Instr::New(instr->m_opcode, instr->m_func);
- instr->TransferTo(nonBailOutInstr);
- instr->InsertBefore(nonBailOutInstr);
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
- instr->InsertAfter(doneLabel);
- // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
- // ordering instructions anymore.
- IR::LabelInstr * bailOutLabel = GenerateBailOut(instr);
- IR::Opnd * denominatorOpnd = nonBailOutInstr->GetSrc2();
- IR::Opnd * nominatorOpnd = nonBailOutInstr->GetSrc1();
- bool isFastDiv = false;
- if (bailOutKind & IR::BailOutOnDivOfMinInt)
- {
- // Bailout if numerator is MIN_INT (could also check for denominator being -1
- // before bailing out, but does not seem worth the extra code..)
- InsertCompareBranch(nominatorOpnd, IR::IntConstOpnd::New(INT32_MIN, TyInt32, this->m_func, true), Js::OpCode::BrEq_A, bailOutLabel, nonBailOutInstr);
- }
- if (denominatorOpnd->IsIntConstOpnd() && Math::IsPow2(denominatorOpnd->AsIntConstOpnd()->AsInt32()))
- {
- Assert((bailOutKind & (IR::BailOutOnNegativeZero | IR::BailOutOnDivByZero)) == 0);
- if (Math::IsPow2(denominatorOpnd->AsIntConstOpnd()->AsInt32()))
- {
- int pow2 = denominatorOpnd->AsIntConstOpnd()->AsInt32();
- InsertTestBranch(nominatorOpnd, IR::IntConstOpnd::New(pow2 - 1, TyInt32, this->m_func),
- Js::OpCode::BrNeq_A, bailOutLabel, nonBailOutInstr);
- nonBailOutInstr->m_opcode = Js::OpCode::Shr_A;
- nonBailOutInstr->ReplaceSrc2(IR::IntConstOpnd::New(Math::Log2(pow2), TyInt32, this->m_func));
- LowererMD::ChangeToShift(nonBailOutInstr, false);
- LowererMD::Legalize(nonBailOutInstr);
- isFastDiv = true;
- }
- else
- {
- isFastDiv = m_lowererMD.GenerateFastDivAndRem(nonBailOutInstr, bailOutLabel);
- }
- }
- if (!isFastDiv)
- {
- if (bailOutKind & IR::BailOutOnDivByZero)
- {
- // Bailout if denominator is 0
- InsertTestBranch(denominatorOpnd, denominatorOpnd, Js::OpCode::BrEq_A, bailOutLabel, nonBailOutInstr);
- }
- // Lower the div and bailout if there is a reminder (machine specific)
- IR::Instr * insertBeforeInstr = m_lowererMD.LowerDivI4AndBailOnReminder(nonBailOutInstr, bailOutLabel);
- IR::Opnd * resultOpnd = nonBailOutInstr->GetDst();
- if (bailOutKind & IR::BailOutOnNegativeZero)
- {
- // TEST result, result
- // JNE skipNegDenominatorCheckLabel // Result not 0
- // TEST denominator, denominator
- // JNSB/BMI bailout // bail if negative
- // skipNegDenominatorCheckLabel:
- IR::LabelInstr * skipNegDenominatorCheckLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- // Skip negative denominator check if the result is not 0
- InsertTestBranch(resultOpnd, resultOpnd, Js::OpCode::BrNeq_A, skipNegDenominatorCheckLabel, insertBeforeInstr);
- IR::LabelInstr * negDenominatorCheckLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- insertBeforeInstr->InsertBefore(negDenominatorCheckLabel);
- // Jump to done if the denominator is not negative
- InsertTestBranch(denominatorOpnd, denominatorOpnd,
- LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), bailOutLabel, insertBeforeInstr);
- insertBeforeInstr->InsertBefore(skipNegDenominatorCheckLabel);
- }
- }
- // We are all fine, jump around the bailout to done
- InsertBranch(Js::OpCode::Br, doneLabel, bailOutLabel);
- }
- void
- Lowerer::LowerRemR8(IR::Instr * instr)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::Rem_A);
- Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
- m_lowererMD.LoadDoubleHelperArgument(instr, instr->UnlinkSrc2());
- m_lowererMD.LoadDoubleHelperArgument(instr, instr->UnlinkSrc1());
- instr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperOp_Rem_Double, m_func));
- m_lowererMD.LowerCall(instr, 0);
- }
- void
- Lowerer::LowerNewScopeSlots(IR::Instr * instr, bool doStackSlots)
- {
- Func * func = m_func;
- if (PHASE_OFF(Js::NewScopeSlotFastPathPhase, func))
- {
- this->LowerUnaryHelperMemWithFunctionInfo(instr, IR::HelperOP_NewScopeSlots);
- return;
- }
- uint const count = instr->GetSrc1()->AsIntConstOpnd()->AsUint32();
- uint const allocSize = count * sizeof(Js::Var);
- uint const actualSlotCount = count - Js::ScopeSlots::FirstSlotIndex;
- IR::RegOpnd * dst = instr->UnlinkDst()->AsRegOpnd();
- // dst = RecyclerAlloc(allocSize)
- // dst[EncodedSlotCountSlotIndex] = min(actualSlotCount, MaxEncodedSlotCount);
- // dst[ScopeMetadataSlotIndex] = FunctionBody;
- // mov undefinedOpnd, undefined
- // dst[FirstSlotIndex..count] = undefinedOpnd;
- // Note: stack allocation of both scope slots and frame display are done together
- // in lowering of NewStackFrameDisplay
- if (!doStackSlots)
- {
- GenerateRecyclerAlloc(IR::HelperAllocMemForVarArray, allocSize, dst, instr);
- }
- m_lowererMD.GenerateMemInit(dst, Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
- (size_t)min<uint>(actualSlotCount, Js::ScopeSlots::MaxEncodedSlotCount), instr, !doStackSlots);
- IR::Opnd * functionInfoOpnd = this->LoadFunctionInfoOpnd(instr);
- GenerateMemInit(dst, Js::ScopeSlots::ScopeMetadataSlotIndex * sizeof(Js::Var),
- functionInfoOpnd, instr, !doStackSlots);
- IR::Opnd * undefinedOpnd = this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined);
- const IR::AutoReuseOpnd autoReuseUndefinedOpnd(undefinedOpnd, func);
- // avoid using a register for the undefined pointer if we are going to assign 1 or 2
- if (actualSlotCount > 2)
- {
- undefinedOpnd = GetRegOpnd(undefinedOpnd, instr, func, TyVar);
- }
- int const loopUnrollCount = 8;
- if (actualSlotCount <= loopUnrollCount * 2)
- {
- // Just generate all the assignment in straight line code
- // mov[dst + Js::FirstSlotIndex], undefinedOpnd
- // ...
- // mov[dst + count - 1], undefinedOpnd
- for (unsigned int i = Js::ScopeSlots::FirstSlotIndex; i < count; i++)
- {
- GenerateMemInit(dst, sizeof(Js::Var) * i, undefinedOpnd, instr, !doStackSlots);
- }
- }
- else
- {
- // Just generate all the assignment in loop of loopUnrollCount and the rest as straight line code
- //
- // lea currOpnd, [dst + sizeof(Var) * (loopAssignCount + Js::ScopeSlots::FirstSlotIndex - loopUnrollCount)];
- // mov [currOpnd + loopUnrollCount + leftOverAssignCount - 1] , undefinedOpnd
- // mov [currOpnd + loopUnrollCount + leftOverAssignCount - 2] , undefinedOpnd
- // ...
- // mov [currOpnd + loopUnrollCount], undefinedOpnd
- // $LoopTop:
- // mov [currOpnd + loopUnrollCount - 1], undefinedOpnd
- // mov [currOpnd + loopUnrollCount - 2], undefinedOpnd
- // ...
- // mov [currOpnd], undefinedOpnd
- // lea currOpnd, [currOpnd - loopUnrollCount]
- // cmp dst, currOpnd
- // jlt $Looptop
- uint nLoop = actualSlotCount / loopUnrollCount;
- uint loopAssignCount = nLoop * loopUnrollCount;
- uint leftOverAssignCount = actualSlotCount - loopAssignCount; // The left over assignments
- IR::RegOpnd * currOpnd = IR::RegOpnd::New(TyMachPtr, func);
- const IR::AutoReuseOpnd autoReuseCurrOpnd(currOpnd, m_func);
- InsertLea(
- currOpnd,
- IR::IndirOpnd::New(
- dst,
- sizeof(Js::Var) * (loopAssignCount + Js::ScopeSlots::FirstSlotIndex - loopUnrollCount),
- TyMachPtr,
- func),
- instr);
- for (unsigned int i = 0; i < leftOverAssignCount; i++)
- {
- GenerateMemInit(currOpnd, sizeof(Js::Var) * (loopUnrollCount + leftOverAssignCount - i - 1), undefinedOpnd, instr, !doStackSlots);
- }
- IR::LabelInstr * loopTop = InsertLoopTopLabel(instr);
- Loop * loop = loopTop->GetLoop();
- for (unsigned int i = 0; i < loopUnrollCount; i++)
- {
- GenerateMemInit(currOpnd, sizeof(Js::Var) * (loopUnrollCount - i - 1), undefinedOpnd, instr, !doStackSlots);
- }
- InsertLea(currOpnd, IR::IndirOpnd::New(currOpnd, -((int)sizeof(Js::Var) * loopUnrollCount), TyMachPtr, func), instr);
- InsertCompareBranch(dst, currOpnd, Js::OpCode::BrLt_A, true, loopTop, instr);
- loop->regAlloc.liveOnBackEdgeSyms->Set(currOpnd->m_sym->m_id);
- loop->regAlloc.liveOnBackEdgeSyms->Set(dst->m_sym->m_id);
- loop->regAlloc.liveOnBackEdgeSyms->Set(undefinedOpnd->AsRegOpnd()->m_sym->m_id);
- }
- if (!doStackSlots)
- {
- InsertMove(IR::RegOpnd::New(instr->m_func->GetLocalClosureSym(), TyMachPtr, func), dst, instr);
- }
- instr->Remove();
- }
- void Lowerer::LowerLdInnerFrameDisplay(IR::Instr *instr)
- {
- bool isStrict = instr->m_func->GetJITFunctionBody()->IsStrictMode();
- if (isStrict)
- {
- if (instr->GetSrc2())
- {
- this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdStrictInnerFrameDisplay);
- }
- else
- {
- #if DBG
- instr->m_opcode = Js::OpCode::LdInnerFrameDisplayNoParent;
- #endif
- this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdStrictInnerFrameDisplayNoParent);
- }
- }
- else
- {
- if (instr->GetSrc2())
- {
- this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdInnerFrameDisplay);
- }
- else
- {
- #if DBG
- instr->m_opcode = Js::OpCode::LdInnerFrameDisplayNoParent;
- #endif
- this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdInnerFrameDisplayNoParent);
- }
- }
- }
- void Lowerer::LowerLdFrameDisplay(IR::Instr *instr, bool doStackFrameDisplay)
- {
- bool isStrict = instr->m_func->GetJITFunctionBody()->IsStrictMode();
- uint16 envDepth = instr->m_func->GetJITFunctionBody()->GetEnvDepth();
- Func *func = this->m_func;
- // envDepth of -1 indicates unknown depth (eval expression or HTML event handler).
- // We could still fast-path these by generating a loop over the (dynamically loaded) scope chain length,
- // but I doubt it's worth it.
- // If the dst opnd is a byte code temp, that indicates we're prepending a block scope or some such and
- // shouldn't attempt to do this.
- if (envDepth == (uint16)-1 ||
- (!doStackFrameDisplay && (instr->isNonFastPathFrameDisplay || instr->GetDst()->AsRegOpnd()->m_sym->IsTempReg(instr->m_func))) ||
- PHASE_OFF(Js::FrameDisplayFastPathPhase, func))
- {
- if (isStrict)
- {
- if (instr->GetSrc2())
- {
- this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdStrictFrameDisplay);
- }
- else
- {
- #if DBG
- instr->m_opcode = Js::OpCode::LdFrameDisplayNoParent;
- #endif
- this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdStrictFrameDisplayNoParent);
- }
- }
- else
- {
- if (instr->GetSrc2())
- {
- this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdFrameDisplay);
- }
- else
- {
- #if DBG
- instr->m_opcode = Js::OpCode::LdFrameDisplayNoParent;
- #endif
- this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdFrameDisplayNoParent);
- }
- }
- return;
- }
- uint16 frameDispLength = envDepth + 1;
- Assert(frameDispLength > 0);
- IR::RegOpnd *dstOpnd = instr->UnlinkDst()->AsRegOpnd();
- IR::RegOpnd *currentFrameOpnd = instr->UnlinkSrc1()->AsRegOpnd();
- uint allocSize = sizeof(Js::FrameDisplay) + (frameDispLength * sizeof(Js::Var));
- if (doStackFrameDisplay)
- {
- IR::Instr *insertInstr = func->GetFunctionEntryInsertionPoint();
- // Initialize stack pointers for scope slots and frame display together at the top of the function
- // (in case we bail out before executing the instructions).
- IR::LabelInstr *labelNoStackFunc = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- // Check whether stack functions have been disabled since we jitted.
- // If they have, then we must allocate closure memory on the heap.
- InsertTestBranch(IR::MemRefOpnd::New(m_func->GetJITFunctionBody()->GetFlagsAddr(), TyInt8, m_func),
- IR::IntConstOpnd::New(Js::FunctionBody::Flags_StackNestedFunc, TyInt8, m_func, true),
- Js::OpCode::BrEq_A, labelNoStackFunc, insertInstr);
- // allocSize is greater than TyMachPtr and hence changing the initial size to TyMisc
- StackSym * stackSym = StackSym::New(TyMisc, instr->m_func);
- m_func->StackAllocate(stackSym, allocSize);
- InsertLea(dstOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertInstr);
- uint scopeSlotAllocSize =
- (m_func->GetJITFunctionBody()->GetScopeSlotArraySize() + Js::ScopeSlots::FirstSlotIndex) * sizeof(Js::Var);
- stackSym = StackSym::New(TyMisc, instr->m_func);
- m_func->StackAllocate(stackSym, scopeSlotAllocSize);
- InsertLea(currentFrameOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertInstr);
- InsertBranch(Js::OpCode::Br, labelDone, insertInstr);
- insertInstr->InsertBefore(labelNoStackFunc);
- GenerateRecyclerAlloc(IR::HelperAllocMemForFrameDisplay, allocSize, dstOpnd, insertInstr, true);
- GenerateRecyclerAlloc(IR::HelperAllocMemForVarArray, scopeSlotAllocSize, currentFrameOpnd, insertInstr, true);
- insertInstr->InsertBefore(labelDone);
- InsertMove(IR::SymOpnd::New(m_func->GetLocalFrameDisplaySym(), 0, TyMachReg, m_func), dstOpnd, insertInstr);
- InsertMove(IR::SymOpnd::New(m_func->GetLocalClosureSym(), 0, TyMachReg, m_func), currentFrameOpnd, insertInstr);
- }
- else
- {
- GenerateRecyclerAlloc(IR::HelperAllocMemForFrameDisplay, allocSize, dstOpnd, instr);
- }
- // Copy contents of environment
- // Work back to front to leave the head element(s) in cache
- if (envDepth > 0)
- {
- IR::RegOpnd *envOpnd = instr->UnlinkSrc2()->AsRegOpnd();
- for (uint16 i = envDepth; i >= 1; i--)
- {
- IR::Opnd *scopeOpnd = IR::RegOpnd::New(TyMachReg, func);
- IR::Opnd *envLoadOpnd =
- IR::IndirOpnd::New(envOpnd, Js::FrameDisplay::GetOffsetOfScopes() + ((i - 1) * sizeof(Js::Var)), TyMachReg, func);
- InsertMove(scopeOpnd, envLoadOpnd, instr);
- IR::Opnd *dstStoreOpnd =
- IR::IndirOpnd::New(dstOpnd, Js::FrameDisplay::GetOffsetOfScopes() + (i * sizeof(Js::Var)), TyMachReg, func);
- InsertMove(dstStoreOpnd, scopeOpnd, instr);
- }
- }
- // Assign current element.
- InsertMove(
- IR::IndirOpnd::New(dstOpnd, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg, func),
- currentFrameOpnd,
- instr);
- // Combine tag, strict mode flag, and length
- uintptr_t bits = 1 |
- (isStrict << (Js::FrameDisplay::GetOffsetOfStrictMode() * 8)) |
- (frameDispLength << (Js::FrameDisplay::GetOffsetOfLength() * 8));
- InsertMove(
- IR::IndirOpnd::New(dstOpnd, 0, TyMachReg, func),
- IR::IntConstOpnd::New(bits, TyMachReg, func, true),
- instr);
- instr->Remove();
- }
- IR::AddrOpnd *Lowerer::CreateFunctionBodyOpnd(Func *const func) const
- {
- return IR::AddrOpnd::New(func->GetJITFunctionBody()->GetAddr(), IR::AddrOpndKindDynamicFunctionBody, m_func, true);
- }
- IR::AddrOpnd *Lowerer::CreateFunctionBodyOpnd(Js::FunctionBody *const functionBody) const
- {
- // TODO: OOP JIT, CreateFunctionBodyOpnd
- Assert(!m_func->IsOOPJIT());
- return IR::AddrOpnd::New(functionBody, IR::AddrOpndKindDynamicFunctionBody, m_func, true);
- }
- bool
- Lowerer::GenerateRecyclerOrMarkTempAlloc(IR::Instr * instr, IR::RegOpnd * dstOpnd, IR::JnHelperMethod allocHelper, size_t allocSize, IR::SymOpnd ** tempObjectSymOpnd)
- {
- if (instr->dstIsTempObject)
- {
- *tempObjectSymOpnd = GenerateMarkTempAlloc(dstOpnd, allocSize, instr);
- return false;
- }
- this->GenerateRecyclerAlloc(allocHelper, allocSize, dstOpnd, instr);
- *tempObjectSymOpnd = nullptr;
- return true;
- }
- IR::SymOpnd *
- Lowerer::GenerateMarkTempAlloc(IR::RegOpnd *const dstOpnd, const size_t allocSize, IR::Instr *const insertBeforeInstr)
- {
- Assert(dstOpnd);
- Assert(allocSize != 0);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- // Allocate stack space for the reg exp instance, and a slot for the boxed value
- StackSym *const tempObjectSym = StackSym::New(TyMisc, func);
- m_func->StackAllocate(tempObjectSym, (int)(allocSize + sizeof(void *)));
- IR::SymOpnd * tempObjectOpnd = IR::SymOpnd::New(tempObjectSym, sizeof(void *), TyVar, func);
- InsertLea(dstOpnd, tempObjectOpnd, insertBeforeInstr);
- // Initialize the boxed instance slot
- if (this->outerMostLoopLabel == nullptr)
- {
- GenerateMemInit(dstOpnd, -(int)sizeof(void *), IR::AddrOpnd::NewNull(func), insertBeforeInstr, false);
- }
- else if (!PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func))
- {
- InsertMove(IR::SymOpnd::New(tempObjectSym, TyMachPtr, func), IR::AddrOpnd::NewNull(func), this->outerMostLoopLabel, false);
- }
- return tempObjectOpnd;
- }
- void Lowerer::LowerBrFncCachedScopeEq(IR::Instr *instr)
- {
- Assert(instr->m_opcode == Js::OpCode::BrFncCachedScopeEq || instr->m_opcode == Js::OpCode::BrFncCachedScopeNeq);
- Js::OpCode opcode = (instr->m_opcode == Js::OpCode::BrFncCachedScopeEq ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A);
- IR::RegOpnd *src1Reg = instr->UnlinkSrc1()->AsRegOpnd();
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src1Reg, Js::ScriptFunction::GetOffsetOfCachedScopeObj(), TyMachReg, this->m_func);
- this->InsertCompareBranch(indirOpnd, instr->UnlinkSrc2(), opcode, false, instr->AsBranchInstr()->GetTarget(), instr->m_next);
- instr->Remove();
- }
- IR::Instr* Lowerer::InsertLoweredRegionStartMarker(IR::Instr* instrToInsertBefore)
- {
- AssertMsg(instrToInsertBefore->m_prev != nullptr, "Can't insert lowered region start marker as the first instr in the func.");
- IR::LabelInstr* startMarkerLabel = IR::LabelInstr::New(Js::OpCode::Label, instrToInsertBefore->m_func);
- instrToInsertBefore->InsertBefore(startMarkerLabel);
- return startMarkerLabel;
- }
- IR::Instr* Lowerer::RemoveLoweredRegionStartMarker(IR::Instr* startMarkerInstr)
- {
- AssertMsg(startMarkerInstr->m_prev != nullptr, "Lowered region start marker became the first instruction in the func after lowering?");
- IR::Instr* prevInstr = startMarkerInstr->m_prev;
- startMarkerInstr->Remove();
- return prevInstr;
- }
- IR::Instr* Lowerer::GetLdSpreadIndicesInstr(IR::Instr *instr)
- {
- IR::Opnd *src2 = instr->GetSrc2();
- if (!src2->IsSymOpnd())
- {
- return nullptr;
- }
- IR::SymOpnd * argLinkOpnd = src2->AsSymOpnd();
- StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
- Assert(argLinkSym->IsSingleDef());
- return argLinkSym->m_instrDef;
- }
- bool Lowerer::IsSpreadCall(IR::Instr *instr)
- {
- IR::Instr *lastInstr = GetLdSpreadIndicesInstr(instr);
- return lastInstr && lastInstr->m_opcode == Js::OpCode::LdSpreadIndices;
- }
- // When under debugger, generate a new label to be used as safe place to jump after ignore exception,
- // insert it after insertAfterInstr, and return the label inserted.
- // Returns nullptr/NoOP for non-debugger code path.
- //static
- IR::LabelInstr* Lowerer::InsertContinueAfterExceptionLabelForDebugger(Func* func, IR::Instr* insertAfterInstr, bool isHelper)
- {
- Assert(func);
- Assert(insertAfterInstr);
- IR::LabelInstr* continueAfterExLabel = nullptr;
- if (func->IsJitInDebugMode())
- {
- continueAfterExLabel = IR::LabelInstr::New(Js::OpCode::Label, func, isHelper);
- insertAfterInstr->InsertAfter(continueAfterExLabel);
- }
- return continueAfterExLabel;
- }
- void Lowerer::GenerateSingleCharStrJumpTableLookup(IR::Instr * instr)
- {
- IR::MultiBranchInstr * multiBrInstr = instr->AsBranchInstr()->AsMultiBrInstr();
- Func * func = instr->m_func;
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- // MOV strLengthOpnd, str->length
- IR::RegOpnd * strLengthOpnd = IR::RegOpnd::New(TyUint32, func);
- InsertMove(strLengthOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func), instr);
- // CMP strLengthOpnd, 1
- // JNE defaultLabel
- IR::LabelInstr * defaultLabelInstr = (IR::LabelInstr *)multiBrInstr->GetBranchJumpTable()->defaultTarget;
- InsertCompareBranch(strLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
- // MOV strBuffer, str->psz
- IR::RegOpnd * strBufferOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(strBufferOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, func), instr);
- // TST strBuffer, strBuffer
- // JNE $continue
- InsertTestBranch(strBufferOpnd, strBufferOpnd, Js::OpCode::BrNeq_A, continueLabel, instr);
- // $helper:
- // PUSH str
- // CALL JavascriptString::GetSzHelper
- // MOV strBuffer, eax
- // $continue:
- instr->InsertBefore(helperLabel);
- m_lowererMD.LoadHelperArgument(instr, instr->GetSrc1());
- IR::Instr * instrCall = IR::Instr::New(Js::OpCode::Call, strBufferOpnd, IR::HelperCallOpnd::New(IR::HelperString_GetSz, func), func);
- instr->InsertBefore(instrCall);
- m_lowererMD.LowerCall(instrCall, 0);
- instr->InsertBefore(continueLabel);
- // MOV charOpnd, [strBuffer]
- IR::RegOpnd * charOpnd = IR::RegOpnd::New(TyUint32, func);
- InsertMove(charOpnd, IR::IndirOpnd::New(strBufferOpnd, 0, TyUint16, func), instr);
- if (multiBrInstr->m_baseCaseValue != 0)
- {
- // SUB charOpnd, baseIndex
- InsertSub(false, charOpnd, charOpnd, IR::IntConstOpnd::New(multiBrInstr->m_baseCaseValue, TyUint32, func), instr);
- }
- // CMP charOpnd, lastCaseIndex - baseCaseIndex
- // JA defaultLabel
- InsertCompareBranch(charOpnd, IR::IntConstOpnd::New(multiBrInstr->m_lastCaseValue - multiBrInstr->m_baseCaseValue, TyUint32, func),
- Js::OpCode::BrGt_A, true, defaultLabelInstr, instr);
- instr->UnlinkSrc1();
- LowerJumpTableMultiBranch(multiBrInstr, charOpnd);
- }
- void Lowerer::GenerateSwitchStringLookup(IR::Instr * instr)
- {
- /* Collect information about string length in all the case*/
- charcount_t minLength = UINT_MAX;
- charcount_t maxLength = 0;
- BVUnit32 bvLength;
- instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary()->dictionary.Map([&](JITJavascriptString * str, void *)
- {
- charcount_t len = str->GetLength();
- minLength = min(minLength, str->GetLength());
- maxLength = max(maxLength, str->GetLength());
- if (len < 32)
- {
- bvLength.Set(len);
- }
- });
- Func * func = instr->m_func;
- IR::RegOpnd * strLengthOpnd = IR::RegOpnd::New(TyUint32, func);
- InsertMove(strLengthOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func), instr);
- IR::LabelInstr * defaultLabelInstr = (IR::LabelInstr *)instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary()->defaultTarget;
- if (minLength == maxLength)
- {
- // Generate single length filter
- InsertCompareBranch(strLengthOpnd, IR::IntConstOpnd::New(minLength, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
- }
- else if (maxLength < 32)
- {
- // Generate bit filter
- // Jump to default label if the bit is not on for the length % 32
- IR::IntConstOpnd * lenBitMaskOpnd = IR::IntConstOpnd::New(bvLength.GetWord(), TyUint32, func);
- InsertBitTestBranch(lenBitMaskOpnd, strLengthOpnd, false, defaultLabelInstr, instr);
- // Jump to default label if the bit is > 32
- InsertTestBranch(strLengthOpnd, IR::IntConstOpnd::New(UINT32_MAX ^ 31, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
- }
- else
- {
- // CONSIDER: Generate range filter
- }
- this->LowerMultiBr(instr, IR::HelperOp_SwitchStringLookUp);
- }
- IR::Instr *
- Lowerer::LowerGetCachedFunc(IR::Instr *instr)
- {
- // src1 is an ActivationObjectEx, and we want to get the function object identified by the index (src2)
- // dst = MOV (src1)->GetFuncCacheEntry(src2)->func
- //
- // => [src1 + (offsetof(src1, cache) + (src2 * sizeof(FuncCacheEntry)) + offsetof(FuncCacheEntry, func))]
- IR::IntConstOpnd *src2Opnd = instr->UnlinkSrc2()->AsIntConstOpnd();
- IR::RegOpnd *src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
- IR::Instr *instrPrev = instr->m_prev;
- instr->SetSrc1(IR::IndirOpnd::New(src1Opnd, int32((src2Opnd->GetValue() * sizeof(Js::FuncCacheEntry)) + Js::ActivationObjectEx::GetOffsetOfCache() + offsetof(Js::FuncCacheEntry, func)), TyVar, this->m_func));
- this->m_lowererMD.ChangeToAssign(instr);
- src2Opnd->Free(this->m_func);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerCommitScope(IR::Instr *instrCommit)
- {
- IR::Instr *instrPrev = instrCommit->m_prev;
- IR::RegOpnd *baseOpnd = instrCommit->UnlinkSrc1()->AsRegOpnd();
- IR::Opnd *opnd;
- IR::Instr * insertInstr = instrCommit->m_next;
- // Write undef to all the local var slots.
- opnd = IR::IndirOpnd::New(baseOpnd, Js::ActivationObjectEx::GetOffsetOfCommitFlag(), TyInt8, this->m_func);
- instrCommit->SetDst(opnd);
- instrCommit->SetSrc1(IR::IntConstOpnd::New(1, TyInt8, this->m_func));
- LowererMD::ChangeToAssign(instrCommit);
- const Js::PropertyIdArray *propIds = instrCommit->m_func->GetJITFunctionBody()->GetFormalsPropIdArray();
- uint firstVarSlot = (uint)Js::ActivationObjectEx::GetFirstVarSlot(propIds);
- if (firstVarSlot < propIds->count)
- {
- // Instead of re-using the address of "undefined" for each store, put the address in a register and re-use that.
- IR::RegOpnd *undefOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
- InsertMove(undefOpnd, LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueUndefined), insertInstr);
- IR::RegOpnd *slotBaseOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
- // Load a pointer to the aux slots. We assume that all ActivationObject's have only aux slots.
- opnd = IR::IndirOpnd::New(baseOpnd, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
- InsertMove(slotBaseOpnd, opnd, insertInstr);
- for (uint i = firstVarSlot; i < propIds->count; i++)
- {
- opnd = IR::IndirOpnd::New(slotBaseOpnd, i << this->m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
- InsertMove(opnd, undefOpnd, insertInstr);
- }
- }
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerTry(IR::Instr* instr, bool tryCatch)
- {
- if (this->m_func->hasBailout)
- {
- this->EnsureBailoutReturnValueSym();
- }
- this->EnsureHasBailedOutSym();
- IR::SymOpnd * hasBailedOutOpnd = IR::SymOpnd::New(this->m_func->m_hasBailedOutSym, TyUint32, this->m_func);
- IR::Instr * setInstr = IR::Instr::New(LowererMD::GetStoreOp(TyUint32), hasBailedOutOpnd, IR::IntConstOpnd::New(0, TyUint32, this->m_func), this->m_func);
- instr->InsertBefore(setInstr);
- LowererMD::Legalize(setInstr);
- return m_lowererMD.LowerTry(instr, tryCatch ? IR::HelperOp_TryCatch : ((this->m_func->DoOptimizeTry() || (this->m_func->IsSimpleJit() && this->m_func->hasBailout))? IR::HelperOp_TryFinally : IR::HelperOp_TryFinallyNoOpt));
- }
- IR::Instr *
- Lowerer::LowerCatch(IR::Instr * instr)
- {
- // t1 = catch => t2 = catch
- // => t1 = t2
- IR::Opnd *catchObj = instr->UnlinkDst();
- IR::RegOpnd *catchParamReg = IR::RegOpnd::New(TyMachPtr, this->m_func);
- catchParamReg->SetReg(CATCH_OBJ_REG);
- instr->SetDst(catchParamReg);
- IR::Instr * mov = IR::Instr::New(Js::OpCode::Ld_A, catchObj, catchParamReg, this->m_func);
- this->m_lowererMD.ChangeToAssign(mov);
- instr->InsertAfter(mov);
- return instr->m_prev;
- }
- IR::Instr *
- Lowerer::LowerLeave(IR::Instr * leaveInstr, IR::LabelInstr * targetInstr, bool fromFinalLower, bool isOrphanedLeave)
- {
- if (isOrphanedLeave)
- {
- Assert(this->m_func->IsLoopBodyInTry());
- leaveInstr->m_opcode = LowererMD::MDUncondBranchOpcode;
- return leaveInstr->m_prev;
- }
- IR::Instr * instrPrev = leaveInstr->m_prev;
- IR::LabelOpnd *labelOpnd = IR::LabelOpnd::New(targetInstr, this->m_func);
- m_lowererMD.LowerEHRegionReturn(leaveInstr, labelOpnd);
- if (fromFinalLower)
- {
- instrPrev = leaveInstr->m_prev;
- }
- leaveInstr->Remove();
- return instrPrev;
- }
- void
- Lowerer::EnsureBailoutReturnValueSym()
- {
- if (this->m_func->m_bailoutReturnValueSym == nullptr)
- {
- this->m_func->m_bailoutReturnValueSym = StackSym::New(TyVar, this->m_func);
- this->m_func->StackAllocate(this->m_func->m_bailoutReturnValueSym, sizeof(Js::Var));
- }
- }
- void
- Lowerer::EnsureHasBailedOutSym()
- {
- if (this->m_func->m_hasBailedOutSym == nullptr)
- {
- this->m_func->m_hasBailedOutSym = StackSym::New(TyUint32, this->m_func);
- this->m_func->StackAllocate(this->m_func->m_hasBailedOutSym, MachRegInt);
- }
- }
- void
- Lowerer::InsertReturnThunkForRegion(Region* region, IR::LabelInstr* restoreLabel)
- {
- Assert(this->m_func->isPostLayout);
- Assert(region->GetType() == RegionTypeTry || region->GetType() == RegionTypeCatch || region->GetType() == RegionTypeFinally);
- if (!region->returnThunkEmitted)
- {
- this->m_func->m_exitInstr->InsertAfter(region->GetBailoutReturnThunkLabel());
- bool newLastInstrInserted = false;
- IR::Instr * insertBeforeInstr = region->GetBailoutReturnThunkLabel()->m_next;
- if (insertBeforeInstr == nullptr)
- {
- Assert(this->m_func->m_exitInstr == this->m_func->m_tailInstr);
- insertBeforeInstr = IR::Instr::New(Js::OpCode::Nop, this->m_func);
- newLastInstrInserted = true;
- region->GetBailoutReturnThunkLabel()->InsertAfter(insertBeforeInstr);
- this->m_func->m_tailInstr = insertBeforeInstr;
- }
- IR::LabelOpnd * continuationAddr;
- // We insert return thunk to the region's parent return thunk label
- // For non exception finallys, we do not need a return thunk
- // Because, we are not calling none xception finallys from within amd64_callWithFakeFrame
- // But a non exception finally maybe within other eh regions that need a return thunk
- if (region->IsNonExceptingFinally())
- {
- Assert(region->GetParent()->GetType() != RegionTypeRoot);
- Region *ancestor = region->GetParent()->GetFirstAncestorOfNonExceptingFinallyParent();
- Assert(ancestor && !ancestor->IsNonExceptingFinally());
- if (ancestor->GetType() != RegionTypeRoot)
- {
- continuationAddr = IR::LabelOpnd::New(ancestor->GetBailoutReturnThunkLabel(), this->m_func);
- }
- else
- {
- continuationAddr = IR::LabelOpnd::New(restoreLabel, this->m_func);
- }
- }
- else if (region->GetParent()->IsNonExceptingFinally())
- {
- Region *ancestor = region->GetFirstAncestorOfNonExceptingFinally();
- if (ancestor && ancestor->GetType() != RegionTypeRoot)
- {
- continuationAddr = IR::LabelOpnd::New(ancestor->GetBailoutReturnThunkLabel(), this->m_func);
- }
- else
- {
- continuationAddr = IR::LabelOpnd::New(restoreLabel, this->m_func);
- }
- }
- else if (region->GetParent()->GetType() != RegionTypeRoot)
- {
- continuationAddr = IR::LabelOpnd::New(region->GetParent()->GetBailoutReturnThunkLabel(), this->m_func);
- }
- else
- {
- continuationAddr = IR::LabelOpnd::New(restoreLabel, this->m_func);
- }
- IR::Instr * lastInstr = m_lowererMD.LowerEHRegionReturn(insertBeforeInstr, continuationAddr);
- if (newLastInstrInserted)
- {
- Assert(this->m_func->m_tailInstr == insertBeforeInstr);
- insertBeforeInstr->Remove();
- this->m_func->m_tailInstr = lastInstr;
- }
- region->returnThunkEmitted = true;
- }
- }
- void
- Lowerer::SetHasBailedOut(IR::Instr * bailoutInstr)
- {
- Assert(this->m_func->isPostLayout);
- IR::SymOpnd * hasBailedOutOpnd = IR::SymOpnd::New(this->m_func->m_hasBailedOutSym, TyUint32, this->m_func);
- IR::Instr * setInstr = IR::Instr::New(LowererMD::GetStoreOp(TyUint32), hasBailedOutOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), this->m_func);
- bailoutInstr->InsertBefore(setInstr);
- LowererMD::Legalize(setInstr);
- }
- IR::Instr*
- Lowerer::EmitEHBailoutStackRestore(IR::Instr * bailoutInstr)
- {
- Assert(this->m_func->isPostLayout);
- #ifdef _M_IX86
- BailOutInfo * bailoutInfo = bailoutInstr->GetBailOutInfo();
- uint totalLiveArgCount = 0;
- if (bailoutInfo->startCallCount != 0)
- {
- uint totalStackToBeRestored = 0;
- uint stackAlignmentAdjustment = 0;
- for (uint i = 0; i < bailoutInfo->startCallCount; i++)
- {
- uint startCallLiveArgCount = bailoutInfo->startCallInfo[i].isOrphanedCall ? 0 : bailoutInfo->GetStartCallOutParamCount(i);
- if ((Math::Align<int32>(startCallLiveArgCount * MachPtr, MachStackAlignment) - (startCallLiveArgCount * MachPtr)) != 0)
- {
- stackAlignmentAdjustment++;
- }
- totalLiveArgCount += startCallLiveArgCount;
- }
- totalStackToBeRestored = (totalLiveArgCount + stackAlignmentAdjustment) * MachPtr;
- IR::RegOpnd * espOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegStackPointer(), TyMachReg, this->m_func);
- IR::Opnd * opnd = IR::IndirOpnd::New(espOpnd, totalStackToBeRestored, TyMachReg, this->m_func);
- IR::Instr * stackRestoreInstr = IR::Instr::New(Js::OpCode::LEA, espOpnd, opnd, this->m_func);
- bailoutInstr->InsertAfter(stackRestoreInstr);
- return stackRestoreInstr;
- }
- #endif
- return bailoutInstr;
- }
- void
- Lowerer::EmitSaveEHBailoutReturnValueAndJumpToRetThunk(IR::Instr * insertAfterInstr)
- {
- Assert(this->m_func->isPostLayout);
- // After the CALL SaveAllRegistersAndBailout instruction, emit
- //
- // MOV bailoutReturnValueSym, eax
- // JMP $currentRegion->bailoutReturnThunkLabel
- IR::SymOpnd * bailoutReturnValueSymOpnd = IR::SymOpnd::New(this->m_func->m_bailoutReturnValueSym, TyVar, this->m_func);
- IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegReturn(TyMachReg), TyMachReg, this->m_func);
- IR::Instr * movInstr = IR::Instr::New(LowererMD::GetStoreOp(TyVar), bailoutReturnValueSymOpnd, eaxOpnd, this->m_func);
- insertAfterInstr->InsertAfter(movInstr);
- LowererMD::Legalize(movInstr);
- IR::BranchInstr * jumpInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, this->currentRegion->GetBailoutReturnThunkLabel(), this->m_func);
- movInstr->InsertAfter(jumpInstr);
- }
- void
- Lowerer::EmitRestoreReturnValueFromEHBailout(IR::LabelInstr * restoreLabel, IR::LabelInstr * epilogLabel)
- {
- Assert(this->m_func->isPostLayout);
- // JMP $epilog
- // $restore:
- // MOV eax, bailoutReturnValueSym
- // $epilog:
- IR::SymOpnd * bailoutReturnValueSymOpnd = IR::SymOpnd::New(this->m_func->m_bailoutReturnValueSym, TyVar, this->m_func);
- IR::RegOpnd * eaxOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegReturn(TyMachReg), TyMachReg, this->m_func);
- IR::Instr * movInstr = IR::Instr::New(LowererMD::GetLoadOp(TyVar), eaxOpnd, bailoutReturnValueSymOpnd, this->m_func);
- epilogLabel->InsertBefore(restoreLabel);
- epilogLabel->InsertBefore(movInstr);
- LowererMD::Legalize(movInstr);
- restoreLabel->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, epilogLabel, this->m_func));
- }
- void
- Lowerer::InsertBitTestBranch(IR::Opnd * bitMaskOpnd, IR::Opnd * bitIndex, bool jumpIfBitOn, IR::LabelInstr * targetLabel, IR::Instr * insertBeforeInstr)
- {
- #if defined(_M_IX86) || defined(_M_AMD64)
- // Generate bit test and branch
- // BT bitMaskOpnd, bitIndex
- // JB/JAE targetLabel
- Func * func = this->m_func;
- IR::Instr * instr = IR::Instr::New(Js::OpCode::BT, func);
- instr->SetSrc1(bitMaskOpnd);
- instr->SetSrc2(bitIndex);
- insertBeforeInstr->InsertBefore(instr);
- if (!(bitMaskOpnd->IsRegOpnd() || bitMaskOpnd->IsIndirOpnd() || bitMaskOpnd->IsMemRefOpnd()))
- {
- instr->HoistSrc1(Js::OpCode::MOV);
- }
- InsertBranch(jumpIfBitOn ? Js::OpCode::JB : Js::OpCode::JAE, targetLabel, insertBeforeInstr);
- #elif defined(_M_ARM)
- // ARM don't have bit test instruction, so just generated
- // MOV r1, 1
- // SHL r1, bitIndex
- // TEST bitMaskOpnd, r1
- // BEQ/BNEQ targetLabel
- Func * func = this->m_func;
- IR::RegOpnd * lenBitOpnd = IR::RegOpnd::New(TyUint32, func);
- InsertMove(lenBitOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
- InsertShift(Js::OpCode::Shl_I4, false, lenBitOpnd, lenBitOpnd, bitIndex, insertBeforeInstr);
- InsertTestBranch(lenBitOpnd, bitMaskOpnd, jumpIfBitOn ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, targetLabel, insertBeforeInstr);
- #elif defined(_M_ARM64)
- if (bitIndex->IsImmediateOpnd())
- {
- // TBZ/TBNZ bitMaskOpnd, bitIndex, targetLabel
- IR::Instr* branchInstr = InsertBranch(jumpIfBitOn ? Js::OpCode::TBNZ : Js::OpCode::TBZ, targetLabel, insertBeforeInstr);
- branchInstr->SetSrc1(bitMaskOpnd);
- branchInstr->SetSrc2(bitIndex);
- }
- else
- {
- // TBZ/TBNZ require an immediate for the bit to test, so shift the mask to place the bit we want to test at bit zero, and then test bit zero.
- Func * func = this->m_func;
- IR::RegOpnd * maskOpnd = IR::RegOpnd::New(TyUint32, func);
- InsertShift(Js::OpCode::Shr_I4, false, maskOpnd, bitMaskOpnd, bitIndex, insertBeforeInstr);
- IR::Instr* branchInstr = InsertBranch(jumpIfBitOn ? Js::OpCode::TBNZ : Js::OpCode::TBZ, targetLabel, insertBeforeInstr);
- branchInstr->SetSrc1(maskOpnd);
- branchInstr->SetSrc2(IR::IntConstOpnd::New(0, TyUint32, this->m_func));
- }
- #else
- AssertMsg(false, "Not implemented");
- #endif
- }
- //
- // Generates an object test and then a string test with the static string type
- //
- void
- Lowerer::GenerateStringTest(IR::RegOpnd *srcReg, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr * continueLabel, bool generateObjectCheck)
- {
- Assert(srcReg);
- if (!srcReg->GetValueType().IsString())
- {
- if (generateObjectCheck && !srcReg->IsNotTaggedValue())
- {
- this->m_lowererMD.GenerateObjectTest(srcReg, insertInstr, labelHelper);
- }
- // CMP [regSrcStr + offset(type)] , static string type -- check base string type
- // BrEq/BrNeq labelHelper.
- IR::IndirOpnd * src1 = IR::IndirOpnd::New(srcReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func);
- IR::Opnd * src2 = this->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueStringTypeStatic);
- IR::BranchInstr* branchInstr = nullptr;
- if (continueLabel)
- {
- branchInstr = InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, continueLabel, insertInstr);
- }
- else
- {
- branchInstr = InsertCompareBranch(src1, src2, Js::OpCode::BrNeq_A, labelHelper, insertInstr);
- }
- InsertObjectPoison(srcReg, branchInstr, insertInstr, false);
- }
- }
- //
- // Generates an object test and then a symbol test with the static symbol type
- //
- void
- Lowerer::GenerateSymbolTest(IR::RegOpnd *srcReg, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr * continueLabel, bool generateObjectCheck)
- {
- Assert(srcReg);
- if (!srcReg->GetValueType().IsSymbol())
- {
- if (generateObjectCheck && !srcReg->IsNotTaggedValue())
- {
- this->m_lowererMD.GenerateObjectTest(srcReg, insertInstr, labelHelper);
- }
- // CMP [regSrcStr + offset(type)] , static symbol type -- check base symbol type
- // BrEq/BrNeq labelHelper.
- IR::IndirOpnd * src1 = IR::IndirOpnd::New(srcReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func);
- IR::Opnd * src2 = this->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueSymbolTypeStatic);
- if (continueLabel)
- {
- InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, continueLabel, insertInstr);
- }
- else
- {
- InsertCompareBranch(src1, src2, Js::OpCode::BrNeq_A, labelHelper, insertInstr);
- }
- }
- }
- void
- Lowerer::LowerConvNum(IR::Instr *instrLoad, bool noMathFastPath)
- {
- if (PHASE_OFF(Js::OtherFastPathPhase, this->m_func) || noMathFastPath || !instrLoad->GetSrc1()->IsRegOpnd())
- {
- this->LowerUnaryHelperMemWithTemp2(instrLoad, IR_HELPER_OP_FULL_OR_INPLACE(ConvNumber));
- return;
- }
- // MOV dst, src1
- // TEST src1, 1
- // JNE $done
- // call ToNumber
- //$done:
- bool isInt = false;
- bool isNotInt = false;
- IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
- IR::LabelInstr *labelDone = NULL;
- IR::Instr *instr;
- if (src1->IsTaggedInt())
- {
- isInt = true;
- }
- else if (src1->IsNotInt())
- {
- isNotInt = true;
- }
- if (!isNotInt)
- {
- // MOV dst, src1
- instr = Lowerer::InsertMove(instrLoad->GetDst(), src1, instrLoad);
- if (!isInt)
- {
- labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- bool didTest = m_lowererMD.GenerateObjectTest(src1, instrLoad, labelDone);
- if (didTest)
- {
- // This label is needed only to mark the helper block
- IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- instrLoad->InsertBefore(labelHelper);
- }
- }
- }
- if (!isInt)
- {
- if (labelDone)
- {
- instrLoad->InsertAfter(labelDone);
- }
- this->LowerUnaryHelperMemWithTemp2(instrLoad, IR_HELPER_OP_FULL_OR_INPLACE(ConvNumber));
- }
- else
- {
- instrLoad->Remove();
- }
- }
- IR::Opnd *
- Lowerer::LoadSlotArrayWithCachedLocalType(IR::Instr * instrInsert, IR::PropertySymOpnd *propertySymOpnd)
- {
- IR::RegOpnd *opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
- if (propertySymOpnd->UsesAuxSlot())
- {
- // If we use the auxiliary slot array, load it and return it
- IR::RegOpnd * opndSlotArray;
- if (propertySymOpnd->IsAuxSlotPtrSymAvailable() || propertySymOpnd->ProducesAuxSlotPtr())
- {
- // We want to reload and/or reuse the shared aux slot ptr sym
- StackSym * auxSlotPtrSym = propertySymOpnd->GetAuxSlotPtrSym();
- Assert(auxSlotPtrSym != nullptr);
- opndSlotArray = IR::RegOpnd::New(auxSlotPtrSym, TyMachReg, this->m_func);
- opndSlotArray->SetIsJITOptimizedReg(true);
- if (!propertySymOpnd->ProducesAuxSlotPtr())
- {
- // No need to reload
- return opndSlotArray;
- }
- }
- else
- {
- opndSlotArray = IR::RegOpnd::New(TyMachReg, this->m_func);
- }
- IR::Opnd *opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
- Lowerer::InsertMove(opndSlotArray, opndIndir, instrInsert);
- return opndSlotArray;
- }
- else
- {
- // If we use inline slot return the address to the object header
- return opndBase;
- }
- }
- IR::Opnd *
- Lowerer::LoadSlotArrayWithCachedProtoType(IR::Instr * instrInsert, IR::PropertySymOpnd *propertySymOpnd)
- {
- // Get the prototype object from the cache
- intptr_t prototypeObject = propertySymOpnd->GetProtoObject();
- Assert(prototypeObject != 0);
- if (propertySymOpnd->UsesAuxSlot())
- {
- // If we use the auxiliary slot array, load it from the prototype object and return it
- IR::RegOpnd *opndSlotArray = IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::Opnd *opnd = IR::MemRefOpnd::New((char*)prototypeObject + Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func, IR::AddrOpndKindDynamicAuxSlotArrayRef);
- Lowerer::InsertMove(opndSlotArray, opnd, instrInsert);
- return opndSlotArray;
- }
- else
- {
- // If we use inline slot return the address of the prototype object
- return IR::MemRefOpnd::New(prototypeObject, TyMachReg, this->m_func);
- }
- }
- IR::Instr *
- Lowerer::LowerLdAsmJsEnv(IR::Instr * instr)
- {
- Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
- IR::Opnd * functionObjOpnd;
- IR::Instr * instrPrev = this->m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
- Assert(!instr->GetSrc1());
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::AsmJsScriptFunction::GetOffsetOfModuleMemory(), TyMachPtr, m_func);
- instr->SetSrc1(indirOpnd);
- LowererMD::ChangeToAssign(instr);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerLdNativeCodeData(IR::Instr * instr)
- {
- Assert(!instr->GetSrc1());
- Assert(m_func->IsTopFunc());
- IR::Instr * instrPrev = instr->m_prev;
- instr->SetSrc1(IR::MemRefOpnd::New((void*)m_func->GetWorkItem()->GetWorkItemData()->nativeDataAddr, TyMachPtr, m_func, IR::AddrOpndKindDynamicNativeCodeDataRef));
- LowererMD::ChangeToAssign(instr);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerLdEnv(IR::Instr * instr)
- {
- IR::Opnd * src1 = instr->GetSrc1();
- IR::Opnd * functionObjOpnd;
- IR::Instr * instrPrev = this->m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
- Assert(!instr->GetSrc1());
- if (src1 == nullptr || functionObjOpnd->IsRegOpnd())
- {
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(),
- Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, m_func);
- instr->SetSrc1(indirOpnd);
- }
- else
- {
- Assert(functionObjOpnd->IsAddrOpnd());
- IR::AddrOpnd* functionObjAddrOpnd = functionObjOpnd->AsAddrOpnd();
- IR::MemRefOpnd* functionEnvMemRefOpnd = IR::MemRefOpnd::New((void *)((intptr_t)functionObjAddrOpnd->m_address + Js::ScriptFunction::GetOffsetOfEnvironment()),
- TyMachPtr, this->m_func, IR::AddrOpndKindDynamicFunctionEnvironmentRef);
- instr->SetSrc1(functionEnvMemRefOpnd);
- }
- LowererMD::ChangeToAssign(instr);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerLdSuper(IR::Instr *instr, IR::JnHelperMethod helperOpCode)
- {
- IR::Opnd * functionObjOpnd;
- IR::Instr * instrPrev = m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
- LoadScriptContext(instr);
- m_lowererMD.LoadHelperArgument(instr, functionObjOpnd);
- m_lowererMD.ChangeToHelperCall(instr, helperOpCode);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerFrameDisplayCheck(IR::Instr * instr)
- {
- IR::Instr *instrPrev = instr->m_prev;
- IR::Instr *insertInstr = instr->m_next;
- IR::AddrOpnd *addrOpnd = instr->UnlinkSrc2()->AsAddrOpnd();
- FrameDisplayCheckRecord *record = (FrameDisplayCheckRecord*)addrOpnd->m_address;
- IR::LabelInstr *errorLabel = nullptr;
- IR::LabelInstr *continueLabel = nullptr;
- IR::RegOpnd *envOpnd = instr->GetDst()->AsRegOpnd();
- uint32 frameDisplayOffset = Js::FrameDisplay::GetOffsetOfScopes()/sizeof(Js::Var);
- if (record->slotId != (uint32)-1 && record->slotId > frameDisplayOffset)
- {
- // Check that the frame display has enough scopes in it to satisfy the code.
- errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(envOpnd,
- Js::FrameDisplay::GetOffsetOfLength(),
- TyUint16, m_func, true);
- IR::IntConstOpnd *slotIdOpnd = IR::IntConstOpnd::New(record->slotId - frameDisplayOffset, TyUint16, m_func);
- InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrLe_A, true, errorLabel, insertInstr);
- }
- if (record->table)
- {
- // Check the size of each of the slot arrays in the scope chain.
- FOREACH_HASHTABLE_ENTRY(uint32, bucket, record->table)
- {
- uint32 slotId = bucket.element;
- if (slotId != (uint32)-1 && slotId > Js::ScopeSlots::FirstSlotIndex)
- {
- if (errorLabel == nullptr)
- {
- errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- }
- IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(envOpnd,
- bucket.value * sizeof(Js::Var),
- TyVar, m_func, true);
- IR::RegOpnd * slotArrayOpnd = IR::RegOpnd::New(TyVar, m_func);
- InsertMove(slotArrayOpnd, indirOpnd, insertInstr);
- indirOpnd = IR::IndirOpnd::New(slotArrayOpnd,
- Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
- TyVar, m_func, true);
- IR::IntConstOpnd * slotIdOpnd = IR::IntConstOpnd::New(slotId - Js::ScopeSlots::FirstSlotIndex,
- TyUint32, m_func);
- InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrLe_A, true, errorLabel, insertInstr);
- }
- }
- NEXT_HASHTABLE_ENTRY;
- }
- if (errorLabel)
- {
- InsertBranch(Js::OpCode::Br, continueLabel, insertInstr);
- insertInstr->InsertBefore(errorLabel);
- IR::Instr * instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
- insertInstr->InsertBefore(instrHelper);
- m_lowererMD.ChangeToHelperCall(instrHelper, IR::HelperOp_FatalInternalError);
- insertInstr->InsertBefore(continueLabel);
- }
- m_lowererMD.ChangeToAssign(instr);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerSlotArrayCheck(IR::Instr * instr)
- {
- IR::Instr *instrPrev = instr->m_prev;
- IR::Instr *insertInstr = instr->m_next;
- IR::RegOpnd *slotArrayOpnd = instr->GetDst()->AsRegOpnd();
- StackSym *stackSym = slotArrayOpnd->m_sym;
- IR::IntConstOpnd *slotIdOpnd = instr->UnlinkSrc2()->AsIntConstOpnd();
- uint32 slotId = (uint32)slotIdOpnd->GetValue();
- Assert(slotId != (uint32)-1 && slotId >= Js::ScopeSlots::FirstSlotIndex);
- if (slotId > Js::ScopeSlots::FirstSlotIndex)
- {
- if (m_func->DoStackFrameDisplay() && stackSym->m_id == m_func->GetLocalClosureSym()->m_id)
- {
- // The pointer we loaded points to the reserved/known address where the slot array can be boxed.
- // Deref to get the real value.
- IR::IndirOpnd * srcOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(stackSym, TyVar, m_func), 0, TyVar, m_func);
- IR::RegOpnd * dstOpnd = IR::RegOpnd::New(TyVar, m_func);
- InsertMove(dstOpnd, srcOpnd, insertInstr);
- stackSym = dstOpnd->m_sym;
- }
- IR::LabelInstr *errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::LabelInstr *continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(stackSym, TyVar, m_func),
- Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
- TyVar, m_func, true);
- slotIdOpnd->SetValue(slotId - Js::ScopeSlots::FirstSlotIndex);
- InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrGt_A, true, continueLabel, insertInstr);
- insertInstr->InsertBefore(errorLabel);
- IR::Instr * instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
- insertInstr->InsertBefore(instrHelper);
- m_lowererMD.ChangeToHelperCall(instrHelper, IR::HelperOp_FatalInternalError);
- insertInstr->InsertBefore(continueLabel);
- }
- m_lowererMD.ChangeToAssign(instr);
- return instrPrev;
- }
- IR::RegOpnd *
- Lowerer::LoadIndexFromLikelyFloat(
- IR::RegOpnd *indexOpnd,
- const bool skipNegativeCheck,
- IR::LabelInstr *const notIntLabel,
- IR::LabelInstr *const negativeLabel,
- IR::Instr *const insertBeforeInstr)
- {
- #ifdef _M_IX86
- // We should only generate this if sse2 is available
- Assert(AutoSystemInfo::Data.SSE2Available());
- #endif
- Func *func = insertBeforeInstr->m_func;
- IR::LabelInstr * fallThrough = IR::LabelInstr::New(Js::OpCode::Label, func);
- IR::RegOpnd *int32IndexOpnd = nullptr;
- // If we know for sure that it's not an int, do not check to see if it's a tagged int
- if (indexOpnd->IsNotInt())
- {
- int32IndexOpnd = IR::RegOpnd::New(TyInt32, func);
- }
- else
- {
- IR::LabelInstr * convertToUint = IR::LabelInstr::New(Js::OpCode::Label, func);
- // First generate test for tagged int even though profile data says likely float. Indices are usually int and we need a fast path before we try to convert float to int
- // mov intIndex, index
- // sar intIndex, 1
- // jae convertToInt
- int32IndexOpnd = GenerateUntagVar(indexOpnd, convertToUint, insertBeforeInstr, !indexOpnd->IsTaggedInt());
- if (!skipNegativeCheck)
- {
- // test index, index
- // js $notTaggedIntOrNegative
- InsertTestBranch(int32IndexOpnd, int32IndexOpnd, LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), negativeLabel, insertBeforeInstr);
- }
- InsertBranch(Js::OpCode::Br, fallThrough, insertBeforeInstr);
- insertBeforeInstr->InsertBefore(convertToUint);
- }
- // try to convert float to int in a fast path
- #if FLOATVAR
- IR::RegOpnd* floatIndexOpnd = m_lowererMD.CheckFloatAndUntag(indexOpnd, insertBeforeInstr, notIntLabel);
- #else
- m_lowererMD.GenerateFloatTest(indexOpnd, insertBeforeInstr, notIntLabel);
- IR::IndirOpnd * floatIndexOpnd = IR::IndirOpnd::New(indexOpnd, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
- #endif
- IR::LabelInstr * doneConvUint32 = IR::LabelInstr::New(Js::OpCode::Label, func);
- IR::LabelInstr * helperConvUint32 = IR::LabelInstr::New(Js::OpCode::Label, func, true /*helper*/);
- m_lowererMD.ConvertFloatToInt32(int32IndexOpnd, floatIndexOpnd, helperConvUint32, doneConvUint32, insertBeforeInstr);
- // helper path
- insertBeforeInstr->InsertBefore(helperConvUint32);
- m_lowererMD.LoadDoubleHelperArgument(insertBeforeInstr, floatIndexOpnd);
- IR::Instr * helperCall = IR::Instr::New(Js::OpCode::Call, int32IndexOpnd, this->m_func);
- insertBeforeInstr->InsertBefore(helperCall);
- #if DBG
- // This call to Conv_ToUint32Core wont be reentrant as we would only call it for floats
- this->ClearAndSaveImplicitCallCheckOnHelperCallCheckState();
- #endif
- m_lowererMD.ChangeToHelperCall(helperCall, IR::HelperConv_ToUInt32Core);
- #if DBG
- this->RestoreImplicitCallCheckOnHelperCallCheckState();
- #endif
- // main path
- insertBeforeInstr->InsertBefore(doneConvUint32);
- //Convert uint32 to back to float for comparison that conversion was indeed successful
- IR::RegOpnd *floatOpndFromUint32 = IR::RegOpnd::New(TyFloat64, func);
- m_lowererMD.EmitUIntToFloat(floatOpndFromUint32, int32IndexOpnd->UseWithNewType(TyUint32, this->m_func), insertBeforeInstr);
- // compare with float from the original indexOpnd, we need floatIndex == (float64)(uint32)floatIndex
- InsertCompareBranch(floatOpndFromUint32, floatIndexOpnd, Js::OpCode::BrNeq_A, notIntLabel, insertBeforeInstr, false);
- insertBeforeInstr->InsertBefore(fallThrough);
- return int32IndexOpnd;
- }
- void
- Lowerer::AllocStackForInObjectEnumeratorArray()
- {
- Func * func = this->m_func;
- Assert(func->IsTopFunc());
- if (func->m_forInLoopMaxDepth)
- {
- func->m_forInEnumeratorArrayOffset = func->StackAllocate(sizeof(Js::ForInObjectEnumerator) * this->m_func->m_forInLoopMaxDepth);
- }
- }
- IR::RegOpnd *
- Lowerer::GenerateForInEnumeratorLoad(IR::Opnd * forInEnumeratorOpnd, IR::Instr * insertBeforeInstr)
- {
- Func * func = insertBeforeInstr->m_func;
- if (forInEnumeratorOpnd->IsSymOpnd())
- {
- StackSym * stackSym = forInEnumeratorOpnd->AsSymOpnd()->GetStackSym();
- Assert(!stackSym->m_allocated);
- uint forInLoopLevel = stackSym->m_offset;
- Assert(func->m_forInLoopBaseDepth + forInLoopLevel < this->m_func->m_forInLoopMaxDepth);
- stackSym->m_offset = this->m_func->m_forInEnumeratorArrayOffset + ((func->m_forInLoopBaseDepth + forInLoopLevel) * sizeof(Js::ForInObjectEnumerator));
- stackSym->m_allocated = true;
- }
- else
- {
- Assert(forInEnumeratorOpnd->IsIndirOpnd());
- if (forInEnumeratorOpnd->AsIndirOpnd()->GetOffset() == 0)
- {
- return forInEnumeratorOpnd->AsIndirOpnd()->GetBaseOpnd();
- }
- }
- IR::RegOpnd * forInEnumeratorRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertLea(forInEnumeratorRegOpnd, forInEnumeratorOpnd, insertBeforeInstr);
- return forInEnumeratorRegOpnd;
- }
- void
- Lowerer::GenerateHasObjectArrayCheck(IR::RegOpnd * objectOpnd, IR::RegOpnd * typeOpnd, IR::LabelInstr * hasObjectArrayLabel, IR::Instr * insertBeforeInstr)
- {
- // CMP [objectOpnd + offset(objectArray)], nullptr
- // JEQ $noObjectArrayLabel
- // TEST[objectOpnd + offset(objectArray)], ObjectArrayFlagsTag (used as flags)
- // JEQ $noObjectArrayLabel
- // MOV typeHandlerOpnd, [typeOpnd + offset(typeHandler)]
- // CMP typeHandler->OffsetOfInlineSlots, Js::DynamicTypeHandler::GetOffsetOfObjectHeaderInlineSlots()
- // JNE $hasObjectArrayLabel
- // $$noObjectArrayLabel: (fall thru)
- Func * func = this->m_func;
- IR::LabelInstr * noObjectArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- IR::IndirOpnd * objectArrayOpnd = IR::IndirOpnd::New(objectOpnd, Js::DynamicObject::GetOffsetOfObjectArray(), TyMachPtr, func);
- InsertCompareBranch(objectArrayOpnd, IR::AddrOpnd::NewNull(func), Js::OpCode::BrEq_A, noObjectArrayLabel, insertBeforeInstr);
- InsertTestBranch(objectArrayOpnd, IR::IntConstOpnd::New((uint32)Js::DynamicObjectFlags::ObjectArrayFlagsTag, TyUint8, func),
- Js::OpCode::BrNeq_A, noObjectArrayLabel, insertBeforeInstr);
- IR::RegOpnd * typeHandlerOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(typeHandlerOpnd, IR::IndirOpnd::New(typeOpnd, Js::DynamicType::GetOffsetOfTypeHandler(), TyMachPtr, func), insertBeforeInstr);
- InsertCompareBranch(IR::IndirOpnd::New(typeHandlerOpnd, Js::DynamicTypeHandler::GetOffsetOfOffsetOfInlineSlots(), TyUint16, func),
- IR::IntConstOpnd::New(Js::DynamicTypeHandler::GetOffsetOfObjectHeaderInlineSlots(), TyUint16, func),
- Js::OpCode::BrNeq_A, hasObjectArrayLabel, insertBeforeInstr);
- insertBeforeInstr->InsertBefore(noObjectArrayLabel);
- }
- void
- Lowerer::GenerateInitForInEnumeratorFastPath(IR::Instr * instr, Js::EnumeratorCache * forInCache)
- {
- Func * func = this->m_func;
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- IR::RegOpnd * objectOpnd = instr->GetSrc1()->AsRegOpnd();
- // Tagged check and object check
- m_lowererMD.GenerateObjectTest(objectOpnd, instr, helperLabel);
- GenerateIsDynamicObject(objectOpnd, instr, helperLabel);
- // Type check with cache
- //
- // MOV typeOpnd, [objectOpnd + offset(type)]
- // CMP [&forInCache->type], typeOpnd
- // JNE $helper
- IR::RegOpnd * typeOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(typeOpnd, IR::IndirOpnd::New(objectOpnd, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, func), instr);
- InsertCompareBranch(IR::MemRefOpnd::New(&forInCache->type, TyMachPtr, func, IR::AddrOpndKindForInCacheType), typeOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
- // Check forInCacheData->EnumNonEnumerable == false
- //
- // MOV forInCacheDataOpnd, [&forInCache->data]
- // CMP forInCacheDataOpnd->enumNonEnumerable, 0
- // JNE $helper
- IR::RegOpnd * forInCacheDataOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(forInCacheDataOpnd, IR::MemRefOpnd::New(&forInCache->data, TyMachPtr, func, IR::AddrOpndKindForInCacheData), instr);
- InsertCompareBranch(IR::IndirOpnd::New(forInCacheDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataEnumNonEnumerable(), TyUint8, func),
- IR::IntConstOpnd::New(0, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
- // Check has object array
- GenerateHasObjectArrayCheck(objectOpnd, typeOpnd, helperLabel, instr);
- // Check first prototype with enumerable properties
- //
- // MOV prototypeObjectOpnd, [type + offset(prototype)]
- // MOV prototypeTypeOpnd, [prototypeObjectOpnd + offset(type)]
- // CMP [prototypeTypeOpnd + offset(typeId)], TypeIds_Null
- // JEQ $noPrototypeWithEnumerablePropertiesLabel
- //
- // $checkFirstPrototypeLoopTopLabel:
- // CMP [prototypeTypeOpnd + offset(typeId)], TypeIds_LastStaticType
- // JLE $helper
- // CMP [prototypeTypeOpnd, offset(hasNoEnumerableProperties], 0
- // JEQ $helper
- // <hasObjectArrayCheck prototypeObjectOpnd, prototypeTypeOpnd>
- //
- // MOV prototypeObjectOpnd, [prototypeTypeOpnd + offset(protottype)] (load next prototype)
- //
- // MOV prototypeTypeOpnd, [prototypeObjectOpnd + offset(type)] (tail dup TypeIds_Null check)
- // CMP [prototypeTypeOpnd + offset(typeId)], TypeIds_Null
- // JNE $checkFirstPrototypeLoopTopLabel
- //
- // $noPrototypeWithEnumerablePropertiesLabel:
- //
- IR::LabelInstr * noPrototypeWithEnumerablePropertiesLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- IR::RegOpnd * prototypeObjectOpnd = IR::RegOpnd::New(TyMachPtr, func);
- IR::RegOpnd * prototypeTypeOpnd = IR::RegOpnd::New(TyMachPtr, func);
- IR::IndirOpnd * prototypeTypeIdOpnd = IR::IndirOpnd::New(prototypeTypeOpnd, Js::DynamicType::GetOffsetOfTypeId(), TyUint32, func);
- InsertMove(prototypeObjectOpnd, IR::IndirOpnd::New(typeOpnd, Js::DynamicType::GetOffsetOfPrototype(), TyMachPtr, func), instr);
- InsertMove(prototypeTypeOpnd, IR::IndirOpnd::New(prototypeObjectOpnd, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, func), instr);
- InsertCompareBranch(prototypeTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_Null, TyUint32, func), Js::OpCode::BrEq_A, noPrototypeWithEnumerablePropertiesLabel, instr);
- IR::LabelInstr * checkFirstPrototypeLoopTopLabel = InsertLoopTopLabel(instr);
- Loop * loop = checkFirstPrototypeLoopTopLabel->GetLoop();
- loop->regAlloc.liveOnBackEdgeSyms->Set(prototypeObjectOpnd->m_sym->m_id);
- loop->regAlloc.liveOnBackEdgeSyms->Set(prototypeTypeOpnd->m_sym->m_id);
- InsertCompareBranch(prototypeTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_LastStaticType, TyUint32, func), Js::OpCode::BrLe_A, helperLabel, instr);
- // No need to do EnsureObjectReady. Defer init type may not have this bit set, so we will go to helper and call EnsureObjectReady then
- InsertCompareBranch(IR::IndirOpnd::New(prototypeTypeOpnd, Js::DynamicType::GetOffsetOfHasNoEnumerableProperties(), TyUint8, func),
- IR::IntConstOpnd::New(0, TyUint8, func), Js::OpCode::BrEq_A, helperLabel, instr);
- GenerateHasObjectArrayCheck(prototypeObjectOpnd, prototypeTypeOpnd, helperLabel, instr);
- InsertMove(prototypeObjectOpnd, IR::IndirOpnd::New(prototypeTypeOpnd, Js::DynamicType::GetOffsetOfPrototype(), TyMachPtr, func), instr);
- // Tail dup the TypeIds_Null check
- InsertMove(prototypeTypeOpnd, IR::IndirOpnd::New(prototypeObjectOpnd, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, func), instr);
- InsertCompareBranch(prototypeTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_Null, TyUint32, func), Js::OpCode::BrNeq_A, checkFirstPrototypeLoopTopLabel, instr);
- instr->InsertBefore(noPrototypeWithEnumerablePropertiesLabel);
- // Initialize DynamicObjectPropertyEnumerator fields
- IR::Opnd * forInEnumeratorOpnd = instr->GetSrc2();
- InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorScriptContext(), TyMachPtr),
- LoadScriptContextOpnd(instr), instr);
- InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorObject(), TyMachPtr),
- objectOpnd, instr);
- InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorInitialType(), TyMachPtr),
- typeOpnd, instr);
- InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorObjectIndex(), TyInt32),
- IR::IntConstOpnd::New(Js::Constants::NoBigSlot, TyInt32, func), instr);
- IR::RegOpnd * initialPropertyCountOpnd = IR::RegOpnd::New(TyInt32, func);
- InsertMove(initialPropertyCountOpnd,
- IR::IndirOpnd::New(forInCacheDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataPropertyCount(), TyInt32, func), instr);
- InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorInitialPropertyCount(), TyInt32),
- initialPropertyCountOpnd, instr);
- InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorEnumeratedCount(), TyInt32),
- IR::IntConstOpnd::New(0, TyInt32, func), instr);
- InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorFlags(), TyUint8),
- IR::IntConstOpnd::New((uint8)(Js::EnumeratorFlags::UseCache | Js::EnumeratorFlags::SnapShotSemantics), TyUint8, func), instr);
- InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorCachedData(), TyMachPtr),
- forInCacheDataOpnd, instr);
- // Initialize rest of the JavascriptStaticEnumerator fields
- InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorCurrentEnumerator(), TyMachPtr),
- IR::AddrOpnd::NewNull(func), instr);
- InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorPrefixEnumerator(), TyMachPtr),
- IR::AddrOpnd::NewNull(func), instr);
- InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorArrayEnumerator(), TyMachPtr),
- IR::AddrOpnd::NewNull(func), instr);
- // Initialize rest of the ForInObjectEnumerator fields
- InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfShadowData(), TyMachPtr),
- IR::AddrOpnd::NewNull(func), instr);
- // Initialize can UseJitFastPath = true and enumeratingPrototype = false at the same time.
- InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfStates(), TyUint16),
- IR::IntConstOpnd::New(1, TyUint16, func, true), instr);
- IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(helperLabel);
- instr->InsertAfter(doneLabel);
- }
- void
- Lowerer::LowerInitForInEnumerator(IR::Instr * instr)
- {
- Js::EnumeratorCache * forInCache = nullptr;
- Func * func = instr->m_func;
- if (instr->IsProfiledInstr())
- {
- uint profileId = instr->AsProfiledInstr()->u.profileId;
- forInCache = instr->m_func->GetJITFunctionBody()->GetForInCache(profileId);
- Assert(forInCache != nullptr);
- if (!func->IsSimpleJit()
- #if ENABLE_TTD
- && (func->IsOOPJIT() || !func->GetScriptContext()->GetThreadContext()->IsRuntimeInTTDMode())
- //TODO: We will need to enable OOPJIT info to exclude this if we have a TTD Runtime
- #endif
- )
- {
- GenerateInitForInEnumeratorFastPath(instr, forInCache);
- }
- }
- IR::RegOpnd * forInEnumeratorRegOpnd = GenerateForInEnumeratorLoad(instr->UnlinkSrc2(), instr);
- instr->SetSrc2(forInEnumeratorRegOpnd);
- m_lowererMD.LoadHelperArgument(instr, IR::AddrOpnd::New(forInCache, IR::AddrOpndKindForInCache, func));
- this->LowerBinaryHelperMem(instr, IR::HelperOp_OP_InitForInEnumerator);
- }
- IR::LabelInstr *
- Lowerer::InsertLoopTopLabel(IR::Instr * insertBeforeInstr)
- {
- Func * func = this->m_func;
- IR::LabelInstr * loopTopLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- loopTopLabel->m_isLoopTop = true;
- Loop *loop = JitAnew(func->m_alloc, Loop, func->m_alloc, func);
- loopTopLabel->SetLoop(loop);
- loop->SetLoopTopInstr(loopTopLabel);
- loop->regAlloc.liveOnBackEdgeSyms = AllocatorNew(JitArenaAllocator, func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
- insertBeforeInstr->InsertBefore(loopTopLabel);
- return loopTopLabel;
- }
- IR::Instr *
- Lowerer::AddBailoutToHelperCallInstr(IR::Instr * helperCallInstr, BailOutInfo * bailoutInfo, IR::BailOutKind bailoutKind, IR::Instr * primaryBailoutInstr)
- {
- helperCallInstr = helperCallInstr->ConvertToBailOutInstr(bailoutInfo, bailoutKind);
- if (bailoutInfo->bailOutInstr == primaryBailoutInstr)
- {
- IR::Instr * instrShare = primaryBailoutInstr->ShareBailOut();
- LowerBailTarget(instrShare);
- }
- return helperCallInstr;
- }
- void
- Lowerer::GenerateAuxSlotPtrLoad(IR::PropertySymOpnd *propertySymOpnd, IR::Instr * instrInsert)
- {
- StackSym * auxSlotPtrSym = propertySymOpnd->GetAuxSlotPtrSym();
- Assert(auxSlotPtrSym);
- Func * func = instrInsert->m_func;
- IR::Opnd *opndIndir = IR::IndirOpnd::New(propertySymOpnd->CreatePropertyOwnerOpnd(func), Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, func);
- IR::RegOpnd *regOpnd = IR::RegOpnd::New(auxSlotPtrSym, TyMachReg, func);
- regOpnd->SetIsJITOptimizedReg(true);
- InsertMove(regOpnd, opndIndir, instrInsert);
- }
- void
- Lowerer::InsertAndLegalize(IR::Instr * instr, IR::Instr* insertBeforeInstr)
- {
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::Legalize(instr);
- }
- IR::Instr*
- Lowerer::InsertObjectCheck(IR::RegOpnd *funcOpnd, IR::Instr *insertBeforeInstr, IR::BailOutKind bailOutKind, BailOutInfo *bailOutInfo)
- {
- IR::Instr *bailOutIfNotObject = IR::BailOutInstr::New(Js::OpCode::BailOnNotObject, bailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
- // Bailout when funcOpnd is not an object.
- bailOutIfNotObject->SetSrc1(funcOpnd);
- bailOutIfNotObject->SetByteCodeOffset(insertBeforeInstr);
- insertBeforeInstr->InsertBefore(bailOutIfNotObject);
- return bailOutIfNotObject;
- }
- IR::Instr*
- Lowerer::InsertFunctionTypeIdCheck(IR::RegOpnd * funcOpnd, IR::Instr* insertBeforeInstr, IR::BailOutKind bailOutKind, BailOutInfo *bailOutInfo)
- {
- IR::Instr *bailOutIfNotFunction = IR::BailOutInstr::New(Js::OpCode::BailOnNotEqual, bailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
- // functionTypeRegOpnd = Ld functionRegOpnd->type
- IR::IndirOpnd *functionTypeIndirOpnd = IR::IndirOpnd::New(funcOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, insertBeforeInstr->m_func);
- IR::RegOpnd *functionTypeRegOpnd = IR::RegOpnd::New(TyVar, insertBeforeInstr->m_func->GetTopFunc());
- IR::Instr *instr = IR::Instr::New(Js::OpCode::Ld_A, functionTypeRegOpnd, functionTypeIndirOpnd, insertBeforeInstr->m_func);
- if (instr->m_func->HasByteCodeOffset())
- {
- instr->SetByteCodeOffset(insertBeforeInstr);
- }
- insertBeforeInstr->InsertBefore(instr);
- CompileAssert(sizeof(Js::TypeId) == sizeof(int32));
- // if (functionTypeRegOpnd->typeId != TypeIds_Function) goto $noInlineLabel
- // BrNeq_I4 $noInlineLabel, functionTypeRegOpnd->typeId, TypeIds_Function
- IR::IndirOpnd *functionTypeIdIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, insertBeforeInstr->m_func);
- IR::IntConstOpnd *typeIdFunctionConstOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, insertBeforeInstr->m_func);
- bailOutIfNotFunction->SetSrc1(functionTypeIdIndirOpnd);
- bailOutIfNotFunction->SetSrc2(typeIdFunctionConstOpnd);
- insertBeforeInstr->InsertBefore(bailOutIfNotFunction);
- return bailOutIfNotFunction;
- }
- IR::Instr*
- Lowerer::InsertFunctionInfoCheck(IR::RegOpnd * funcOpnd, IR::Instr *insertBeforeInstr, IR::AddrOpnd* inlinedFuncInfo, IR::BailOutKind bailOutKind, BailOutInfo *bailOutInfo)
- {
- IR::Instr *bailOutIfWrongFuncInfo = IR::BailOutInstr::New(Js::OpCode::BailOnNotEqual, bailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
- // if (VarTo<JavascriptFunction>(r1)->functionInfo != funcInfo) goto noInlineLabel
- // BrNeq_A noInlineLabel, r1->functionInfo, funcInfo
- IR::IndirOpnd* opndFuncInfo = IR::IndirOpnd::New(funcOpnd, Js::JavascriptFunction::GetOffsetOfFunctionInfo(), TyMachPtr, insertBeforeInstr->m_func);
- bailOutIfWrongFuncInfo->SetSrc1(opndFuncInfo);
- bailOutIfWrongFuncInfo->SetSrc2(inlinedFuncInfo);
- insertBeforeInstr->InsertBefore(bailOutIfWrongFuncInfo);
- return bailOutIfWrongFuncInfo;
- }
- #if DBG
- void
- Lowerer::LegalizeVerifyRange(IR::Instr * instrStart, IR::Instr * instrLast)
- {
- FOREACH_INSTR_IN_RANGE(verifyLegalizeInstr, instrStart, instrLast)
- {
- LowererMD::Legalize<true>(verifyLegalizeInstr);
- }
- NEXT_INSTR_IN_RANGE;
- }
- void
- Lowerer::ReconcileWithLowererStateOnHelperCall(IR::Instr * callInstr, IR::JnHelperMethod helperMethod)
- {
- AssertMsg((this->helperCallCheckState & HelperCallCheckState_NoHelperCalls) == 0, "Emitting an helper call when we didn't allow helper calls");
- if (HelperMethodAttributes::CanBeReentrant(helperMethod))
- {
- if (this->helperCallCheckState & HelperCallCheckState_ImplicitCallsBailout)
- {
- if (!callInstr->HasBailOutInfo() ||
- !BailOutInfo::IsBailOutOnImplicitCalls(callInstr->GetBailOutKind()))
- {
- Output::Print(_u("HelperMethod : %s\n"), IR::GetMethodName(helperMethod));
- AssertMsg(false, "Helper call doesn't have BailOutOnImplicitCalls when it should");
- }
- }
- if (!OpCodeAttr::HasImplicitCall(m_currentInstrOpCode) && !OpCodeAttr::OpndHasImplicitCall(m_currentInstrOpCode)
- // Special case where we allow support implicit calls, but FromVar says it doesn't have implicit calls
- && m_currentInstrOpCode != Js::OpCode::FromVar
- )
- {
- Output::Print(_u("HelperMethod : %s, OpCode: %s"), IR::GetMethodName(helperMethod), Js::OpCodeUtil::GetOpCodeName(m_currentInstrOpCode));
- callInstr->DumpByteCodeOffset();
- Output::Print(_u("\n"));
- AssertMsg(false, "OpCode and Helper implicit call attribute mismatch");
- }
- }
- }
- void
- Lowerer::ClearAndSaveImplicitCallCheckOnHelperCallCheckState()
- {
- this->oldHelperCallCheckState = this->helperCallCheckState;
- this->helperCallCheckState = HelperCallCheckState(this->helperCallCheckState & ~HelperCallCheckState_ImplicitCallsBailout);
- }
- void
- Lowerer::RestoreImplicitCallCheckOnHelperCallCheckState()
- {
- if (this->oldHelperCallCheckState & HelperCallCheckState_ImplicitCallsBailout)
- {
- this->helperCallCheckState = HelperCallCheckState(this->helperCallCheckState | HelperCallCheckState_ImplicitCallsBailout);
- this->oldHelperCallCheckState = HelperCallCheckState_None;
- }
- }
- IR::Instr*
- Lowerer::LowerCheckLowerIntBound(IR::Instr * instr)
- {
- IR::Instr * instrPrev = instr->m_prev;
- IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, false /*isOpHelper*/);
- Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32());
- InsertCompareBranch(instr->GetSrc1(), instr->GetSrc2(), Js::OpCode::BrGe_A, continueLabel, instr);
- IR::Instr* helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
- instr->InsertBefore(helperCallInstr);
- m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::HelperIntRangeCheckFailure);
- instr->InsertAfter(continueLabel);
- instr->Remove();
- return instrPrev;
- }
- IR::Instr*
- Lowerer::LowerCheckUpperIntBound(IR::Instr * instr)
- {
- bool lowerBoundCheckPresent = instr->m_prev->m_opcode == Js::OpCode::CheckLowerIntBound;
- IR::Instr * instrPrev = lowerBoundCheckPresent ? instr->m_prev->m_prev : instr->m_prev;
- IR::Instr * lowerBoundCheckInstr = lowerBoundCheckPresent ? instr->m_prev : nullptr;
- IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, false /*isOpHelper*/);
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true /*isOpHelper*/);
- Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32());
- if (lowerBoundCheckInstr)
- {
- InsertCompareBranch(instr->UnlinkSrc1(), instr->UnlinkSrc2(), Js::OpCode::BrGt_A, helperLabel, instr);
- Assert(lowerBoundCheckInstr->GetSrc1()->IsInt32() || lowerBoundCheckInstr->GetSrc1()->IsUInt32());
- InsertCompareBranch(lowerBoundCheckInstr->UnlinkSrc1(), lowerBoundCheckInstr->UnlinkSrc2(), Js::OpCode::BrGe_A, continueLabel, instr);
- }
- else
- {
- InsertCompareBranch(instr->UnlinkSrc1(), instr->UnlinkSrc2(), Js::OpCode::BrLe_A, continueLabel, instr);
- }
- instr->InsertBefore(helperLabel);
- IR::Instr* helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
- instr->InsertBefore(helperCallInstr);
- m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::HelperIntRangeCheckFailure);
- instr->InsertAfter(continueLabel);
- instr->Remove();
- if (lowerBoundCheckInstr)
- {
- lowerBoundCheckInstr->Remove();
- }
- return instrPrev;
- }
- #endif
|