| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589115901159111592115931159411595115961159711598115991160011601116021160311604116051160611607116081160911610116111161211613116141161511616116171161811619116201162111622116231162411625116261162711628116291163011631116321163311634116351163611637116381163911640116411164211643116441164511646116471164811649116501165111652116531165411655116561165711658116591166011661116621166311664116651166611667116681166911670116711167211673116741167511676116771167811679116801168111682116831168411685116861168711688116891169011691116921169311694116951169611697116981169911700117011170211703117041170511706117071170811709117101171111712117131171411715117161171711718117191172011721117221172311724117251172611727117281172911730117311173211733117341173511736117371173811739117401174111742117431174411745117461174711748117491175011751117521175311754117551175611757117581175911760117611176211763117641176511766117671176811769117701177111772117731177411775117761177711778117791178011781117821178311784117851178611787117881178911790117911179211793117941179511796117971179811799118001180111802118031180411805118061180711808118091181011811118121181311814118151181611817118181181911820118211182211823118241182511826118271182811829118301183111832118331183411835118361183711838118391184011841118421184311844118451184611847118481184911850118511185211853118541185511856118571185811859118601186111862118631186411865118661186711868118691187011871118721187311874118751187611877118781187911880118811188211883118841188511886118871188811889118901189111892118931189411895118961189711898118991190011901119021190311904119051190611907119081190911910119111191211913119141191511916119171191811919119201192111922119231192411925119261192711928119291193011931119321193311934119351193611937119381193911940119411194211943119441194511946119471194811949119501195111952119531195411955119561195711958119591196011961119621196311964119651196611967119681196911970119711197211973119741197511976119771197811979119801198111982119831198411985119861198711988119891199011991119921199311994119951199611997119981199912000120011200212003120041200512006120071200812009120101201112012120131201412015120161201712018120191202012021120221202312024120251202612027120281202912030120311203212033120341203512036120371203812039120401204112042120431204412045120461204712048120491205012051120521205312054120551205612057120581205912060120611206212063120641206512066120671206812069120701207112072120731207412075120761207712078120791208012081120821208312084120851208612087120881208912090120911209212093120941209512096120971209812099121001210112102121031210412105121061210712108121091211012111121121211312114121151211612117121181211912120121211212212123121241212512126121271212812129121301213112132121331213412135121361213712138121391214012141121421214312144121451214612147121481214912150121511215212153121541215512156121571215812159121601216112162121631216412165121661216712168121691217012171121721217312174121751217612177121781217912180121811218212183121841218512186121871218812189121901219112192121931219412195121961219712198121991220012201122021220312204122051220612207122081220912210122111221212213122141221512216122171221812219122201222112222122231222412225122261222712228122291223012231122321223312234122351223612237122381223912240122411224212243122441224512246122471224812249122501225112252122531225412255122561225712258122591226012261122621226312264122651226612267122681226912270122711227212273122741227512276122771227812279122801228112282122831228412285122861228712288122891229012291122921229312294122951229612297122981229912300123011230212303123041230512306123071230812309123101231112312123131231412315123161231712318123191232012321123221232312324123251232612327123281232912330123311233212333123341233512336123371233812339123401234112342123431234412345123461234712348123491235012351123521235312354123551235612357123581235912360123611236212363123641236512366123671236812369123701237112372123731237412375123761237712378123791238012381123821238312384123851238612387123881238912390123911239212393123941239512396123971239812399124001240112402124031240412405124061240712408124091241012411124121241312414124151241612417124181241912420124211242212423124241242512426124271242812429124301243112432124331243412435124361243712438124391244012441124421244312444124451244612447124481244912450124511245212453124541245512456124571245812459124601246112462124631246412465124661246712468124691247012471124721247312474124751247612477124781247912480124811248212483124841248512486124871248812489124901249112492124931249412495124961249712498124991250012501125021250312504125051250612507125081250912510125111251212513125141251512516125171251812519125201252112522125231252412525125261252712528125291253012531125321253312534125351253612537125381253912540125411254212543125441254512546125471254812549125501255112552125531255412555125561255712558125591256012561125621256312564125651256612567125681256912570125711257212573125741257512576125771257812579125801258112582125831258412585125861258712588125891259012591125921259312594125951259612597125981259912600126011260212603126041260512606126071260812609126101261112612126131261412615126161261712618126191262012621126221262312624126251262612627126281262912630126311263212633126341263512636126371263812639126401264112642126431264412645126461264712648126491265012651126521265312654126551265612657126581265912660126611266212663126641266512666126671266812669126701267112672126731267412675126761267712678126791268012681126821268312684126851268612687126881268912690126911269212693126941269512696126971269812699127001270112702127031270412705127061270712708127091271012711127121271312714127151271612717127181271912720127211272212723127241272512726127271272812729127301273112732127331273412735127361273712738127391274012741127421274312744127451274612747127481274912750127511275212753127541275512756127571275812759127601276112762127631276412765127661276712768127691277012771127721277312774127751277612777127781277912780127811278212783127841278512786127871278812789127901279112792127931279412795127961279712798127991280012801128021280312804128051280612807128081280912810128111281212813128141281512816128171281812819128201282112822128231282412825128261282712828128291283012831128321283312834128351283612837128381283912840128411284212843128441284512846128471284812849128501285112852128531285412855128561285712858128591286012861128621286312864128651286612867128681286912870128711287212873128741287512876128771287812879128801288112882128831288412885128861288712888128891289012891128921289312894128951289612897128981289912900129011290212903129041290512906129071290812909129101291112912129131291412915129161291712918129191292012921129221292312924129251292612927129281292912930129311293212933129341293512936129371293812939129401294112942129431294412945129461294712948129491295012951129521295312954129551295612957129581295912960129611296212963129641296512966129671296812969129701297112972129731297412975129761297712978129791298012981129821298312984129851298612987129881298912990129911299212993129941299512996129971299812999130001300113002130031300413005130061300713008130091301013011130121301313014130151301613017130181301913020130211302213023130241302513026130271302813029130301303113032130331303413035130361303713038130391304013041130421304313044130451304613047130481304913050130511305213053130541305513056130571305813059130601306113062130631306413065130661306713068130691307013071130721307313074130751307613077130781307913080130811308213083130841308513086130871308813089130901309113092130931309413095130961309713098130991310013101131021310313104131051310613107131081310913110131111311213113131141311513116131171311813119131201312113122131231312413125131261312713128131291313013131131321313313134131351313613137131381313913140131411314213143131441314513146131471314813149131501315113152131531315413155131561315713158131591316013161131621316313164131651316613167131681316913170131711317213173131741317513176131771317813179131801318113182131831318413185131861318713188131891319013191131921319313194131951319613197131981319913200132011320213203132041320513206132071320813209132101321113212132131321413215132161321713218132191322013221132221322313224132251322613227132281322913230132311323213233132341323513236132371323813239132401324113242132431324413245132461324713248132491325013251132521325313254132551325613257132581325913260132611326213263132641326513266132671326813269132701327113272132731327413275132761327713278132791328013281132821328313284132851328613287132881328913290132911329213293132941329513296132971329813299133001330113302133031330413305133061330713308133091331013311133121331313314133151331613317133181331913320133211332213323133241332513326133271332813329133301333113332133331333413335133361333713338133391334013341133421334313344133451334613347133481334913350133511335213353133541335513356133571335813359133601336113362133631336413365133661336713368133691337013371133721337313374133751337613377133781337913380133811338213383133841338513386133871338813389133901339113392133931339413395133961339713398133991340013401134021340313404134051340613407134081340913410134111341213413134141341513416134171341813419134201342113422134231342413425134261342713428134291343013431134321343313434134351343613437134381343913440134411344213443134441344513446134471344813449134501345113452134531345413455134561345713458134591346013461134621346313464134651346613467134681346913470134711347213473134741347513476134771347813479134801348113482134831348413485134861348713488134891349013491134921349313494134951349613497134981349913500135011350213503135041350513506135071350813509135101351113512135131351413515135161351713518135191352013521135221352313524135251352613527135281352913530135311353213533135341353513536135371353813539135401354113542135431354413545135461354713548135491355013551135521355313554135551355613557135581355913560135611356213563135641356513566135671356813569135701357113572135731357413575135761357713578135791358013581135821358313584135851358613587135881358913590135911359213593135941359513596135971359813599136001360113602136031360413605136061360713608136091361013611136121361313614136151361613617136181361913620136211362213623136241362513626136271362813629136301363113632136331363413635136361363713638136391364013641136421364313644136451364613647136481364913650136511365213653136541365513656136571365813659136601366113662136631366413665136661366713668136691367013671136721367313674136751367613677136781367913680136811368213683136841368513686136871368813689136901369113692136931369413695136961369713698136991370013701137021370313704137051370613707137081370913710137111371213713137141371513716137171371813719137201372113722137231372413725137261372713728137291373013731137321373313734137351373613737137381373913740137411374213743137441374513746137471374813749137501375113752137531375413755137561375713758137591376013761137621376313764137651376613767137681376913770137711377213773137741377513776137771377813779137801378113782137831378413785137861378713788137891379013791137921379313794137951379613797137981379913800138011380213803138041380513806138071380813809138101381113812138131381413815138161381713818138191382013821138221382313824138251382613827138281382913830138311383213833138341383513836138371383813839138401384113842138431384413845138461384713848138491385013851138521385313854138551385613857138581385913860138611386213863138641386513866138671386813869138701387113872138731387413875138761387713878138791388013881138821388313884138851388613887138881388913890138911389213893138941389513896138971389813899139001390113902139031390413905139061390713908139091391013911139121391313914139151391613917139181391913920139211392213923139241392513926139271392813929139301393113932139331393413935139361393713938139391394013941139421394313944139451394613947139481394913950139511395213953139541395513956139571395813959139601396113962139631396413965139661396713968139691397013971139721397313974139751397613977139781397913980139811398213983139841398513986139871398813989139901399113992139931399413995139961399713998139991400014001140021400314004140051400614007140081400914010140111401214013140141401514016140171401814019140201402114022140231402414025140261402714028140291403014031140321403314034140351403614037140381403914040140411404214043140441404514046140471404814049140501405114052140531405414055140561405714058140591406014061140621406314064140651406614067140681406914070140711407214073140741407514076140771407814079140801408114082140831408414085140861408714088140891409014091140921409314094140951409614097140981409914100141011410214103141041410514106141071410814109141101411114112141131411414115141161411714118141191412014121141221412314124141251412614127141281412914130141311413214133141341413514136141371413814139141401414114142141431414414145141461414714148141491415014151141521415314154141551415614157141581415914160141611416214163141641416514166141671416814169141701417114172141731417414175141761417714178141791418014181141821418314184141851418614187141881418914190141911419214193141941419514196141971419814199142001420114202142031420414205142061420714208142091421014211142121421314214142151421614217142181421914220142211422214223142241422514226142271422814229142301423114232142331423414235142361423714238142391424014241142421424314244142451424614247142481424914250142511425214253142541425514256142571425814259142601426114262142631426414265142661426714268142691427014271142721427314274142751427614277142781427914280142811428214283142841428514286142871428814289142901429114292142931429414295142961429714298142991430014301143021430314304143051430614307143081430914310143111431214313143141431514316143171431814319143201432114322143231432414325143261432714328143291433014331143321433314334143351433614337143381433914340143411434214343143441434514346143471434814349143501435114352143531435414355143561435714358143591436014361143621436314364143651436614367143681436914370143711437214373143741437514376143771437814379143801438114382143831438414385143861438714388143891439014391143921439314394143951439614397143981439914400144011440214403144041440514406144071440814409144101441114412144131441414415144161441714418144191442014421144221442314424144251442614427144281442914430144311443214433144341443514436144371443814439144401444114442144431444414445144461444714448144491445014451144521445314454144551445614457144581445914460144611446214463144641446514466144671446814469144701447114472144731447414475144761447714478144791448014481144821448314484144851448614487144881448914490144911449214493144941449514496144971449814499145001450114502145031450414505145061450714508145091451014511145121451314514145151451614517145181451914520145211452214523145241452514526145271452814529145301453114532145331453414535145361453714538145391454014541145421454314544145451454614547145481454914550145511455214553145541455514556145571455814559145601456114562145631456414565145661456714568145691457014571145721457314574145751457614577145781457914580145811458214583145841458514586145871458814589145901459114592145931459414595145961459714598145991460014601146021460314604146051460614607146081460914610146111461214613146141461514616146171461814619146201462114622146231462414625146261462714628146291463014631146321463314634146351463614637146381463914640146411464214643146441464514646146471464814649146501465114652146531465414655146561465714658146591466014661146621466314664146651466614667146681466914670146711467214673146741467514676146771467814679146801468114682146831468414685146861468714688146891469014691146921469314694146951469614697146981469914700147011470214703147041470514706147071470814709147101471114712147131471414715147161471714718147191472014721147221472314724147251472614727147281472914730147311473214733147341473514736147371473814739147401474114742147431474414745147461474714748147491475014751147521475314754147551475614757147581475914760147611476214763147641476514766147671476814769147701477114772147731477414775147761477714778147791478014781147821478314784147851478614787147881478914790147911479214793147941479514796147971479814799148001480114802148031480414805148061480714808148091481014811148121481314814148151481614817148181481914820148211482214823148241482514826148271482814829148301483114832148331483414835148361483714838148391484014841148421484314844148451484614847148481484914850148511485214853148541485514856148571485814859148601486114862148631486414865148661486714868148691487014871148721487314874148751487614877148781487914880148811488214883148841488514886148871488814889148901489114892148931489414895148961489714898148991490014901149021490314904149051490614907149081490914910149111491214913149141491514916149171491814919149201492114922149231492414925149261492714928149291493014931149321493314934149351493614937149381493914940149411494214943149441494514946149471494814949149501495114952149531495414955149561495714958149591496014961149621496314964149651496614967149681496914970149711497214973149741497514976149771497814979149801498114982149831498414985149861498714988149891499014991149921499314994149951499614997149981499915000150011500215003150041500515006150071500815009150101501115012150131501415015150161501715018150191502015021150221502315024150251502615027150281502915030150311503215033150341503515036150371503815039150401504115042150431504415045150461504715048150491505015051150521505315054150551505615057150581505915060150611506215063150641506515066150671506815069150701507115072150731507415075150761507715078150791508015081150821508315084150851508615087150881508915090150911509215093150941509515096150971509815099151001510115102151031510415105151061510715108151091511015111151121511315114151151511615117151181511915120151211512215123151241512515126151271512815129151301513115132151331513415135151361513715138151391514015141151421514315144151451514615147151481514915150151511515215153151541515515156151571515815159151601516115162151631516415165151661516715168151691517015171151721517315174151751517615177151781517915180151811518215183151841518515186151871518815189151901519115192151931519415195151961519715198151991520015201152021520315204152051520615207152081520915210152111521215213152141521515216152171521815219152201522115222152231522415225152261522715228152291523015231152321523315234152351523615237152381523915240152411524215243152441524515246152471524815249152501525115252152531525415255152561525715258152591526015261152621526315264152651526615267152681526915270152711527215273152741527515276152771527815279152801528115282152831528415285152861528715288152891529015291152921529315294152951529615297152981529915300153011530215303153041530515306153071530815309153101531115312153131531415315153161531715318153191532015321153221532315324153251532615327153281532915330153311533215333153341533515336153371533815339153401534115342153431534415345153461534715348153491535015351153521535315354153551535615357153581535915360153611536215363153641536515366153671536815369153701537115372153731537415375153761537715378153791538015381153821538315384153851538615387153881538915390153911539215393153941539515396153971539815399154001540115402154031540415405154061540715408154091541015411154121541315414154151541615417154181541915420154211542215423154241542515426154271542815429154301543115432154331543415435154361543715438154391544015441154421544315444154451544615447154481544915450154511545215453154541545515456154571545815459154601546115462154631546415465154661546715468154691547015471154721547315474154751547615477154781547915480154811548215483154841548515486154871548815489154901549115492154931549415495154961549715498154991550015501155021550315504155051550615507155081550915510155111551215513155141551515516155171551815519155201552115522155231552415525155261552715528155291553015531155321553315534155351553615537155381553915540155411554215543155441554515546155471554815549155501555115552155531555415555155561555715558155591556015561155621556315564155651556615567155681556915570155711557215573155741557515576155771557815579155801558115582155831558415585155861558715588155891559015591155921559315594155951559615597155981559915600156011560215603156041560515606156071560815609156101561115612156131561415615156161561715618156191562015621156221562315624156251562615627156281562915630156311563215633156341563515636156371563815639156401564115642156431564415645156461564715648156491565015651156521565315654156551565615657156581565915660156611566215663156641566515666156671566815669156701567115672156731567415675156761567715678156791568015681156821568315684156851568615687156881568915690156911569215693156941569515696156971569815699157001570115702157031570415705157061570715708157091571015711157121571315714157151571615717157181571915720157211572215723157241572515726157271572815729157301573115732157331573415735157361573715738157391574015741157421574315744157451574615747157481574915750157511575215753157541575515756157571575815759157601576115762157631576415765157661576715768157691577015771157721577315774157751577615777157781577915780157811578215783157841578515786157871578815789157901579115792157931579415795157961579715798157991580015801158021580315804158051580615807158081580915810158111581215813158141581515816158171581815819158201582115822158231582415825158261582715828158291583015831158321583315834158351583615837158381583915840158411584215843158441584515846158471584815849158501585115852158531585415855158561585715858158591586015861158621586315864158651586615867158681586915870158711587215873158741587515876158771587815879158801588115882158831588415885158861588715888158891589015891158921589315894158951589615897158981589915900159011590215903159041590515906159071590815909159101591115912159131591415915159161591715918159191592015921159221592315924159251592615927159281592915930159311593215933159341593515936159371593815939159401594115942159431594415945159461594715948159491595015951159521595315954159551595615957159581595915960159611596215963159641596515966159671596815969159701597115972159731597415975159761597715978159791598015981159821598315984159851598615987159881598915990159911599215993159941599515996159971599815999160001600116002160031600416005160061600716008160091601016011160121601316014160151601616017160181601916020160211602216023160241602516026160271602816029160301603116032160331603416035160361603716038160391604016041160421604316044160451604616047160481604916050160511605216053160541605516056160571605816059160601606116062160631606416065160661606716068160691607016071160721607316074160751607616077160781607916080160811608216083160841608516086160871608816089160901609116092160931609416095160961609716098160991610016101161021610316104161051610616107161081610916110161111611216113161141611516116161171611816119161201612116122161231612416125161261612716128161291613016131161321613316134161351613616137161381613916140161411614216143161441614516146161471614816149161501615116152161531615416155161561615716158161591616016161161621616316164161651616616167161681616916170161711617216173161741617516176161771617816179161801618116182161831618416185161861618716188161891619016191161921619316194161951619616197161981619916200162011620216203162041620516206162071620816209162101621116212162131621416215162161621716218162191622016221162221622316224162251622616227162281622916230162311623216233162341623516236162371623816239162401624116242162431624416245162461624716248162491625016251162521625316254162551625616257162581625916260162611626216263162641626516266162671626816269162701627116272162731627416275162761627716278162791628016281162821628316284162851628616287162881628916290162911629216293162941629516296162971629816299163001630116302163031630416305163061630716308163091631016311163121631316314163151631616317163181631916320163211632216323163241632516326163271632816329163301633116332163331633416335163361633716338163391634016341163421634316344163451634616347163481634916350163511635216353163541635516356163571635816359163601636116362163631636416365163661636716368163691637016371163721637316374163751637616377163781637916380163811638216383163841638516386163871638816389163901639116392163931639416395163961639716398163991640016401164021640316404164051640616407164081640916410164111641216413164141641516416164171641816419164201642116422164231642416425164261642716428164291643016431164321643316434164351643616437164381643916440164411644216443164441644516446164471644816449164501645116452164531645416455164561645716458164591646016461164621646316464164651646616467164681646916470164711647216473164741647516476164771647816479164801648116482164831648416485164861648716488164891649016491164921649316494164951649616497164981649916500165011650216503165041650516506165071650816509165101651116512165131651416515165161651716518165191652016521165221652316524165251652616527165281652916530165311653216533165341653516536165371653816539165401654116542165431654416545165461654716548165491655016551165521655316554165551655616557165581655916560165611656216563165641656516566165671656816569165701657116572165731657416575165761657716578165791658016581165821658316584165851658616587165881658916590165911659216593165941659516596165971659816599166001660116602166031660416605166061660716608166091661016611166121661316614166151661616617166181661916620166211662216623166241662516626166271662816629166301663116632166331663416635166361663716638166391664016641166421664316644166451664616647166481664916650166511665216653166541665516656166571665816659166601666116662166631666416665166661666716668166691667016671166721667316674166751667616677166781667916680166811668216683166841668516686166871668816689166901669116692166931669416695166961669716698166991670016701167021670316704167051670616707167081670916710167111671216713167141671516716167171671816719167201672116722167231672416725167261672716728167291673016731167321673316734167351673616737167381673916740167411674216743167441674516746167471674816749167501675116752167531675416755167561675716758167591676016761167621676316764167651676616767167681676916770167711677216773167741677516776167771677816779167801678116782167831678416785167861678716788167891679016791167921679316794167951679616797167981679916800168011680216803168041680516806168071680816809168101681116812168131681416815168161681716818168191682016821168221682316824168251682616827168281682916830168311683216833168341683516836168371683816839168401684116842168431684416845168461684716848168491685016851168521685316854168551685616857168581685916860168611686216863168641686516866168671686816869168701687116872168731687416875168761687716878168791688016881168821688316884168851688616887168881688916890168911689216893168941689516896168971689816899169001690116902169031690416905169061690716908169091691016911169121691316914169151691616917169181691916920169211692216923169241692516926169271692816929169301693116932169331693416935169361693716938169391694016941169421694316944169451694616947169481694916950169511695216953169541695516956169571695816959169601696116962169631696416965169661696716968169691697016971169721697316974169751697616977169781697916980169811698216983169841698516986169871698816989169901699116992169931699416995169961699716998169991700017001170021700317004170051700617007170081700917010170111701217013170141701517016170171701817019170201702117022170231702417025170261702717028170291703017031170321703317034170351703617037170381703917040170411704217043170441704517046170471704817049170501705117052170531705417055170561705717058170591706017061170621706317064170651706617067170681706917070170711707217073170741707517076170771707817079170801708117082170831708417085170861708717088170891709017091170921709317094170951709617097170981709917100171011710217103171041710517106171071710817109171101711117112171131711417115171161711717118171191712017121171221712317124171251712617127171281712917130171311713217133171341713517136171371713817139171401714117142171431714417145171461714717148171491715017151171521715317154171551715617157171581715917160171611716217163171641716517166171671716817169171701717117172171731717417175171761717717178171791718017181171821718317184171851718617187171881718917190171911719217193171941719517196171971719817199172001720117202172031720417205172061720717208172091721017211172121721317214172151721617217172181721917220172211722217223172241722517226172271722817229172301723117232172331723417235172361723717238172391724017241172421724317244172451724617247172481724917250172511725217253172541725517256172571725817259172601726117262172631726417265172661726717268172691727017271172721727317274172751727617277172781727917280172811728217283172841728517286172871728817289172901729117292172931729417295172961729717298172991730017301173021730317304173051730617307173081730917310173111731217313173141731517316173171731817319173201732117322173231732417325173261732717328173291733017331173321733317334173351733617337173381733917340173411734217343173441734517346173471734817349173501735117352173531735417355173561735717358173591736017361173621736317364173651736617367173681736917370173711737217373173741737517376173771737817379173801738117382173831738417385173861738717388173891739017391173921739317394173951739617397173981739917400174011740217403174041740517406174071740817409174101741117412174131741417415174161741717418174191742017421174221742317424174251742617427174281742917430174311743217433174341743517436174371743817439174401744117442174431744417445174461744717448174491745017451174521745317454174551745617457174581745917460174611746217463174641746517466174671746817469174701747117472174731747417475174761747717478174791748017481174821748317484174851748617487174881748917490174911749217493174941749517496174971749817499175001750117502175031750417505175061750717508175091751017511175121751317514175151751617517175181751917520175211752217523175241752517526175271752817529175301753117532175331753417535175361753717538175391754017541175421754317544175451754617547175481754917550175511755217553175541755517556175571755817559175601756117562175631756417565175661756717568175691757017571175721757317574175751757617577175781757917580175811758217583175841758517586175871758817589175901759117592175931759417595175961759717598175991760017601176021760317604176051760617607176081760917610176111761217613176141761517616176171761817619176201762117622176231762417625176261762717628176291763017631176321763317634176351763617637176381763917640176411764217643176441764517646176471764817649176501765117652176531765417655176561765717658176591766017661176621766317664176651766617667176681766917670176711767217673176741767517676176771767817679176801768117682176831768417685176861768717688176891769017691176921769317694176951769617697176981769917700177011770217703177041770517706177071770817709177101771117712177131771417715177161771717718177191772017721177221772317724177251772617727177281772917730177311773217733177341773517736177371773817739177401774117742177431774417745177461774717748177491775017751177521775317754177551775617757177581775917760177611776217763177641776517766177671776817769177701777117772177731777417775177761777717778177791778017781177821778317784177851778617787177881778917790177911779217793177941779517796177971779817799178001780117802178031780417805178061780717808178091781017811178121781317814178151781617817178181781917820178211782217823178241782517826178271782817829178301783117832178331783417835178361783717838178391784017841178421784317844178451784617847178481784917850178511785217853178541785517856178571785817859178601786117862178631786417865178661786717868178691787017871178721787317874178751787617877178781787917880178811788217883178841788517886178871788817889178901789117892178931789417895178961789717898178991790017901179021790317904179051790617907179081790917910179111791217913179141791517916179171791817919179201792117922179231792417925179261792717928179291793017931179321793317934179351793617937179381793917940179411794217943179441794517946179471794817949179501795117952179531795417955179561795717958179591796017961179621796317964179651796617967179681796917970179711797217973179741797517976179771797817979179801798117982179831798417985179861798717988179891799017991179921799317994179951799617997179981799918000180011800218003180041800518006180071800818009180101801118012180131801418015180161801718018180191802018021180221802318024180251802618027180281802918030180311803218033180341803518036180371803818039180401804118042180431804418045180461804718048180491805018051180521805318054180551805618057180581805918060180611806218063180641806518066180671806818069180701807118072180731807418075180761807718078180791808018081180821808318084180851808618087180881808918090180911809218093180941809518096180971809818099181001810118102181031810418105181061810718108181091811018111181121811318114181151811618117181181811918120181211812218123181241812518126181271812818129181301813118132181331813418135181361813718138181391814018141181421814318144181451814618147181481814918150181511815218153181541815518156181571815818159181601816118162181631816418165181661816718168181691817018171181721817318174181751817618177181781817918180181811818218183181841818518186181871818818189181901819118192181931819418195181961819718198181991820018201182021820318204182051820618207182081820918210182111821218213182141821518216182171821818219182201822118222182231822418225182261822718228182291823018231182321823318234182351823618237182381823918240182411824218243182441824518246182471824818249182501825118252182531825418255182561825718258182591826018261182621826318264182651826618267182681826918270182711827218273182741827518276182771827818279182801828118282182831828418285182861828718288182891829018291182921829318294182951829618297182981829918300183011830218303183041830518306183071830818309183101831118312183131831418315183161831718318183191832018321183221832318324183251832618327183281832918330183311833218333183341833518336183371833818339183401834118342183431834418345183461834718348183491835018351183521835318354183551835618357183581835918360183611836218363183641836518366183671836818369183701837118372183731837418375183761837718378183791838018381183821838318384183851838618387183881838918390183911839218393183941839518396183971839818399184001840118402184031840418405184061840718408184091841018411184121841318414184151841618417184181841918420184211842218423184241842518426184271842818429184301843118432184331843418435184361843718438184391844018441184421844318444184451844618447184481844918450184511845218453184541845518456184571845818459184601846118462184631846418465184661846718468184691847018471184721847318474184751847618477184781847918480184811848218483184841848518486184871848818489184901849118492184931849418495184961849718498184991850018501185021850318504185051850618507185081850918510185111851218513185141851518516185171851818519185201852118522185231852418525185261852718528185291853018531185321853318534185351853618537185381853918540185411854218543185441854518546185471854818549185501855118552185531855418555185561855718558185591856018561185621856318564185651856618567185681856918570185711857218573185741857518576185771857818579185801858118582185831858418585185861858718588185891859018591185921859318594185951859618597185981859918600186011860218603186041860518606186071860818609186101861118612186131861418615186161861718618186191862018621186221862318624186251862618627186281862918630186311863218633186341863518636186371863818639186401864118642186431864418645186461864718648186491865018651186521865318654186551865618657186581865918660186611866218663186641866518666186671866818669186701867118672186731867418675186761867718678186791868018681186821868318684186851868618687186881868918690186911869218693186941869518696186971869818699187001870118702187031870418705187061870718708187091871018711187121871318714187151871618717187181871918720187211872218723187241872518726187271872818729187301873118732187331873418735187361873718738187391874018741187421874318744187451874618747187481874918750187511875218753187541875518756187571875818759187601876118762187631876418765187661876718768187691877018771187721877318774187751877618777187781877918780187811878218783187841878518786187871878818789187901879118792187931879418795187961879718798187991880018801188021880318804188051880618807188081880918810188111881218813188141881518816188171881818819188201882118822188231882418825188261882718828188291883018831188321883318834188351883618837188381883918840188411884218843188441884518846188471884818849188501885118852188531885418855188561885718858188591886018861188621886318864188651886618867188681886918870188711887218873188741887518876188771887818879188801888118882188831888418885188861888718888188891889018891188921889318894188951889618897188981889918900189011890218903189041890518906189071890818909189101891118912189131891418915189161891718918189191892018921189221892318924189251892618927189281892918930189311893218933189341893518936189371893818939189401894118942189431894418945189461894718948189491895018951189521895318954189551895618957189581895918960189611896218963189641896518966189671896818969189701897118972189731897418975189761897718978189791898018981189821898318984189851898618987189881898918990189911899218993189941899518996189971899818999190001900119002190031900419005190061900719008190091901019011190121901319014190151901619017190181901919020190211902219023190241902519026190271902819029190301903119032190331903419035190361903719038190391904019041190421904319044190451904619047190481904919050190511905219053190541905519056190571905819059190601906119062190631906419065190661906719068190691907019071190721907319074190751907619077190781907919080190811908219083190841908519086190871908819089190901909119092190931909419095190961909719098190991910019101191021910319104191051910619107191081910919110191111911219113191141911519116191171911819119191201912119122191231912419125191261912719128191291913019131191321913319134191351913619137191381913919140191411914219143191441914519146191471914819149191501915119152191531915419155191561915719158191591916019161191621916319164191651916619167191681916919170191711917219173191741917519176191771917819179191801918119182191831918419185191861918719188191891919019191191921919319194191951919619197191981919919200192011920219203192041920519206192071920819209192101921119212192131921419215192161921719218192191922019221192221922319224192251922619227192281922919230192311923219233192341923519236192371923819239192401924119242192431924419245192461924719248192491925019251192521925319254192551925619257192581925919260192611926219263192641926519266192671926819269192701927119272192731927419275192761927719278192791928019281192821928319284192851928619287192881928919290192911929219293192941929519296192971929819299193001930119302193031930419305193061930719308193091931019311193121931319314193151931619317193181931919320193211932219323193241932519326193271932819329193301933119332193331933419335193361933719338193391934019341193421934319344193451934619347193481934919350193511935219353193541935519356193571935819359193601936119362193631936419365193661936719368193691937019371193721937319374193751937619377193781937919380193811938219383193841938519386193871938819389193901939119392193931939419395193961939719398193991940019401194021940319404194051940619407194081940919410194111941219413194141941519416194171941819419194201942119422194231942419425194261942719428194291943019431194321943319434194351943619437194381943919440194411944219443194441944519446194471944819449194501945119452194531945419455194561945719458194591946019461194621946319464194651946619467194681946919470194711947219473194741947519476194771947819479194801948119482194831948419485194861948719488194891949019491194921949319494194951949619497194981949919500195011950219503195041950519506195071950819509195101951119512195131951419515195161951719518195191952019521195221952319524195251952619527195281952919530195311953219533195341953519536195371953819539195401954119542195431954419545195461954719548195491955019551195521955319554195551955619557195581955919560195611956219563195641956519566195671956819569195701957119572195731957419575195761957719578195791958019581195821958319584195851958619587195881958919590195911959219593195941959519596195971959819599196001960119602196031960419605196061960719608196091961019611196121961319614196151961619617196181961919620196211962219623196241962519626196271962819629196301963119632196331963419635196361963719638196391964019641196421964319644196451964619647196481964919650196511965219653196541965519656196571965819659196601966119662196631966419665196661966719668196691967019671196721967319674196751967619677196781967919680196811968219683196841968519686196871968819689196901969119692196931969419695196961969719698196991970019701197021970319704197051970619707197081970919710197111971219713197141971519716197171971819719197201972119722197231972419725197261972719728197291973019731197321973319734197351973619737197381973919740197411974219743197441974519746197471974819749197501975119752197531975419755197561975719758197591976019761197621976319764197651976619767197681976919770197711977219773197741977519776197771977819779197801978119782197831978419785197861978719788197891979019791197921979319794197951979619797197981979919800198011980219803198041980519806198071980819809198101981119812198131981419815198161981719818198191982019821198221982319824198251982619827198281982919830198311983219833198341983519836198371983819839198401984119842198431984419845198461984719848198491985019851198521985319854198551985619857198581985919860198611986219863198641986519866198671986819869198701987119872198731987419875198761987719878198791988019881198821988319884198851988619887198881988919890198911989219893198941989519896198971989819899199001990119902199031990419905199061990719908199091991019911199121991319914199151991619917199181991919920199211992219923199241992519926199271992819929199301993119932199331993419935199361993719938199391994019941199421994319944199451994619947199481994919950199511995219953199541995519956199571995819959199601996119962199631996419965199661996719968199691997019971199721997319974199751997619977199781997919980199811998219983199841998519986199871998819989199901999119992199931999419995199961999719998199992000020001200022000320004200052000620007200082000920010200112001220013200142001520016200172001820019200202002120022200232002420025200262002720028200292003020031200322003320034200352003620037200382003920040200412004220043200442004520046200472004820049200502005120052200532005420055200562005720058200592006020061200622006320064200652006620067200682006920070200712007220073200742007520076200772007820079200802008120082200832008420085200862008720088200892009020091200922009320094200952009620097200982009920100201012010220103201042010520106201072010820109201102011120112201132011420115201162011720118201192012020121201222012320124201252012620127201282012920130201312013220133201342013520136201372013820139201402014120142201432014420145201462014720148201492015020151201522015320154201552015620157201582015920160201612016220163201642016520166201672016820169201702017120172201732017420175201762017720178201792018020181201822018320184201852018620187201882018920190201912019220193201942019520196201972019820199202002020120202202032020420205202062020720208202092021020211202122021320214202152021620217202182021920220202212022220223202242022520226202272022820229202302023120232202332023420235202362023720238202392024020241202422024320244202452024620247202482024920250202512025220253202542025520256202572025820259202602026120262202632026420265202662026720268202692027020271202722027320274202752027620277202782027920280202812028220283202842028520286202872028820289202902029120292202932029420295202962029720298202992030020301203022030320304203052030620307203082030920310203112031220313203142031520316203172031820319203202032120322203232032420325203262032720328203292033020331203322033320334203352033620337203382033920340203412034220343203442034520346203472034820349203502035120352203532035420355203562035720358203592036020361203622036320364203652036620367203682036920370203712037220373203742037520376203772037820379203802038120382203832038420385203862038720388203892039020391203922039320394203952039620397203982039920400204012040220403204042040520406204072040820409204102041120412204132041420415204162041720418204192042020421204222042320424204252042620427204282042920430204312043220433204342043520436204372043820439204402044120442204432044420445204462044720448204492045020451204522045320454204552045620457204582045920460204612046220463204642046520466204672046820469204702047120472204732047420475204762047720478204792048020481204822048320484204852048620487204882048920490204912049220493204942049520496204972049820499205002050120502205032050420505205062050720508205092051020511205122051320514205152051620517205182051920520205212052220523205242052520526205272052820529205302053120532205332053420535205362053720538205392054020541205422054320544205452054620547205482054920550205512055220553205542055520556205572055820559205602056120562205632056420565205662056720568205692057020571205722057320574205752057620577205782057920580205812058220583205842058520586205872058820589205902059120592205932059420595205962059720598205992060020601206022060320604206052060620607206082060920610206112061220613206142061520616206172061820619206202062120622206232062420625206262062720628206292063020631206322063320634206352063620637206382063920640206412064220643206442064520646206472064820649206502065120652206532065420655206562065720658206592066020661206622066320664206652066620667206682066920670206712067220673206742067520676206772067820679206802068120682206832068420685206862068720688206892069020691206922069320694206952069620697206982069920700207012070220703207042070520706207072070820709207102071120712207132071420715207162071720718207192072020721207222072320724207252072620727207282072920730207312073220733207342073520736207372073820739207402074120742207432074420745207462074720748207492075020751207522075320754207552075620757207582075920760207612076220763207642076520766207672076820769207702077120772207732077420775207762077720778207792078020781207822078320784207852078620787207882078920790207912079220793207942079520796207972079820799208002080120802208032080420805208062080720808208092081020811208122081320814208152081620817208182081920820208212082220823208242082520826208272082820829208302083120832208332083420835208362083720838208392084020841208422084320844208452084620847208482084920850208512085220853208542085520856208572085820859208602086120862208632086420865208662086720868208692087020871208722087320874208752087620877208782087920880208812088220883208842088520886208872088820889208902089120892208932089420895208962089720898208992090020901209022090320904209052090620907209082090920910209112091220913209142091520916209172091820919209202092120922209232092420925209262092720928209292093020931209322093320934209352093620937209382093920940209412094220943209442094520946209472094820949209502095120952209532095420955209562095720958209592096020961209622096320964209652096620967209682096920970209712097220973209742097520976209772097820979209802098120982209832098420985209862098720988209892099020991209922099320994209952099620997209982099921000210012100221003210042100521006210072100821009210102101121012210132101421015210162101721018210192102021021210222102321024210252102621027210282102921030210312103221033210342103521036210372103821039210402104121042210432104421045210462104721048210492105021051210522105321054210552105621057210582105921060210612106221063210642106521066210672106821069210702107121072210732107421075210762107721078210792108021081210822108321084210852108621087210882108921090210912109221093210942109521096210972109821099211002110121102211032110421105211062110721108211092111021111211122111321114211152111621117211182111921120211212112221123211242112521126211272112821129211302113121132211332113421135211362113721138211392114021141211422114321144211452114621147211482114921150211512115221153211542115521156211572115821159211602116121162211632116421165211662116721168211692117021171211722117321174211752117621177211782117921180211812118221183211842118521186211872118821189211902119121192211932119421195211962119721198211992120021201212022120321204212052120621207212082120921210212112121221213212142121521216212172121821219212202122121222212232122421225212262122721228212292123021231212322123321234212352123621237212382123921240212412124221243212442124521246212472124821249212502125121252212532125421255212562125721258212592126021261212622126321264212652126621267212682126921270212712127221273212742127521276212772127821279212802128121282212832128421285212862128721288212892129021291212922129321294212952129621297212982129921300213012130221303213042130521306213072130821309213102131121312213132131421315213162131721318213192132021321213222132321324213252132621327213282132921330213312133221333213342133521336213372133821339213402134121342213432134421345213462134721348213492135021351213522135321354213552135621357213582135921360213612136221363213642136521366213672136821369213702137121372213732137421375213762137721378213792138021381213822138321384213852138621387213882138921390213912139221393213942139521396213972139821399214002140121402214032140421405214062140721408214092141021411214122141321414214152141621417214182141921420214212142221423214242142521426214272142821429214302143121432214332143421435214362143721438214392144021441214422144321444214452144621447214482144921450214512145221453214542145521456214572145821459214602146121462214632146421465214662146721468214692147021471214722147321474214752147621477214782147921480214812148221483214842148521486214872148821489214902149121492214932149421495214962149721498214992150021501215022150321504215052150621507215082150921510215112151221513215142151521516215172151821519215202152121522215232152421525215262152721528215292153021531215322153321534215352153621537215382153921540215412154221543215442154521546215472154821549215502155121552215532155421555215562155721558215592156021561215622156321564215652156621567215682156921570215712157221573215742157521576215772157821579215802158121582215832158421585215862158721588215892159021591215922159321594215952159621597215982159921600216012160221603216042160521606216072160821609216102161121612216132161421615216162161721618216192162021621216222162321624216252162621627216282162921630216312163221633216342163521636216372163821639216402164121642216432164421645216462164721648216492165021651216522165321654216552165621657216582165921660216612166221663216642166521666216672166821669216702167121672216732167421675216762167721678216792168021681216822168321684216852168621687216882168921690216912169221693216942169521696216972169821699217002170121702217032170421705217062170721708217092171021711217122171321714217152171621717217182171921720217212172221723217242172521726217272172821729217302173121732217332173421735217362173721738217392174021741217422174321744217452174621747217482174921750217512175221753217542175521756217572175821759217602176121762217632176421765217662176721768217692177021771217722177321774217752177621777217782177921780217812178221783217842178521786217872178821789217902179121792217932179421795217962179721798217992180021801218022180321804218052180621807218082180921810218112181221813218142181521816218172181821819218202182121822218232182421825218262182721828218292183021831218322183321834218352183621837218382183921840218412184221843218442184521846218472184821849218502185121852218532185421855218562185721858218592186021861218622186321864218652186621867218682186921870218712187221873218742187521876218772187821879218802188121882218832188421885218862188721888218892189021891218922189321894218952189621897218982189921900219012190221903219042190521906219072190821909219102191121912219132191421915219162191721918219192192021921219222192321924219252192621927219282192921930219312193221933219342193521936219372193821939219402194121942219432194421945219462194721948219492195021951219522195321954219552195621957219582195921960219612196221963219642196521966219672196821969219702197121972219732197421975219762197721978219792198021981219822198321984219852198621987219882198921990219912199221993219942199521996219972199821999220002200122002220032200422005220062200722008220092201022011220122201322014220152201622017220182201922020220212202222023220242202522026220272202822029220302203122032220332203422035220362203722038220392204022041220422204322044220452204622047220482204922050220512205222053220542205522056220572205822059220602206122062220632206422065220662206722068220692207022071220722207322074 |
- //-------------------------------------------------------------------------------------------------------
- // Copyright (C) Microsoft. All rights reserved.
- // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
- //-------------------------------------------------------------------------------------------------------
- #include "BackEnd.h"
- #include "Debug\DebuggingFlags.h"
- #include "Debug\DiagProbe.h"
- #include "Debug\DebugManager.h"
- // Parser includes
- #include "RegexCommon.h"
- #include "RegexPattern.h"
- #include "ExternalLowerer.h"
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::Lower
- ///
- /// Lowerer's main entrypoint. Lowers this function..
- ///
- ///----------------------------------------------------------------------------
- void
- Lowerer::Lower()
- {
- this->m_func->StopMaintainByteCodeOffset();
- NoRecoverMemoryJitArenaAllocator localAlloc(L"BE-Lower", this->m_func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
- this->m_alloc = &localAlloc;
- BVSparse<JitArenaAllocator> localInitializedTempSym(&localAlloc);
- this->initializedTempSym = &localInitializedTempSym;
- BVSparse<JitArenaAllocator> localAddToLiveOnBackEdgeSyms(&localAlloc);
- this->addToLiveOnBackEdgeSyms = &localAddToLiveOnBackEdgeSyms;
- Assert(this->m_func->GetCloneMap() == nullptr);
- m_lowererMD.Init(this);
- bool defaultDoFastPath = this->m_func->DoFastPaths();
- bool loopFastPath = this->m_func->DoLoopFastPaths();
- if (!loopFastPath || !defaultDoFastPath
- #ifdef INLINE_CACHE_STATS
- || PHASE_STATS1(Js::PolymorphicInlineCachePhase)
- #endif
- )
- {
- //arguments[] access is similar to array fast path hence disable when array fastpath is disabled.
- //loopFastPath is always true except explicitly disabled
- //defaultDoFastPath can be false when we the source code size is huge
- m_func->SetHasStackArgs(false);
- }
- if (m_func->HasAnyStackNestedFunc())
- {
- EnsureStackFunctionListStackSym();
- }
- if (m_func->DoStackFrameDisplay() && !m_func->IsLoopBody())
- {
- AllocStackClosure();
- }
- if (m_func->IsJitInDebugMode())
- {
- // Initialize metadata of local var slots.
- // Too late to wait until Register Allocator, as we need the offset when lowerering bailout for debugger.
- int32 hasLocalVarChangedOffset = m_func->GetHasLocalVarChangedOffset();
- if (hasLocalVarChangedOffset != Js::Constants::InvalidOffset)
- {
- // MOV [EBP + m_func->GetHasLocalVarChangedOffset()], 0
- StackSym* sym = StackSym::New(TyInt8, m_func);
- sym->m_offset = hasLocalVarChangedOffset;
- sym->m_allocated = true;
- IR::Opnd* opnd1 = IR::SymOpnd::New(sym, TyInt8, m_func);
- IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func);
- LowererMD::CreateAssign(opnd1, opnd2, m_func->GetFunctionEntryInsertionPoint());
- #ifdef DBG
- // Pre-fill all local slots with a pattern. This will help identify non-initialized/garbage var values.
- // Note that in the beginning of the function in bytecode we should initialize all locals to undefined.
- uint32 localSlotCount = m_func->GetJnFunction()->GetEndNonTempLocalIndex() - m_func->GetJnFunction()->GetFirstNonTempLocalIndex();
- for (uint i = 0; i < localSlotCount; ++i)
- {
- int offset = m_func->GetLocalVarSlotOffset(i);
- IRType opnd1Type;
- opnd2;
- uint32 slotSize = Func::GetDiagLocalSlotSize();
- switch (slotSize)
- {
- case 4:
- opnd1Type = TyInt32;
- opnd2 = IR::IntConstOpnd::New(Func::c_debugFillPattern4, opnd1Type, m_func);
- break;
- case 8:
- opnd1Type = TyInt64;
- opnd2 = IR::AddrOpnd::New((Js::Var)Func::c_debugFillPattern8, IR::AddrOpndKindConstant, m_func);
- break;
- default:
- AssertMsg(FALSE, "Unsupported slot size!");
- opnd1Type = TyIllegal;
- opnd2 = nullptr;
- }
- sym = StackSym::New(opnd1Type, m_func);
- sym->m_offset = offset;
- sym->m_allocated = true;
- opnd1 = IR::SymOpnd::New(sym, TyInt32, m_func);
- LowererMD::CreateAssign(opnd1, opnd2, m_func->GetFunctionEntryInsertionPoint());
- }
- #endif
- }
- Assert(!m_func->HasAnyStackNestedFunc());
- }
- this->LowerRange(m_func->m_headInstr, m_func->m_tailInstr, defaultDoFastPath, loopFastPath);
- this->m_func->ClearCloneMap();
- if (m_func->HasAnyStackNestedFunc())
- {
- EnsureZeroLastStackFunctionNext();
- }
- if (!m_func->IsSimpleJit())
- {
- Js::EntryPointInfo* entryPointInfo = this->m_func->m_workItem->GetEntryPoint();
- Assert(entryPointInfo->GetJitTransferData() != nullptr && !entryPointInfo->GetJitTransferData()->GetIsReady());
- }
- this->initializedTempSym = nullptr;
- this->m_alloc = nullptr;
- this->m_func->DisableConstandAddressLoadHoist();
- }
- void
- Lowerer::LowerRange(IR::Instr *instrStart, IR::Instr *instrEnd, bool defaultDoFastPath, bool defaultDoLoopFastPath)
- {
- bool noMathFastPath;
- bool noFieldFastPath;
- bool fNoLower = false;
- noFieldFastPath = !defaultDoFastPath;
- noMathFastPath = !defaultDoFastPath;
- #if DBG_DUMP
- wchar_t * globOptInstrString = nullptr;
- #endif
- FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, instrEnd, instrStart)
- {
- // Try to peep this`
- instr = this->PreLowerPeepInstr(instr, &instrPrev);
- #if DBG
- IR::Instr * verifyLegalizeInstrNext = instr->m_next;
- #endif
- // If we have debugger bailout as part of real instr (not separate BailForDebugger instr),
- // extract/split out BailOutForDebugger into separate instr, if needed.
- // The instr can have just debugger bailout, or debugger bailout + other shared bailout.
- // Note that by the time we get here, we should not have aux-only bailout (in globopt we promote it to normal bailout).
- if (m_func->IsJitInDebugMode() && instr->HasBailOutInfo() &&
- ((instr->GetBailOutKind() & IR::BailOutForDebuggerBits) && instr->m_opcode != Js::OpCode::BailForDebugger ||
- instr->HasAuxBailOut()))
- {
- instr = this->SplitBailForDebugger(instr); // Change instr, as returned is the one we need to lower next.
- instrPrev = instr->m_prev; // Change just in case if instr got changed.
- }
- #if DBG_DUMP
- if (!instr->IsLowered() && !instr->IsLabelInstr()
- && (CONFIG_FLAG(ForcePostLowerGlobOptInstrString) ||
- PHASE_DUMP(Js::LowererPhase, m_func) ||
- PHASE_DUMP(Js::LinearScanPhase, m_func) ||
- PHASE_DUMP(Js::RegAllocPhase, m_func) ||
- PHASE_DUMP(Js::PeepsPhase, m_func) ||
- PHASE_DUMP(Js::LayoutPhase, m_func) ||
- PHASE_DUMP(Js::EmitterPhase, m_func) ||
- PHASE_DUMP(Js::EncoderPhase, m_func) ||
- PHASE_DUMP(Js::BackEndPhase, m_func)))
- {
- if(instr->m_next && instr->m_next->m_opcode != Js::OpCode::StatementBoundary && !instr->m_next->IsLabelInstr())
- {
- instr->m_next->globOptInstrString = globOptInstrString;
- }
- globOptInstrString = instr->DumpString();
- }
- #endif
- IR::Opnd *src1;
- IR::RegOpnd *srcReg1;
- IR::RegOpnd *srcReg2;
- if (instr->IsBranchInstr() && !instr->AsBranchInstr()->IsMultiBranch() && instr->AsBranchInstr()->GetTarget()->m_isLoopTop)
- {
- Loop * loop = instr->AsBranchInstr()->GetTarget()->GetLoop();
- if (this->outerMostLoopLabel == nullptr && !loop->isProcessed)
- {
- while (loop && loop->GetLoopTopInstr()) // some loops are optimized away so that they are not loops anymore.
- // They do, however, stay in the loop graph but don't have loop top labels assigned to them
- {
- this->outerMostLoopLabel = loop->GetLoopTopInstr();
- Assert(this->outerMostLoopLabel->m_isLoopTop);
- // landing pad must fall through to the loop
- Assert(this->outerMostLoopLabel->m_prev->HasFallThrough());
- loop = loop->parent;
- }
- this->initializedTempSym->ClearAll();
- }
- noFieldFastPath = !defaultDoLoopFastPath;
- noMathFastPath = !defaultDoLoopFastPath;
- }
- #ifdef INLINE_CACHE_STATS
- if(PHASE_STATS1(Js::PolymorphicInlineCachePhase))
- {
- // Always use the slow path, so we can track property accesses
- noFieldFastPath = true;
- }
- #endif
- switch(instr->m_opcode)
- {
- case Js::OpCode::LdHandlerScope:
- this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdHandlerScope);
- break;
- case Js::OpCode::InitSetFld:
- instrPrev = this->LowerStFld(instr, IR::HelperOP_InitSetter, IR::HelperOP_InitSetter, false);
- break;
- case Js::OpCode::InitGetFld:
- instrPrev = this->LowerStFld(instr, IR::HelperOP_InitGetter, IR::HelperOP_InitGetter, false);
- break;
- case Js::OpCode::InitProto:
- instrPrev = this->LowerStFld(instr, IR::HelperOP_InitProto, IR::HelperOP_InitProto, false);
- break;
- case Js::OpCode::LdArgCnt:
- this->LoadArgumentCount(instr);
- break;
- case Js::OpCode::LdStackArgPtr:
- this->LoadStackArgPtr(instr);
- break;
- case Js::OpCode::LdHeapArguments:
- case Js::OpCode::LdLetHeapArguments:
- instrPrev = m_lowererMD.LoadHeapArguments(instr);
- break;
- case Js::OpCode::LdArgumentsFromStack:
- instrPrev = this->LoadArgumentsFromStack(instr);
- break;
- case Js::OpCode::LdHeapArgsCached:
- case Js::OpCode::LdLetHeapArgsCached:
- m_lowererMD.LoadHeapArgsCached(instr);
- break;
- case Js::OpCode::InvalCachedScope:
- this->LowerBinaryHelper(instr, IR::HelperOP_InvalidateCachedScope);
- break;
- case Js::OpCode::NewScopeObject:
- m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewScopeObject);
- break;
- case Js::OpCode::NewStackScopeSlots:
- this->LowerNewScopeSlots(instr, m_func->DoStackScopeSlots());
- break;
- case Js::OpCode::NewScopeSlots:
- this->LowerNewScopeSlots(instr, false);
- break;
- case Js::OpCode::InitLocalClosure:
- // Real initialization of the stack pointers happens on entry to the function, so this instruction
- // (which exists to provide a def in the IR) can go away.
- instr->Remove();
- break;
- case Js::OpCode::NewScopeSlotsWithoutPropIds:
- this->LowerBinaryHelperMemWithFuncBody(instr, IR::HelperOP_NewScopeSlotsWithoutPropIds);
- break;
- case Js::OpCode::NewBlockScope:
- m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewBlockScope);
- break;
- case Js::OpCode::NewPseudoScope:
- m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewPseudoScope);
- break;
- case Js::OpCode::CloneInnerScopeSlots:
- this->LowerUnaryHelperMem(instr, IR::HelperOP_CloneInnerScopeSlots);
- break;
- case Js::OpCode::CloneBlockScope:
- this->LowerUnaryHelperMem(instr, IR::HelperOP_CloneBlockScope);
- break;
- case Js::OpCode::GetCachedFunc:
- m_lowererMD.LowerGetCachedFunc(instr);
- break;
- case Js::OpCode::BrFncCachedScopeEq:
- case Js::OpCode::BrFncCachedScopeNeq:
- this->LowerBrFncCachedScopeEq(instr);
- break;
- case Js::OpCode::CommitScope:
- m_lowererMD.LowerCommitScope(instr);
- break;
- case Js::OpCode::LdFldForTypeOf:
- instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValueForTypeOf, IR::HelperOp_PatchGetValuePolymorphicForTypeOf,
- IR::HelperOp_PatchGetValueForTypeOf, IR::HelperOp_PatchGetValuePolymorphicForTypeOf);
- break;
- case Js::OpCode::LdFld:
- case Js::OpCode::LdFldForCallApplyTarget:
- instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValue, IR::HelperOp_PatchGetValuePolymorphic,
- IR::HelperOp_PatchGetValue, IR::HelperOp_PatchGetValuePolymorphic);
- break;
- case Js::OpCode::LdSuperFld:
- instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValueWithThisPtr, IR::HelperOp_PatchGetValuePolymorphicWithThisPtr,
- IR::HelperOp_PatchGetValueWithThisPtr, IR::HelperOp_PatchGetValuePolymorphicWithThisPtr);
- break;
- case Js::OpCode::LdRootFld:
- instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootValue, IR::HelperOp_PatchGetRootValuePolymorphic,
- IR::HelperOp_PatchGetRootValue, IR::HelperOp_PatchGetRootValuePolymorphic);
- break;
- case Js::OpCode::LdRootFldForTypeOf:
- instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootValueForTypeOf, IR::HelperOp_PatchGetRootValuePolymorphicForTypeOf,
- IR::HelperOp_PatchGetRootValueForTypeOf, IR::HelperOp_PatchGetRootValuePolymorphicForTypeOf);
- break;
- case Js::OpCode::LdMethodFldPolyInlineMiss:
- instrPrev = LowerLdFld(instr, IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic, true, nullptr, true);
- break;
- case Js::OpCode::LdMethodFld:
- instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic,
- IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic);
- break;
- case Js::OpCode::LdRootMethodFld:
- instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootMethod, IR::HelperOp_PatchGetRootMethodPolymorphic,
- IR::HelperOp_PatchGetRootMethod, IR::HelperOp_PatchGetRootMethodPolymorphic);
- break;
- case Js::OpCode::ScopedLdMethodFld:
- // "Scoped" in ScopedLdMethodFld is a bit of a misnomer because it doesn't look through a scope chain.
- // Instead the op is to allow for either a LdRootMethodFld or LdMethodFld depending on whether the
- // object is the root object or not.
- instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_ScopedGetMethod, IR::HelperOp_ScopedGetMethodPolymorphic,
- IR::HelperOp_ScopedGetMethod, IR::HelperOp_ScopedGetMethodPolymorphic);
- break;
- case Js::OpCode::LdMethodFromFlags:
- {
- Assert(instr->HasBailOutInfo());
- bool success = m_lowererMD.GenerateFastLdMethodFromFlags(instr);
- AssertMsg(success, "Not expected to generate helper block here");
- break;
- }
- case Js::OpCode::CheckFixedFld:
- AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instr->m_func->GetJnFunction()) || !PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func->GetJnFunction()), "CheckFixedFld with fixed prop(Data|Method) phase disabled?");
- this->GenerateCheckFixedFld(instr);
- break;
- case Js::OpCode::CheckPropertyGuardAndLoadType:
- instrPrev = this->GeneratePropertyGuardCheckBailoutAndLoadType(instr);
- break;
- case Js::OpCode::CheckObjType:
- this->GenerateCheckObjType(instr);
- break;
- case Js::OpCode::AdjustObjType:
- this->LowerAdjustObjType(instr);
- break;
- case Js::OpCode::DeleteFld:
- instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteProperty, false, false);
- break;
- case Js::OpCode::DeleteRootFld:
- instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteRootProperty, false, false);
- break;
- case Js::OpCode::DeleteFldStrict:
- instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteProperty, false, true);
- break;
- case Js::OpCode::DeleteRootFldStrict:
- instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteRootProperty, false, true);
- break;
- case Js::OpCode::ScopedLdFldForTypeOf:
- if (!noFieldFastPath)
- {
- m_lowererMD.GenerateFastScopedLdFld(instr);
- }
- instrPrev = this->LowerScopedLdFld(instr, IR::HelperOp_PatchGetPropertyForTypeOfScoped, true);
- break;
- case Js::OpCode::ScopedLdFld:
- if (!noFieldFastPath)
- {
- m_lowererMD.GenerateFastScopedLdFld(instr);
- }
- instrPrev = this->LowerScopedLdFld(instr, IR::HelperOp_PatchGetPropertyScoped, true);
- break;
- case Js::OpCode::ScopedLdInst:
- instrPrev = this->LowerScopedLdInst(instr, IR::HelperOp_GetInstanceScoped);
- break;
- case Js::OpCode::ScopedDeleteFld:
- instrPrev = this->LowerScopedDelFld(instr, IR::HelperOp_DeletePropertyScoped, false, false);
- break;
- case Js::OpCode::ScopedDeleteFldStrict:
- instrPrev = this->LowerScopedDelFld(instr, IR::HelperOp_DeletePropertyScoped, false, true);
- break;
- case Js::OpCode::NewScFunc:
- instrPrev = this->LowerNewScFunc(instr);
- break;
- case Js::OpCode::NewScGenFunc:
- instrPrev = this->LowerNewScGenFunc(instr);
- break;
- case Js::OpCode::StFld:
- instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueNoLocalFastPath, IR::HelperOp_PatchPutValueNoLocalFastPathPolymorphic,
- IR::HelperOp_PatchPutValue, IR::HelperOp_PatchPutValuePolymorphic, true, Js::PropertyOperation_None);
- break;
- case Js::OpCode::StSuperFld:
- instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueWithThisPtrNoLocalFastPath, IR::HelperOp_PatchPutValueWithThisPtrNoLocalFastPathPolymorphic,
- IR::HelperOp_PatchPutValueWithThisPtr, IR::HelperOp_PatchPutValueWithThisPtrPolymorphic, true, Js::PropertyOperation_None);
- break;
- case Js::OpCode::StRootFld:
- instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPathPolymorphic,
- IR::HelperOp_PatchPutRootValue, IR::HelperOp_PatchPutRootValuePolymorphic, true, Js::PropertyOperation_Root);
- break;
- case Js::OpCode::StFldStrict:
- instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueNoLocalFastPath, IR::HelperOp_PatchPutValueNoLocalFastPathPolymorphic,
- IR::HelperOp_PatchPutValue, IR::HelperOp_PatchPutValuePolymorphic, true, Js::PropertyOperation_StrictMode);
- break;
- case Js::OpCode::StRootFldStrict:
- instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPathPolymorphic,
- IR::HelperOp_PatchPutRootValue, IR::HelperOp_PatchPutRootValuePolymorphic, true, Js::PropertyOperation_StrictModeRoot);
- break;
- case Js::OpCode::InitFld:
- case Js::OpCode::InitRootFld:
- instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchInitValue, IR::HelperOp_PatchInitValuePolymorphic,
- IR::HelperOp_PatchInitValue, IR::HelperOp_PatchInitValuePolymorphic, false, Js::PropertyOperation_None);
- break;
- case Js::OpCode::ScopedInitFunc:
- instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_InitFuncScoped, false);
- break;
- case Js::OpCode::ScopedStFld:
- case Js::OpCode::ScopedStFldStrict:
- if (!noFieldFastPath)
- {
- m_lowererMD.GenerateFastScopedStFld(instr);
- }
- instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_PatchSetPropertyScoped, true, true,
- instr->m_opcode == Js::OpCode::ScopedStFld ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode);
- break;
- case Js::OpCode::ConsoleScopedStFld:
- {
- if (!noFieldFastPath)
- {
- m_lowererMD.GenerateFastScopedStFld(instr);
- }
- Js::PropertyOperationFlags flags = static_cast<Js::PropertyOperationFlags>(Js::PropertyOperation_None | Js::PropertyOperation_AllowUndeclInConsoleScope);
- instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_ConsolePatchSetPropertyScoped, true, true, flags);
- break;
- }
- case Js::OpCode::LdStr:
- m_lowererMD.ChangeToAssign(instr);
- break;
- case Js::OpCode::CloneStr:
- {
- GenerateGetImmutableOrScriptUnreferencedString(instr->GetSrc1()->AsRegOpnd(), instr, IR::HelperOp_CompoundStringCloneForAppending, false);
- instr->Remove();
- break;
- }
- case Js::OpCode::NewScObjArray:
- instrPrev = this->LowerNewScObjArray(instr);
- break;
- case Js::OpCode::NewScObject:
- case Js::OpCode::NewScObjectSpread:
- case Js::OpCode::NewScObjArraySpread:
- instrPrev = this->LowerNewScObject(instr, true, true);
- break;
- case Js::OpCode::NewScObjectNoCtor:
- instrPrev = this->LowerNewScObject(instr, false, true);
- break;
- case Js::OpCode::NewScObjectNoCtorFull:
- instrPrev = this->LowerNewScObject(instr, false, true, true);
- break;
- case Js::OpCode::GetNewScObject:
- instrPrev = this->LowerGetNewScObject(instr);
- break;
- case Js::OpCode::UpdateNewScObjectCache:
- instrPrev = instr->m_prev;
- this->LowerUpdateNewScObjectCache(instr, instr->GetSrc2(), instr->GetSrc1(), true /* isCtorFunction */);
- instr->Remove();
- break;
- case Js::OpCode::NewScObjectSimple:
- this->LowerNewScObjectSimple(instr);
- break;
- case Js::OpCode::NewScObjectLiteral:
- this->LowerNewScObjectLiteral(instr);
- break;
- case Js::OpCode::LdPropIds:
- m_lowererMD.ChangeToAssign(instr);
- break;
- case Js::OpCode::StArrSegItem_A:
- instrPrev = this->LowerArraySegmentVars(instr);
- break;
- case Js::OpCode::InlineMathAcos:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Acos);
- break;
- case Js::OpCode::InlineMathAsin:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Asin);
- break;
- case Js::OpCode::InlineMathAtan:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Atan);
- break;
- case Js::OpCode::InlineMathAtan2:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Atan2);
- break;
- case Js::OpCode::InlineMathCos:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Cos);
- break;
- case Js::OpCode::InlineMathExp:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Exp);
- break;
- case Js::OpCode::InlineMathLog:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Log);
- break;
- case Js::OpCode::InlineMathPow:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Pow);
- break;
- case Js::OpCode::InlineMathSin:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Sin);
- break;
- case Js::OpCode::InlineMathSqrt:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
- break;
- case Js::OpCode::InlineMathTan:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Tan);
- break;
- case Js::OpCode::InlineMathFloor:
- #if _M_X64
- if (!AutoSystemInfo::Data.SSE4_1Available() && instr->m_func->GetJnFunction()->GetIsAsmjsMode())
- {
- m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_FloorFlt, IR::HelperDirectMath_FloorDb);
- break;
- }
- #endif
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
- break;
- case Js::OpCode::InlineMathCeil:
- #if _M_X64
- if (!AutoSystemInfo::Data.SSE4_1Available() && instr->m_func->GetJnFunction()->GetIsAsmjsMode())
- {
- m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_CeilFlt, IR::HelperDirectMath_CeilDb);
- break;
- }
- #endif
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
- break;
- case Js::OpCode::InlineMathRound:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
- break;
- case Js::OpCode::InlineMathAbs:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
- break;
- case Js::OpCode::InlineMathImul:
- GenerateFastInlineMathImul(instr);
- break;
- case Js::OpCode::InlineMathClz32:
- GenerateFastInlineMathClz32(instr);
- break;
- case Js::OpCode::InlineMathFround:
- GenerateFastInlineMathFround(instr);
- break;
- case Js::OpCode::InlineMathMin:
- case Js::OpCode::InlineMathMax:
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
- break;
- case Js::OpCode::InlineMathRandom:
- this->GenerateFastInlineBuiltInMathRandom(instr);
- break;
- #ifdef ENABLE_DOM_FAST_PATH
- case Js::OpCode::DOMFastPathGetter:
- this->LowerFastInlineDOMFastPathGetter(instr);
- break;
- #endif
- case Js::OpCode::InlineArrayPush:
- this->GenerateFastInlineArrayPush(instr);
- break;
- case Js::OpCode::InlineArrayPop:
- this->GenerateFastInlineArrayPop(instr);
- break;
- //Now retrieve the function object from the ArgOut_A_InlineSpecialized instruction opcode to push it on the stack after all the other arguments have been pushed.
- //The lowering of the direct call to helper is handled by GenerateDirectCall (architecture specific).
- case Js::OpCode::CallDirect:
- {
- IR::Opnd * src1 = instr->GetSrc1();
- Assert(src1->IsHelperCallOpnd());
- switch (src1->AsHelperCallOpnd()->m_fnHelper)
- {
- case IR::JnHelperMethod::HelperString_Split:
- case IR::JnHelperMethod::HelperString_Match:
- GenerateFastInlineStringSplitMatch(instr);
- break;
- case IR::JnHelperMethod::HelperRegExp_Exec:
- GenerateFastInlineRegExpExec(instr);
- break;
- case IR::JnHelperMethod::HelperGlobalObject_ParseInt:
- GenerateFastInlineGlobalObjectParseInt(instr);
- break;
- case IR::JnHelperMethod::HelperString_FromCharCode:
- GenerateFastInlineStringFromCharCode(instr);
- break;
- case IR::JnHelperMethod::HelperString_FromCodePoint:
- GenerateFastInlineStringFromCodePoint(instr);
- break;
- case IR::JnHelperMethod::HelperString_CharAt:
- GenerateFastInlineStringCharCodeAt(instr, Js::BuiltinFunction::String_CharAt);
- break;
- case IR::JnHelperMethod::HelperString_CharCodeAt:
- GenerateFastInlineStringCharCodeAt(instr, Js::BuiltinFunction::String_CharCodeAt);
- break;
- case IR::JnHelperMethod::HelperString_Replace:
- GenerateFastInlineStringReplace(instr);
- break;
- }
- instrPrev = LowerCallDirect(instr);
- break;
- }
- case Js::OpCode::CallIDynamic:
- {
- Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
- instrPrev = this->LowerCallIDynamic(instr, (ushort)flags);
- break;
- }
- case Js::OpCode::CallIDynamicSpread:
- {
- Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
- instrPrev = this->LowerCallIDynamicSpread(instr, (ushort)flags);
- break;
- }
- case Js::OpCode::CallI:
- case Js::OpCode::CallINew:
- case Js::OpCode::CallIFixed:
- case Js::OpCode::CallINewTargetNew:
- {
- Js::CallFlags flags = Js::CallFlags_None;
- if (instr->isCtorCall)
- {
- flags = Js::CallFlags_New;
- }
- else
- {
- if (instr->m_opcode == Js::OpCode::CallINew)
- {
- flags = Js::CallFlags_New;
- }
- else if (instr->m_opcode == Js::OpCode::CallINewTargetNew)
- {
- flags = (Js::CallFlags) (Js::CallFlags_New | Js::CallFlags_ExtraArg | Js::CallFlags_NewTarget);
- }
- if (instr->GetDst())
- {
- flags = (Js::CallFlags) (flags | Js::CallFlags_Value);
- }
- else
- {
- flags = (Js::CallFlags) (flags | Js::CallFlags_NotUsed);
- }
- }
- if (!PHASE_OFF(Js::CallFastPathPhase, this->m_func) && !noMathFastPath)
- {
- // We shouldn't have turned this instruction into a fixed method call if we're calling one of the
- // built-ins we still inline in the lowerer.
- Assert(instr->m_opcode != Js::OpCode::CallIFixed || !Func::IsBuiltInInlinedInLowerer(instr->GetSrc1()));
- // Disable InlineBuiltInLibraryCall as it does not work well with 2nd chance reg alloc
- // and may invalidate live on back edge data by introducing refs across loops. See Winblue Bug: 577641
- //// Callee may still be a library built-in; if so, generate it inline.
- //if (this->InlineBuiltInLibraryCall(instr))
- //{
- // m_lowererMD.LowerCallI(instr, (ushort)flags, true /*isHelper*/);
- //}
- //else
- //{
- m_lowererMD.LowerCallI(instr, (ushort)flags);
- //}
- }
- else
- {
- m_lowererMD.LowerCallI(instr, (ushort)flags);
- }
- break;
- }
- case Js::OpCode::AsmJsCallI:
- m_lowererMD.LowerAsmJsCallI(instr);
- break;
- case Js::OpCode::AsmJsCallE:
- m_lowererMD.LowerAsmJsCallE(instr);
- break;
- case Js::OpCode::CallIEval:
- {
- Js::CallFlags flags = (Js::CallFlags)(Js::CallFlags_ExtraArg | (instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed));
- if (IsSpreadCall(instr))
- {
- instrPrev = LowerSpreadCall(instr, flags);
- }
- else
- {
- m_lowererMD.LowerCallI(instr, (ushort)flags);
- }
- #ifdef PERF_HINT
- if (PHASE_TRACE1(Js::PerfHintPhase))
- {
- WritePerfHint(PerfHints::CallsEval, this->m_func->GetJnFunction(), instr->GetByteCodeOffset());
- }
- #endif
- break;
- }
- case Js::OpCode::CallIPut:
- m_lowererMD.LowerCallPut(instr);
- break;
- case Js::OpCode::CallHelper:
- instrPrev = m_lowererMD.LowerCallHelper(instr);
- break;
- case Js::OpCode::Ret:
- if (instr->m_next->m_opcode != Js::OpCode::FunctionExit)
- {
- // If this RET isn't at the end of the function, insert a branch to
- // the epilog.
- IR::Instr *exitPrev = m_func->m_exitInstr->m_prev;
- if (!exitPrev->IsLabelInstr())
- {
- exitPrev = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- m_func->m_exitInstr->InsertBefore(exitPrev);
- }
- IR::BranchInstr *exitBr = IR::BranchInstr::New(Js::OpCode::Br,
- exitPrev->AsLabelInstr(), m_func);
- instr->InsertAfter(exitBr);
- m_lowererMD.LowerUncondBranch(exitBr);
- }
- m_lowererMD.LowerRet(instr);
- break;
- case Js::OpCode::LdArgumentsFromFrame:
- this->LoadArgumentsFromFrame(instr);
- break;
- case Js::OpCode::LdC_A_I4:
- src1 = instr->UnlinkSrc1();
- AssertMsg(src1->IsIntConstOpnd(), "Source of LdC_A_I4 should be an IntConst...");
- instrPrev = this->LowerLoadVar(instr,
- IR::AddrOpnd::NewFromNumber(static_cast<int32>(src1->AsIntConstOpnd()->GetValue()), this->m_func));
- src1->Free(this->m_func);
- break;
- case Js::OpCode::LdC_A_R8:
- src1 = instr->UnlinkSrc1();
- AssertMsg(src1->IsFloatConstOpnd(), "Source of LdC_A_R8 should be a FloatConst...");
- instrPrev = this->LowerLoadVar(instr, src1->AsFloatConstOpnd()->GetAddrOpnd(this->m_func));
- src1->Free(this->m_func);
- break;
- case Js::OpCode::LdC_F8_R8:
- src1 = instr->UnlinkSrc1();
- AssertMsg(src1->IsFloatConstOpnd(), "Source of LdC_F8_R8 should be a FloatConst...");
- instrPrev = m_lowererMD.LoadFloatValue(instr->UnlinkDst()->AsRegOpnd(), src1->AsFloatConstOpnd()->m_value, instr);
- src1->Free(this->m_func);
- instr->Remove();
- break;
- case Js::OpCode::NewRegEx:
- instrPrev = this->LowerNewRegEx(instr);
- break;
- case Js::OpCode::Conv_Obj:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_ConvObject);
- break;
- case Js::OpCode::NewWithObject:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_NewWithObject);
- break;
- case Js::OpCode::LdCustomSpreadIteratorList:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_ToSpreadedFunctionArgument);
- break;
- case Js::OpCode::Conv_Num:
- this->LowerConvNum(instr, noMathFastPath);
- break;
- case Js::OpCode::Incr_A:
- if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerUnaryHelperMem(instr, IR::HelperOp_Increment);
- }
- else
- {
- instr->SetSrc2(IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(1), IR::AddrOpndKindConstantVar, this->m_func));
- m_lowererMD.GenerateFastAdd(instr);
- instr->FreeSrc2();
- this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Increment));
- }
- break;
- case Js::OpCode::Decr_A:
- if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerUnaryHelperMem(instr, IR::HelperOp_Decrement);
- }
- else
- {
- instr->SetSrc2(IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(1), IR::AddrOpndKindConstantVar, this->m_func));
- m_lowererMD.GenerateFastSub(instr);
- instr->FreeSrc2();
- this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Decrement));
- }
- break;
- case Js::OpCode::Neg_A:
- if (instr->GetDst()->IsFloat())
- {
- Assert(instr->GetSrc1()->IsFloat());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerUnaryHelperMem(instr, IR::HelperOp_Negate);
- }
- else if (m_lowererMD.GenerateFastNeg(instr))
- {
- this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Negate));
- }
- break;
- case Js::OpCode::Not_A:
- if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerUnaryHelperMem(instr, IR::HelperOp_Not);
- }
- else if (m_lowererMD.GenerateFastNot(instr))
- {
- this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Not));
- }
- break;
- case Js::OpCode::BrEq_I4:
- case Js::OpCode::BrNeq_I4:
- case Js::OpCode::BrGt_I4:
- case Js::OpCode::BrGe_I4:
- case Js::OpCode::BrLt_I4:
- case Js::OpCode::BrLe_I4:
- case Js::OpCode::BrUnGt_I4:
- case Js::OpCode::BrUnGe_I4:
- case Js::OpCode::BrUnLt_I4:
- case Js::OpCode::BrUnLe_I4:
- {
- // See calls to MarkOneFltTmpSym under BrSrEq. This is to handle the case
- // where a branch is type-specialized and uses the result of a float pref op,
- // which must then be saved to var at the def.
- StackSym *sym = instr->GetSrc1()->GetStackSym();
- if (sym)
- {
- sym = sym->GetVarEquivSym(nullptr);
- }
- sym = instr->GetSrc2()->GetStackSym();
- if (sym)
- {
- sym = sym->GetVarEquivSym(nullptr);
- }
- }
- // FALLTHROUGH
- case Js::OpCode::Neg_I4:
- case Js::OpCode::Not_I4:
- case Js::OpCode::Add_I4:
- case Js::OpCode::Sub_I4:
- case Js::OpCode::Mul_I4:
- case Js::OpCode::Rem_I4:
- case Js::OpCode::Or_I4:
- case Js::OpCode::Xor_I4:
- case Js::OpCode::And_I4:
- case Js::OpCode::Shl_I4:
- case Js::OpCode::Shr_I4:
- case Js::OpCode::ShrU_I4:
- case Js::OpCode::BrTrue_I4:
- case Js::OpCode::BrFalse_I4:
- if(instr->HasBailOutInfo())
- {
- const auto bailOutKind = instr->GetBailOutKind();
- if(bailOutKind & IR::BailOutOnResultConditions ||
- bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck)
- {
- const auto nonBailOutInstr = SplitBailOnResultCondition(instr);
- IR::LabelInstr *bailOutLabel, *skipBailOutLabel;
- LowerBailOnResultCondition(instr, &bailOutLabel, &skipBailOutLabel);
- LowerInstrWithBailOnResultCondition(nonBailOutInstr, bailOutKind, bailOutLabel, skipBailOutLabel);
- }
- else if(bailOutKind == IR::BailOnModByPowerOf2)
- {
- Assert(instr->m_opcode == Js::OpCode::Rem_I4);
- bool fastPath = GenerateSimplifiedInt4Rem(instr);
- Assert(fastPath);
- instr->FreeSrc1();
- instr->FreeSrc2();
- this->GenerateBailOut(instr);
- }
- }
- else
- {
- if (instr->m_opcode == Js::OpCode::Rem_I4)
- {
- // fast path
- this->GenerateSimplifiedInt4Rem(instr);
- // slow path
- this->LowerRemI4(instr);
- }
- #if defined(_M_IX86) || defined(_M_X64)
- else if (instr->m_opcode == Js::OpCode::Mul_I4)
- {
- if (!LowererMD::GenerateSimplifiedInt4Mul(instr))
- {
- m_lowererMD.EmitInt4Instr(instr);
- }
- }
- #endif
- else
- {
- m_lowererMD.EmitInt4Instr(instr);
- }
- }
- break;
- case Js::OpCode::Div_I4:
- this->LowerDivI4(instr);
- break;
- case Js::OpCode::Add_Ptr:
- m_lowererMD.EmitPtrInstr(instr);
- break;
- case Js::OpCode::Typeof:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_Typeof);
- break;
- case Js::OpCode::TypeofElem:
- this->LowerLdElemI(instr, IR::HelperOp_TypeofElem, false);
- break;
- case Js::OpCode::LdLen_A:
- {
- bool fastPath = !noMathFastPath;
- if(!fastPath && instr->HasBailOutInfo())
- {
- // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
- // bailouts require the fast path.
- const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if(bailOutKind & IR::BailOutKindBits)
- {
- fastPath = true;
- }
- else
- {
- const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
- fastPath =
- bailOutKindMinusBits &&
- bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
- bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
- }
- }
- bool instrIsInHelperBlock;
- if(!fastPath)
- {
- LowerLdLen(instr, false);
- }
- else if(GenerateFastLdLen(instr, &instrIsInHelperBlock))
- {
- Assert(
- !instr->HasBailOutInfo() ||
- (instr->GetBailOutKind() & ~IR::BailOutKindBits) != IR::BailOutOnIrregularLength);
- LowerLdLen(instr, instrIsInHelperBlock);
- }
- break;
- }
- case Js::OpCode::LdThis:
- {
- if (noFieldFastPath || !m_lowererMD.GenerateLdThisCheck(instr))
- {
- IR::JnHelperMethod meth;
- if (instr->IsJitProfilingInstr())
- {
- Assert(instr->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
- m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
- meth = IR::HelperSimpleProfiledLdThis;
- this->LowerBinaryHelper(instr, meth);
- }
- else
- {
- meth = IR::HelperLdThisNoFastPath;
- this->LowerBinaryHelperMem(instr, meth);
- }
- }
- else
- {
- this->LowerBinaryHelperMem(instr, IR::HelperLdThis);
- }
- break;
- }
- case Js::OpCode::StrictLdThis:
- if (noFieldFastPath)
- {
- IR::JnHelperMethod meth;
- if (instr->IsJitProfilingInstr())
- {
- Assert(instr->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
- m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
- meth = IR::HelperSimpleProfiledStrictLdThis;
- this->LowerUnaryHelper(instr, meth);
- }
- else
- {
- meth = IR::HelperStrictLdThis;
- this->LowerUnaryHelperMem(instr, meth);
- }
- }
- else
- {
- m_lowererMD.GenerateLdThisStrict(instr);
- instr->Remove();
- }
- break;
- case Js::OpCode::CheckThis:
- m_lowererMD.GenerateLdThisCheck(instr);
- instr->FreeSrc1();
- this->GenerateBailOut(instr);
- break;
- case Js::OpCode::StrictCheckThis:
- m_lowererMD.GenerateLdThisStrict(instr);
- instr->FreeSrc1();
- this->GenerateBailOut(instr);
- break;
- case Js::OpCode::NewScArray:
- instrPrev = this->LowerNewScArray(instr);
- break;
- case Js::OpCode::NewScArrayWithMissingValues:
- this->LowerUnaryHelperMem(instr, IR::HelperScrArr_OP_NewScArrayWithMissingValues);
- break;
- case Js::OpCode::NewScIntArray:
- instrPrev = this->LowerNewScIntArray(instr);
- break;
- case Js::OpCode::NewScFltArray:
- instrPrev = this->LowerNewScFltArray(instr);
- break;
- case Js::OpCode::GetForInEnumerator:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_OP_GetForInEnumerator);
- break;
- case Js::OpCode::ReleaseForInEnumerator:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_OP_ReleaseForInEnumerator);
- break;
- case Js::OpCode::Add_A:
- if (instr->GetDst()->IsFloat())
- {
- Assert(instr->GetSrc1()->IsFloat());
- Assert(instr->GetSrc2()->IsFloat());
- // we don't want to mix float32 and float64
- Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
- Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOp_Add);
- }
- else if (m_lowererMD.TryGenerateFastMulAdd(instr, &instrPrev))
- {
- }
- else
- {
- m_lowererMD.GenerateFastAdd(instr);
- this->LowerBinaryHelperMemWithTemp3(instr, IR_HELPER_OP_FULL_OR_INPLACE(Add), IR::HelperOp_AddLeftDead);
- }
- break;
- case Js::OpCode::Div_A:
- {
- if (instr->IsJitProfilingInstr()) {
- LowerProfiledBinaryOp(instr->AsJitProfilingInstr(), IR::HelperSimpleProfiledDivide);
- }
- else if (instr->GetDst()->IsFloat())
- {
- Assert(instr->GetSrc1()->IsFloat());
- Assert(instr->GetSrc2()->IsFloat());
- Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
- Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else
- {
- if (!PHASE_OFF(Js::MathFastPathPhase, this->m_func) && !noMathFastPath)
- {
- IR::AddrOpnd *src2 = instr->GetSrc2()->IsAddrOpnd() ? instr->GetSrc2()->AsAddrOpnd() : nullptr;
- if (src2 && src2->IsVar() && Js::TaggedInt::Is(src2->m_address))
- {
- int32 value = Js::TaggedInt::ToInt32(src2->m_address);
- if (Math::IsPow2(value))
- {
- m_lowererMD.GenerateFastDivByPow2(instr);
- }
- }
- }
- this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Divide));
- }
- break;
- }
- case Js::OpCode::Expo_A:
- {
- if (instr->GetDst()->IsFloat())
- {
- Assert(instr->GetSrc1()->IsFloat());
- Assert(instr->GetSrc2()->IsFloat());
- Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
- Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Pow);
- }
- else
- {
- this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Exponentiation));
- }
- break;
- }
- case Js::OpCode::Mul_A:
- if (instr->GetDst()->IsFloat())
- {
- Assert(instr->GetSrc1()->IsFloat());
- Assert(instr->GetSrc2()->IsFloat());
- Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
- Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOp_Multiply);
- }
- else if (m_lowererMD.GenerateFastMul(instr))
- {
- this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Multiply));
- }
- break;
- case Js::OpCode::Rem_A:
- if (instr->GetDst()->IsFloat64())
- {
- this->LowerRemR8(instr);
- }
- else if (instr->IsJitProfilingInstr())
- {
- this->LowerProfiledBinaryOp(instr->AsJitProfilingInstr(), IR::HelperSimpleProfiledRemainder);
- }
- else
- {
- this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Modulus));
- }
- break;
- case Js::OpCode::Sub_A:
- if (instr->GetDst()->IsFloat())
- {
- Assert(instr->GetSrc1()->IsFloat());
- Assert(instr->GetSrc2()->IsFloat());
- Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
- Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOp_Subtract);
- }
- else if (m_lowererMD.TryGenerateFastMulAdd(instr, &instrPrev))
- {
- }
- else
- {
- m_lowererMD.GenerateFastSub(instr);
- this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Subtract));
- }
- break;
- case Js::OpCode::And_A:
- if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOp_And);
- }
- else if (m_lowererMD.GenerateFastAnd(instr))
- {
- this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(And));
- }
- break;
- case Js::OpCode::Or_A:
- if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOp_Or);
- }
- else if (m_lowererMD.GenerateFastOr(instr))
- {
- this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Or));
- }
- break;
- case Js::OpCode::Xor_A:
- if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastXor(instr))
- {
- this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Xor));
- }
- break;
- case Js::OpCode::Shl_A:
- if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftLeft(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftLeft);
- }
- break;
- case Js::OpCode::Shr_A:
- if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftRight(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftRight);
- }
- break;
- case Js::OpCode::ShrU_A:
- if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftRight(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftRightU);
- }
- break;
- case Js::OpCode::CmEq_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- this->m_lowererMD.GenerateFastCmXxR8(instr);
- }
- else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
- {
- if (!fNoLower)
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmEq_A);
- }
- }
- else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmEq_A);
- }
- break;
- case Js::OpCode::CmNeq_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- this->m_lowererMD.GenerateFastCmXxR8(instr);
- }
- else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
- {
- if (!fNoLower)
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmNeq_A);
- }
- }
- else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmNeq_A);
- }
- break;
- case Js::OpCode::CmSrEq_A:
- if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
- {
- if (!fNoLower)
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_A);
- }
- }
- else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastCmSrEq(instr))
- {
- }
- else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_A);
- }
- break;
- case Js::OpCode::CmSrNeq_A:
- if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
- {
- if (!fNoLower)
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrNeq_A);
- }
- }
- else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrNeq_A);
- }
- break;
- case Js::OpCode::CmGt_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- this->m_lowererMD.GenerateFastCmXxR8(instr);
- }
- else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmGt_A);
- }
- break;
- case Js::OpCode::CmGe_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- this->m_lowererMD.GenerateFastCmXxR8(instr);
- }
- else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmGe_A);
- }
- break;
- case Js::OpCode::CmLt_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- this->m_lowererMD.GenerateFastCmXxR8(instr);
- }
- else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmLt_A);
- }
- break;
- case Js::OpCode::CmLe_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- this->m_lowererMD.GenerateFastCmXxR8(instr);
- }
- else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmLe_A);
- }
- break;
- case Js::OpCode::CmEq_I4:
- case Js::OpCode::CmNeq_I4:
- case Js::OpCode::CmGe_I4:
- case Js::OpCode::CmGt_I4:
- case Js::OpCode::CmLe_I4:
- case Js::OpCode::CmLt_I4:
- case Js::OpCode::CmUnGe_I4:
- case Js::OpCode::CmUnGt_I4:
- case Js::OpCode::CmUnLe_I4:
- case Js::OpCode::CmUnLt_I4:
- this->m_lowererMD.GenerateFastCmXxI4(instr);
- break;
- case Js::OpCode::Conv_Bool:
- instrPrev = this->m_lowererMD.GenerateConvBool(instr);
- break;
- case Js::OpCode::IsInst:
- m_lowererMD.GenerateFastIsInst(instr);
- instrPrev = this->LowerIsInst(instr, IR::HelperScrObj_OP_IsInst);
- break;
- case Js::OpCode::IsIn:
- this->LowerBinaryHelperMem(instr, IR::HelperOp_IsIn);
- break;
- case Js::OpCode::LdInt8ArrViewElem:
- case Js::OpCode::LdUInt8ArrViewElem:
- case Js::OpCode::LdInt16ArrViewElem:
- case Js::OpCode::LdUInt16ArrViewElem:
- case Js::OpCode::LdInt32ArrViewElem:
- case Js::OpCode::LdUInt32ArrViewElem:
- case Js::OpCode::LdFloat32ArrViewElem:
- case Js::OpCode::LdFloat64ArrViewElem:
- instrPrev = LowerLdArrViewElem(instr);
- break;
- case Js::OpCode::StInt8ArrViewElem:
- case Js::OpCode::StUInt8ArrViewElem:
- case Js::OpCode::StInt16ArrViewElem:
- case Js::OpCode::StUInt16ArrViewElem:
- case Js::OpCode::StInt32ArrViewElem:
- case Js::OpCode::StUInt32ArrViewElem:
- case Js::OpCode::StFloat32ArrViewElem:
- case Js::OpCode::StFloat64ArrViewElem:
- instrPrev = LowerStArrViewElem(instr);
- break;
- case Js::OpCode::Memset:
- case Js::OpCode::Memcopy:
- {
- LowerMemOp(instr);
- break;
- }
- case Js::OpCode::ArrayDetachedCheck:
- instrPrev = LowerArrayDetachedCheck(instr);
- break;
- case Js::OpCode::StElemI_A:
- case Js::OpCode::StElemI_A_Strict:
- {
- // Note: under debugger (Fast F12) don't let GenerateFastStElemI which calls into ToNumber_Helper
- // which takes double, and currently our helper wrapper doesn't support double.
- bool fastPath = !noMathFastPath && !m_func->IsJitInDebugMode();
- if(!fastPath && instr->HasBailOutInfo())
- {
- // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
- // bailouts require the fast path.
- const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- const IR::BailOutKind bailOutKindBits = bailOutKind & IR::BailOutKindBits;
- if(bailOutKindBits & ~(IR::BailOutOnMissingValue | IR::BailOutConvertedNativeArray))
- {
- fastPath = true;
- }
- else
- {
- const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
- fastPath =
- bailOutKindMinusBits &&
- bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
- bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
- }
- }
- IR::Opnd * opnd = instr->GetDst();
- IR::Opnd * baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
- ValueType profiledBaseValueType = baseOpnd->AsRegOpnd()->GetValueType();
- if (profiledBaseValueType.IsUninitialized() && baseOpnd->AsRegOpnd()->m_sym->IsSingleDef())
- {
- baseOpnd->SetValueType(baseOpnd->FindProfiledValueType());
- }
- bool instrIsInHelperBlock;
- if (!fastPath)
- {
- this->LowerStElemI(
- instr,
- instr->m_opcode == Js::OpCode::StElemI_A ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode,
- false);
- }
- else if (GenerateFastStElemI(instr, &instrIsInHelperBlock))
- {
- #if DBG
- if(instr->HasBailOutInfo())
- {
- const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- Assert(
- (bailOutKind & ~IR::BailOutKindBits) != IR::BailOutConventionalTypedArrayAccessOnly &&
- !(
- bailOutKind &
- (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)
- ));
- }
- #endif
- this->LowerStElemI(
- instr,
- instr->m_opcode == Js::OpCode::StElemI_A ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode,
- instrIsInHelperBlock);
- }
- break;
- }
- case Js::OpCode::LdElemI_A:
- case Js::OpCode::LdMethodElem:
- {
- bool fastPath =
- !noMathFastPath &&
- (
- instr->m_opcode != Js::OpCode::LdMethodElem ||
- instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyObject()
- );
- if(!fastPath && instr->HasBailOutInfo())
- {
- // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
- // bailouts require the fast path.
- const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if(bailOutKind & IR::BailOutKindBits)
- {
- fastPath = true;
- }
- else
- {
- const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
- fastPath =
- bailOutKindMinusBits &&
- bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
- bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
- }
- }
- IR::Opnd * opnd = instr->GetSrc1();
- IR::Opnd * baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
- ValueType profiledBaseValueType = baseOpnd->AsRegOpnd()->GetValueType();
- if (profiledBaseValueType.IsUninitialized() && baseOpnd->AsRegOpnd()->m_sym->IsSingleDef())
- {
- baseOpnd->SetValueType(baseOpnd->FindProfiledValueType());
- }
- bool instrIsInHelperBlock;
- if (!fastPath)
- {
- this->LowerLdElemI(
- instr,
- instr->m_opcode == Js::OpCode::LdElemI_A ? IR::HelperOp_GetElementI : IR::HelperOp_GetMethodElement,
- false);
- }
- else if (GenerateFastLdElemI(instr, &instrIsInHelperBlock))
- {
- #if DBG
- if(instr->HasBailOutInfo())
- {
- const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- Assert(
- (bailOutKind & ~IR::BailOutKindBits) != IR::BailOutConventionalTypedArrayAccessOnly &&
- !(
- bailOutKind &
- (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)
- ));
- }
- #endif
- this->LowerLdElemI(
- instr,
- instr->m_opcode == Js::OpCode::LdElemI_A ? IR::HelperOp_GetElementI : IR::HelperOp_GetMethodElement,
- instrIsInHelperBlock);
- }
- break;
- }
- case Js::OpCode::InitSetElemI:
- instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitElemSetter);
- break;
- case Js::OpCode::InitGetElemI:
- instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitElemGetter);
- break;
- case Js::OpCode::InitComputedProperty:
- instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitComputedProperty);
- break;
- case Js::OpCode::Delete_A:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_Delete);
- break;
- case Js::OpCode::DeleteElemI_A:
- this->LowerDeleteElemI(instr, false);
- break;
- case Js::OpCode::DeleteElemIStrict_A:
- this->LowerDeleteElemI(instr, true);
- break;
- case Js::OpCode::BytecodeArgOutCapture:
- m_lowererMD.ChangeToAssign(instr);
- break;
- case Js::OpCode::UnwrapWithObj:
- this->LowerUnaryHelper(instr, IR::HelperOp_UnwrapWithObj);
- break;
- case Js::OpCode::Ld_A:
- case Js::OpCode::Ld_I4:
- case Js::OpCode::InitConst:
- if (instr->IsJitProfilingInstr() && instr->AsJitProfilingInstr()->isBeginSwitch) {
- LowerProfiledBeginSwitch(instr->AsJitProfilingInstr());
- break;
- }
- m_lowererMD.ChangeToAssign(instr);
- if(instr->HasBailOutInfo())
- {
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if(bailOutKind == IR::BailOutExpectingString)
- {
- this->LowerBailOnNotString(instr);
- }
- else
- {
- // Should not reach here as there are only 1 BailOutKind (BailOutExpectingString) currently associated with the Load Instr
- Assert(false);
- }
- }
- break;
- case Js::OpCode::LdIndir:
- Assert(instr->GetDst());
- Assert(instr->GetDst()->IsRegOpnd());
- Assert(instr->GetSrc1());
- Assert(instr->GetSrc1()->IsIndirOpnd());
- Assert(!instr->GetSrc2());
- m_lowererMD.ChangeToAssign(instr);
- break;
- case Js::OpCode::FromVar:
- Assert(instr->GetSrc1()->GetType() == TyVar);
- if (instr->GetDst()->GetType() == TyInt32)
- {
- if(m_lowererMD.EmitLoadInt32(instr))
- {
- // Bail out instead of calling a helper
- Assert(instr->GetBailOutKind() == IR::BailOutIntOnly || instr->GetBailOutKind() == IR::BailOutExpectingInteger);
- Assert(!instr->GetSrc1()->GetValueType().IsInt()); // when we know it's an int, it should not have bailout info, to avoid generating a bailout path that will never be taken
- instr->UnlinkSrc1();
- instr->UnlinkDst();
- GenerateBailOut(instr);
- }
- }
- else if (instr->GetDst()->IsFloat())
- {
- if (m_func->GetJnFunction()->GetIsAsmJsFunction())
- {
- m_lowererMD.EmitLoadFloat(instr->GetDst(), instr->GetSrc1(), instr);
- instr->Remove();
- }
- else
- {
- m_lowererMD.EmitLoadFloatFromNumber(instr->GetDst(), instr->GetSrc1(), instr);
- }
- }
- // Support on IA only
- #if defined(_M_IX86) || defined(_M_X64)
- else if (instr->GetDst()->IsSimd128())
- {
- // SIMD_JS
- m_lowererMD.GenerateCheckedSimdLoad(instr);
- }
- #endif
- else
- {
- Assert(UNREACHED);
- }
- break;
- case Js::OpCode::ArgOut_A:
- // I don't know if this can happen in asm.js mode, but if it can, we might want to handle differently
- Assert(!m_func->GetJnFunction()->GetIsAsmjsMode());
- // fall-through
- case Js::OpCode::ArgOut_A_Inline:
- case Js::OpCode::ArgOut_A_Dynamic:
- {
- // ArgOut/StartCall are normally lowered by the lowering of the associated call instr.
- // If the call becomes unreachable, we could end up with an orphan ArgOut or StartCall.
- // Change the ArgOut into a store to the stack for bailouts
- instr->FreeSrc2();
- StackSym *argSym = instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
- argSym->m_offset = this->m_func->StackAllocate(sizeof(Js::Var));
- argSym->m_allocated = true;
- argSym->m_isOrphanedArg = true;
- this->m_lowererMD.ChangeToAssign(instr);
- }
- break;
- case Js::OpCode::LoweredStartCall:
- case Js::OpCode::StartCall:
- // ArgOut/StartCall are normally lowered by the lowering of the associated call instr.
- // If the call becomes unreachable, we could end up with an orphan ArgOut or StartCall.
- // We'll just delete these StartCalls during peeps.
- break;
- case Js::OpCode::ToVar:
- Assert(instr->GetDst()->GetType() == TyVar);
- if (instr->GetSrc1()->GetType() == TyInt32)
- {
- m_lowererMD.EmitLoadVar(instr);
- }
- else if (instr->GetSrc1()->GetType() == TyFloat64)
- {
- Assert(instr->GetSrc1()->IsRegOpnd());
- m_lowererMD.SaveDoubleToVar(
- instr->GetDst()->AsRegOpnd(),
- instr->GetSrc1()->AsRegOpnd(), instr, instr);
- instr->Remove();
- }
- #if defined(_M_IX86) || defined(_M_X64)
- else if (IRType_IsSimd128(instr->GetSrc1()->GetType()))
- {
- m_lowererMD.GenerateSimdStore(instr);
- }
- #endif
- else
- {
- Assert(UNREACHED);
- }
- break;
- case Js::OpCode::Conv_Prim:
- if (instr->GetDst()->IsFloat())
- {
- if (instr->GetSrc1()->IsIntConstOpnd())
- {
- LoadFloatFromNonReg(instr->UnlinkSrc1(), instr->UnlinkDst(), instr);
- }
- else if (instr->GetSrc1()->IsInt32())
- {
- m_lowererMD.EmitIntToFloat(instr->GetDst(), instr->GetSrc1(), instr);
- }
- else if (instr->GetSrc1()->IsUInt32())
- {
- Assert(instr->GetDst()->IsFloat64());
- m_lowererMD.EmitUIntToFloat(instr->GetDst(), instr->GetSrc1(), instr);
- }
- else
- {
- Assert(instr->GetDst()->IsFloat64());
- Assert(instr->GetSrc1()->IsFloat32());
- m_lowererMD.EmitFloat32ToFloat64(instr->GetDst(), instr->GetSrc1(), instr);
- }
- }
- else
- {
- Assert(instr->GetDst()->IsInt32());
- Assert(instr->GetSrc1()->IsFloat());
- m_lowererMD.EmitFloatToInt(instr->GetDst(), instr->GetSrc1(), instr);
- }
- instr->Remove();
- break;
- case Js::OpCode::FunctionExit:
- LowerFunctionExit(instr);
- // The rest of Epilog generation happens after reg allocation
- break;
- case Js::OpCode::FunctionEntry:
- LowerFunctionEntry(instr);
- // The rest of Prolog generation happens after reg allocation
- break;
- case Js::OpCode::ArgIn_Rest:
- case Js::OpCode::ArgIn_A:
- if (m_func->GetJnFunction()->GetIsAsmjsMode() && !m_func->IsLoopBody())
- {
- instrPrev = LowerArgInAsmJs(instr);
- }
- else
- {
- instrPrev = LowerArgIn(instr);
- }
- break;
- case Js::OpCode::Label:
- if (instr->AsLabelInstr()->m_isLoopTop)
- {
- if (this->outerMostLoopLabel == instr)
- {
- noFieldFastPath = !defaultDoFastPath;
- noMathFastPath = !defaultDoFastPath;
- this->outerMostLoopLabel = nullptr;
- instr->AsLabelInstr()->GetLoop()->isProcessed = true;
- }
- this->m_func->MarkConstantAddressSyms(instr->AsLabelInstr()->GetLoop()->regAlloc.liveOnBackEdgeSyms);
- instr->AsLabelInstr()->GetLoop()->regAlloc.liveOnBackEdgeSyms->Or(this->addToLiveOnBackEdgeSyms);
- }
- break;
- case Js::OpCode::Br:
- m_lowererMD.LowerUncondBranch(instr);
- break;
- case Js::OpCode::BrFncEqApply:
- LowerBrFncApply(instr,IR::HelperOp_OP_BrFncEqApply);
- break;
- case Js::OpCode::BrFncNeqApply:
- LowerBrFncApply(instr,IR::HelperOp_OP_BrFncNeqApply);
- break;
- case Js::OpCode::BrHasSideEffects:
- case Js::OpCode::BrNotHasSideEffects:
- m_lowererMD.GenerateFastBrS(instr->AsBranchInstr());
- break;
- case Js::OpCode::BrFalse_A:
- case Js::OpCode::BrTrue_A:
- if (instr->GetSrc1()->IsFloat())
- {
- GenerateFastBrBool(instr->AsBranchInstr());
- }
- else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) ||
- noMathFastPath ||
- GenerateFastBrBool(instr->AsBranchInstr()))
- {
- this->LowerBrBMem(instr, IR::HelperConv_ToBoolean);
- }
- break;
- case Js::OpCode::BrOnObject_A:
- if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath)
- {
- this->LowerBrOnObject(instr, IR::HelperOp_IsObject);
- }
- else
- {
- GenerateFastBrOnObject(instr);
- }
- break;
- case Js::OpCode::BrOnClassConstructor:
- this->LowerBrOnClassConstructor(instr, IR::HelperOp_IsClassConstructor);
- break;
- case Js::OpCode::BrAddr_A:
- case Js::OpCode::BrNotAddr_A:
- case Js::OpCode::BrNotNull_A:
- m_lowererMD.LowerCondBranch(instr);
- break;
- case Js::OpCode::BrEq_A:
- case Js::OpCode::BrNotNeq_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
- {
- bool needHelper = true;
- if (this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
- {
- if (!fNoLower)
- {
- this->LowerBrCMem(instr, IR::HelperOp_Equal, false, false /*isHelper*/);
- }
- }
- else if (this->TryGenerateFastBrEq(instr))
- {
- }
- else if (m_lowererMD.GenerateFastBrString(instr->AsBranchInstr()) || this->GenerateFastBrEqLikely(instr->AsBranchInstr(), &needHelper))
- {
- if (needHelper)
- {
- this->LowerBrCMem(instr, IR::HelperOp_Equal, false);
- }
- }
- else
- {
- if (needHelper)
- {
- this->LowerBrCMem(instr, IR::HelperOp_Equal, false, false /*isHelper*/);
- }
- }
- if (!needHelper)
- {
- instr->Remove();
- }
- }
- else
- {
- this->LowerBrCMem(instr, IR::HelperOp_Equal, true, false /*isHelper*/);
- }
- break;
- case Js::OpCode::BrGe_A:
- case Js::OpCode::BrNotGe_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
- {
- this->LowerBrCMem(instr, IR::HelperOp_GreaterEqual, false, false /*isHelper*/);
- }
- else
- {
- this->LowerBrCMem(instr, IR::HelperOp_GreaterEqual, true, false /*isHelper*/);
- }
- break;
- case Js::OpCode::BrGt_A:
- case Js::OpCode::BrNotGt_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
- {
- this->LowerBrCMem(instr, IR::HelperOp_Greater, false, false /*isHelper*/);
- }
- else
- {
- this->LowerBrCMem(instr, IR::HelperOp_Greater, true, false /*isHelper*/);
- }
- break;
- case Js::OpCode::BrLt_A:
- case Js::OpCode::BrNotLt_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
- {
- this->LowerBrCMem(instr, IR::HelperOp_Less, false, false /*isHelper*/);
- }
- else
- {
- this->LowerBrCMem(instr, IR::HelperOp_Less, true, false /*isHelper*/);
- }
- break;
- case Js::OpCode::BrLe_A:
- case Js::OpCode::BrNotLe_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
- {
- this->LowerBrCMem(instr, IR::HelperOp_LessEqual, false, false /*isHelper*/);
- }
- else
- {
- this->LowerBrCMem(instr, IR::HelperOp_LessEqual, true, false /*isHelper*/);
- }
- break;
- case Js::OpCode::BrNeq_A:
- case Js::OpCode::BrNotEq_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
- {
- bool needHelper = true;
- if (this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
- {
- if (!fNoLower)
- {
- this->LowerBrCMem(instr, IR::HelperOp_NotEqual, false, false /*isHelper*/);
- }
- }
- else if (this->TryGenerateFastBrNeq(instr))
- {
- }
- else if (m_lowererMD.GenerateFastBrString(instr->AsBranchInstr()) || this->GenerateFastBrEqLikely(instr->AsBranchInstr(), &needHelper))
- {
- this->LowerBrCMem(instr, IR::HelperOp_NotEqual, false);
- }
- else
- {
- this->LowerBrCMem(instr, IR::HelperOp_NotEqual, false, false /*isHelper*/);
- }
- }
- else
- {
- this->LowerBrCMem(instr, IR::HelperOp_NotEqual, true, false /*isHelper*/);
- }
- break;
- case Js::OpCode::MultiBr:
- {
- IR::MultiBranchInstr * multiBranchInstr = instr->AsBranchInstr()->AsMultiBrInstr();
- switch (multiBranchInstr->m_kind)
- {
- case IR::MultiBranchInstr::StrDictionary:
- this->GenerateSwitchStringLookup(instr);
- break;
- case IR::MultiBranchInstr::SingleCharStrJumpTable:
- this->GenerateSingleCharStrJumpTableLookup(instr);
- m_func->m_totalJumpTableSizeInBytesForSwitchStatements += (multiBranchInstr->GetBranchJumpTable()->tableSize * sizeof(void*));
- break;
- case IR::MultiBranchInstr::IntJumpTable:
- this->LowerMultiBr(instr);
- m_func->m_totalJumpTableSizeInBytesForSwitchStatements += (multiBranchInstr->GetBranchJumpTable()->tableSize * sizeof(void*));
- break;
- default:
- Assert(false);
- }
- break;
- }
- case Js::OpCode::BrSrEq_A:
- case Js::OpCode::BrSrNotNeq_A:
- {
- srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
- srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
- {
- if (!fNoLower)
- {
- this->LowerBrCMem(instr, IR::HelperOp_StrictEqual, false, false /*isHelper*/);
- }
- }
- else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->GenerateFastBrSrEq(instr, srcReg1, srcReg2, &instrPrev, noMathFastPath))
- {
- }
- else
- {
- bool needHelper = true;
- if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
- {
- if (m_lowererMD.GenerateFastBrString(instr->AsBranchInstr()) || this->GenerateFastBrEqLikely(instr->AsBranchInstr(), &needHelper))
- {
- if (needHelper)
- {
- this->LowerBrCMem(instr, IR::HelperOp_StrictEqual, false);
- }
- }
- else
- {
- if (needHelper)
- {
- this->LowerBrCMem(instr, IR::HelperOp_StrictEqual, false, false /*isHelper*/);
- }
- }
- if (!needHelper)
- {
- instr->Remove();
- }
- }
- else
- {
- this->LowerBrCMem(instr, IR::HelperOp_StrictEqual, true, false /*isHelper*/);
- }
- }
- break;
- }
- case Js::OpCode::BrSrNeq_A:
- case Js::OpCode::BrSrNotEq_A:
- if (instr->GetSrc1()->IsFloat())
- {
- Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
- m_lowererMD.LowerToFloat(instr);
- }
- else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
- {
- bool needHelper = true;
- if (this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
- {
- if (!fNoLower)
- {
- this->LowerBrCMem(instr, IR::HelperOp_NotStrictEqual, false, false /*isHelper*/);
- }
- }
- else if (this->GenerateFastBrSrNeq(instr, &instrPrev))
- {
- }
- else if (m_lowererMD.GenerateFastBrString(instr->AsBranchInstr()) || this->GenerateFastBrEqLikely(instr->AsBranchInstr(), &needHelper))
- {
- if (needHelper)
- {
- this->LowerBrCMem(instr, IR::HelperOp_NotStrictEqual, false);
- }
- }
- else
- {
- if (needHelper)
- {
- this->LowerBrCMem(instr, IR::HelperOp_NotStrictEqual, false, false /*isHelper*/);
- }
- }
- if (!needHelper)
- {
- instr->Remove();
- }
- }
- else
- {
- this->LowerBrCMem(instr, IR::HelperOp_NotStrictEqual, true, false /*isHelper*/);
- }
- break;
- case Js::OpCode::BrOnEmpty:
- case Js::OpCode::BrOnNotEmpty:
- if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func))
- {
- m_lowererMD.GenerateFastBrBReturn(instr);
- this->LowerBrBReturn(instr, IR::HelperOp_OP_BrOnEmpty, true);
- }
- else
- {
- this->LowerBrBReturn(instr, IR::HelperOp_OP_BrOnEmpty, false);
- }
- break;
- case Js::OpCode::BrOnHasProperty:
- case Js::OpCode::BrOnNoProperty:
- this->LowerBrProperty(instr, IR::HelperOp_HasProperty);
- break;
- case Js::OpCode::BrOnException:
- Assert(!this->m_func->DoGlobOpt());
- instr->Remove();
- break;
- case Js::OpCode::BrOnNoException:
- instr->m_opcode = LowererMD::MDUncondBranchOpcode;
- break;
- case Js::OpCode::StSlot:
- this->LowerStSlot(instr);
- break;
- case Js::OpCode::StSlotChkUndecl:
- this->LowerStSlotChkUndecl(instr);
- break;
- case Js::OpCode::ProfiledLoopStart:
- {
- Assert(m_func->DoSimpleJitDynamicProfile());
- Assert(instr->IsJitProfilingInstr());
- // Check for the helper instr from IRBuilding (it won't be there if there are no LoopEnds due to an infinite loop)
- auto prev = instr->m_prev;
- if (prev->IsJitProfilingInstr() && prev->AsJitProfilingInstr()->isLoopHelper)
- {
- auto saveOpnd = prev->UnlinkDst();
- instrPrev = prev->m_prev;
- prev->Remove();
- const auto starFlag = GetImplicitCallFlagsOpnd();
- IR::AutoReuseOpnd a(starFlag, m_func);
- this->InsertMove(saveOpnd, starFlag, instr);
- this->InsertMove(starFlag, CreateClearImplicitCallFlagsOpnd(), instr);
- }
- else
- {
- #if DBG
- // Double check that we indeed do not have a LoopEnd that is part of the same loop for the rest of the function
- auto cur = instr;
- auto loopNumber = instr->AsJitProfilingInstr()->loopNumber;
- while (cur)
- {
- Assert(cur->m_opcode != Js::OpCode::ProfiledLoopEnd || cur->IsJitProfilingInstr() && cur->AsJitProfilingInstr()->loopNumber != loopNumber);
- cur = cur->m_next;
- }
- #endif
- }
- // If we turned off fulljit, there's no reason to do this.
- if (!m_func->GetJnFunction()->DoFullJit())
- {
- instr->Remove();
- }
- else
- {
- Assert(instr->GetDst());
- instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleGetScheduledEntryPoint, m_func));
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateUint32Opnd(instr->AsJitProfilingInstr()->loopNumber, m_func));
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
- this->m_lowererMD.LowerCall(instr, 0);
- }
- break;
- }
- case Js::OpCode::ProfiledLoopBodyStart:
- {
- Assert(m_func->DoSimpleJitDynamicProfile());
- const auto loopNum = instr->AsJitProfilingInstr()->loopNumber;
- Assert(loopNum < m_func->GetJnFunction()->GetLoopCount());
- auto entryPointOpnd = instr->UnlinkSrc1();
- auto dobailout = instr->UnlinkDst();
- const auto dobailoutType = TyUint8;
- Assert(dobailout->GetType() == TyUint8 && sizeof(decltype(Js::SimpleJitHelpers::IsLoopCodeGenDone(nullptr))) == 1);
- m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(0, TyUint32, m_func)); // zero indicates that we do not want to add flags back in
- m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(loopNum, TyUint32, m_func));
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
- instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleRecordLoopImplicitCallFlags, m_func));
- m_lowererMD.LowerCall(instr, 0);
- // Outline of JITed code:
- //
- // LoopStart:
- // entryPoint = GetScheduledEntryPoint(framePtr, loopNum)
- // LoopBodyStart:
- // uint8 dobailout;
- // if (entryPoint) {
- // dobailout = IsLoopCodeGenDone(entryPoint)
- // } else {
- // dobailout = ++interpretCount >= threshold
- // }
- // // already exists from IRBuilding:
- // if (dobailout) {
- // Bailout
- // }
- if (!m_func->GetJnFunction()->DoFullJit() || !m_func->GetJnFunction()->DoJITLoopBody())
- {
- // If we're not doing fulljit, we've turned off JitLoopBodies, or if we don't have loop headers allocated (the function has a Try, etc)
- // just move false to dobailout
- this->InsertMove(dobailout, IR::IntConstOpnd::New(0, dobailoutType, m_func, true), instr->m_next);
- }
- else if (m_func->GetJnFunction()->ForceJITLoopBody())
- {
- // If we're forcing jit loop bodies, move true to dobailout
- this->InsertMove(dobailout, IR::IntConstOpnd::New(1, dobailoutType, m_func, true), instr->m_next);
- }
- else
- {
- // Put in the labels
- auto entryPointIsNull = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- auto checkDoBailout = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- instr->InsertAfter(checkDoBailout);
- instr->InsertAfter(entryPointIsNull);
- this->InsertCompareBranch(entryPointOpnd, IR::AddrOpnd::New(nullptr, IR::AddrOpndKindDynamicMisc, m_func), Js::OpCode::BrEq_A, false, entryPointIsNull, instr->m_next);
- // If the entry point is not null
- auto isCodeGenDone = IR::Instr::New(Js::OpCode::Call, dobailout, IR::HelperCallOpnd::New(IR::HelperSimpleIsLoopCodeGenDone, m_func), m_func);
- entryPointIsNull->InsertBefore(isCodeGenDone);
- m_lowererMD.LoadHelperArgument(isCodeGenDone, entryPointOpnd);
- m_lowererMD.LowerCall(isCodeGenDone, 0);
- this->InsertBranch(LowererMD::MDUncondBranchOpcode, true, checkDoBailout, entryPointIsNull);
- // If the entry point is null
- auto head = m_func->GetJnFunction()->GetLoopHeader(loopNum);
- Assert(head);
- static_assert(sizeof(head->interpretCount) == 4, "Change the type in the following line");
- const auto type = TyUint32;
- auto countReg = IR::RegOpnd::New(type, m_func);
- auto countAddr = IR::MemRefOpnd::New(&head->interpretCount, type, m_func);
- IR::AutoReuseOpnd a(countReg, m_func), b(countAddr, m_func);
- this->InsertAdd(false, countReg, countAddr, IR::IntConstOpnd::New(1, type, m_func, true), checkDoBailout);
- this->InsertMove(countAddr, countReg, checkDoBailout);
- this->InsertMove(dobailout, IR::IntConstOpnd::New(0, dobailoutType, m_func, true), checkDoBailout);
- // GetLoopInterpretCount() is a dynamic quantity. It's computed at simple-JIT time here, but that's okay
- // because there would have been sufficient iterations in interpreted mode to get a reasonable value.
- const auto threshold = instr->m_func->GetJnFunction()->GetLoopInterpretCount(head);
- this->InsertCompareBranch(countReg, IR::IntConstOpnd::New(threshold, type, m_func), Js::OpCode::BrLt_A, checkDoBailout, checkDoBailout);
- this->InsertMove(dobailout, IR::IntConstOpnd::New(1, dobailoutType, m_func, true), checkDoBailout);
- // fallthrough
- // Label checkDoBailout (inserted above)
- }
- }
- break;
- case Js::OpCode::ProfiledLoopEnd:
- {
- Assert(m_func->DoSimpleJitDynamicProfile());
- // This is set up in IRBuilding
- Assert(instr->GetSrc1());
- IR::Opnd* savedFlags = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, savedFlags);
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateUint32Opnd(instr->AsJitProfilingInstr()->loopNumber, m_func));
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
- instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleRecordLoopImplicitCallFlags, m_func));
- m_lowererMD.LowerCall(instr, 0);
- }
- break;
- case Js::OpCode::InitLoopBodyCount:
- Assert(this->m_func->IsLoopBody());
- instr->SetSrc1(IR::IntConstOpnd::New(0, TyUint32, this->m_func));
- this->m_lowererMD.ChangeToAssign(instr);
- break;
- case Js::OpCode::StLoopBodyCount:
- Assert(this->m_func->IsLoopBody());
- this->LowerStLoopBodyCount(instr);
- break;
- case Js::OpCode::IncrLoopBodyCount:
- Assert(this->m_func->IsLoopBody());
- instr->m_opcode = Js::OpCode::Add_I4;
- instr->SetSrc2(IR::IntConstOpnd::New(1, TyUint32, this->m_func));
- this->m_lowererMD.EmitInt4Instr(instr);
- break;
- #if !FLOATVAR
- case Js::OpCode::StSlotBoxTemp:
- this->LowerStSlotBoxTemp(instr);
- break;
- #endif
- case Js::OpCode::LdSlot:
- case Js::OpCode::LdSlotArr:
- {
- Js::ProfileId profileId;
- IR::Instr *profileBeforeInstr;
- if(instr->IsJitProfilingInstr())
- {
- profileId = instr->AsJitProfilingInstr()->profileId;
- Assert(profileId != Js::Constants::NoProfileId);
- profileBeforeInstr = instr->m_next;
- }
- else
- {
- profileId = Js::Constants::NoProfileId;
- profileBeforeInstr = nullptr;
- }
- this->LowerLdSlot(instr);
- if(profileId != Js::Constants::NoProfileId)
- {
- LowerProfileLdSlot(instr->GetDst(), instr->m_func, profileId, profileBeforeInstr);
- }
- break;
- }
- case Js::OpCode::LdAsmJsSlot:
- this->LowerLdSlot(instr);
- break;
- case Js::OpCode::StAsmJsSlot:
- this->LowerStSlot(instr);
- break;
- case Js::OpCode::ChkUndecl:
- instrPrev = this->LowerChkUndecl(instr);
- break;
- case Js::OpCode::LdArrHead:
- this->LowerLdArrHead(instr);
- break;
- case Js::OpCode::StElemC:
- case Js::OpCode::StArrSegElemC:
- this->LowerStElemC(instr);
- break;
- case Js::OpCode::LdEnv:
- instrPrev = this->LowerLdEnv(instr);
- break;
- case Js::OpCode::LdAsmJsEnv:
- instrPrev = this->LowerLdAsmJsEnv(instr);
- break;
- case Js::OpCode::LdElemUndef:
- this->LowerLdElemUndef(instr);
- break;
- case Js::OpCode::LdElemUndefScoped:
- this->LowerElementUndefinedScopedMem(instr, IR::HelperOp_LdElemUndefScoped);
- break;
- case Js::OpCode::EnsureNoRootFld:
- this->LowerElementUndefined(instr, IR::HelperOp_EnsureNoRootProperty);
- break;
- case Js::OpCode::EnsureNoRootRedeclFld:
- this->LowerElementUndefined(instr, IR::HelperOp_EnsureNoRootRedeclProperty);
- break;
- case Js::OpCode::ScopedEnsureNoRedeclFld:
- this->LowerElementUndefinedScoped(instr, IR::HelperOp_EnsureNoRedeclPropertyScoped);
- break;
- case Js::OpCode::LdFuncExpr:
- // src = function Expression
- m_lowererMD.LoadFuncExpression(instr);
- this->GenerateGetCurrentFunctionObject(instr);
- break;
- case Js::OpCode::LdNewTarget:
- this->GenerateLoadNewTarget(instr);
- break;
- case Js::OpCode::ChkNewCallFlag:
- this->GenerateCheckForCallFlagNew(instr);
- break;
- case Js::OpCode::StFuncExpr:
- // object.propid = src
- LowerStFld(instr, IR::HelperOp_StFunctionExpression, IR::HelperOp_StFunctionExpression, false);
- break;
- case Js::OpCode::InitLetFld:
- case Js::OpCode::InitRootLetFld:
- LowerStFld(instr, IR::HelperOp_InitLetFld, IR::HelperOp_InitLetFld, false);
- break;
- case Js::OpCode::InitConstFld:
- case Js::OpCode::InitRootConstFld:
- LowerStFld(instr, IR::HelperOp_InitConstFld, IR::HelperOp_InitConstFld, false);
- break;
- case Js::OpCode::InitUndeclRootLetFld:
- LowerElementUndefined(instr, IR::HelperOp_InitUndeclRootLetFld);
- break;
- case Js::OpCode::InitUndeclRootConstFld:
- LowerElementUndefined(instr, IR::HelperOp_InitUndeclRootConstFld);
- break;
- case Js::OpCode::InitUndeclConsoleLetFld:
- LowerElementUndefined(instr, IR::HelperOp_InitUndeclConsoleLetFld);
- break;
- case Js::OpCode::InitUndeclConsoleConstFld:
- LowerElementUndefined(instr, IR::HelperOp_InitUndeclConsoleConstFld);
- break;
- case Js::OpCode::InitClassMember:
- LowerStFld(instr, IR::HelperOp_InitClassMember, IR::HelperOp_InitClassMember, false);
- break;
- case Js::OpCode::InitClassMemberComputedName:
- instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberComputedName);
- break;
- case Js::OpCode::InitClassMemberGetComputedName:
- instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberGetComputedName);
- break;
- case Js::OpCode::InitClassMemberSetComputedName:
- instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberSetComputedName);
- break;
- case Js::OpCode::InitClassMemberGet:
- instrPrev = this->LowerStFld(instr, IR::HelperOp_InitClassMemberGet, IR::HelperOp_InitClassMemberGet, false);
- break;
- case Js::OpCode::InitClassMemberSet:
- instrPrev = this->LowerStFld(instr, IR::HelperOp_InitClassMemberSet, IR::HelperOp_InitClassMemberSet, false);
- break;
- case Js::OpCode::NewStackFrameDisplay:
- this->LowerLdFrameDisplay(instr, m_func->DoStackFrameDisplay());
- break;
- case Js::OpCode::LdFrameDisplay:
- this->LowerLdFrameDisplay(instr, false);
- break;
- case Js::OpCode::LdInnerFrameDisplay:
- this->LowerLdInnerFrameDisplay(instr);
- break;
- case Js::OpCode::Throw:
- case Js::OpCode::InlineThrow:
- case Js::OpCode::EHThrow:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_Throw);
- break;
- case Js::OpCode::TryCatch:
- instrPrev = this->LowerTry(instr, true /*try-catch*/);
- break;
- case Js::OpCode::TryFinally:
- instrPrev = this->LowerTry(instr, false /*try-finally*/);
- break;
- case Js::OpCode::Catch:
- instrPrev = m_lowererMD.LowerCatch(instr);
- break;
- case Js::OpCode::LeaveNull:
- instrPrev = m_lowererMD.LowerLeaveNull(instr);
- break;
- case Js::OpCode::Leave:
- if (this->m_func->HasTry() && this->m_func->DoOptimizeTryCatch())
- {
- // Required in Register Allocator to mark region boundaries
- break;
- }
- instrPrev = m_lowererMD.LowerLeave(instr, instr->AsBranchInstr()->GetTarget(), false /*fromFinalLower*/, instr->AsBranchInstr()->m_isOrphanedLeave);
- break;
- case Js::OpCode::BailOnException:
- instrPrev = this->LowerBailOnException(instr);
- break;
- case Js::OpCode::RuntimeTypeError:
- case Js::OpCode::InlineRuntimeTypeError:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeTypeError);
- break;
- case Js::OpCode::RuntimeReferenceError:
- case Js::OpCode::InlineRuntimeReferenceError:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeReferenceError);
- break;
- case Js::OpCode::Break:
- // Inline breakpoint: for now do nothing.
- break;
- case Js::OpCode::Nop:
- // This may need support for debugging the JIT, but for now just remove the instruction.
- instr->Remove();
- break;
- case Js::OpCode::Unused:
- // Currently Unused is used with ScopedLdInst to keep the second dst alive, but we don't need to lower it.
- instr->Remove();
- break;
- case Js::OpCode::StatementBoundary:
- // This instruction is merely to help convey source info through the IR
- // and eventually generate the nativeOffset maps.
- break;
- case Js::OpCode::BailOnNotPolymorphicInlinee:
- instrPrev = LowerBailOnNotPolymorphicInlinee(instr);
- break;
- case Js::OpCode::BailOnNoSimdTypeSpec:
- case Js::OpCode::BailOnNoProfile:
- this->GenerateBailOut(instr, nullptr, nullptr);
- break;
- case Js::OpCode::BailOnNotSpreadable:
- instrPrev = this->LowerBailOnNotSpreadable(instr);
- break;
- case Js::OpCode::BailOnNotStackArgs:
- instrPrev = this->LowerBailOnNotStackArgs(instr);
- break;
- case Js::OpCode::BailOnEqual:
- case Js::OpCode::BailOnNotEqual:
- instrPrev = this->LowerBailOnEqualOrNotEqual(instr);
- break;
- case Js::OpCode::BailOnNegative:
- LowerBailOnNegative(instr);
- break;
- case Js::OpCode::BailForDebugger:
- instrPrev = this->LowerBailForDebugger(instr);
- break;
- case Js::OpCode::BailOnNotObject:
- instrPrev = this->LowerBailOnNotObject(instr);
- break;
- case Js::OpCode::BailOnNotBuiltIn:
- instrPrev = this->LowerBailOnNotBuiltIn(instr);
- break;
- case Js::OpCode::BailOnNotArray:
- {
- IR::Instr *bailOnNotArray, *bailOnMissingValue;
- SplitBailOnNotArray(instr, &bailOnNotArray, &bailOnMissingValue);
- IR::RegOpnd *const arrayOpnd = LowerBailOnNotArray(bailOnNotArray);
- if(bailOnMissingValue)
- {
- LowerBailOnMissingValue(bailOnMissingValue, arrayOpnd);
- }
- break;
- }
- case Js::OpCode::BoundCheck:
- case Js::OpCode::UnsignedBoundCheck:
- LowerBoundCheck(instr);
- break;
- case Js::OpCode::BailTarget:
- instrPrev = this->LowerBailTarget(instr);
- break;
- case Js::OpCode::InlineeStart:
- this->LowerInlineeStart(instr);
- break;
- case Js::OpCode::EndCallForPolymorphicInlinee:
- instr->Remove();
- break;
- case Js::OpCode::InlineeEnd:
- this->LowerInlineeEnd(instr);
- break;
- case Js::OpCode::InlineBuiltInEnd:
- case Js::OpCode::InlineNonTrackingBuiltInEnd:
- this->LowerInlineBuiltIn(instr);
- break;
- case Js::OpCode::ExtendArg_A:
- if (instr->GetSrc1()->IsRegOpnd())
- {
- IR::RegOpnd *src1 = instr->GetSrc1()->AsRegOpnd();
- this->addToLiveOnBackEdgeSyms->Clear(src1->m_sym->m_id);
- }
- instr->Remove();
- break;
- case Js::OpCode::InlineBuiltInStart:
- case Js::OpCode::BytecodeArgOutUse:
- case Js::OpCode::ArgOut_A_InlineBuiltIn:
- instr->Remove();
- break;
- case Js::OpCode::DeadBrEqual:
- this->LowerBinaryHelperMem(instr, IR::HelperOp_Equal);
- break;
- case Js::OpCode::DeadBrSrEqual:
- this->LowerBinaryHelperMem(instr, IR::HelperOp_StrictEqual);
- break;
- case Js::OpCode::DeadBrRelational:
- this->LowerBinaryHelperMem(instr, IR::HelperOp_Greater);
- break;
- case Js::OpCode::DeadBrOnHasProperty:
- this->LowerUnaryHelperMem(instr, IR::HelperOp_HasProperty);
- break;
- case Js::OpCode::DeletedNonHelperBranch:
- break;
- case Js::OpCode::InitClass:
- instrPrev = this->LowerInitClass(instr);
- break;
- case Js::OpCode::NewConcatStrMulti:
- this->LowerNewConcatStrMulti(instr);
- break;
- case Js::OpCode::NewConcatStrMultiBE:
- this->LowerNewConcatStrMultiBE(instr);
- break;
- case Js::OpCode::SetConcatStrMultiItem:
- this->LowerSetConcatStrMultiItem(instr);
- break;
- case Js::OpCode::SetConcatStrMultiItemBE:
- Assert(instr->GetSrc1()->IsRegOpnd());
- this->addToLiveOnBackEdgeSyms->Clear(instr->GetSrc1()->GetStackSym()->m_id);
- // code corresponding to it should already have been generated while lowering NewConcatStrMultiBE
- instr->Remove();
- break;
- case Js::OpCode::Conv_Str:
- this->LowerConvStr(instr);
- break;
- case Js::OpCode::Coerse_Str:
- this->LowerCoerseStr(instr);
- break;
- case Js::OpCode::Coerse_StrOrRegex:
- this->LowerCoerseStrOrRegex(instr);
- break;
- case Js::OpCode::Coerse_Regex:
- this->LowerCoerseRegex(instr);
- break;
- case Js::OpCode::Conv_PrimStr:
- this->LowerConvPrimStr(instr);
- break;
- case Js::OpCode::ObjectFreeze:
- this->LowerUnaryHelper(instr, IR::HelperOP_Freeze);
- break;
- case Js::OpCode::ClearAttributes:
- this->LowerBinaryHelper(instr, IR::HelperOP_ClearAttributes);
- break;
- case Js::OpCode::SpreadArrayLiteral:
- this->LowerSpreadArrayLiteral(instr);
- break;
- case Js::OpCode::CallIExtended:
- {
- // Currently, the only use for CallIExtended is a call that uses spread.
- Assert(IsSpreadCall(instr));
- instrPrev = this->LowerSpreadCall(instr, Js::CallFlags_None);
- break;
- }
- case Js::OpCode::CallIExtendedNew:
- {
- // Currently, the only use for CallIExtended is a call that uses spread.
- Assert(IsSpreadCall(instr));
- instrPrev = this->LowerSpreadCall(instr, Js::CallFlags_New);
- break;
- }
- case Js::OpCode::CallIExtendedNewTargetNew:
- {
- // Currently, the only use for CallIExtended is a call that uses spread.
- Assert(IsSpreadCall(instr));
- instrPrev = this->LowerSpreadCall(instr, (Js::CallFlags)(Js::CallFlags_New | Js::CallFlags_ExtraArg | Js::CallFlags_NewTarget));
- break;
- }
- case Js::OpCode::LdSpreadIndices:
- instr->Remove();
- break;
- case Js::OpCode::LdSuper:
- instrPrev = m_lowererMD.LowerLdSuper(instr, IR::HelperLdSuper);
- break;
- case Js::OpCode::LdSuperCtor:
- instrPrev = m_lowererMD.LowerLdSuper(instr, IR::HelperLdSuperCtor);
- break;
- case Js::OpCode::ScopedLdSuper:
- instrPrev = m_lowererMD.LowerLdSuper(instr, IR::HelperScopedLdSuper);
- break;
- case Js::OpCode::ScopedLdSuperCtor:
- instrPrev = m_lowererMD.LowerLdSuper(instr, IR::HelperScopedLdSuperCtor);
- break;
- case Js::OpCode::SetHomeObj:
- {
- IR::Opnd *src2Opnd = instr->UnlinkSrc2();
- IR::Opnd *src1Opnd = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, src2Opnd);
- m_lowererMD.LoadHelperArgument(instr, src1Opnd);
- m_lowererMD.ChangeToHelperCall(instr, IR::HelperSetHomeObj);
- break;
- }
- case Js::OpCode::SetComputedNameVar:
- {
- IR::Opnd *src2Opnd = instr->UnlinkSrc2();
- IR::Opnd *src1Opnd = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, src2Opnd);
- m_lowererMD.LoadHelperArgument(instr, src1Opnd);
- m_lowererMD.ChangeToHelperCall(instr, IR::HelperSetComputedNameVar);
- break;
- }
- case Js::OpCode::InlineeMetaArg:
- {
- m_lowererMD.ChangeToAssign(instr);
- break;
- }
- case Js::OpCode::Yield:
- {
- instr->FreeSrc1(); // Source is not actually used by the backend other than to calculate lifetime
- IR::Opnd* dstOpnd = instr->UnlinkDst();
- // prm2 is the ResumeYieldData pointer per calling convention established in JavascriptGenerator::CallGenerator
- // This is the value the bytecode expects to be in the dst register of the Yield opcode after resumption.
- // Load it here after the bail-in.
- StackSym *resumeYieldDataSym = StackSym::NewParamSlotSym(2, m_func);
- m_func->SetArgOffset(resumeYieldDataSym, (LowererMD::GetFormalParamOffset() + 1) * MachPtr);
- IR::SymOpnd * resumeYieldDataOpnd = IR::SymOpnd::New(resumeYieldDataSym, TyMachPtr, m_func);
- AssertMsg(instr->m_next->IsLabelInstr(), "Expect the resume label to immediately follow Yield instruction");
- m_lowererMD.CreateAssign(dstOpnd, resumeYieldDataOpnd, instr->m_next->m_next);
- GenerateBailOut(instr);
- break;
- }
- case Js::OpCode::ResumeYield:
- case Js::OpCode::ResumeYieldStar:
- {
- IR::Opnd *srcOpnd1 = instr->UnlinkSrc1();
- IR::Opnd *srcOpnd2 = instr->m_opcode == Js::OpCode::ResumeYieldStar ? instr->UnlinkSrc2() : IR::AddrOpnd::NewNull(m_func);
- m_lowererMD.LoadHelperArgument(instr, srcOpnd2);
- m_lowererMD.LoadHelperArgument(instr, srcOpnd1);
- m_lowererMD.ChangeToHelperCall(instr, IR::HelperResumeYield);
- break;
- }
- case Js::OpCode::GeneratorResumeJumpTable:
- {
- // Lowered in LowerPrologEpilog so that the jumps introduced are not considered to be part of the flow for the RegAlloc phase.
- // Introduce a BailOutNoSave label if there were yield points that were elided due to optimizations. They could still be hit
- // if an active generator object had been paused at such a yield point when the function body was JITed. So safe guard such a
- // case by having the native code simply jump back to the interpreter for such yield points.
- IR::LabelInstr *bailOutNoSaveLabel = nullptr;
- m_func->MapUntilYieldOffsetResumeLabels([this, &bailOutNoSaveLabel](int, const YieldOffsetResumeLabel& yorl)
- {
- if (yorl.Second() == nullptr)
- {
- if (bailOutNoSaveLabel == nullptr)
- {
- bailOutNoSaveLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- }
- return true;
- }
- return false;
- });
- // Insert the bailoutnosave label somewhere along with a call to BailOutNoSave helper
- if (bailOutNoSaveLabel != nullptr)
- {
- IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->m_prev;
- IR::LabelInstr * exitTargetInstr;
- if (exitPrevInstr->IsLabelInstr())
- {
- exitTargetInstr = exitPrevInstr->AsLabelInstr();
- exitPrevInstr = exitPrevInstr->m_prev;
- }
- else
- {
- exitTargetInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
- exitPrevInstr->InsertAfter(exitTargetInstr);
- }
- bailOutNoSaveLabel->m_hasNonBranchRef = true;
- bailOutNoSaveLabel->isOpHelper = true;
- IR::Instr* bailOutCall = IR::Instr::New(Js::OpCode::Call, m_func);
- exitPrevInstr->InsertAfter(bailOutCall);
- exitPrevInstr->InsertAfter(bailOutNoSaveLabel);
- exitPrevInstr->InsertAfter(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, exitTargetInstr, m_func));
- IR::RegOpnd * frameRegOpnd = IR::RegOpnd::New(nullptr, LowererMD::GetRegFramePointer(), TyMachPtr, m_func);
- m_lowererMD.LoadHelperArgument(bailOutCall, frameRegOpnd);
- m_lowererMD.ChangeToHelperCall(bailOutCall, IR::HelperNoSaveRegistersBailOutForElidedYield);
- m_func->m_bailOutNoSaveLabel = bailOutNoSaveLabel;
- }
- break;
- }
- case Js::OpCode::AsyncSpawn:
- this->LowerBinaryHelperMem(instr, IR::HelperAsyncSpawn);
- break;
- case Js::OpCode::FrameDisplayCheck:
- instrPrev = this->LowerFrameDisplayCheck(instr);
- break;
- case Js::OpCode::SlotArrayCheck:
- instrPrev = this->LowerSlotArrayCheck(instr);
- break;
- default:
- #if defined(_M_IX86) || defined(_M_X64)
- if (IsSimd128Opcode(instr->m_opcode))
- {
- instrPrev = m_lowererMD.Simd128Instruction(instr);
- break;
- }
- #endif
- AssertMsg(instr->IsLowered(), "Unknown opcode");
- if(!instr->IsLowered())
- {
- Fatal();
- }
- break;
- }
- #if DBG
- LegalizeVerifyRange(instrPrev ? instrPrev->m_next : instrStart,
- verifyLegalizeInstrNext ? verifyLegalizeInstrNext->m_prev : nullptr);
- #endif
- } NEXT_INSTR_BACKWARD_EDITING_IN_RANGE;
- Assert(this->outerMostLoopLabel == nullptr);
- }
- IR::Instr *
- Lowerer::LoadFunctionBody(IR::Instr * instr)
- {
- return m_lowererMD.LoadHelperArgument(instr, LoadFunctionBodyOpnd(instr));
- }
- IR::Instr *
- Lowerer::LoadScriptContext(IR::Instr * instr)
- {
- return m_lowererMD.LoadHelperArgument(instr, LoadScriptContextOpnd(instr));
- }
- IR::Opnd *
- Lowerer::LoadFunctionBodyOpnd(IR::Instr * instr)
- {
- return IR::AddrOpnd::New(instr->m_func->GetJnFunction(), IR::AddrOpndKindDynamicFunctionBody, instr->m_func);
- }
- IR::Opnd *
- Lowerer::LoadScriptContextOpnd(IR::Instr * instr)
- {
- return IR::AddrOpnd::New(this->m_func->GetScriptContext(), IR::AddrOpndKindDynamicScriptContext, this->m_func);
- }
- IR::Opnd *
- Lowerer::LoadScriptContextValueOpnd(IR::Instr * instr, ScriptContextValue valueType)
- {
- Js::ScriptContext *scriptContext = instr->m_func->GetScriptContext();
- switch (valueType)
- {
- case ScriptContextValue::ScriptContextNumberAllocator:
- return IR::AddrOpnd::New(scriptContext->GetNumberAllocator(), IR::AddrOpndKindDynamicMisc, instr->m_func);
- case ScriptContextValue::ScriptContextRecycler:
- return IR::AddrOpnd::New(scriptContext->GetRecycler(), IR::AddrOpndKindDynamicMisc, instr->m_func);
- default:
- Assert(false);
- return nullptr;
- }
- }
- IR::Opnd *
- Lowerer::LoadLibraryValueOpnd(IR::Instr * instr, LibraryValue valueType, RegNum regNum)
- {
- Js::ScriptContext *scriptContext = instr->m_func->GetScriptContext();
- switch (valueType)
- {
- case LibraryValue::ValueEmptyString:
- return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetEmptyString(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
- case LibraryValue::ValueUndeclBlockVar:
- return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetUndeclBlockVar(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
- case LibraryValue::ValueUndefined:
- return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetUndefined(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
- case LibraryValue::ValueNull:
- return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetNull(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
- case LibraryValue::ValueTrue:
- return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetTrue(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
- case LibraryValue::ValueFalse:
- return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetFalse(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
- case LibraryValue::ValueNegativeZero:
- return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetNegativeZero(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
- case LibraryValue::ValueNumberTypeStatic:
- return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetNumberTypeStatic(), IR::AddrOpndKindDynamicType, instr->m_func, true);
- case LibraryValue::ValueStringTypeStatic:
- return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetStringTypeStatic(), IR::AddrOpndKindDynamicType, instr->m_func, true);
- case LibraryValue::ValueObjectType:
- return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetObjectType(), IR::AddrOpndKindDynamicType, instr->m_func);
- case LibraryValue::ValueObjectHeaderInlinedType:
- return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetObjectHeaderInlinedType(), IR::AddrOpndKindDynamicType, instr->m_func);
- case LibraryValue::ValueRegexType:
- return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetRegexType(), IR::AddrOpndKindDynamicType, instr->m_func);
- case LibraryValue::ValueArrayConstructor:
- return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetArrayConstructor(), IR::AddrOpndKindDynamicVar, instr->m_func);
- case LibraryValue::ValueJavascriptArrayType:
- return IR::AddrOpnd::New(Js::JavascriptArray::GetInitialType(scriptContext), IR::AddrOpndKindDynamicType, instr->m_func);
- case LibraryValue::ValueNativeIntArrayType:
- return IR::AddrOpnd::New(Js::JavascriptNativeIntArray::GetInitialType(scriptContext), IR::AddrOpndKindDynamicType, instr->m_func);
- case LibraryValue::ValueNativeFloatArrayType:
- return IR::AddrOpnd::New(Js::JavascriptNativeFloatArray::GetInitialType(scriptContext), IR::AddrOpndKindDynamicType, instr->m_func);
- case LibraryValue::ValueConstructorCacheDefaultInstance:
- return IR::AddrOpnd::New(&Js::ConstructorCache::DefaultInstance, IR::AddrOpndKindDynamicMisc, instr->m_func);
- case LibraryValue::ValueAbsDoubleCst:
- return IR::MemRefOpnd::New((void*)&Js::JavascriptNumber::AbsDoubleCst, TyMachDouble, instr->m_func, IR::AddrOpndKindDynamicDoubleRef);
- case LibraryValue::ValueCharStringCache:
- return IR::AddrOpnd::New((Js::Var)&scriptContext->GetLibrary()->GetCharStringCache(), IR::AddrOpndKindDynamicCharStringCache, instr->m_func);
- default:
- Assert(false);
- return nullptr;
- }
- }
- IR::Opnd *
- Lowerer::LoadVTableValueOpnd(IR::Instr * instr, VTableValue vtableType)
- {
- return IR::AddrOpnd::New((Js::Var)instr->m_func->GetScriptContext()->GetLibrary()->GetVTableAddresses()[vtableType], IR::AddrOpndKindDynamicVtable, this->m_func);
- }
- IR::Opnd *
- Lowerer::LoadOptimizationOverridesValueOpnd(IR::Instr *instr, OptimizationOverridesValue valueType)
- {
- Js::ScriptContext *scriptContext = instr->m_func->GetScriptContext();
- switch (valueType)
- {
- case OptimizationOverridesValue::OptimizationOverridesSideEffects:
- return IR::MemRefOpnd::New(scriptContext->optimizationOverrides.GetAddressOfSideEffects(), TyInt32, instr->m_func);
- case OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable:
- return IR::MemRefOpnd::New(scriptContext->optimizationOverrides.GetAddressOfArraySetElementFastPathVtable(), TyMachPtr, instr->m_func);
- case OptimizationOverridesValue::OptimizationOverridesIntArraySetElementFastPathVtable:
- return IR::MemRefOpnd::New(scriptContext->optimizationOverrides.GetAddressOfIntArraySetElementFastPathVtable(), TyMachPtr, instr->m_func);
- case OptimizationOverridesValue::OptimizationOverridesFloatArraySetElementFastPathVtable:
- return IR::MemRefOpnd::New(scriptContext->optimizationOverrides.GetAddressOfFloatArraySetElementFastPathVtable(), TyMachPtr, instr->m_func);
- default:
- Assert(false);
- return nullptr;
- }
- }
- IR::Opnd *
- Lowerer::LoadNumberAllocatorValueOpnd(IR::Instr *instr, NumberAllocatorValue valueType)
- {
- Js::ScriptContext *scriptContext = instr->m_func->GetScriptContext();
- bool allowNativeCodeBumpAllocation = scriptContext->GetNumberAllocator()->AllowNativeCodeBumpAllocation();
- switch (valueType)
- {
- case NumberAllocatorValue::NumberAllocatorEndAddress:
- return IR::MemRefOpnd::New(((char *)scriptContext->GetNumberAllocator()) + Js::RecyclerJavascriptNumberAllocator::GetEndAddressOffset(), TyMachPtr, instr->m_func);
- case NumberAllocatorValue::NumberAllocatorFreeObjectList:
- return IR::MemRefOpnd::New(
- ((char *)scriptContext->GetNumberAllocator()) +
- (allowNativeCodeBumpAllocation ? Js::RecyclerJavascriptNumberAllocator::GetFreeObjectListOffset() : Js::RecyclerJavascriptNumberAllocator::GetEndAddressOffset()),
- TyMachPtr, instr->m_func);
- default:
- Assert(false);
- return nullptr;
- }
- }
- IR::Opnd *
- Lowerer::LoadIsInstInlineCacheOpnd(IR::Instr * instr, uint inlineCacheIndex)
- {
- Js::IsInstInlineCache * inlineCache = instr->m_func->GetJnFunction()->GetIsInstInlineCache(inlineCacheIndex);
- return IR::AddrOpnd::New(inlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func);
- }
- IR::Opnd *
- Lowerer::LoadRuntimeInlineCacheOpnd(IR::Instr * instr, IR::PropertySymOpnd * propertySymOpnd, bool isHelper)
- {
- Assert(propertySymOpnd->m_runtimeInlineCache != nullptr);
- IR::Opnd * inlineCacheOpnd = nullptr;
- if (instr->m_func->GetJnFunction()->GetInlineCachesOnFunctionObject() && !instr->m_func->IsInlinee())
- {
- inlineCacheOpnd = this->GetInlineCacheFromFuncObjectForRuntimeUse(instr, propertySymOpnd, isHelper);
- }
- else
- {
- Js::InlineCache * inlineCache = propertySymOpnd->m_runtimeInlineCache;
- inlineCacheOpnd = IR::AddrOpnd::New(inlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func, /* dontEncode */ true);
- }
- return inlineCacheOpnd;
- }
- bool
- Lowerer::TryGenerateFastCmSrEq(IR::Instr * instr)
- {
- IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
- IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
- if (srcReg2 && IsConstRegOpnd(srcReg2))
- {
- return m_lowererMD.GenerateFastCmSrEqConst(instr);
- }
- else if (srcReg1 && IsConstRegOpnd(srcReg1))
- {
- instr->SwapOpnds();
- return m_lowererMD.GenerateFastCmSrEqConst(instr);
- }
- else if (srcReg2 && (srcReg2->m_sym->m_isStrConst))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_String);
- return true;
- }
- else if (srcReg1 && (srcReg1->m_sym->m_isStrConst))
- {
- instr->SwapOpnds();
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_String);
- return true;
- }
- else if (srcReg2 && (srcReg2->m_sym->m_isStrEmpty))
- {
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_EmptyString);
- return true;
- }
- else if (srcReg1 && (srcReg1->m_sym->m_isStrEmpty))
- {
- instr->SwapOpnds();
- this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_EmptyString);
- return true;
- }
- return false;
- }
- bool
- Lowerer::GenerateFastBrSrEq(IR::Instr * instr, IR::RegOpnd * srcReg1, IR::RegOpnd * srcReg2, IR::Instr ** pInstrPrev, bool noMathFastPath)
- {
- if (srcReg2 && IsConstRegOpnd(srcReg2))
- {
- this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg2->m_sym->GetConstOpnd(), true);
- instr->Remove();
- return true;
- }
- else if (srcReg1 && IsConstRegOpnd(srcReg1))
- {
- instr->SwapOpnds();
- this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg1->m_sym->GetConstOpnd(), true);
- instr->Remove();
- return true;
- }
- else if (srcReg2 && (srcReg2->m_sym->m_isStrConst))
- {
- this->LowerBrCMem(instr, IR::HelperOp_StrictEqualString, noMathFastPath, false);
- return true;
- }
- else if (srcReg1 && (srcReg1->m_sym->m_isStrConst))
- {
- instr->SwapOpnds();
- this->LowerBrCMem(instr, IR::HelperOp_StrictEqualString, noMathFastPath, false);
- return true;
- }
- else if (srcReg2 && (srcReg2->m_sym->m_isStrEmpty))
- {
- this->LowerBrCMem(instr, IR::HelperOp_StrictEqualEmptyString, noMathFastPath, false);
- return true;
- }
- else if (srcReg1 && (srcReg1->m_sym->m_isStrConst))
- {
- instr->SwapOpnds();
- this->LowerBrCMem(instr, IR::HelperOp_StrictEqualEmptyString, noMathFastPath, false);
- return true;
- }
- return false;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::GenerateFastBrConst
- ///
- ///----------------------------------------------------------------------------
- IR::BranchInstr *
- Lowerer::GenerateFastBrConst(IR::BranchInstr *branchInstr, IR::Opnd * constOpnd, bool isEqual)
- {
- Assert(constOpnd->IsAddrOpnd() || constOpnd->IsIntConstOpnd());
- //
- // Given:
- // BrSrEq_A $L1, s1, s2
- // where s2 is either 'null', 'undefined', 'true' or 'false'
- //
- // Generate:
- //
- // CMP s1, s2
- // JEQ/JNE $L1
- //
- Assert(this->IsConstRegOpnd(branchInstr->GetSrc2()->AsRegOpnd()));
- IR::Opnd *opnd = branchInstr->GetSrc1();
- if (!opnd->IsRegOpnd())
- {
- IR::RegOpnd *lhsReg = IR::RegOpnd::New(TyVar, m_func);
- LowererMD::CreateAssign(lhsReg, opnd, branchInstr);
- opnd = lhsReg;
- }
- Assert(opnd->IsRegOpnd());
- IR::BranchInstr *newBranch;
- newBranch = InsertCompareBranch(opnd, constOpnd, isEqual ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A, branchInstr->GetTarget(), branchInstr);
- return newBranch;
- }
- bool
- Lowerer::TryGenerateFastBrEq(IR::Instr * instr)
- {
- IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
- IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
- bool isConst = false;
- if (srcReg1 && this->IsNullOrUndefRegOpnd(srcReg1))
- {
- instr->SwapOpnds();
- isConst = true;
- }
- // Fast path for == null or == undefined
- // if (src == null || src == undefined)
- if (isConst || srcReg2 && this->IsNullOrUndefRegOpnd(srcReg2))
- {
- IR::BranchInstr *newBranch;
- newBranch = this->GenerateFastBrConst(instr->AsBranchInstr(),
- this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
- true);
- this->GenerateFastBrConst(instr->AsBranchInstr(),
- this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
- true);
- instr->Remove();
- return true;
- }
- return false;
- }
- bool
- Lowerer::TryGenerateFastBrNeq(IR::Instr * instr)
- {
- IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
- IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
- bool isConst = false;
- if (srcReg1 && this->IsNullOrUndefRegOpnd(srcReg1))
- {
- instr->SwapOpnds();
- isConst = true;
- }
- // Fast path for != null or != undefined
- // if (src != null && src != undefined)
- //
- // That is:
- // if (src == NULL) goto labelEq
- // if (src != undef) goto target
- // labelEq:
- if (isConst || (srcReg2 && this->IsNullOrUndefRegOpnd(srcReg2)))
- {
- IR::LabelInstr *labelEq = instr->GetOrCreateContinueLabel();
- IR::BranchInstr *newBranch;
- newBranch = this->GenerateFastBrConst(instr->AsBranchInstr(),
- this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
- true);
- newBranch->AsBranchInstr()->SetTarget(labelEq);
- this->GenerateFastBrConst(instr->AsBranchInstr(),
- this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
- false);
- instr->Remove();
- return true;
- }
- return false;
- }
- bool
- Lowerer::GenerateFastBrSrNeq(IR::Instr * instr, IR::Instr ** pInstrPrev)
- {
- IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
- IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
- if (srcReg2 && IsConstRegOpnd(srcReg2))
- {
- this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg2->m_sym->GetConstOpnd(), false);
- instr->Remove();
- return true;
- }
- else if (srcReg1 && IsConstRegOpnd(srcReg1))
- {
- instr->SwapOpnds();
- this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg1->m_sym->GetConstOpnd(), false);
- instr->Remove();
- return true;
- }
- return false;
- }
- void
- Lowerer::GenerateDynamicObjectAlloc(IR::Instr * newObjInstr, uint inlineSlotCount, uint slotCount, IR::RegOpnd * newObjDst, IR::Opnd * typeSrc)
- {
- size_t headerAllocSize = sizeof(Js::DynamicObject) + inlineSlotCount * sizeof(Js::Var);
- IR::SymOpnd * tempObjectSymOpnd;
- bool isZeroed = GenerateRecyclerOrMarkTempAlloc(newObjInstr, newObjDst, IR::HelperAllocMemForScObject, headerAllocSize, &tempObjectSymOpnd);
- if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
- {
- // Hoist the vtable init to the outer most loop top as it never changes
- InsertMove(tempObjectSymOpnd,
- LoadVTableValueOpnd(this->outerMostLoopLabel, VTableValue::VtableDynamicObject), this->outerMostLoopLabel, false);
- }
- else
- {
- // MOV [newObjDst + offset(vtable)], DynamicObject::vtable
- GenerateMemInit(newObjDst, 0, LoadVTableValueOpnd(newObjInstr, VTableValue::VtableDynamicObject), newObjInstr, isZeroed);
- }
- // MOV [newObjDst + offset(type)], newObjectType
- GenerateMemInit(newObjDst, Js::DynamicObject::GetOffsetOfType(), typeSrc, newObjInstr, isZeroed);
- // CALL JavascriptOperators::AllocMemForVarArray((slotCount - inlineSlotCount) * sizeof(Js::Var))
- if (slotCount > inlineSlotCount)
- {
- size_t auxSlotsAllocSize = (slotCount - inlineSlotCount) * sizeof(Js::Var);
- IR::RegOpnd* auxSlots = IR::RegOpnd::New(TyMachPtr, m_func);
- GenerateRecyclerAllocAligned(IR::HelperAllocMemForVarArray, auxSlotsAllocSize, auxSlots, newObjInstr);
- GenerateMemInit(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), auxSlots, newObjInstr, isZeroed);
- IR::IndirOpnd* newObjAuxSlots = IR::IndirOpnd::New(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachPtr, m_func);
- this->m_lowererMD.CreateAssign(newObjAuxSlots, auxSlots, newObjInstr);
- }
- else
- {
- GenerateMemInitNull(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), newObjInstr, isZeroed);
- }
- GenerateMemInitNull(newObjDst, Js::DynamicObject::GetOffsetOfObjectArray(), newObjInstr, isZeroed);
- }
- void
- Lowerer::LowerNewScObjectSimple(IR::Instr * instr)
- {
- GenerateDynamicObjectAlloc(
- instr,
- 0,
- 0,
- instr->UnlinkDst()->AsRegOpnd(),
- LoadLibraryValueOpnd(
- instr,
- Js::FunctionBody::DoObjectHeaderInliningForEmptyObjects()
- ? LibraryValue::ValueObjectHeaderInlinedType
- : LibraryValue::ValueObjectType));
- instr->Remove();
- }
- void
- Lowerer::LowerNewScObjectLiteral(IR::Instr *newObjInstr)
- {
- Func * func = m_func;
- IR::IntConstOpnd * literalObjectIdOpnd = newObjInstr->UnlinkSrc2()->AsIntConstOpnd();
- Js::DynamicType ** literalTypeRef = newObjInstr->m_func->GetJnFunction()->GetObjectLiteralTypeRef(literalObjectIdOpnd->AsUint32());
- Js::DynamicType * literalType = *literalTypeRef;
- IR::LabelInstr * helperLabel = nullptr;
- IR::LabelInstr * allocLabel = nullptr;
- IR::Opnd * literalTypeRefOpnd;
- IR::Opnd * literalTypeOpnd;
- IR::Opnd * propertyArrayOpnd;
- IR::IntConstOpnd * propertyArrayIdOpnd = newObjInstr->UnlinkSrc1()->AsIntConstOpnd();
- const Js::PropertyIdArray * propIds = Js::ByteCodeReader::ReadPropertyIdArray(propertyArrayIdOpnd->AsUint32(), newObjInstr->m_func->GetJnFunction());
- Js::ScriptContext *const scriptContext = newObjInstr->m_func->GetJnFunction()->GetScriptContext();
- uint inlineSlotCapacity = Js::JavascriptOperators::GetLiteralInlineSlotCapacity(propIds, scriptContext);
- uint slotCapacity = Js::JavascriptOperators::GetLiteralSlotCapacity(propIds, scriptContext);
- IR::RegOpnd * dstOpnd;
- literalTypeRefOpnd = IR::AddrOpnd::New(literalTypeRef, IR::AddrOpndKindDynamicMisc, this->m_func);
- propertyArrayOpnd = IR::AddrOpnd::New((Js::Var)propIds, IR::AddrOpndKindDynamicMisc, this->m_func);
- if (literalType == nullptr || !literalType->GetIsShared())
- {
- helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- allocLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- literalTypeOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(literalTypeOpnd, IR::MemRefOpnd::New(literalTypeRef, TyMachPtr, func), newObjInstr);
- InsertTestBranch(literalTypeOpnd, literalTypeOpnd,
- Js::OpCode::BrEq_A, helperLabel, newObjInstr);
- InsertTestBranch(IR::IndirOpnd::New(literalTypeOpnd->AsRegOpnd(), Js::DynamicType::GetOffsetOfIsShared(), TyInt8, func),
- IR::IntConstOpnd::New(1, TyInt8, func, true), Js::OpCode::BrEq_A, helperLabel, newObjInstr);
- dstOpnd = newObjInstr->GetDst()->AsRegOpnd();
- }
- else
- {
- literalTypeOpnd = IR::AddrOpnd::New(literalType, IR::AddrOpndKindDynamicType, func);
- dstOpnd = newObjInstr->UnlinkDst()->AsRegOpnd();
- Assert(inlineSlotCapacity == literalType->GetTypeHandler()->GetInlineSlotCapacity());
- Assert(slotCapacity == (uint)literalType->GetTypeHandler()->GetSlotCapacity());
- }
- if (helperLabel)
- {
- InsertBranch(Js::OpCode::Br, allocLabel, newObjInstr);
- // Slow path to ensure the type is there
- newObjInstr->InsertBefore(helperLabel);
- IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperEnsureObjectLiteralType, func);
- m_lowererMD.LoadHelperArgument(newObjInstr, literalTypeRefOpnd);
- m_lowererMD.LoadHelperArgument(newObjInstr, propertyArrayOpnd);
- LoadScriptContext(newObjInstr);
- IR::Instr * ensureTypeInstr = IR::Instr::New(Js::OpCode::Call, literalTypeOpnd, opndHelper, func);
- newObjInstr->InsertBefore(ensureTypeInstr);
- m_lowererMD.LowerCall(ensureTypeInstr, 0);
- newObjInstr->InsertBefore(allocLabel);
- }
- else
- {
- Assert(allocLabel == nullptr);
- }
- // For the next call:
- // inlineSlotCapacity == Number of slots to allocate beyond the DynamicObject header
- // slotCapacity - inlineSlotCapacity == Number of aux slots to allocate
- if(Js::FunctionBody::DoObjectHeaderInliningForObjectLiteral(propIds, scriptContext))
- {
- Assert(inlineSlotCapacity >= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity());
- Assert(inlineSlotCapacity == slotCapacity);
- slotCapacity = inlineSlotCapacity -= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity();
- }
- GenerateDynamicObjectAlloc(
- newObjInstr,
- inlineSlotCapacity,
- slotCapacity,
- dstOpnd,
- literalTypeOpnd);
- newObjInstr->Remove();
- }
- IR::Instr*
- Lowerer::LowerProfiledNewScArray(IR::JitProfilingInstr* arrInstr)
- {
- IR::Instr *instrPrev = arrInstr->m_prev;
- /*
- JavascriptArray *ProfilingHelpers::ProfiledNewScArray(
- const uint length,
- FunctionBody *const functionBody,
- const ProfileId profileId)
- */
- m_lowererMD.LoadHelperArgument(arrInstr, IR::Opnd::CreateProfileIdOpnd(arrInstr->profileId, m_func));
- m_lowererMD.LoadHelperArgument(arrInstr, CreateFunctionBodyOpnd(arrInstr->m_func));
- m_lowererMD.LoadHelperArgument(arrInstr, arrInstr->UnlinkSrc1());
- arrInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledNewScArray, m_func));
- m_lowererMD.LowerCall(arrInstr, 0);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerNewScArray(IR::Instr *arrInstr)
- {
- if (arrInstr->IsJitProfilingInstr())
- {
- return LowerProfiledNewScArray(arrInstr->AsJitProfilingInstr());
- }
- IR::Instr *instrPrev = arrInstr->m_prev;
- IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScArray;
- if (arrInstr->IsProfiledInstr() && arrInstr->m_func->HasProfileInfo())
- {
- RecyclerWeakReference<Js::FunctionBody> *weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
- Assert(weakFuncRef);
- Js::ProfileId profileId = static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
- Js::FunctionBody *functionBody = arrInstr->m_func->GetJnFunction();
- Js::DynamicProfileInfo *profileInfo = functionBody->GetAnyDynamicProfileInfo();
- Js::ArrayCallSiteInfo *arrayInfo = profileInfo->GetArrayCallSiteInfo(functionBody, profileId);
- Assert(arrInstr->GetSrc1()->IsConstOpnd());
- GenerateProfiledNewScArrayFastPath(arrInstr, arrayInfo, weakFuncRef, arrInstr->GetSrc1()->AsIntConstOpnd()->AsUint32());
- if (arrInstr->GetDst() && arrInstr->GetDst()->GetValueType().IsLikelyNativeArray())
- {
- m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
- m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfo, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
- helperMethod = IR::HelperScrArr_ProfiledNewScArray;
- }
- }
- LoadScriptContext(arrInstr);
- IR::Opnd *src1Opnd = arrInstr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(arrInstr, src1Opnd);
- m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
- return instrPrev;
- }
- template <typename ArrayType>
- BOOL Lowerer::IsSmallObject(uint32 length)
- {
- if (ArrayType::HasInlineHeadSegment(length))
- return true;
- uint32 alignedHeadSegmentSize = Js::SparseArraySegment<typename ArrayType::TElement>::GetAlignedSize(length);
- size_t allocSize = sizeof(Js::SparseArraySegment<typename ArrayType::TElement>) + alignedHeadSegmentSize * sizeof(typename ArrayType::TElement);
- return HeapInfo::IsSmallObject(HeapInfo::GetAlignedSizeNoCheck(allocSize));
- }
- void
- Lowerer::GenerateProfiledNewScArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, RecyclerWeakReference<Js::FunctionBody> * weakFuncRef, uint32 length)
- {
- if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
- {
- return;
- }
- Func * func = this->m_func;
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- uint32 size = length;
- bool isZeroed;
- IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
- IR::RegOpnd *headOpnd;
- uint32 i = length;
- if (instr->GetDst() && instr->GetDst()->GetValueType().IsLikelyNativeIntArray())
- {
- if (!IsSmallObject<Js::JavascriptNativeIntArray>(length))
- {
- return;
- }
- GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, helperLabel);
- Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
- headOpnd = GenerateArrayAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isZeroed);
- const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
- GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
- for (; i < size; i++)
- {
- GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
- Js::JavascriptNativeIntArray::MissingItem, instr, isZeroed);
- }
- }
- else if (instr->GetDst() && instr->GetDst()->GetValueType().IsLikelyNativeFloatArray())
- {
- if (!IsSmallObject<Js::JavascriptNativeFloatArray>(length))
- {
- return;
- }
- GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, helperLabel);
- Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
- headOpnd = GenerateArrayAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isZeroed);
- const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
- GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
- // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
- uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
- uint const missingItemCount = size * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
- i = i * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
- for (; i < missingItemCount; i++)
- {
- GenerateMemInit(
- headOpnd, offsetStart + i * sizeof(Js::JavascriptArray::MissingItem),
- IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, m_func, true),
- instr, isZeroed);
- }
- }
- else
- {
- if (!IsSmallObject<Js::JavascriptArray>(length))
- {
- return;
- }
- uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
- headOpnd = GenerateArrayAlloc<Js::JavascriptArray>(instr, &size, arrayInfo, &isZeroed);
- const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
- for (; i < size; i++)
- {
- GenerateMemInit(
- headOpnd, offsetStart + i * sizeof(Js::Var),
- IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, m_func, true),
- instr, isZeroed);
- }
- }
- // Skip pass the helper call
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(helperLabel);
- instr->InsertAfter(doneLabel);
- }
- void
- Lowerer::GenerateArrayInfoIsNativeIntArrayTest(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, IR::LabelInstr * helperLabel)
- {
- Func * func = this->m_func;
- InsertTestBranch(IR::MemRefOpnd::New(((char *)arrayInfo) + Js::ArrayCallSiteInfo::GetOffsetOfBits(), TyUint8, func),
- IR::IntConstOpnd::New(Js::ArrayCallSiteInfo::NotNativeIntBit, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
- }
- void
- Lowerer::GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, IR::LabelInstr * helperLabel)
- {
- Func * func = this->m_func;
- InsertCompareBranch(IR::MemRefOpnd::New(((char *)arrayInfo) + Js::ArrayCallSiteInfo::GetOffsetOfBits(), TyUint8, func),
- IR::IntConstOpnd::New(Js::ArrayCallSiteInfo::NotNativeIntBit, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
- }
- template <typename ArrayType>
- static IR::JnHelperMethod GetArrayAllocMemHelper();
- template <>
- static IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptArray>()
- {
- return IR::HelperAllocMemForJavascriptArray;
- }
- template <>
- static IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptNativeIntArray>()
- {
- return IR::HelperAllocMemForJavascriptNativeIntArray;
- }
- template <>
- static IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptNativeFloatArray>()
- {
- return IR::HelperAllocMemForJavascriptNativeFloatArray;
- }
- template <typename ArrayType>
- IR::RegOpnd *
- Lowerer::GenerateArrayAlloc(IR::Instr *instr, uint32 * psize, Js::ArrayCallSiteInfo * arrayInfo, bool * pIsHeadSegmentZeroed)
- {
- Func * func = this->m_func;
- IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
- // Generate code as in JavascriptArray::NewLiteral
- uint32 count = *psize;
- uint alignedHeadSegmentSize;
- size_t arrayAllocSize;
- IR::RegOpnd * headOpnd = IR::RegOpnd::New(TyMachPtr, func);
- const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func, false);
- IR::Instr * leaHeadInstr = nullptr;
- bool isHeadSegmentZeroed = false;
- if (ArrayType::HasInlineHeadSegment(count))
- {
- uint32 allocCount = count == 0 ? Js::SparseArraySegmentBase::SMALL_CHUNK_SIZE : count;
- arrayAllocSize = Js::JavascriptArray::DetermineAllocationSize<ArrayType, 0>(allocCount, nullptr, &alignedHeadSegmentSize);
- leaHeadInstr = IR::Instr::New(Js::OpCode::LEA, headOpnd,
- IR::IndirOpnd::New(dstOpnd, sizeof(ArrayType), TyMachPtr, func), func);
- isHeadSegmentZeroed = true;
- }
- else
- {
- // Need to allocate the head segment first so that if it throws,
- // we doesn't have the memory assigned to dstOpnd yet
- // Even if the instruction is marked as dstIsTempObject, we still should not allocate
- // that big of a chunk on the stack.
- alignedHeadSegmentSize = Js::SparseArraySegment<typename ArrayType::TElement>::GetAlignedSize(count);
- GenerateRecyclerAlloc(
- IR::HelperAllocMemForSparseArraySegmentBase,
- sizeof(Js::SparseArraySegment<typename ArrayType::TElement>) +
- alignedHeadSegmentSize * sizeof(typename ArrayType::TElement),
- headOpnd,
- instr);
- arrayAllocSize = sizeof(ArrayType);
- }
- *psize = alignedHeadSegmentSize;
- IR::SymOpnd * tempObjectSymOpnd;
- bool isZeroed = GenerateRecyclerOrMarkTempAlloc(instr, dstOpnd,
- GetArrayAllocMemHelper<ArrayType>(), arrayAllocSize, &tempObjectSymOpnd);
- isHeadSegmentZeroed = isHeadSegmentZeroed & isZeroed;
- if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
- {
- // Hoist the vtable init to the outer most loop top as it never changes
- InsertMove(tempObjectSymOpnd,
- this->LoadVTableValueOpnd(this->outerMostLoopLabel, ArrayType::VtableHelper()),
- this->outerMostLoopLabel, false);
- }
- else
- {
- GenerateMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, ArrayType::VtableHelper()), instr, isZeroed);
- }
- GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfType(), this->LoadLibraryValueOpnd(instr, ArrayType::InitialTypeHelper()), instr, isZeroed);
- GenerateMemInitNull(dstOpnd, ArrayType::GetOffsetOfAuxSlots(), instr, isZeroed);
- // Emit the flags and call site index together
- Js::ProfileId arrayCallSiteIndex = (Js::ProfileId)instr->AsProfiledInstr()->u.profileId;
- #if DBG
- if (instr->AsProfiledInstr()->u.profileId < Js::Constants::NoProfileId)
- {
- Js::FunctionBody * functionBody = instr->m_func->GetJnFunction();
- Assert((uint32)(arrayInfo - functionBody->GetAnyDynamicProfileInfo()->GetArrayCallSiteInfo(functionBody, 0)) == arrayCallSiteIndex);
- }
- else
- {
- Assert(arrayInfo == nullptr);
- }
- #endif
- // The same at this:
- // GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), (uint16)Js::DynamicObjectFlags::InitialArrayValue, instr, isZeroed);
- // GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayCallSiteIndex(), arrayCallSiteIndex, instr, isZeroed);
- GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), (uint)Js::DynamicObjectFlags::InitialArrayValue | ((uint)arrayCallSiteIndex << 16), instr, isZeroed);
- GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLength(), count, instr, isZeroed);
- if (leaHeadInstr != nullptr)
- {
- instr->InsertBefore(leaHeadInstr);
- LowererMD::ChangeToLea(leaHeadInstr);
- }
- GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfHead(), headOpnd, instr, isZeroed);
- GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLastUsedSegmentOrSegmentMap(), headOpnd, instr, isZeroed);
- // Initialize segment head
- GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLeft(), 0, instr, isHeadSegmentZeroed);
- GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), count, instr, isHeadSegmentZeroed);
- GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfSize(), alignedHeadSegmentSize, instr, isHeadSegmentZeroed);
- GenerateMemInitNull(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfNext(), instr, isHeadSegmentZeroed);
- *pIsHeadSegmentZeroed = isHeadSegmentZeroed;
- return headOpnd;
- }
- void
- Lowerer::GenerateProfiledNewScObjArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, RecyclerWeakReference<Js::FunctionBody> * weakFuncRef, uint32 length)
- {
- if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func))
- {
- return;
- }
- Func * func = this->m_func;
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- uint32 size = length;
- bool isZeroed;
- IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
- IR::RegOpnd *headOpnd;
- if (arrayInfo && arrayInfo->IsNativeIntArray())
- {
- GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, helperLabel);
- Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
- headOpnd = GenerateArrayAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isZeroed);
- GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
- for (uint i = 0; i < size; i++)
- {
- GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
- Js::JavascriptNativeIntArray::MissingItem, instr, isZeroed);
- }
- }
- else if (arrayInfo && arrayInfo->IsNativeFloatArray())
- {
- GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, helperLabel);
- Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
- headOpnd = GenerateArrayAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isZeroed);
- GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
- // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
- uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
- uint const missingItemCount = size * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
- for (uint i = 0; i < missingItemCount; i++)
- {
- GenerateMemInit(
- headOpnd, offsetStart + i * sizeof(Js::JavascriptArray::MissingItem),
- IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, m_func, true),
- instr, isZeroed);
- }
- }
- else
- {
- uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
- headOpnd = GenerateArrayAlloc<Js::JavascriptArray>(instr, &size, arrayInfo, &isZeroed);
- for (uint i = 0; i < size; i++)
- {
- GenerateMemInit(
- headOpnd, offsetStart + i * sizeof(Js::Var),
- IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, m_func, true),
- instr, isZeroed);
- }
- }
- // Skip pass the helper call
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(helperLabel);
- instr->InsertAfter(doneLabel);
- }
- void
- Lowerer::GenerateProfiledNewScIntArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, RecyclerWeakReference<Js::FunctionBody> * weakFuncRef)
- {
- // Helper will deal with ForceES5ARray
- if (PHASE_OFF(Js::ArrayLiteralFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
- {
- return;
- }
- if (!arrayInfo->IsNativeIntArray())
- {
- return;
- }
- Func * func = this->m_func;
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, helperLabel);
- IR::AddrOpnd * elementsOpnd = instr->GetSrc1()->AsAddrOpnd();
- Js::AuxArray<int32> * ints = (Js::AuxArray<int32> *)elementsOpnd->m_address;
- uint32 size = ints->count;
- // Generate code as in JavascriptArray::NewLiteral
- bool isHeadSegmentZeroed;
- IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
- Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
- IR::RegOpnd * headOpnd = GenerateArrayAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isHeadSegmentZeroed);
- const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
- GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicMisc, m_func), instr, isHeadSegmentZeroed);
- // Initialize the elements
- uint i = 0;
- if (ints->count > 16)
- {
- // Do memcpy if > 16
- IR::RegOpnd * dstElementsOpnd = IR::RegOpnd::New(TyMachPtr, func);
- const IR::AutoReuseOpnd autoReuseDstElementsOpnd(dstElementsOpnd, func);
- IR::Opnd * srcOpnd = IR::AddrOpnd::New(ints->elements, IR::AddrOpndKindDynamicMisc, func);
- InsertLea(dstElementsOpnd, IR::IndirOpnd::New(headOpnd, sizeof(Js::SparseArraySegmentBase), TyMachPtr, func), instr);
- GenerateMemCopy(dstElementsOpnd, srcOpnd, ints->count * sizeof(int32), instr);
- i = ints->count;
- }
- else
- {
- for (; i < ints->count; i++)
- {
- GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
- ints->elements[i], instr, isHeadSegmentZeroed);
- }
- }
- Assert(i == ints->count);
- for (; i < size; i++)
- {
- GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
- Js::JavascriptNativeIntArray::MissingItem, instr, isHeadSegmentZeroed);
- }
- // Skip pass the helper call
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(helperLabel);
- instr->InsertAfter(doneLabel);
- }
- void
- Lowerer::GenerateProfiledNewScFloatArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, RecyclerWeakReference<Js::FunctionBody> * weakFuncRef)
- {
- if (PHASE_OFF(Js::ArrayLiteralFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
- {
- return;
- }
- if (!arrayInfo->IsNativeFloatArray())
- {
- return;
- }
- Func * func = this->m_func;
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- // If the array info hasn't mark as not int array yet, go to the helper and mark it.
- // It really is just for assert purpose in JavascriptNativeFloatArray::ToVarArray
- GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, helperLabel);
- IR::AddrOpnd * elementsOpnd = instr->GetSrc1()->AsAddrOpnd();
- Js::AuxArray<double> * doubles = (Js::AuxArray<double> *)elementsOpnd->m_address;
- uint32 size = doubles->count;
- // Generate code as in JavascriptArray::NewLiteral
- bool isHeadSegmentZeroed;
- IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
- Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
- IR::RegOpnd * headOpnd = GenerateArrayAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isHeadSegmentZeroed);
- const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
- GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isHeadSegmentZeroed);
- // Initialize the elements
- IR::RegOpnd * dstElementsOpnd = IR::RegOpnd::New(TyMachPtr, func);
- const IR::AutoReuseOpnd autoReuseDstElementsOpnd(dstElementsOpnd, func);
- IR::Opnd * srcOpnd = IR::AddrOpnd::New(doubles->elements, IR::AddrOpndKindDynamicMisc, func);
- InsertLea(dstElementsOpnd, IR::IndirOpnd::New(headOpnd, sizeof(Js::SparseArraySegmentBase), TyMachPtr, func), instr);
- GenerateMemCopy(dstElementsOpnd, srcOpnd, doubles->count * sizeof(double), instr);
- // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
- uint const offsetStart = sizeof(Js::SparseArraySegmentBase) + doubles->count * sizeof(double);
- uint const missingItem = (size - doubles->count) * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
- for (uint i = 0; i < missingItem; i++)
- {
- GenerateMemInit(headOpnd, offsetStart + i * sizeof(Js::JavascriptArray::MissingItem),
- IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, m_func, true), instr, isHeadSegmentZeroed);
- }
- // Skip pass the helper call
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(helperLabel);
- instr->InsertAfter(doneLabel);
- }
- IR::Instr *
- Lowerer::LowerNewScIntArray(IR::Instr *arrInstr)
- {
- IR::Instr *instrPrev = arrInstr->m_prev;
- IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScIntArray;
- if ((arrInstr->IsJitProfilingInstr() || arrInstr->IsProfiledInstr()) && arrInstr->m_func->HasProfileInfo())
- {
- RecyclerWeakReference<Js::FunctionBody> *weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
- if (weakFuncRef)
- {
- Js::FunctionBody *functionBody = arrInstr->m_func->GetJnFunction();
- // Technically a load of the same memory address either way.
- Js::ProfileId profileId =
- arrInstr->IsJitProfilingInstr()
- ? arrInstr->AsJitProfilingInstr()->profileId
- : static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
- Js::ArrayCallSiteInfo *arrayInfo =
- functionBody->GetAnyDynamicProfileInfo()->GetArrayCallSiteInfo(functionBody, profileId);
- // Only do fast-path if it isn't a JitProfiling instr and not copy-on-access array
- if (arrInstr->IsProfiledInstr()
- && (PHASE_OFF1(Js::Phase::CopyOnAccessArrayPhase) || arrayInfo->isNotCopyOnAccessArray) && !PHASE_FORCE1(Js::Phase::CopyOnAccessArrayPhase))
- {
- GenerateProfiledNewScIntArrayFastPath(arrInstr, arrayInfo, weakFuncRef);
- }
- m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
- m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfo, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
- helperMethod = IR::HelperScrArr_ProfiledNewScIntArray;
- }
- }
- LoadScriptContext(arrInstr);
- IR::Opnd *elementsOpnd = arrInstr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(arrInstr, elementsOpnd);
- m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerNewScFltArray(IR::Instr *arrInstr)
- {
- IR::Instr *instrPrev = arrInstr->m_prev;
- IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScFltArray;
- if ((arrInstr->IsJitProfilingInstr() || arrInstr->IsProfiledInstr()) && arrInstr->m_func->HasProfileInfo())
- {
- RecyclerWeakReference<Js::FunctionBody> *weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
- if (weakFuncRef)
- {
- Js::ProfileId profileId =
- arrInstr->IsJitProfilingInstr()
- ? arrInstr->AsJitProfilingInstr()->profileId
- : static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
- Js::FunctionBody *functionBody = arrInstr->m_func->GetJnFunction();
- Js::ArrayCallSiteInfo *arrayInfo =
- functionBody->GetAnyDynamicProfileInfo()->GetArrayCallSiteInfo(functionBody, profileId);
- // Only do fast-path if it isn't a JitProfiling instr
- if (arrInstr->IsProfiledInstr()) {
- GenerateProfiledNewScFloatArrayFastPath(arrInstr, arrayInfo, weakFuncRef);
- }
- m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
- m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfo, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
- helperMethod = IR::HelperScrArr_ProfiledNewScFltArray;
- }
- }
- LoadScriptContext(arrInstr);
- IR::Opnd *elementsOpnd = arrInstr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(arrInstr, elementsOpnd);
- m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerArraySegmentVars(IR::Instr *arrayInstr)
- {
- IR::Instr * instrPrev;
- IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperArraySegmentVars, m_func);
- instrPrev = m_lowererMD.LoadHelperArgument(arrayInstr, arrayInstr->UnlinkSrc2());
- m_lowererMD.LoadHelperArgument(arrayInstr, arrayInstr->UnlinkSrc1());
- arrayInstr->m_opcode = Js::OpCode::Call;
- arrayInstr->SetSrc1(opndHelper);
- m_lowererMD.LowerCall(arrayInstr, 0);
- return instrPrev;
- }
- IR::Instr* Lowerer::LowerProfiledNewArray(IR::JitProfilingInstr* instr, bool hasArgs)
- {
- // Use the special helper which checks whether Array has been overwritten by the user and if
- // it hasn't, possibly allocates a native array
- // Insert a temporary label before the instruction we're about to lower, so that we can return
- // the first instruction above that needs to be lowered after we're done - regardless of argument
- // list, StartCall, etc.
- IR::Instr* startMarkerInstr = InsertLoweredRegionStartMarker(instr);
- Assert(instr->isNewArray);
- Assert(instr->arrayProfileId != Js::Constants::NoProfileId);
- Assert(instr->profileId != Js::Constants::NoProfileId);
- bool isSpreadCall = instr->m_opcode == Js::OpCode::NewScObjectSpread || instr->m_opcode == Js::OpCode::NewScObjArraySpread;
- m_lowererMD.LoadNewScObjFirstArg(instr, IR::AddrOpnd::New(nullptr, IR::AddrOpndKindConstantVar, m_func, true), isSpreadCall ? 1 : 0);
- if (isSpreadCall)
- {
- this->LowerSpreadCall(instr, Js::CallFlags_New, true);
- }
- else
- {
- const int32 argCount = m_lowererMD.LowerCallArgs(instr, Js::CallFlags_New, 4);
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->arrayProfileId, m_func));
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
- m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
- instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledNewScObjArray, m_func));
- m_lowererMD.LowerCall(instr, static_cast<Js::ArgSlot>(argCount));
- }
- return RemoveLoweredRegionStartMarker(startMarkerInstr);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerNewScObject
- ///
- /// Machine independent lowering of a CallI instr.
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerNewScObject(IR::Instr *newObjInstr, bool callCtor, bool hasArgs, bool isBaseClassConstructorNewScObject)
- {
- if (newObjInstr->IsJitProfilingInstr() && newObjInstr->AsJitProfilingInstr()->isNewArray)
- {
- Assert(callCtor);
- return LowerProfiledNewArray(newObjInstr->AsJitProfilingInstr(), hasArgs);
- }
- bool isSpreadCall = newObjInstr->m_opcode == Js::OpCode::NewScObjectSpread ||
- newObjInstr->m_opcode == Js::OpCode::NewScObjArraySpread;
- Func* func = newObjInstr->m_func;
- // Insert a temporary label before the instruction we're about to lower, so that we can return
- // the first instruction above that needs to be lowered after we're done - regardless of argument
- // list, StartCall, etc.
- IR::Instr* startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
- IR::Opnd *ctorOpnd = newObjInstr->GetSrc1();
- IR::RegOpnd *newObjDst = newObjInstr->GetDst()->AsRegOpnd();
- Assert(!callCtor || !hasArgs || (newObjInstr->GetSrc2() != nullptr /*&& newObjInstr->GetSrc2()->IsSymOpnd()*/));
- bool skipNewScObj = false;
- bool returnNewScObj = false;
- bool emitBailOut = false;
- // If we haven't yet split NewScObject into NewScObjectNoCtor and CallI, we will need a temporary register
- // to hold the result of the object allocation.
- IR::RegOpnd* createObjDst = callCtor ? IR::RegOpnd::New(TyVar, func) : newObjDst;
- IR::LabelInstr* helperOrBailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, /* isOpHelper = */ true);
- IR::LabelInstr* callCtorLabel = IR::LabelInstr::New(Js::OpCode::Label, func, /* isOpHelper = */ false);
- // Try to emit the fast allocation and construction path.
- bool usedFixedCtorCache = TryLowerNewScObjectWithFixedCtorCache(newObjInstr, createObjDst, helperOrBailoutLabel, callCtorLabel, skipNewScObj, returnNewScObj, emitBailOut);
- AssertMsg(!skipNewScObj || callCtor, "What will we return if we skip the default new object and don't call the ctor?");
- Assert(!skipNewScObj || !returnNewScObj);
- Assert(usedFixedCtorCache || !skipNewScObj);
- Assert(!usedFixedCtorCache || newObjInstr->HasFixedFunctionAddressTarget());
- Assert(!skipNewScObj || !emitBailOut);
- #if DBG
- if (usedFixedCtorCache)
- {
- Js::JavascriptFunction* ctor = newObjInstr->GetFixedFunction();
- Js::FunctionInfo* ctorInfo = ctor->GetFunctionInfo();
- Assert((ctorInfo->GetAttributes() & Js::FunctionInfo::Attributes::ErrorOnNew) == 0);
- Assert(!!(ctorInfo->GetAttributes() & Js::FunctionInfo::Attributes::SkipDefaultNewObject) == skipNewScObj);
- }
- #endif
- IR::Instr* startCallInstr = nullptr;
- if (callCtor && hasArgs)
- {
- hasArgs = !newObjInstr->HasEmptyArgOutChain(&startCallInstr);
- }
- // If we're not skipping the default new object, let's emit bailout or a call to NewScObject* helper
- IR::JnHelperMethod newScHelper = IR::HelperInvalid;
- IR::Instr *newScObjCall = nullptr;
- if (!skipNewScObj)
- {
- // If we emitted the fast path, this block is a helper block.
- if (usedFixedCtorCache)
- {
- newObjInstr->InsertBefore(helperOrBailoutLabel);
- }
- if (emitBailOut)
- {
- IR::Instr* bailOutInstr = newObjInstr;
- newObjInstr = IR::Instr::New(newObjInstr->m_opcode, func);
- bailOutInstr->TransferTo(newObjInstr);
- bailOutInstr->m_opcode = Js::OpCode::BailOut;
- bailOutInstr->InsertAfter(newObjInstr);
- GenerateBailOut(bailOutInstr);
- }
- else
- {
- Assert(!newObjDst->CanStoreTemp());
- // createObjDst = NewScObject...(ctorOpnd)
- newScHelper = !callCtor ?
- (isBaseClassConstructorNewScObject ?
- (hasArgs ? IR::HelperNewScObjectNoCtorFull : IR::HelperNewScObjectNoArgNoCtorFull) :
- (hasArgs ? IR::HelperNewScObjectNoCtor : IR::HelperNewScObjectNoArgNoCtor)) :
- (hasArgs || usedFixedCtorCache ? IR::HelperNewScObjectNoCtor : IR::HelperNewScObjectNoArg);
- LoadScriptContext(newObjInstr);
- m_lowererMD.LoadHelperArgument(newObjInstr, newObjInstr->GetSrc1());
- newScObjCall = IR::Instr::New(Js::OpCode::Call, createObjDst, IR::HelperCallOpnd::New(newScHelper, func), func);
- newObjInstr->InsertBefore(newScObjCall);
- m_lowererMD.LowerCall(newScObjCall, 0);
- }
- }
- // If we call HelperNewScObjectNoArg directly, we won't be calling the constructor from here, because the helper will do it.
- // We could probably avoid this complexity by converting NewScObjectNoArg to NewScObject in the IRBuilder, once we have dedicated
- // code paths for new Object() and new Array().
- callCtor &= hasArgs || usedFixedCtorCache;
- AssertMsg(!skipNewScObj || callCtor, "What will we return if we skip the default new object and don't call the ctor?");
- newObjInstr->InsertBefore(callCtorLabel);
- if (callCtor && usedFixedCtorCache)
- {
- IR::JnHelperMethod ctorHelper = IR::JnHelperMethodCount;
- // If we have no arguments (i.e. the argument chain is empty), we can recognize a couple of common special cases, such
- // as new Object() or new Array(), for which we have optimized helpers.
- Js::JavascriptFunction* ctor = newObjInstr->GetFixedFunction();
- Js::FunctionInfo* ctorInfo = ctor->GetFunctionInfo();
- if (!hasArgs && (ctorInfo == &Js::JavascriptObject::EntryInfo::NewInstance || ctorInfo == &Js::JavascriptArray::EntryInfo::NewInstance))
- {
- if (ctorInfo == &Js::JavascriptObject::EntryInfo::NewInstance)
- {
- Assert(skipNewScObj);
- ctorHelper = IR::HelperNewJavascriptObjectNoArg;
- callCtor = false;
- }
- else if (ctorInfo == &Js::JavascriptArray::EntryInfo::NewInstance)
- {
- Assert(skipNewScObj);
- ctorHelper = IR::HelperNewJavascriptArrayNoArg;
- callCtor = false;
- }
- if (!callCtor)
- {
- LoadScriptContext(newObjInstr);
- IR::Instr *ctorCall = IR::Instr::New(Js::OpCode::Call, newObjDst, IR::HelperCallOpnd::New(ctorHelper, func), func);
- newObjInstr->InsertBefore(ctorCall);
- m_lowererMD.LowerCall(ctorCall, 0);
- }
- }
- }
- IR::AutoReuseOpnd autoReuseSavedCtorOpnd;
- if (callCtor)
- {
- // Load the first argument, which is either the object just created or null. Spread has an extra argument.
- IR::Instr * argInstr = this->m_lowererMD.LoadNewScObjFirstArg(newObjInstr, createObjDst, isSpreadCall ? 1 : 0);
- IR::Instr * insertAfterCtorInstr = newObjInstr->m_next;
- if (skipNewScObj)
- {
- // Since we skipped the default new object, we must be returning whatever the constructor returns
- // (which better be an Object), so let's just use newObjDst directly.
- // newObjDst = newObjInstr->m_src1(createObjDst, ...)
- Assert(newObjInstr->GetDst() == newObjDst);
- if (isSpreadCall)
- {
- newObjInstr = this->LowerSpreadCall(newObjInstr, Js::CallFlags_New);
- }
- else
- {
- newObjInstr = this->m_lowererMD.LowerCallI(newObjInstr, Js::CallFlags_New, false, argInstr);
- }
- }
- else
- {
- // We may need to return the default new object or whatever the constructor returns. Let's stash
- // away the constructor's return in a temporary operand, and do the right check, if necessary.
- // ctorResultObjOpnd = newObjInstr->m_src1(createObjDst, ...)
- IR::RegOpnd *ctorResultObjOpnd = IR::RegOpnd::New(TyVar, func);
- newObjInstr->UnlinkDst();
- newObjInstr->SetDst(ctorResultObjOpnd);
- if (isSpreadCall)
- {
- newObjInstr = this->LowerSpreadCall(newObjInstr, Js::CallFlags_New);
- }
- else
- {
- newObjInstr = this->m_lowererMD.LowerCallI(newObjInstr, Js::CallFlags_New, false, argInstr);
- }
- if (returnNewScObj)
- {
- // MOV newObjDst, createObjDst
- this->m_lowererMD.CreateAssign(newObjDst, createObjDst, insertAfterCtorInstr);
- }
- else
- {
- LowerGetNewScObjectCommon(ctorResultObjOpnd, ctorResultObjOpnd, createObjDst, insertAfterCtorInstr);
- this->m_lowererMD.CreateAssign(newObjDst, ctorResultObjOpnd, insertAfterCtorInstr);
- }
- }
- // We don't ever need to update the constructor cache, if we hard coded it. Caches requiring update after constructor
- // don't get cloned, and those that don't require update will never need one anymore.
- if (!usedFixedCtorCache)
- {
- LowerUpdateNewScObjectCache(insertAfterCtorInstr, newObjDst, ctorOpnd, false /* isCtorFunction */);
- }
- }
- else
- {
- if (newObjInstr->IsJitProfilingInstr())
- {
- Assert(m_func->IsSimpleJit());
- Assert(!Js::FunctionBody::IsNewSimpleJit());
- // This path skipped calling the Ctor, which skips calling LowerCallI with newObjInstr, meaning that the call will not be profiled.
- // So we insert it manually here.
- if(newScHelper == IR::HelperNewScObjectNoArg &&
- newObjDst &&
- ctorOpnd->IsRegOpnd() &&
- newObjDst->AsRegOpnd()->m_sym == ctorOpnd->AsRegOpnd()->m_sym)
- {
- Assert(newObjInstr->m_func->IsSimpleJit());
- Assert(createObjDst != newObjDst);
- // The function object sym is going to be overwritten, so save it in a temp for profiling
- IR::RegOpnd *const savedCtorOpnd = IR::RegOpnd::New(ctorOpnd->GetType(), newObjInstr->m_func);
- autoReuseSavedCtorOpnd.Initialize(savedCtorOpnd, newObjInstr->m_func);
- Lowerer::InsertMove(savedCtorOpnd, ctorOpnd, newObjInstr);
- ctorOpnd = savedCtorOpnd;
- }
- // It is a constructor (CallFlags_New) and therefore a single argument (this) would have been given.
- const auto info = Lowerer::MakeCallInfoConst(Js::CallFlags_New, 1, func);
- Assert(newScObjCall);
- IR::JitProfilingInstr *const newObjJitProfilingInstr = newObjInstr->AsJitProfilingInstr();
- GenerateCallProfiling(
- newObjJitProfilingInstr->profileId,
- newObjJitProfilingInstr->inlineCacheIndex,
- createObjDst,
- ctorOpnd,
- info,
- false,
- newScObjCall,
- newObjInstr);
- }
- // MOV newObjDst, createObjDst
- if (!skipNewScObj && createObjDst != newObjDst)
- {
- this->m_lowererMD.CreateAssign(newObjDst, createObjDst, newObjInstr);
- }
- newObjInstr->Remove();
- }
- // Return the first instruction above the region we've just lowered.
- return RemoveLoweredRegionStartMarker(startMarkerInstr);
- }
- IR::Instr*
- Lowerer::GenerateCallProfiling(Js::ProfileId profileId, Js::InlineCacheIndex inlineCacheIndex, IR::Opnd* retval, IR::Opnd*calleeFunctionObjOpnd, IR::Opnd* callInfo, bool returnTypeOnly, IR::Instr*callInstr,IR::Instr*insertAfter)
- {
- // This should only ever happen in profiling simplejit
- Assert(m_func->DoSimpleJitDynamicProfile());
- // Make sure they gave us the correct call instruction
- #if defined(_M_IX86) || defined(_M_X64)
- Assert(callInstr->m_opcode == Js::OpCode::CALL);
- #elif defined(_M_ARM)
- Assert(callInstr->m_opcode == Js::OpCode::BLX);
- #endif
- Func*const func = insertAfter->m_func;
- {
- // First, we should save the implicit call flags
- const auto starFlag = GetImplicitCallFlagsOpnd();
- const auto saveOpnd = IR::RegOpnd::New(starFlag->GetType(), func);
- IR::AutoReuseOpnd a(starFlag, func), b(saveOpnd, func);
- //Save the flags (before call) and restore them (after the call)
- this->InsertMove(saveOpnd, starFlag, callInstr);
- // Note: On arm this is slightly inefficient because it forces a reload of the memory location to a reg (whereas x86 can load straight from hard-coded memory into a reg)
- // But it works and making it not reload the memory location would force more refactoring.
- this->InsertMove(starFlag, saveOpnd, insertAfter->m_next);
- }
- // Profile a call that just happened: push some extra info on the stack and call the helper
- if (!retval)
- {
- if (returnTypeOnly)
- {
- // If we are only supposed to profile the return type but don't use the return value, we might
- // as well do nothing!
- return insertAfter;
- }
- retval = IR::AddrOpnd::NewNull(func);
- }
- IR::Instr* profileCall = IR::Instr::New(Js::OpCode::Call, func);
- bool needInlineCacheIndex;
- IR::JnHelperMethod helperMethod;
- if (returnTypeOnly)
- {
- needInlineCacheIndex = false;
- helperMethod = IR::HelperSimpleProfileReturnTypeCall;
- }
- else if(inlineCacheIndex == Js::Constants::NoInlineCacheIndex)
- {
- needInlineCacheIndex = false;
- helperMethod = IR::HelperSimpleProfileCall_DefaultInlineCacheIndex;
- }
- else
- {
- needInlineCacheIndex = true;
- helperMethod = IR::HelperSimpleProfileCall;
- }
- profileCall->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
- insertAfter->InsertAfter(profileCall);
- m_lowererMD.LoadHelperArgument(profileCall, callInfo);
- m_lowererMD.LoadHelperArgument(profileCall, calleeFunctionObjOpnd);
- m_lowererMD.LoadHelperArgument(profileCall, retval);
- if(needInlineCacheIndex)
- {
- m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateInlineCacheIndexOpnd(inlineCacheIndex, func));
- }
- m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateProfileIdOpnd(profileId, func));
- // Push the frame pointer so that the profiling call can grab the stack layout
- m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateFramePointerOpnd(func));
- // No args: the helper is stdcall
- return m_lowererMD.LowerCall(profileCall, 0);
- }
- bool Lowerer::TryLowerNewScObjectWithFixedCtorCache(IR::Instr* newObjInstr, IR::RegOpnd* newObjDst,
- IR::LabelInstr* helperOrBailoutLabel, IR::LabelInstr* callCtorLabel, bool& skipNewScObj, bool& returnNewScObj, bool& emitBailOut)
- {
- skipNewScObj = false;
- returnNewScObj = false;
- AssertMsg(!PHASE_OFF(Js::ObjTypeSpecNewObjPhase, this->m_func) || !newObjInstr->HasBailOutInfo(),
- "Why do we have bailout on NewScObject when ObjTypeSpecNewObj is off?");
- if (PHASE_OFF(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()) && PHASE_OFF(Js::ObjTypeSpecNewObjPhase, this->m_func))
- {
- return false;
- }
- Js::JitTimeConstructorCache* ctorCache;
- if (newObjInstr->HasBailOutInfo())
- {
- Assert(newObjInstr->IsNewScObjectInstr());
- Assert(newObjInstr->IsProfiledInstr());
- Assert(newObjInstr->GetBailOutKind() == IR::BailOutFailedCtorGuardCheck);
- emitBailOut = true;
- ctorCache = newObjInstr->m_func->GetConstructorCache(static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId));
- Assert(ctorCache != nullptr);
- Assert(!ctorCache->skipNewScObject);
- Assert(!ctorCache->typeIsFinal || ctorCache->ctorHasNoExplicitReturnValue);
- LinkCtorCacheToGuardedProperties(ctorCache);
- }
- else
- {
- if (newObjInstr->m_opcode == Js::OpCode::NewScObjArray || newObjInstr->m_opcode == Js::OpCode::NewScObjArraySpread)
- {
- // These instr's carry a profile that indexes the array call site info, not the ctor cache.
- return false;
- }
- ctorCache = newObjInstr->IsProfiledInstr() ? newObjInstr->m_func->GetConstructorCache(static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId)) : nullptr;
- if (ctorCache == nullptr)
- {
- if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()))
- {
- Js::FunctionBody* callerFunctionBody = newObjInstr->m_func->GetJnFunction();
- wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(L"FixedNewObj: function %s (%s): lowering non-fixed new script object for %s, because %s.\n",
- callerFunctionBody->GetDisplayName(), callerFunctionBody->GetDebugNumberSet(debugStringBuffer), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
- newObjInstr->IsProfiledInstr() ? L"constructor cache hasn't been cloned" : L"instruction is not profiled");
- Output::Flush();
- }
- return false;
- }
- }
- Assert(ctorCache != nullptr);
- // We should only have cloned if the script contexts match.
- Assert(newObjInstr->m_func->GetScriptContext() == ctorCache->scriptContext);
- // Built-in constructors don't need a default new object. Since we know which constructor we're calling, we can skip creating a default
- // object and call a specialized helper (or even constructor, directly) avoiding the checks in generic NewScObjectCommon.
- if (ctorCache->skipNewScObject)
- {
- if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()))
- {
- Js::FunctionBody* callerFunctionBody = newObjInstr->m_func->GetJnFunction();
- const Js::JavascriptFunction* ctor = ctorCache->constructor;
- Js::FunctionBody* ctorBody = ctor->GetFunctionInfo()->HasBody() ? ctor->GetFunctionInfo()->GetFunctionBody() : nullptr;
- const wchar_t* ctorName = ctorBody != nullptr ? ctorBody->GetDisplayName() : L"<unknown>";
- wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- wchar_t debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(L"FixedNewObj: function %s (%s): lowering skipped new script object for %s with %s ctor <unknown> (%s %s).\n",
- callerFunctionBody->GetDisplayName(), callerFunctionBody->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
- newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? L"inlined" : L"called",
- ctorName, ctorBody ? ctorBody->GetDebugNumberSet(debugStringBuffer) : L"(null)");
- Output::Flush();
- }
- // All built-in constructors share a special singleton cache that is never checked and never invalidated. It cannot be used
- // as a guard to protect any property operations downstream from the constructor. If this ever becomes a performance issue,
- // we could have a dedicated cache for each built-in constructor, populate it and invalidate it as any other constructor cache.
- AssertMsg(!emitBailOut, "Can't bail out on constructor cache guard for built-in constructors.");
- skipNewScObj = true;
- IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
- this->m_lowererMD.CreateAssign(newObjDst, zeroOpnd, newObjInstr);
- return true;
- }
- AssertMsg(ctorCache->type != nullptr, "Why did we hard-code a mismatched, invalidated or polymorphic constructor cache?");
- if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()))
- {
- Js::FunctionBody* callerFunctionBody = newObjInstr->m_func->GetJnFunction();
- const Js::JavascriptFunction* constructor = ctorCache->constructor;
- Js::FunctionBody* constructorBody = constructor->GetFunctionInfo()->HasBody() ? constructor->GetFunctionInfo()->GetFunctionBody() : nullptr;
- const wchar_t* constructorName = constructorBody != nullptr ? constructorBody->GetDisplayName() : L"<unknown>";
- wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- wchar_t debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()))
- {
- Output::Print(L"FixedNewObj: function %s (%s): lowering fixed new script object for %s with %s ctor <unknown> (%s %s): type = %p, slots = %d, inlined slots = %d.\n",
- callerFunctionBody->GetDisplayName(), callerFunctionBody->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
- newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? L"inlined" : L"called",
- constructorName, constructorBody ? constructorBody->GetDebugNumberSet(debugStringBuffer) : L"(null)",
- ctorCache->type, ctorCache->slotCount, ctorCache->inlineSlotCount);
- }
- else
- {
- Output::Print(L"FixedNewObj: function %s (%s): lowering fixed new script object for %s with %s ctor <unknown> (%s %s): slots = %d, inlined slots = %d.\n",
- callerFunctionBody->GetDisplayName(), callerFunctionBody->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
- newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? L"inlined" : L"called",
- constructorName, debugStringBuffer, ctorCache->slotCount, ctorCache->inlineSlotCount);
- }
- Output::Flush();
- }
- // If the constructor has no return statements, we can safely return the object that was created here.
- // No need to check what the constructor returned - it must be undefined.
- returnNewScObj = ctorCache->ctorHasNoExplicitReturnValue;
- Assert(Js::ConstructorCache::GetSizeOfGuardValue() == static_cast<size_t>(TySize[TyMachPtr]));
- IR::MemRefOpnd* guardOpnd = IR::MemRefOpnd::New(const_cast<void*>(ctorCache->runtimeCache->GetAddressOfGuardValue()), TyMachReg, this->m_func,
- IR::AddrOpndKindDynamicGuardValueRef);
- IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
- InsertCompareBranch(guardOpnd, zeroOpnd, Js::OpCode::BrEq_A, helperOrBailoutLabel, newObjInstr);
- // If we are calling new on a class constructor, the contract is that we pass new.target as the 'this' argument.
- // function is the constructor on which we called new - which is new.target.
- Js::JavascriptFunction* ctor = newObjInstr->GetFixedFunction();
- Js::FunctionInfo* functionInfo = Js::JavascriptOperators::GetConstructorFunctionInfo(ctor, this->m_func->GetScriptContext());
- Assert(functionInfo);
- if (functionInfo->IsClassConstructor())
- {
- // MOV newObjDst, function
- this->m_lowererMD.CreateAssign(newObjDst, newObjInstr->GetSrc1(), newObjInstr);
- }
- else
- {
- const Js::DynamicType* newObjectType = ctorCache->type;
- Assert(newObjectType->GetIsShared());
- IR::AddrOpnd* typeSrc = IR::AddrOpnd::New(const_cast<void *>(reinterpret_cast<const void *>(newObjectType)), IR::AddrOpndKindDynamicType, m_func);
- // For the next call:
- // inlineSlotSize == Number of slots to allocate beyond the DynamicObject header
- // slotSize - inlineSlotSize == Number of aux slots to allocate
- int inlineSlotSize = ctorCache->inlineSlotCount;
- int slotSize = ctorCache->slotCount;
- if (newObjectType->GetTypeHandler()->IsObjectHeaderInlinedTypeHandler())
- {
- Assert(inlineSlotSize >= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity());
- Assert(inlineSlotSize == slotSize);
- slotSize = inlineSlotSize -= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity();
- }
- GenerateDynamicObjectAlloc(newObjInstr, inlineSlotSize, slotSize, newObjDst, typeSrc);
- }
- // JMP $callCtor
- IR::BranchInstr *callCtorBranch = IR::BranchInstr::New(Js::OpCode::Br, callCtorLabel, m_func);
- newObjInstr->InsertBefore(callCtorBranch);
- this->m_lowererMD.LowerUncondBranch(callCtorBranch);
- return true;
- }
- void
- Lowerer::GenerateRecyclerAllocAligned(IR::JnHelperMethod allocHelper, size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, bool inOpHelper)
- {
- IR::LabelInstr * allocDoneLabel = nullptr;
- if (!PHASE_OFF(Js::JitAllocNewObjPhase, insertionPointInstr->m_func->GetJnFunction()) && HeapInfo::IsSmallObject(allocSize))
- {
- IR::LabelInstr * allocHelperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- allocDoneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, inOpHelper);
- this->m_lowererMD.GenerateFastRecyclerAlloc(allocSize, newObjDst, insertionPointInstr, allocHelperLabel, allocDoneLabel);
- // $allocHelper:
- insertionPointInstr->InsertBefore(allocHelperLabel);
- }
- // call JavascriptOperators::AllocMemForScObject(allocSize, scriptContext->GetRecycler())
- this->m_lowererMD.LoadHelperArgument(insertionPointInstr, this->LoadScriptContextValueOpnd(insertionPointInstr, ScriptContextValue::ScriptContextRecycler));
- this->m_lowererMD.LoadHelperArgument(insertionPointInstr, IR::IntConstOpnd::New((int32)allocSize, TyUint32, m_func, true));
- IR::Instr *newObjCall = IR::Instr::New(Js::OpCode::Call, newObjDst, IR::HelperCallOpnd::New(allocHelper, m_func), m_func);
- insertionPointInstr->InsertBefore(newObjCall);
- this->m_lowererMD.LowerCall(newObjCall, 0);
- if (allocDoneLabel != nullptr)
- {
- // $allocDone:
- insertionPointInstr->InsertBefore(allocDoneLabel);
- }
- }
- IR::Instr *
- Lowerer::LowerGetNewScObject(IR::Instr *instr)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::GetNewScObject);
- Assert(instr->GetDst());
- Assert(instr->GetSrc1());
- Assert(instr->GetSrc2());
- const auto instrPrev = instr->m_prev;
- Assert(instrPrev);
- LowerGetNewScObjectCommon(
- instr->GetDst()->AsRegOpnd(),
- instr->GetSrc1()->AsRegOpnd(),
- instr->GetSrc2()->AsRegOpnd(),
- instr);
- instr->Remove();
- return instrPrev;
- }
- void
- Lowerer::LowerGetNewScObjectCommon(
- IR::RegOpnd *const resultObjOpnd,
- IR::RegOpnd *const constructorReturnOpnd,
- IR::RegOpnd *const newObjOpnd,
- IR::Instr *insertBeforeInstr)
- {
- Assert(resultObjOpnd);
- Assert(constructorReturnOpnd);
- Assert(newObjOpnd);
- Assert(insertBeforeInstr);
- // (newObjOpnd == 'this' value passed to constructor)
- //
- // if (!IsJsObject(constructorReturnOpnd))
- // goto notObjectLabel
- // newObjOpnd = constructorReturnOpnd
- // notObjectLabel:
- // resultObjOpnd = newObjOpnd
- if(!constructorReturnOpnd->IsEqual(newObjOpnd))
- {
- // Need to check whether the constructor returned an object
- IR::LabelInstr *notObjectLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- Assert(insertBeforeInstr->m_prev);
- IR::LabelInstr *const doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- insertBeforeInstr->InsertBefore(doneLabel);
- insertBeforeInstr = doneLabel;
- #if defined(_M_ARM32_OR_ARM64)
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, constructorReturnOpnd);
- IR::Opnd * targetOpnd = IR::RegOpnd::New(StackSym::New(TyInt32,m_func), TyInt32, m_func);
- IR::Instr * callIsObjectInstr = IR::Instr::New(Js::OpCode::Call, targetOpnd, m_func);
- insertBeforeInstr->InsertBefore(callIsObjectInstr);
- this->m_lowererMD.ChangeToHelperCall(callIsObjectInstr, IR::HelperOp_IsObject);
- InsertTestBranch( targetOpnd, targetOpnd, Js::OpCode::BrEq_A, notObjectLabel,insertBeforeInstr);
- #else
- m_lowererMD.GenerateIsJsObjectTest(constructorReturnOpnd, insertBeforeInstr, notObjectLabel);
- #endif
- // Value returned by constructor is an object (use constructorReturnOpnd)
- if(!resultObjOpnd->IsEqual(constructorReturnOpnd))
- {
- this->m_lowererMD.CreateAssign(resultObjOpnd, constructorReturnOpnd, insertBeforeInstr);
- }
- insertBeforeInstr->InsertBefore(
- m_lowererMD.LowerUncondBranch(IR::BranchInstr::New(Js::OpCode::Br, doneLabel, m_func)));
- // Value returned by constructor is not an object (use newObjOpnd)
- insertBeforeInstr->InsertBefore(notObjectLabel);
- }
- if(!resultObjOpnd->IsEqual(newObjOpnd))
- {
- this->m_lowererMD.CreateAssign(resultObjOpnd, newObjOpnd, insertBeforeInstr);
- }
- // fall through to insertBeforeInstr or doneLabel
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerUpdateNewScObjectCache
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerUpdateNewScObjectCache(IR::Instr * insertInstr, IR::Opnd *dst, IR::Opnd *src1, const bool isCtorFunction)
- {
- // if (!isCtorFunction)
- // {
- // MOV r1, [src1 + offset(type)] -- check base TypeIds_Function
- // CMP [r1 + offset(typeId)], TypeIds_Function
- // }
- // JNE $fallThru
- // MOV r2, [src1 + offset(constructorCache)]
- // MOV r3, [r2 + offset(updateAfterCtor)]
- // TEST r3, r3 -- check if updateAfterCtor is 0
- // JEQ $fallThru
- // CALL UpdateNewScObjectCache(src1, dst, scriptContext)
- // $fallThru:
- IR::LabelInstr *labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- if (!src1->IsRegOpnd())
- {
- IR::RegOpnd *srcRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
- LowererMD::CreateAssign(srcRegOpnd, src1, insertInstr);
- src1 = srcRegOpnd;
- }
- // Check if constructor is a function if we don't already know it.
- if (!isCtorFunction)
- {
- // MOV r1, [src1 + offset(type)] -- check base TypeIds_Function
- IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
- LowererMD::CreateAssign(r1, indirOpnd, insertInstr);
- // CMP [r1 + offset(typeId)], TypeIds_Function
- // JNE $fallThru
- indirOpnd = IR::IndirOpnd::New(r1, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
- IR::IntConstOpnd *intOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, this->m_func, true);
- InsertCompareBranch(indirOpnd, intOpnd, Js::OpCode::BrNeq_A, labelFallThru, insertInstr);
- }
- // Every function has a constructor cache, even if only the default blank one.
- // r2 = MOV JavascriptFunction->constructorCache
- IR::RegOpnd *r2 = IR::RegOpnd::New(TyVar, this->m_func);
- IR::IndirOpnd *opndIndir = IR::IndirOpnd::New(src1->AsRegOpnd(), Js::JavascriptFunction::GetOffsetOfConstructorCache(), TyMachReg, this->m_func);
- IR::Instr *instr = LowererMD::CreateAssign(r2, opndIndir, insertInstr);
- // r3 = constructorCache->updateAfterCtor
- IR::RegOpnd *r3 = IR::RegOpnd::New(TyInt8, this->m_func);
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(r2, Js::ConstructorCache::GetOffsetOfUpdateAfterCtor(), TyUint8, this->m_func);
- instr = LowererMD::CreateAssign(r3, indirOpnd, insertInstr);
- // TEST r3, r3 -- check if updateAfterCtor is 0
- // JEQ $fallThru
- InsertTestBranch(r3, r3, Js::OpCode::BrEq_A, labelFallThru, insertInstr);
- // r2 = UpdateNewScObjectCache(src1, dst, scriptContext)
- insertInstr->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true)); // helper label for uncommon path
- IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperUpdateNewScObjectCache, m_func);
- LoadScriptContext(insertInstr);
- m_lowererMD.LoadHelperArgument(insertInstr, dst);
- m_lowererMD.LoadHelperArgument(insertInstr, src1);
- instr = IR::Instr::New(Js::OpCode::Call, m_func);
- instr->SetSrc1(opndHelper);
- insertInstr->InsertBefore(instr);
- m_lowererMD.LowerCall(instr, 0);
- // $fallThru:
- insertInstr->InsertBefore(labelFallThru);
- return insertInstr;
- }
- IR::Instr *
- Lowerer::LowerNewScObjArray(IR::Instr *newObjInstr)
- {
- IR::Instr* startCallInstr;
- if (newObjInstr->HasEmptyArgOutChain(&startCallInstr))
- {
- newObjInstr->FreeSrc2();
- return LowerNewScObjArrayNoArg(newObjInstr);
- }
- IR::Instr* startMarkerInstr = nullptr;
- IR::Opnd *targetOpnd = newObjInstr->GetSrc1();
- Func *func = newObjInstr->m_func;
- if (!targetOpnd->IsAddrOpnd())
- {
- if (!newObjInstr->HasBailOutInfo())
- {
- return this->LowerNewScObject(newObjInstr, true, true);
- }
- // Insert a temporary label before the instruction we're about to lower, so that we can return
- // the first instruction above that needs to be lowered after we're done - regardless of argument
- // list, StartCall, etc.
- startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
- // For whatever reason, we couldn't do a fixed function check on the call target.
- // Generate a runtime check on the target.
- Assert(newObjInstr->GetBailOutKind() == IR::BailOutOnNotNativeArray);
- IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertCompareBranch(
- targetOpnd,
- LoadLibraryValueOpnd(newObjInstr, LibraryValue::ValueArrayConstructor),
- Js::OpCode::BrEq_A,
- true,
- labelSkipBailOut,
- newObjInstr);
- IR::ProfiledInstr *instrNew = IR::ProfiledInstr::New(newObjInstr->m_opcode, newObjInstr->UnlinkDst(), newObjInstr->UnlinkSrc1(), newObjInstr->UnlinkSrc2(), func);
- instrNew->u.profileId = newObjInstr->AsProfiledInstr()->u.profileId;
- newObjInstr->InsertAfter(instrNew);
- newObjInstr->m_opcode = Js::OpCode::BailOut;
- GenerateBailOut(newObjInstr);
- instrNew->InsertBefore(labelSkipBailOut);
- newObjInstr = instrNew;
- }
- else
- {
- // Insert a temporary label before the instruction we're about to lower, so that we can return
- // the first instruction above that needs to be lowered after we're done - regardless of argument
- // list, StartCall, etc.
- startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
- }
- RecyclerWeakReference<Js::FunctionBody> *weakFuncRef = nullptr;
- Js::ArrayCallSiteInfo *arrayInfo = nullptr;
- Assert(newObjInstr->IsProfiledInstr());
- IR::RegOpnd *resultObjOpnd = newObjInstr->GetDst()->AsRegOpnd();
- IR::Instr * insertInstr = newObjInstr->m_next;
- Js::ProfileId profileId = static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId);
- // We may not have profileId if we converted a NewScObject to NewScObjArray
- if (profileId != Js::Constants::NoProfileId)
- {
- Js::FunctionBody *functionBody = func->GetJnFunction();
- arrayInfo = functionBody->GetAnyDynamicProfileInfo()->GetArrayCallSiteInfo(functionBody, profileId);
- Assert(arrayInfo);
- weakFuncRef = func->GetWeakFuncRef();
- Assert(weakFuncRef);
- }
- IR::Opnd *opndSrc1 = newObjInstr->UnlinkSrc1();
- if (opndSrc1->IsImmediateOpnd())
- {
- intptr_t length = opndSrc1->GetImmediateValue();
- if (length >= 0 && length <= 8)
- {
- GenerateProfiledNewScObjArrayFastPath(newObjInstr, arrayInfo, weakFuncRef, (uint32)length);
- }
- }
- IR::Opnd *profileOpnd = IR::AddrOpnd::New(arrayInfo, IR::AddrOpndKindDynamicArrayCallSiteInfo, func);
- this->m_lowererMD.LoadNewScObjFirstArg(newObjInstr, profileOpnd);
- IR::JnHelperMethod helperMethod = IR::HelperScrArr_ProfiledNewInstance;
- newObjInstr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
- newObjInstr = GenerateDirectCall(newObjInstr, targetOpnd, Js::CallFlags_New);
- IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertCompareBranch(
- IR::IndirOpnd::New(resultObjOpnd, 0, TyMachPtr, func),
- LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptArray),
- Js::OpCode::BrEq_A,
- true,
- labelDone,
- insertInstr);
- // We know we have a native array, so store the weak ref and call site index.
- m_lowererMD.CreateAssign(
- IR::IndirOpnd::New(resultObjOpnd, Js::JavascriptNativeArray::GetOffsetOfArrayCallSiteIndex(), TyUint16, func),
- IR::Opnd::CreateProfileIdOpnd(profileId, func),
- insertInstr);
- m_lowererMD.CreateAssign(
- IR::IndirOpnd::New(resultObjOpnd, Js::JavascriptNativeArray::GetOffsetOfWeakFuncRef(), TyMachReg, func),
- IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, func),
- insertInstr);
- insertInstr->InsertBefore(labelDone);
- return RemoveLoweredRegionStartMarker(startMarkerInstr);
- }
- IR::Instr *
- Lowerer::LowerNewScObjArrayNoArg(IR::Instr *newObjInstr)
- {
- IR::Opnd *targetOpnd = newObjInstr->GetSrc1();
- Func *func = newObjInstr->m_func;
- IR::Instr* startMarkerInstr = nullptr;
- if (!targetOpnd->IsAddrOpnd())
- {
- if (!newObjInstr->HasBailOutInfo())
- {
- return this->LowerNewScObject(newObjInstr, true, false);
- }
- // Insert a temporary label before the instruction we're about to lower, so that we can return
- // the first instruction above that needs to be lowered after we're done - regardless of argument
- // list, StartCall, etc.
- startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
- // For whatever reason, we couldn't do a fixed function check on the call target.
- // Generate a runtime check on the target.
- Assert(newObjInstr->GetBailOutKind() == IR::BailOutOnNotNativeArray);
- IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertCompareBranch(
- targetOpnd,
- LoadLibraryValueOpnd(newObjInstr, LibraryValue::ValueArrayConstructor),
- Js::OpCode::BrEq_A,
- true,
- labelSkipBailOut,
- newObjInstr);
- IR::ProfiledInstr *instrNew = IR::ProfiledInstr::New(newObjInstr->m_opcode, newObjInstr->UnlinkDst(), newObjInstr->UnlinkSrc1(), func);
- instrNew->u.profileId = newObjInstr->AsProfiledInstr()->u.profileId;
- newObjInstr->InsertAfter(instrNew);
- newObjInstr->m_opcode = Js::OpCode::BailOut;
- GenerateBailOut(newObjInstr);
- instrNew->InsertBefore(labelSkipBailOut);
- newObjInstr = instrNew;
- }
- else
- {
- // Insert a temporary label before the instruction we're about to lower, so that we can return
- // the first instruction above that needs to be lowered after we're done - regardless of argument
- // list, StartCall, etc.
- startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
- }
- Assert(newObjInstr->IsProfiledInstr());
- RecyclerWeakReference<Js::FunctionBody> *weakFuncRef = nullptr;
- Js::ArrayCallSiteInfo *arrayInfo = nullptr;
- Js::ProfileId profileId = static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId);
- if (profileId != Js::Constants::NoProfileId)
- {
- Js::FunctionBody *functionBody = func->GetJnFunction();
- arrayInfo = functionBody->GetAnyDynamicProfileInfo()->GetArrayCallSiteInfo(functionBody, profileId);
- Assert(arrayInfo);
- weakFuncRef = func->GetWeakFuncRef();
- Assert(weakFuncRef);
- }
- GenerateProfiledNewScObjArrayFastPath(newObjInstr, arrayInfo, weakFuncRef, 0);
- m_lowererMD.LoadHelperArgument(newObjInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, func));
- m_lowererMD.LoadHelperArgument(newObjInstr, IR::AddrOpnd::New(arrayInfo, IR::AddrOpndKindDynamicArrayCallSiteInfo, func));
- LoadScriptContext(newObjInstr);
- m_lowererMD.LoadHelperArgument(newObjInstr, targetOpnd);
- newObjInstr->UnlinkSrc1();
- newObjInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperScrArr_ProfiledNewInstanceNoArg, func));
- m_lowererMD.LowerCall(newObjInstr, 0);
- return RemoveLoweredRegionStartMarker(startMarkerInstr);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerPrologEpilog
- ///
- ///----------------------------------------------------------------------------
- void
- Lowerer::LowerPrologEpilog()
- {
- if (m_func->GetJnFunction()->IsGenerator())
- {
- LowerGeneratorResumeJumpTable();
- }
- IR::Instr * instr;
- instr = m_func->m_headInstr;
- AssertMsg(instr->IsEntryInstr(), "First instr isn't an EntryInstr...");
- m_lowererMD.LowerEntryInstr(instr->AsEntryInstr());
- instr = m_func->m_exitInstr;
- AssertMsg(instr->IsExitInstr(), "Last instr isn't an ExitInstr...");
- m_lowererMD.LowerExitInstr(instr->AsExitInstr());
- }
- void
- Lowerer::LowerPrologEpilogAsmJs()
- {
- IR::Instr * instr;
- instr = m_func->m_headInstr;
- AssertMsg(instr->IsEntryInstr(), "First instr isn't an EntryInstr...");
- m_lowererMD.LowerEntryInstrAsmJs(instr->AsEntryInstr());
- instr = m_func->m_exitInstr;
- AssertMsg(instr->IsExitInstr(), "Last instr isn't an ExitInstr...");
- m_lowererMD.LowerExitInstrAsmJs(instr->AsExitInstr());
- }
- void
- Lowerer::LowerGeneratorResumeJumpTable()
- {
- Assert(m_func->GetJnFunction()->IsGenerator());
- IR::Instr * jumpTableInstr = m_func->m_headInstr;
- AssertMsg(jumpTableInstr->IsEntryInstr(), "First instr isn't an EntryInstr...");
- // Hope to do away with this linked list scan by moving this lowering to a post-prolog-epilog/pre-encoder phase that is common to all architectures (currently such phase is only available on amd64/arm)
- while (jumpTableInstr->m_opcode != Js::OpCode::GeneratorResumeJumpTable)
- {
- jumpTableInstr = jumpTableInstr->m_next;
- }
- IR::Opnd * srcOpnd = jumpTableInstr->UnlinkSrc1();
- m_func->MapYieldOffsetResumeLabels([&](int i, const YieldOffsetResumeLabel& yorl)
- {
- uint32 offset = yorl.First();
- IR::LabelInstr * label = yorl.Second();
- if (label != nullptr && label->m_hasNonBranchRef)
- {
- // Also fix up the bailout at the label with the jump to epilog that was not emitted in GenerateBailOut()
- Assert(label->m_prev->HasBailOutInfo());
- GenerateJumpToEpilogForBailOut(label->m_prev->GetBailOutInfo(), label->m_prev);
- }
- else if (label == nullptr)
- {
- label = m_func->m_bailOutNoSaveLabel;
- }
- // For each offset label pair, insert a compare of the offset and branch if equal to the label
- InsertCompareBranch(srcOpnd, IR::IntConstOpnd::New(offset, TyUint32, m_func), Js::OpCode::BrSrEq_A, label, jumpTableInstr);
- });
- jumpTableInstr->Remove();
- }
- void
- Lowerer::DoInterruptProbes()
- {
- this->m_func->SetHasInstrNumber(true);
- uint instrCount = 1;
- FOREACH_INSTR_IN_FUNC(instr, this->m_func)
- {
- instr->SetNumber(instrCount++);
- if (instr->IsLabelInstr())
- {
- IR::LabelInstr *labelInstr = instr->AsLabelInstr();
- if (labelInstr->m_isLoopTop)
- {
- // For every loop top label, insert the following:
- // cmp sp, ThreadContext::stackLimitForCurrentThread
- // bgt $continue
- // $helper:
- // call JavascriptOperators::ScriptAbort
- // b $exit
- // $continue:
- IR::LabelInstr *newLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- labelInstr->InsertAfter(newLabel);
- this->InsertOneLoopProbe(newLabel, newLabel);
- }
- }
- }
- NEXT_INSTR_IN_FUNC;
- }
- // Insert an interrupt probe at each loop back branch. (Currently uncalled, since we're inserting
- // probes at loop tops instead of back edges, but kept around because it may prove useful.)
- uint
- Lowerer::DoLoopProbeAndNumber(IR::BranchInstr *branchInstr)
- {
- IR::LabelInstr *labelInstr = branchInstr->GetTarget();
- if (labelInstr == nullptr || labelInstr->GetNumber() == 0)
- {
- // Forward branch (possibly an indirect jump after try-catch-finally); nothing to do.
- return branchInstr->GetNumber() + 1;
- }
- Assert(labelInstr->m_isLoopTop);
- // Insert a stack probe at this branch. Number all the instructions we insert
- // and return the next instruction number.
- uint number = branchInstr->GetNumber();
- IR::Instr *instrPrev = branchInstr->m_prev;
- IR::Instr *instrNext = branchInstr->m_next;
- if (branchInstr->IsUnconditional())
- {
- // B $loop ==>
- // cmp [], 0
- // beq $loop
- // $helper:
- // call abort
- // b $exit
- this->InsertOneLoopProbe(branchInstr, labelInstr);
- branchInstr->Remove();
- }
- else
- {
- // Bcc $loop ==>
- // Binv $notloop
- // cmp [], 0
- // beq $loop
- // $helper:
- // call abort
- // b $exit
- // $notloop:
- IR::LabelInstr *loopExitLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- branchInstr->SetTarget(loopExitLabel);
- LowererMD::InvertBranch(branchInstr);
- branchInstr->InsertAfter(loopExitLabel);
- this->InsertOneLoopProbe(loopExitLabel, labelInstr);
- }
- FOREACH_INSTR_IN_RANGE(instr, instrPrev->m_next, instrNext->m_prev)
- {
- instr->SetNumber(number++);
- }
- NEXT_INSTR_IN_RANGE;
- return number;
- }
- void
- Lowerer::InsertOneLoopProbe(IR::Instr *insertInstr, IR::LabelInstr *loopLabel)
- {
- // Insert one interrupt probe at the given instruction. Probe the stack and call the abort helper
- // directly if the probe fails.
- IR::Opnd *memRefOpnd = IR::MemRefOpnd::New(
- this->m_func->GetScriptContext()->GetThreadContext()->GetAddressOfStackLimitForCurrentThread(),
- TyMachReg, this->m_func);
- IR::RegOpnd *regStackPointer = IR::RegOpnd::New(
- NULL, this->m_lowererMD.GetRegStackPointer(), TyMachReg, this->m_func);
- InsertCompareBranch(regStackPointer, memRefOpnd, Js::OpCode::BrGt_A, loopLabel, insertInstr);
- IR::LabelInstr *helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- insertInstr->InsertBefore(helperLabel);
- IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperScriptAbort, this->m_func);
- IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
- instr->SetSrc1(helperOpnd);
- insertInstr->InsertBefore(instr);
- this->m_lowererMD.LowerCall(instr, 0);
- // Jump to the exit after the helper call. This instruction will never be reached, but the jump
- // indicates that nothing is live after the call (to avoid useless spills in code that will
- // be executed).
- instr = this->m_func->m_exitInstr->GetPrevRealInstrOrLabel();
- if (instr->IsLabelInstr())
- {
- helperLabel = instr->AsLabelInstr();
- }
- else
- {
- helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- this->m_func->m_exitInstr->InsertBefore(helperLabel);
- }
- instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, helperLabel, this->m_func);
- insertInstr->InsertBefore(instr);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LoadPropertySymAsArgument
- ///
- /// Generate code to pass a fieldSym as argument to a helper.
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LoadPropertySymAsArgument(IR::Instr *instr, IR::Opnd *fieldSrc)
- {
- IR::Instr * instrPrev;
- AssertMsg(fieldSrc->IsSymOpnd() && fieldSrc->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as src of LdFld");
- IR::SymOpnd *symOpnd = fieldSrc->AsSymOpnd();
- PropertySym * fieldSym = symOpnd->m_sym->AsPropertySym();
- IR::IntConstOpnd * indexOpnd = IR::IntConstOpnd::New(fieldSym->m_propertyId, TyInt32, m_func, /*dontEncode*/true);
- instrPrev = m_lowererMD.LoadHelperArgument(instr, indexOpnd);
- IR::RegOpnd * instanceOpnd = symOpnd->CreatePropertyOwnerOpnd(m_func);
- m_lowererMD.LoadHelperArgument(instr, instanceOpnd);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LoadFunctionBodyAsArgument
- ///
- /// Special case: the "property ID" is a key into the ScriptContext's FunctionBody map
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LoadFunctionBodyAsArgument(IR::Instr *instr, IR::IntConstOpnd * functionBodySlotOpnd, IR::RegOpnd * envOpnd)
- {
- IR::Instr * instrPrev;
- // We need to pass in the function reference, we can't embed the pointer to the function proxy here.
- // The function proxy may be deferred parsed/serialize, and may 'progress' to a real function body after it is undeferred
- // At which point the deferred function proxy may be collect.
- // Just pass it the address where we will find the function proxy/body
- Js::FunctionProxyPtrPtr proxyRef = instr->m_func->GetJnFunction()->GetNestedFuncReference((uint)functionBodySlotOpnd->GetValue());
- AssertMsg(proxyRef, "Expected FunctionProxy for index of NewScFunc or NewScGenFunc opnd");
- AssertMsg(*proxyRef, "Expected FunctionProxy for index of NewScFunc or NewScGenFunc opnd");
- IR::AddrOpnd * indexOpnd = IR::AddrOpnd::New((Js::Var)proxyRef, IR::AddrOpndKindDynamicMisc, m_func);
- instrPrev = m_lowererMD.LoadHelperArgument(instr, indexOpnd);
- m_lowererMD.LoadHelperArgument(instr, envOpnd);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerProfiledLdFld(IR::JitProfilingInstr *ldFldInstr)
- {
- const auto instrPrev = ldFldInstr->m_prev;
- auto src = ldFldInstr->UnlinkSrc1();
- AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
- IR::JnHelperMethod helper;
- switch (ldFldInstr->m_opcode)
- {
- case Js::OpCode::LdFld:
- helper = IR::HelperProfiledLdFld;
- goto ldFldCommon;
- case Js::OpCode::LdRootFld:
- helper = IR::HelperProfiledLdRootFld;
- goto ldFldCommon;
- case Js::OpCode::LdMethodFld:
- helper = IR::HelperProfiledLdMethodFld;
- goto ldFldCommon;
- case Js::OpCode::LdRootMethodFld:
- helper = IR::HelperProfiledLdRootMethodFld;
- goto ldFldCommon;
- case Js::OpCode::LdFldForCallApplyTarget:
- helper = IR::HelperProfiledLdFld_CallApplyTarget;
- goto ldFldCommon;
- case Js::OpCode::LdFldForTypeOf:
- helper = IR::HelperProfiledLdFldForTypeOf;
- goto ldFldCommon;
- case Js::OpCode::LdRootFldForTypeOf:
- helper = IR::HelperProfiledLdRootFldForTypeOf;
- goto ldFldCommon;
- ldFldCommon:
- {
- Assert(ldFldInstr->profileId == Js::Constants::NoProfileId);
- /*
- Var ProfilingHelpers::ProfiledLdFld_Jit(
- const Var instance,
- const PropertyId propertyId,
- const InlineCacheIndex inlineCacheIndex,
- void *const framePointer)
- */
- m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
- m_lowererMD.LoadHelperArgument(
- ldFldInstr,
- IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
- LoadPropertySymAsArgument(ldFldInstr, src);
- break;
- }
- case Js::OpCode::LdSuperFld:
- {
- Assert(ldFldInstr->profileId == Js::Constants::NoProfileId);
- IR::Opnd * src2 = nullptr;
- /*
- Var ProfilingHelpers::ProfiledLdSuperFld_Jit(
- const Var instance,
- const PropertyId propertyId,
- const InlineCacheIndex inlineCacheIndex,
- void *const framePointer,
- const Var thisInstance)
- */
- src2 = ldFldInstr->UnlinkSrc2();
- m_lowererMD.LoadHelperArgument(ldFldInstr, src2 );
- m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
- m_lowererMD.LoadHelperArgument(
- ldFldInstr,
- IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
- LoadPropertySymAsArgument(ldFldInstr, src);
- helper = IR::HelperProfiledLdSuperFld;
- break;
- }
- case Js::OpCode::LdLen_A:
- // If we want to profile this call, then push some extra args and call the profiling version
- m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateProfileIdOpnd(ldFldInstr->profileId, m_func));
- m_lowererMD.LoadHelperArgument(ldFldInstr, src->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func));
- m_lowererMD.LoadHelperArgument(ldFldInstr, CreateFunctionBodyOpnd(ldFldInstr->m_func));
- helper = IR::HelperSimpleProfiledLdLen;
- break;
- default:
- Assert(false);
- __assume(false);
- }
- ldFldInstr->SetSrc1(IR::HelperCallOpnd::New(helper, m_func));
- m_lowererMD.LowerCall(ldFldInstr, 0);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerLdFld
- ///
- /// Lower an instruction (LdFld, ScopedLdFld) that takes a property
- /// reference as a source and puts a result in a register.
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerLdFld(
- IR::Instr * ldFldInstr,
- IR::JnHelperMethod helperMethod,
- IR::JnHelperMethod polymorphicHelperMethod,
- bool useInlineCache,
- IR::LabelInstr *labelBailOut,
- bool isHelper)
- {
- if (ldFldInstr->IsJitProfilingInstr())
- {
- // If we want to profile then do something completely different
- return this->LowerProfiledLdFld(ldFldInstr->AsJitProfilingInstr());
- }
- IR::Opnd *src;
- IR::Instr *instrPrev = ldFldInstr->m_prev;
- src = ldFldInstr->UnlinkSrc1();
- if (ldFldInstr->m_opcode == Js::OpCode::LdSuperFld)
- {
- IR::Opnd * src2 = nullptr;
- src2 = ldFldInstr->UnlinkSrc2();
- m_lowererMD.LoadHelperArgument(ldFldInstr, src2);
- }
- AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
- if (useInlineCache)
- {
- IR::Opnd * inlineCacheOpnd;
- AssertMsg(src->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
- if (src->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache && polymorphicHelperMethod != helperMethod)
- {
- Js::PolymorphicInlineCache * polymorphicInlineCache = src->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache;
- helperMethod = polymorphicHelperMethod;
- inlineCacheOpnd = IR::AddrOpnd::New(polymorphicInlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func);
- }
- else
- {
- // Need to load runtime inline cache opnd first before loading any helper argument
- // because LoadRuntimeInlineCacheOpnd may create labels marked as helper,
- // and cause op helper register push/pop save in x86, messing up with any helper arguments that is already pushed
- inlineCacheOpnd = this->LoadRuntimeInlineCacheOpnd(ldFldInstr, src->AsPropertySymOpnd(), isHelper);
- }
- this->LoadPropertySymAsArgument(ldFldInstr, src);
- this-> m_lowererMD.LoadHelperArgument(
- ldFldInstr,
- IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
- this->m_lowererMD.LoadHelperArgument(ldFldInstr, inlineCacheOpnd);
- this->m_lowererMD.LoadHelperArgument(ldFldInstr, LoadFunctionBodyOpnd(ldFldInstr));
- }
- else
- {
- LoadScriptContext(ldFldInstr);
- this->LoadPropertySymAsArgument(ldFldInstr, src);
- }
- // Do we need to reload the type and slot array after the helper returns?
- // (We do if there's a propertySymOpnd downstream that needs it, i.e., the type is not dead.)
- IR::RegOpnd *opndBase = src->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func);
- m_lowererMD.ChangeToHelperCall(ldFldInstr, helperMethod, labelBailOut, opndBase, src->AsSymOpnd()->IsPropertySymOpnd() ? src->AsSymOpnd()->AsPropertySymOpnd() : nullptr, isHelper);
- return instrPrev;
- }
- bool
- Lowerer::GenerateLdFldWithCachedType(IR::Instr * instrLdFld, bool* continueAsHelperOut, IR::LabelInstr** labelHelperOut, IR::RegOpnd** typeOpndOut)
- {
- IR::Instr *instr;
- IR::Opnd *opnd;
- IR::LabelInstr *labelObjCheckFailed = nullptr;
- IR::LabelInstr *labelTypeCheckFailed = nullptr;
- IR::LabelInstr *labelDone = nullptr;
- Assert(continueAsHelperOut != nullptr);
- *continueAsHelperOut = false;
- Assert(labelHelperOut != nullptr);
- *labelHelperOut = nullptr;
- Assert(typeOpndOut != nullptr);
- *typeOpndOut = nullptr;
- Assert(instrLdFld->GetSrc1()->IsSymOpnd());
- if (!instrLdFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
- {
- return false;
- }
- IR::PropertySymOpnd *propertySymOpnd = instrLdFld->GetSrc1()->AsPropertySymOpnd();
- if (!propertySymOpnd->IsTypeCheckSeqCandidate())
- {
- return false;
- }
- AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
- if (!propertySymOpnd->IsTypeCheckSeqParticipant() && !propertySymOpnd->NeedsLocalTypeCheck())
- {
- return false;
- }
- Assert(!propertySymOpnd->NeedsTypeCheckAndBailOut() || (instrLdFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrLdFld->GetBailOutKind())));
- // In the backwards pass we only add guarded property operations to instructions that are not already
- // protected by an upstream type check.
- Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
- PHASE_PRINT_TESTTRACE(
- Js::ObjTypeSpecPhase,
- this->m_func,
- L"Field load: %s, property: %s, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n",
- Js::OpCodeUtil::GetOpCodeName(instrLdFld->m_opcode),
- this->m_func->GetScriptContext()->GetPropertyNameLocked(
- propertySymOpnd->m_sym->AsPropertySym()->m_propertyId)->GetBuffer(),
- this->m_func->GetJnFunction()->GetDisplayName(),
- propertySymOpnd->m_inlineCacheIndex,
- propertySymOpnd->GetCacheLayoutString(),
- propertySymOpnd->IsTypeChecked() ? L"true" : L"false");
- if (propertySymOpnd->HasFinalType() && !propertySymOpnd->IsLoadedFromProto())
- {
- propertySymOpnd->UpdateSlotForFinalType();
- }
- // TODO (ObjTypeSpec): If ((PropertySym*)propertySymOpnd->m_sym)->m_stackSym->m_isIntConst consider emitting a direct
- // jump to helper or bailout. If we have a type check bailout, we could even abort compilation.
- bool hasTypeCheckBailout = instrLdFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrLdFld->GetBailOutKind());
- // If the hard-coded type is not available here, do a type check, and branch to the helper if the check fails.
- // In the prototype case, we have to check the type even if it was checked upstream, to cover the case where
- // the property has been added locally. Note that this is not necessary if the proto chain has been checked,
- // because then we know there's been no store of the property since the type was checked.
- bool emitPrimaryTypeCheck = propertySymOpnd->NeedsPrimaryTypeCheck();
- bool emitLocalTypeCheck = propertySymOpnd->NeedsLocalTypeCheck();
- bool emitLoadFromProtoTypeCheck = propertySymOpnd->NeedsLoadFromProtoTypeCheck();
- if (emitPrimaryTypeCheck || emitLocalTypeCheck || emitLoadFromProtoTypeCheck)
- {
- if (emitLoadFromProtoTypeCheck)
- {
- propertySymOpnd->EnsureGuardedPropOps(this->m_func->m_alloc);
- propertySymOpnd->SetGuardedPropOp(propertySymOpnd->GetObjTypeSpecFldId());
- }
- labelTypeCheckFailed = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- labelObjCheckFailed = hasTypeCheckBailout ? labelTypeCheckFailed : IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- *typeOpndOut = this->GenerateCachedTypeCheck(instrLdFld, propertySymOpnd, labelObjCheckFailed, labelTypeCheckFailed);
- }
- IR::Opnd *opndSlotArray;
- if (propertySymOpnd->IsLoadedFromProto())
- {
- opndSlotArray = this->LoadSlotArrayWithCachedProtoType(instrLdFld, propertySymOpnd);
- }
- else
- {
- opndSlotArray = this->LoadSlotArrayWithCachedLocalType(instrLdFld, propertySymOpnd);
- }
- // Load the value from the slot, getting the slot ID from the cache.
- uint16 index = propertySymOpnd->GetSlotIndex();
- Assert(index != -1);
- if (opndSlotArray->IsRegOpnd())
- {
- opnd = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, this->m_func);
- }
- else
- {
- Assert(opndSlotArray->IsMemRefOpnd());
- opnd = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, this->m_func, IR::AddrOpndKindDynamicPropertySlotRef);
- }
- Lowerer::InsertMove(instrLdFld->GetDst(), opnd, instrLdFld);
- // We eliminate the helper, or the type check succeeds, or we bail out before the operation.
- // Either delete the original instruction or replace it with a bailout.
- if (!emitPrimaryTypeCheck && !emitLocalTypeCheck && !emitLoadFromProtoTypeCheck)
- {
- Assert(labelTypeCheckFailed == nullptr);
- AssertMsg(!instrLdFld->HasBailOutInfo(), "Why does a direct field load have bailout?");
- instrLdFld->Remove();
- return true;
- }
- // Otherwise, branch around the bailout or helper.
- labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
- instrLdFld->InsertBefore(instr);
- // Insert the bailout or helper label here.
- instrLdFld->InsertBefore(labelTypeCheckFailed);
- instrLdFld->InsertAfter(labelDone);
- if (hasTypeCheckBailout)
- {
- AssertMsg(PHASE_ON1(Js::ObjTypeSpecIsolatedFldOpsWithBailOutPhase) || !propertySymOpnd->IsTypeDead(),
- "Why does a field load have a type check bailout, if its type is dead?");
- // Convert the original instruction to a bailout.
- if (instrLdFld->GetBailOutInfo()->bailOutInstr != instrLdFld)
- {
- // Set the cache index in the bailout info so that the bailout code will write it into the
- // bailout record at runtime.
- instrLdFld->GetBailOutInfo()->polymorphicCacheIndex = propertySymOpnd->m_inlineCacheIndex;
- }
- instrLdFld->FreeDst();
- instrLdFld->FreeSrc1();
- instrLdFld->m_opcode = Js::OpCode::BailOut;
- this->GenerateBailOut(instrLdFld);
- return true;
- }
- else
- {
- *continueAsHelperOut = true;
- Assert(labelObjCheckFailed != nullptr && labelObjCheckFailed != labelTypeCheckFailed);
- *labelHelperOut = labelObjCheckFailed;
- return false;
- }
- }
- template<bool isRoot>
- IR::Instr* Lowerer::GenerateCompleteLdFld(IR::Instr* instr, bool emitFastPath, IR::JnHelperMethod monoHelperAfterFastPath, IR::JnHelperMethod polyHelperAfterFastPath,
- IR::JnHelperMethod monoHelperWithoutFastPath, IR::JnHelperMethod polyHelperWithoutFastPath)
- {
- if(instr->CallsAccessor() && instr->HasBailOutInfo())
- {
- IR::BailOutKind kindMinusBits = instr->GetBailOutKind() & ~IR::BailOutKindBits;
- Assert(kindMinusBits != IR::BailOutOnImplicitCalls && kindMinusBits != IR::BailOutOnImplicitCallsPreOp);
- }
- IR::Instr* prevInstr = instr->m_prev;
- IR::LabelInstr* labelHelper = nullptr;
- IR::LabelInstr* labelBailOut = nullptr;
- bool isHelper = false;
- IR::RegOpnd* typeOpnd = nullptr;
- if (isRoot)
- {
- // Don't do the fast path here if emitFastPath is false, even if we can.
- if (emitFastPath && (this->GenerateLdFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd) || this->GenerateNonConfigurableLdRootFld(instr)))
- {
- Assert(labelHelper == nullptr);
- return prevInstr;
- }
- }
- else
- {
- if (this->GenerateLdFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd))
- {
- Assert(labelHelper == nullptr);
- return prevInstr;
- }
- }
- if (emitFastPath)
- {
- if (!GenerateFastLdFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, &labelBailOut, typeOpnd, &isHelper, &labelHelper))
- {
- if (labelHelper != nullptr)
- {
- labelHelper->isOpHelper = isHelper;
- instr->InsertBefore(labelHelper);
- }
- prevInstr = LowerLdFld(instr, monoHelperAfterFastPath, polyHelperAfterFastPath, true, labelBailOut, isHelper);
- }
- }
- else
- {
- if (labelHelper != nullptr)
- {
- labelHelper->isOpHelper = isHelper;
- instr->InsertBefore(labelHelper);
- }
- prevInstr = LowerLdFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, true, labelBailOut, isHelper);
- }
- return prevInstr;
- }
- bool
- Lowerer::GenerateCheckFixedFld(IR::Instr * instrChkFld)
- {
- IR::Instr *instr;
- IR::LabelInstr *labelBailOut = nullptr;
- IR::LabelInstr *labelDone = nullptr;
- AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instrChkFld->m_func->GetJnFunction()) ||
- !PHASE_OFF(Js::UseFixedDataPropsPhase, instrChkFld->m_func->GetJnFunction()), "Lowering a check fixed field with fixed data/method phase disabled?");
- Assert(instrChkFld->GetSrc1()->IsSymOpnd() && instrChkFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd());
- IR::PropertySymOpnd *propertySymOpnd = instrChkFld->GetSrc1()->AsPropertySymOpnd();
- AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
- Assert(propertySymOpnd->MayNeedTypeCheckProtection());
- // In the backwards pass we only add guarded property operations to instructions that are not already
- // protected by an upstream type check.
- Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
- // For the non-configurable properties on the global object we do not need a type check. Otherwise,
- // we need a type check and bailout here unless this operation is part of the type check sequence and
- // is protected by a type check upstream.
- bool emitPrimaryTypeCheck = propertySymOpnd->NeedsPrimaryTypeCheck();
- // In addition, we may also need a local type check in case the property comes from the prototype and
- // it may have been overwritten on the instance after the primary type check upstream. If the property
- // comes from the instance, we must still protect against its value changing after the type check, but
- // for this a cheaper guard check is sufficient (see below).
- bool emitFixedFieldTypeCheck = propertySymOpnd->NeedsCheckFixedFieldTypeCheck() &&
- (!propertySymOpnd->IsTypeChecked() || propertySymOpnd->IsLoadedFromProto());
- PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
- uint inlineCacheIndex = propertySymOpnd->m_inlineCacheIndex;
- OUTPUT_TRACE_FUNC(
- Js::ObjTypeSpecPhase,
- this->m_func,
- L"Fixed field check: %s, property: %s, cache ID: %u, cloned cache: true, layout: %s, redundant check: %s count of props: %u \n",
- Js::OpCodeUtil::GetOpCodeName(instrChkFld->m_opcode),
- this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(),
- inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(), propertySymOpnd->IsTypeChecked() ? L"true" : L"false",
- propertySymOpnd->GetGuardedPropOps() ? propertySymOpnd->GetGuardedPropOps()->Count() : 0);
- if (emitPrimaryTypeCheck || emitFixedFieldTypeCheck)
- {
- labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- if(emitFixedFieldTypeCheck && propertySymOpnd->IsRootObjectNonConfigurableFieldLoad())
- {
- AssertMsg(!propertySymOpnd->GetGuardedPropOps() || propertySymOpnd->GetGuardedPropOps()->IsEmpty(), "This property Guard is used only for one property");
- //We need only cheaper Guard check, if the property belongs to the GlobalObject.
- GenerateFixedFieldGuardCheck(instrChkFld, propertySymOpnd, labelBailOut);
- }
- else
- {
- if (emitFixedFieldTypeCheck)
- {
- propertySymOpnd->EnsureGuardedPropOps(this->m_func->m_alloc);
- propertySymOpnd->SetGuardedPropOp(propertySymOpnd->GetObjTypeSpecFldId());
- }
- this->GenerateCachedTypeCheck(instrChkFld, propertySymOpnd, labelBailOut, labelBailOut);
- }
- }
- // We may still need this guard if we didn't emit the write protect type check above. This situation arises if we have
- // a fixed field from the instance (not proto) and a property of the same name has been written somewhere between the
- // primary type check and here. Note that we don't need a type check, because we know the fixed field exists on the
- // object even if it has been written since primary type check, but we need to verify the fixed value didn't get overwritten.
- if (!emitPrimaryTypeCheck && !emitFixedFieldTypeCheck && !propertySymOpnd->IsWriteGuardChecked())
- {
- if (!PHASE_OFF(Js::FixedFieldGuardCheckPhase, this->m_func))
- {
- Assert(labelBailOut == nullptr);
- labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- GenerateFixedFieldGuardCheck(instrChkFld, propertySymOpnd, labelBailOut);
- }
- }
- // Note that a type handler holds only a weak reference to the singleton instance it represents, so
- // it is possible that the instance gets collected before the type and handler do. Hence, the upstream
- // type check may succeed, even as the original instance no longer exists. However, this would happen
- // only if another instance reached the same type (otherwise we wouldn't ever pass the type check
- // upstream). In that case we would have invalidated all fixed fields on that type, and so the type
- // check (or property guard check, if necessary) above would fail. All in all, we would never attempt
- // to access a fixed field from an instance that has been collected.
- if (!emitPrimaryTypeCheck && !emitFixedFieldTypeCheck && propertySymOpnd->IsWriteGuardChecked())
- {
- Assert(labelBailOut == nullptr);
- AssertMsg(!instrChkFld->HasBailOutInfo(), "Why does a direct fixed field check have bailout?");
- instrChkFld->Remove();
- return true;
- }
- labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
- instrChkFld->InsertBefore(instr);
- // Insert the helper label here.
- instrChkFld->InsertBefore(labelBailOut);
- instrChkFld->InsertAfter(labelDone);
- // Convert the original instruction to a bailout.
- Assert(instrChkFld->HasBailOutInfo());
- if (instrChkFld->GetBailOutInfo()->bailOutInstr != instrChkFld)
- {
- // Set the cache index in the bailout info so that the bailout code will write it into the
- // bailout record at runtime.
- instrChkFld->GetBailOutInfo()->polymorphicCacheIndex = inlineCacheIndex;
- }
- instrChkFld->FreeSrc1();
- instrChkFld->m_opcode = Js::OpCode::BailOut;
- this->GenerateBailOut(instrChkFld);
- return true;
- }
- void
- Lowerer::GenerateCheckObjType(IR::Instr * instrChkObjType)
- {
- Assert(instrChkObjType->GetSrc1()->IsSymOpnd() && instrChkObjType->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd());
- IR::PropertySymOpnd *propertySymOpnd = instrChkObjType->GetSrc1()->AsPropertySymOpnd();
- // Why do we have an explicit type check if the cached type has been checked upstream? The dead store pass should have
- // removed this instruction.
- Assert(propertySymOpnd->IsTypeCheckSeqCandidate() && !propertySymOpnd->IsTypeChecked());
- // Why do we have an explicit type check on a non-configurable root field load?
- Assert(!propertySymOpnd->IsRootObjectNonConfigurableFieldLoad());
- PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
- uint inlineCacheIndex = propertySymOpnd->m_inlineCacheIndex;
- PHASE_PRINT_TESTTRACE(
- Js::ObjTypeSpecPhase,
- this->m_func,
- L"Object type check: %s, property: %s, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n",
- Js::OpCodeUtil::GetOpCodeName(instrChkObjType->m_opcode),
- this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(),
- this->m_func->GetJnFunction()->GetDisplayName(),
- inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(), L"false");
- IR::LabelInstr* labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- this->GenerateCachedTypeCheck(instrChkObjType, propertySymOpnd, labelBailOut, labelBailOut);
- IR::LabelInstr* labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- IR::Instr* instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
- instrChkObjType->InsertBefore(instr);
- // Insert the bailout label here.
- instrChkObjType->InsertBefore(labelBailOut);
- instrChkObjType->InsertAfter(labelDone);
- // Convert the original instruction to a bailout.
- Assert(instrChkObjType->HasBailOutInfo());
- if (instrChkObjType->GetBailOutInfo()->bailOutInstr != instrChkObjType)
- {
- // Set the cache index in the bailout info so that the bailout code will write it into the
- // bailout record at runtime.
- instrChkObjType->GetBailOutInfo()->polymorphicCacheIndex = inlineCacheIndex;
- }
- instrChkObjType->FreeSrc1();
- instrChkObjType->m_opcode = Js::OpCode::BailOut;
- this->GenerateBailOut(instrChkObjType);
- }
- void
- Lowerer::LowerAdjustObjType(IR::Instr * instrAdjustObjType)
- {
- IR::AddrOpnd *finalTypeOpnd = instrAdjustObjType->UnlinkDst()->AsAddrOpnd();
- IR::AddrOpnd *initialTypeOpnd = instrAdjustObjType->UnlinkSrc2()->AsAddrOpnd();
- IR::RegOpnd *baseOpnd = instrAdjustObjType->UnlinkSrc1()->AsRegOpnd();
- this->GenerateAdjustBaseSlots(
- instrAdjustObjType, baseOpnd, (Js::Type*)initialTypeOpnd->m_address, (Js::Type*)finalTypeOpnd->m_address);
- this->m_func->PinTypeRef(finalTypeOpnd->m_address);
- IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrAdjustObjType->m_func);
- this->m_lowererMD.CreateAssign(opnd, finalTypeOpnd, instrAdjustObjType);
- initialTypeOpnd->Free(instrAdjustObjType->m_func);
- instrAdjustObjType->Remove();
- }
- bool
- Lowerer::GenerateNonConfigurableLdRootFld(IR::Instr * instrLdFld)
- {
- if (!instrLdFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
- {
- return false;
- }
- IR::PropertySymOpnd *propertySymOpnd = instrLdFld->GetSrc1()->AsPropertySymOpnd();
- if (!propertySymOpnd->IsRootObjectNonConfigurableFieldLoad())
- {
- return false;
- }
- Assert(!PHASE_OFF(Js::RootObjectFldFastPathPhase, this->m_func->GetJnFunction()));
- Assert(!instrLdFld->HasBailOutInfo());
- IR::Opnd * srcOpnd;
- Js::RootObjectBase * rootObject = this->m_func->GetJnFunction()->GetRootObject();
- if (propertySymOpnd->UsesAuxSlot())
- {
- IR::RegOpnd * auxSlotOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- this->InsertMove(auxSlotOpnd, IR::MemRefOpnd::New((byte *)rootObject + Js::DynamicObject::GetOffsetOfAuxSlots(),
- TyMachPtr, this->m_func), instrLdFld);
- srcOpnd = IR::IndirOpnd::New(auxSlotOpnd, propertySymOpnd->GetSlotIndex() * sizeof(Js::Var *),
- TyVar, this->m_func);
- }
- else
- {
- srcOpnd = IR::MemRefOpnd::New((Js::Var *)rootObject + propertySymOpnd->GetSlotIndex(),
- TyVar, this->m_func);
- }
- instrLdFld->ReplaceSrc1(srcOpnd);
- instrLdFld->m_opcode = Js::OpCode::Ld_A;
- LowererMD::ChangeToAssign(instrLdFld);
- return true;
- }
- IR::Instr *
- Lowerer::LowerDelFld(IR::Instr *delFldInstr, IR::JnHelperMethod helperMethod, bool useInlineCache, bool strictMode)
- {
- IR::Instr *instrPrev;
- Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
- if (strictMode)
- {
- propertyOperationFlag = Js::PropertyOperation_StrictMode;
- }
- instrPrev = m_lowererMD.LoadHelperArgument(delFldInstr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
- LowerLdFld(delFldInstr, helperMethod, helperMethod, useInlineCache);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerIsInst(IR::Instr * isInstInstr, IR::JnHelperMethod helperMethod)
- {
- IR::Instr * instrPrev;
- IR::Instr * instrArg;
- IR::RegOpnd * argOpnd;
- // inlineCache
- instrPrev = m_lowererMD.LoadHelperArgument(isInstInstr, LoadIsInstInlineCacheOpnd(isInstInstr, isInstInstr->GetSrc1()->AsIntConstOpnd()->AsUint32()));
- isInstInstr->FreeSrc1();
- argOpnd = isInstInstr->UnlinkSrc2()->AsRegOpnd();
- Assert(argOpnd->m_sym->m_isSingleDef);
- instrArg = argOpnd->m_sym->m_instrDef;
- argOpnd->Free(m_func);
- // scriptContext
- LoadScriptContext(isInstInstr);
- // instance goes last, so remember it now
- IR::Opnd * instanceOpnd = instrArg->UnlinkSrc1();
- argOpnd = instrArg->UnlinkSrc2()->AsRegOpnd();
- Assert(argOpnd->m_sym->m_isSingleDef);
- instrArg->Remove();
- instrArg = argOpnd->m_sym->m_instrDef;
- argOpnd->Free(m_func);
- // function
- IR::Opnd *opnd = instrArg->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(isInstInstr, opnd);
- Assert(instrArg->GetSrc2() == NULL);
- instrArg->Remove();
- // instance
- m_lowererMD.LoadHelperArgument(isInstInstr, instanceOpnd);
- m_lowererMD.ChangeToHelperCall(isInstInstr, helperMethod);
- return instrPrev;
- }
- void
- Lowerer::GenerateStackScriptFunctionInit(StackSym * stackSym, Js::FunctionProxyPtrPtr nestedProxy)
- {
- Func * func = this->m_func;
- Assert(func->HasAnyStackNestedFunc());
- Assert(nextStackFunctionOpnd);
- IR::Instr * insertBeforeInstr = func->GetFunctionEntryInsertionPoint();
- IR::RegOpnd * addressOpnd = IR::RegOpnd::New(TyMachPtr, func);
- const IR::AutoReuseOpnd autoReuseAddressOpnd(addressOpnd, func);
- InsertLea(addressOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertBeforeInstr);
- // Currently we don't initialize the environment until we actually allocate the function, we also
- // walk the list of stack function when we need to box them. so we should use initialize it to NullFrameDisplay
- GenerateStackScriptFunctionInit(addressOpnd, nestedProxy,
- IR::AddrOpnd::New((Js::Var)&Js::NullFrameDisplay, IR::AddrOpndKindDynamicMisc, func), insertBeforeInstr);
- // Establish the next link
- InsertMove(nextStackFunctionOpnd, addressOpnd, insertBeforeInstr);
- this->nextStackFunctionOpnd = IR::SymOpnd::New(stackSym, sizeof(Js::StackScriptFunction), TyMachPtr, func);
- }
- void
- Lowerer::GenerateScriptFunctionInit(IR::RegOpnd * regOpnd, IR::Opnd * vtableAddressOpnd,
- Js::FunctionProxyPtrPtr nestedProxy, IR::Opnd * envOpnd, IR::Instr * insertBeforeInstr, bool isZeroed)
- {
- Func * func = this->m_func;
- IR::Opnd * functionProxyOpnd;
- Js::FunctionProxy * functionProxy = *nestedProxy;
- IR::Opnd * typeOpnd = nullptr;
- bool doCheckTypeOpnd = true;
- if (functionProxy->IsDeferred())
- {
- functionProxyOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(functionProxyOpnd, IR::MemRefOpnd::New((Js::FunctionProxy**) nestedProxy, TyMachPtr, func), insertBeforeInstr);
- typeOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(typeOpnd, IR::IndirOpnd::New(functionProxyOpnd->AsRegOpnd(), Js::FunctionProxy::GetOffsetOfDeferredPrototypeType(),
- TyMachPtr, func), insertBeforeInstr);
- }
- else
- {
- Js::FunctionBody * functionBody = functionProxy->GetFunctionBody();
- functionProxyOpnd = CreateFunctionBodyOpnd(functionBody);
- Js::ScriptFunctionType * type = functionProxy->GetDeferredPrototypeType();
- if (type != nullptr)
- {
- typeOpnd = IR::AddrOpnd::New(type, IR::AddrOpndKindDynamicType, func);
- doCheckTypeOpnd = false;
- }
- else
- {
- typeOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(typeOpnd,
- IR::MemRefOpnd::New(((byte *)functionBody) + Js::FunctionProxy::GetOffsetOfDeferredPrototypeType(), TyMachPtr, func),
- insertBeforeInstr);
- }
- }
- if (doCheckTypeOpnd)
- {
- IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- InsertTestBranch(typeOpnd, typeOpnd, Js::OpCode::BrEq_A, labelHelper, insertBeforeInstr);
- IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- InsertBranch(Js::OpCode::Br, labelDone, insertBeforeInstr);
- insertBeforeInstr->InsertBefore(labelHelper);
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, functionProxyOpnd);
- IR::Instr * callHelperInstr = IR::Instr::New(Js::OpCode::Call, typeOpnd,
- IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperEnsureFunctionProxyDeferredPrototypeType, func), func);
- insertBeforeInstr->InsertBefore(callHelperInstr);
- m_lowererMD.LowerCall(callHelperInstr, 0);
- insertBeforeInstr->InsertBefore(labelDone);
- }
- GenerateMemInit(regOpnd, 0, vtableAddressOpnd, insertBeforeInstr, isZeroed);
- GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfType(), typeOpnd, insertBeforeInstr, isZeroed);
- GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfAuxSlots(), insertBeforeInstr, isZeroed);
- GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfObjectArray(), insertBeforeInstr, isZeroed);
- GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfConstructorCache(),
- LoadLibraryValueOpnd(insertBeforeInstr, LibraryValue::ValueConstructorCacheDefaultInstance),
- insertBeforeInstr, isZeroed);
- GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfFunctionInfo(), functionProxyOpnd, insertBeforeInstr, isZeroed);
- GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfEnvironment(), envOpnd, insertBeforeInstr, isZeroed);
- GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfCachedScopeObj(), insertBeforeInstr, isZeroed);
- GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfHasInlineCaches(), insertBeforeInstr, isZeroed);
- }
- void
- Lowerer::GenerateStackScriptFunctionInit(IR::RegOpnd * regOpnd, Js::FunctionProxyPtrPtr nestedProxy, IR::Opnd * envOpnd, IR::Instr * insertBeforeInstr)
- {
- Func * func = this->m_func;
- GenerateScriptFunctionInit(regOpnd,
- LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableStackScriptFunction),
- nestedProxy, envOpnd, insertBeforeInstr);
- InsertMove(IR::IndirOpnd::New(regOpnd, Js::StackScriptFunction::GetOffsetOfBoxedScriptFunction(), TyMachPtr, func),
- IR::AddrOpnd::NewNull(func), insertBeforeInstr);
- }
- void
- Lowerer::EnsureStackFunctionListStackSym()
- {
- Func * func = this->m_func;
- Assert(func->HasAnyStackNestedFunc());
- #if defined(_M_IX86) || defined(_M_X64)
- Assert(func->m_localStackHeight == (func->HasArgumentSlot()? MachArgsSlotOffset : 0));
- StackSym * stackFunctionListStackSym = StackSym::New(TyMachPtr, func);
- func->StackAllocate(stackFunctionListStackSym, sizeof(Js::ScriptFunction *));
- nextStackFunctionOpnd = IR::SymOpnd::New(stackFunctionListStackSym, TyMachPtr, func);
- #else
- Assert(func->m_localStackHeight == 0);
- nextStackFunctionOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(NULL, FRAME_REG, TyMachReg, func),
- -(int32)(Js::Constants::StackNestedFuncList * sizeof(Js::Var)), TyMachPtr, func);
- #endif
- }
- void
- Lowerer::AllocStackClosure()
- {
- m_func->StackAllocate(m_func->GetLocalFrameDisplaySym(), sizeof(Js::Var));
- m_func->StackAllocate(m_func->GetLocalClosureSym(), sizeof(Js::Var));
- }
- void
- Lowerer::EnsureZeroLastStackFunctionNext()
- {
- Assert(nextStackFunctionOpnd != nullptr);
- Func * func = this->m_func;
- IR::Instr * insertBeforeInstr = func->GetFunctionEntryInsertionPoint();
- InsertMove(nextStackFunctionOpnd, IR::AddrOpnd::NewNull(func), insertBeforeInstr);
- }
- IR::Instr *
- Lowerer::GenerateNewStackScFunc(IR::Instr * newScFuncInstr)
- {
- Assert(newScFuncInstr->m_func->DoStackNestedFunc());
- Func * func = newScFuncInstr->m_func;
- uint index = newScFuncInstr->GetSrc1()->AsIntConstOpnd()->AsUint32();
- Assert(index < func->GetJnFunction()->GetNestedCount());
- Js::FunctionProxyPtrPtr nestedProxy = func->GetJnFunction()->GetNestedFuncReference(index);
- // the stackAllocate Call below for this sym is passing a size that is not represented by any IRType and hence passing TyMisc for the constructor
- StackSym * stackSym = StackSym::New(TyMisc, func);
- // ScriptFunction and it's next pointer
- this->m_func->StackAllocate(stackSym, sizeof(Js::StackScriptFunction) + sizeof(Js::StackScriptFunction *));
- IR::Opnd * envOpnd = newScFuncInstr->GetSrc2();
- GenerateStackScriptFunctionInit(stackSym, nestedProxy);
- IR::LabelInstr * labelNoStackFunc = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertTestBranch(IR::MemRefOpnd::New(func->GetJnFunction()->GetAddressOfFlags(), TyInt8, func),
- IR::IntConstOpnd::New(Js::FunctionBody::Flags_StackNestedFunc, TyInt8, func, true),
- Js::OpCode::BrEq_A, labelNoStackFunc, newScFuncInstr);
- InsertMove(IR::SymOpnd::New(stackSym, Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, func),
- envOpnd,
- newScFuncInstr);
- IR::Instr * lea =
- InsertLea(newScFuncInstr->GetDst()->AsRegOpnd(), IR::SymOpnd::New(stackSym, TyMachPtr, func), newScFuncInstr);
- InsertBranch(Js::OpCode::Br, labelDone, newScFuncInstr);
- newScFuncInstr->InsertBefore(labelNoStackFunc);
- newScFuncInstr->InsertAfter(labelDone);
- return lea;
- }
- IR::Instr *
- Lowerer::LowerNewScFunc(IR::Instr * newScFuncInstr)
- {
- IR::Instr *stackNewScFuncInstr = nullptr;
- if (newScFuncInstr->m_func->DoStackNestedFunc())
- {
- stackNewScFuncInstr = GenerateNewStackScFunc(newScFuncInstr);
- }
- IR::IntConstOpnd * functionBodySlotOpnd = newScFuncInstr->UnlinkSrc1()->AsIntConstOpnd();
- IR::RegOpnd * envOpnd = newScFuncInstr->UnlinkSrc2()->AsRegOpnd();
- IR::Instr * instrPrev = this->LoadFunctionBodyAsArgument(newScFuncInstr, functionBodySlotOpnd, envOpnd);
- m_lowererMD.ChangeToHelperCall(newScFuncInstr, IR::HelperScrFunc_OP_NewScFunc );
- return stackNewScFuncInstr == nullptr? instrPrev : stackNewScFuncInstr;
- }
- IR::Instr *
- Lowerer::LowerNewScGenFunc(IR::Instr * newScFuncInstr)
- {
- IR::IntConstOpnd * functionBodySlotOpnd = newScFuncInstr->UnlinkSrc1()->AsIntConstOpnd();
- IR::RegOpnd * envOpnd = newScFuncInstr->UnlinkSrc2()->AsRegOpnd();
- IR::Instr * instrPrev = this->LoadFunctionBodyAsArgument(newScFuncInstr, functionBodySlotOpnd, envOpnd);
- m_lowererMD.ChangeToHelperCall(newScFuncInstr, IR::HelperScrFunc_OP_NewScGenFunc );
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerScopedLdFld
- ///
- /// Lower a load instruction that takes an additional instance to use as a
- /// a default if the scope chain provided doesn't contain the property.
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerScopedLdFld(IR::Instr * ldFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache)
- {
- IR::Opnd *src;
- IR::Instr *instrPrev = ldFldInstr->m_prev;
- if(!withInlineCache)
- {
- LoadScriptContext(ldFldInstr);
- }
- src = ldFldInstr->UnlinkSrc2();
- AssertMsg(src->IsRegOpnd(), "Expected reg opnd as src2");
- instrPrev = m_lowererMD.LoadHelperArgument(ldFldInstr, src);
- src = ldFldInstr->UnlinkSrc1();
- AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
- this->LoadPropertySymAsArgument(ldFldInstr, src);
- if (withInlineCache)
- {
- AssertMsg(src->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
- m_lowererMD.LoadHelperArgument(
- ldFldInstr,
- IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
- // Not using the polymorphic inline cache because the fast path only uses the monomorphic inline cache
- this->m_lowererMD.LoadHelperArgument(ldFldInstr, this->LoadRuntimeInlineCacheOpnd(ldFldInstr, src->AsPropertySymOpnd()));
- m_lowererMD.LoadHelperArgument(ldFldInstr, LoadFunctionBodyOpnd(ldFldInstr));
- }
- m_lowererMD.ChangeToHelperCall(ldFldInstr, helperMethod);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerScopedLdInst
- ///
- /// Lower a load instruction that takes an additional instance to use as a
- /// a default if the scope chain provided doesn't contain the property.
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerScopedLdInst(IR::Instr *instr, IR::JnHelperMethod helperMethod)
- {
- IR::Opnd *src;
- IR::Instr *instrPrev;
- // last argument is the scriptContext
- instrPrev = LoadScriptContext(instr);
- src = instr->UnlinkSrc2();
- AssertMsg(src->IsRegOpnd(), "Expected Reg opnd as src2");
- // __out Var*. The StackSym is allocated in irbuilder, and here we need to insert a lea
- StackSym* dstSym = src->GetStackSym();
- IR::Instr *load = this->m_lowererMD.LoadStackAddress(dstSym);
- instr->InsertBefore(load);
- IR::Opnd* tempOpnd = load->GetDst();
- m_lowererMD.LoadHelperArgument(instr, tempOpnd);
- // now 3rd last argument is the rootObject of the function. Need to add addrOpnd to
- // pass in the address of the roobObject.
- IR::Opnd * srcOpnd;
- Js::RootObjectBase * rootObject = this->m_func->GetJnFunction()->GetRootObject();
- srcOpnd = IR::AddrOpnd::New(rootObject, IR::AddrOpndKindDynamicVar, instr->m_func, true);
- instrPrev = m_lowererMD.LoadHelperArgument(instr, srcOpnd);
- // no change, the property field built from irbuilder.
- src = instr->UnlinkSrc1();
- AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
- this->LoadPropertySymAsArgument(instr, src);
- instrPrev = m_lowererMD.ChangeToHelperCall(instr, helperMethod);
- IR::RegOpnd* regOpnd = IR::RegOpnd::New(dstSym, TyVar, this->m_func);
- IR::SymOpnd*symOpnd = IR::SymOpnd::New(dstSym, TyVar, this->m_func);
- this->m_lowererMD.CreateAssign(regOpnd, symOpnd, instrPrev);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerScopedDelFld(IR::Instr * delFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache, bool strictMode)
- {
- IR::Instr *instrPrev;
- Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
- if (strictMode)
- {
- propertyOperationFlag = Js::PropertyOperation_StrictMode;
- }
- instrPrev = m_lowererMD.LoadHelperArgument(delFldInstr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
- LowerScopedLdFld(delFldInstr, helperMethod, withInlineCache);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerProfiledStFld(IR::JitProfilingInstr *stFldInstr, Js::PropertyOperationFlags flags)
- {
- Assert(stFldInstr->profileId == Js::Constants::NoProfileId);
- IR::Instr *const instrPrev = stFldInstr->m_prev;
- /*
- void ProfilingHelpers::ProfiledInitFld_Jit(
- const Var instance,
- const PropertyId propertyId,
- const InlineCacheIndex inlineCacheIndex,
- const Var value,
- void *const framePointer)
- void ProfilingHelpers::ProfiledStFld_Jit(
- const Var instance,
- const PropertyId propertyId,
- const InlineCacheIndex inlineCacheIndex,
- const Var value,
- void *const framePointer)
- void ProfilingHelpers::ProfiledStSuperFld_Jit(
- const Var instance,
- const PropertyId propertyId,
- const InlineCacheIndex inlineCacheIndex,
- const Var value,
- void *const framePointer,
- const Var thisInstance)
- {
- */
- m_lowererMD.LoadHelperArgument(stFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
- if (stFldInstr->m_opcode == Js::OpCode::StSuperFld)
- {
- m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc2());
- }
- m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc1());
- IR::Opnd *dst = stFldInstr->UnlinkDst();
- AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
- m_lowererMD.LoadHelperArgument(
- stFldInstr,
- IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
- LoadPropertySymAsArgument(stFldInstr, dst);
- IR::JnHelperMethod helper;
- switch (stFldInstr->m_opcode)
- {
- case Js::OpCode::InitFld:
- case Js::OpCode::InitRootFld:
- helper = IR::HelperProfiledInitFld;
- break;
- case Js::OpCode::StSuperFld:
- helper = IR::HelperProfiledStSuperFld;
- break;
- default:
- helper =
- flags & Js::PropertyOperation_Root
- ? flags & Js::PropertyOperation_StrictMode ? IR::HelperProfiledStRootFld_Strict : IR::HelperProfiledStRootFld
- : flags & Js::PropertyOperation_StrictMode ? IR::HelperProfiledStFld_Strict : IR::HelperProfiledStFld;
- break;
- }
- stFldInstr->SetSrc1(IR::HelperCallOpnd::New(helper, m_func));
- m_lowererMD.LowerCall(stFldInstr, 0);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerStFld
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerStFld(
- IR::Instr * stFldInstr,
- IR::JnHelperMethod helperMethod,
- IR::JnHelperMethod polymorphicHelperMethod,
- bool withInlineCache,
- IR::LabelInstr *labelBailOut,
- bool isHelper,
- bool withPutFlags,
- Js::PropertyOperationFlags flags)
- {
- if (stFldInstr->IsJitProfilingInstr())
- {
- // If we want to profile then do something completely different
- return this->LowerProfiledStFld(stFldInstr->AsJitProfilingInstr(), flags);
- }
- IR::Instr *instrPrev = stFldInstr->m_prev;
- IR::Opnd *dst = stFldInstr->UnlinkDst();
- AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
- IR::Opnd * inlineCacheOpnd = nullptr;
- if (withInlineCache)
- {
- AssertMsg(dst->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
- if (dst->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache && polymorphicHelperMethod != helperMethod)
- {
- Js::PolymorphicInlineCache * polymorphicInlineCache = dst->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache;
- helperMethod = polymorphicHelperMethod;
- inlineCacheOpnd = IR::AddrOpnd::New(polymorphicInlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func);
- }
- else
- {
- // Need to load runtime inline cache opnd first before loading any helper argument
- // because LoadRuntimeInlineCacheOpnd may create labels marked as helper
- // and cause op helper register push/pop save in x86, messing up with any helper arguments that is already pushed
- inlineCacheOpnd = this->LoadRuntimeInlineCacheOpnd(stFldInstr, dst->AsPropertySymOpnd(), isHelper);
- }
- }
- if (withPutFlags)
- {
- m_lowererMD.LoadHelperArgument(stFldInstr,
- IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
- }
- IR::Opnd *src = stFldInstr->UnlinkSrc1();
- if (stFldInstr->m_opcode == Js::OpCode::StSuperFld)
- {
- m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc2());
- }
- m_lowererMD.LoadHelperArgument(stFldInstr, src);
- this->LoadPropertySymAsArgument(stFldInstr, dst);
- if (withInlineCache)
- {
- Assert(inlineCacheOpnd != nullptr);
- this->m_lowererMD.LoadHelperArgument(
- stFldInstr,
- IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
- this->m_lowererMD.LoadHelperArgument(stFldInstr, inlineCacheOpnd);
- this->m_lowererMD.LoadHelperArgument(stFldInstr, LoadFunctionBodyOpnd(stFldInstr));
- }
- IR::RegOpnd *opndBase = dst->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func);
- m_lowererMD.ChangeToHelperCall(stFldInstr, helperMethod, labelBailOut, opndBase, dst->AsSymOpnd()->IsPropertySymOpnd() ? dst->AsSymOpnd()->AsPropertySymOpnd() : nullptr, isHelper);
- return instrPrev;
- }
- IR::Instr* Lowerer::GenerateCompleteStFld(IR::Instr* instr, bool emitFastPath, IR::JnHelperMethod monoHelperAfterFastPath, IR::JnHelperMethod polyHelperAfterFastPath,
- IR::JnHelperMethod monoHelperWithoutFastPath, IR::JnHelperMethod polyHelperWithoutFastPath, bool withPutFlags, Js::PropertyOperationFlags flags)
- {
- if(instr->CallsAccessor() && instr->HasBailOutInfo())
- {
- IR::BailOutKind kindMinusBits = instr->GetBailOutKind() & ~IR::BailOutKindBits;
- Assert(kindMinusBits != IR::BailOutOnImplicitCalls && kindMinusBits != IR::BailOutOnImplicitCallsPreOp);
- }
- IR::Instr* prevInstr = instr->m_prev;
- IR::LabelInstr* labelBailOut = nullptr;
- IR::LabelInstr* labelHelper = nullptr;
- bool isHelper = false;
- IR::RegOpnd* typeOpnd = nullptr;
- if(emitFastPath && GenerateFastStFldForCustomProperty(instr, &labelHelper))
- {
- if(labelHelper)
- {
- Assert(labelHelper->isOpHelper);
- instr->InsertBefore(labelHelper);
- prevInstr = this->LowerStFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
- }
- else
- {
- instr->Remove();
- return prevInstr;
- }
- }
- else if (this->GenerateStFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd))
- {
- Assert(labelHelper == nullptr);
- return prevInstr;
- }
- else if (emitFastPath)
- {
- if (!GenerateFastStFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, &labelBailOut, typeOpnd, &isHelper, &labelHelper, withPutFlags, flags))
- {
- if (labelHelper != nullptr)
- {
- labelHelper->isOpHelper = isHelper;
- instr->InsertBefore(labelHelper);
- }
- prevInstr = this->LowerStFld(instr, monoHelperAfterFastPath, polyHelperAfterFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
- }
- }
- else
- {
- if (labelHelper != nullptr)
- {
- labelHelper->isOpHelper = isHelper;
- instr->InsertBefore(labelHelper);
- }
- prevInstr = this->LowerStFld(instr, monoHelperWithoutFastPath, monoHelperWithoutFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
- }
- return prevInstr;
- }
- void
- Lowerer::GenerateDirectFieldStore(IR::Instr* instrStFld, IR::PropertySymOpnd* propertySymOpnd)
- {
- Func* func = instrStFld->m_func;
- IR::Opnd *opndSlotArray = this->LoadSlotArrayWithCachedLocalType(instrStFld, propertySymOpnd);
- // Store the value to the slot, getting the slot index from the cache.
- uint16 index = propertySymOpnd->GetSlotIndex();
- Assert(index != -1);
- #ifdef RECYCLER_RECYCLER_WRITE_BARRIER_JIT
- if (opndSlotArray->IsRegOpnd())
- {
- IR::IndirOpnd * opndDst = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, func);
- LowererMD::GenerateWriteBarrierAssign(opndDst, instrStFld->GetSrc1(), instrStFld);
- }
- else
- {
- Assert(opndSlotArray->IsMemRefOpnd());
- IR::MemRefOpnd * opndDst = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, func);
- LowererMD::GenerateWriteBarrierAssign(opndDst, instrStFld->GetSrc1(), instrStFld);
- }
- #else
- IR::Opnd *opnd;
- if (opndSlotArray->IsRegOpnd())
- {
- opnd = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, func);
- }
- else
- {
- opnd = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, func);
- }
- this->m_lowererMD.CreateAssign(opnd, instrStFld->GetSrc1(), instrStFld);
- #endif
- }
- bool
- Lowerer::GenerateStFldWithCachedType(IR::Instr *instrStFld, bool* continueAsHelperOut, IR::LabelInstr** labelHelperOut, IR::RegOpnd** typeOpndOut)
- {
- IR::Instr *instr;
- IR::RegOpnd *typeOpnd = nullptr;
- IR::LabelInstr* labelObjCheckFailed = nullptr;
- IR::LabelInstr *labelTypeCheckFailed = nullptr;
- IR::LabelInstr *labelBothTypeChecksFailed = nullptr;
- IR::LabelInstr *labelDone = nullptr;
- Assert(continueAsHelperOut != nullptr);
- *continueAsHelperOut = false;
- Assert(labelHelperOut != nullptr);
- *labelHelperOut = nullptr;
- Assert(typeOpndOut != nullptr);
- *typeOpndOut = nullptr;
- Assert(instrStFld->GetDst()->IsSymOpnd());
- if (!instrStFld->GetDst()->AsSymOpnd()->IsPropertySymOpnd() || !instrStFld->GetDst()->AsPropertySymOpnd()->IsTypeCheckSeqCandidate())
- {
- return false;
- }
- IR::PropertySymOpnd *propertySymOpnd = instrStFld->GetDst()->AsPropertySymOpnd();
- // If we have any object type spec info, we better not believe this is a load from prototype, since this is a store
- // and we never share inline caches between loads and stores.
- Assert(!propertySymOpnd->HasObjTypeSpecFldInfo() || !propertySymOpnd->IsLoadedFromProto());
- AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
- if (!propertySymOpnd->IsTypeCheckSeqCandidate())
- {
- return false;
- }
- if (!propertySymOpnd->IsTypeCheckSeqParticipant() && !propertySymOpnd->NeedsLocalTypeCheck())
- {
- return false;
- }
- Assert(!propertySymOpnd->NeedsTypeCheckAndBailOut() || (instrStFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrStFld->GetBailOutKind())));
- // In the backwards pass we only add guarded property operations to instructions that are not already
- // protected by an upstream type check.
- Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
- PHASE_PRINT_TESTTRACE(
- Js::ObjTypeSpecPhase,
- this->m_func,
- L"Field store: %s, property: %s, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n",
- Js::OpCodeUtil::GetOpCodeName(instrStFld->m_opcode),
- this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySymOpnd->m_sym->AsPropertySym()->m_propertyId)->GetBuffer(),
- this->m_func->GetJnFunction()->GetDisplayName(),
- propertySymOpnd->m_inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(),
- propertySymOpnd->IsTypeChecked() ? L"true" : L"false");
- if (propertySymOpnd->HasFinalType() && !propertySymOpnd->IsLoadedFromProto())
- {
- propertySymOpnd->UpdateSlotForFinalType();
- }
- Func* func = instrStFld->m_func;
- // TODO (ObjTypeSpec): If ((PropertySym*)propertySymOpnd->m_sym)->m_stackSym->m_isIntConst consider emitting a direct
- // jump to helper or bailout. If we have a type check bailout, we could even abort compilation.
- bool hasTypeCheckBailout = instrStFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrStFld->GetBailOutKind());
- // If the type hasn't been checked upstream, see if it makes sense to check it here.
- bool isTypeChecked = propertySymOpnd->IsTypeChecked();
- if (!isTypeChecked)
- {
- // If the initial type has been checked, we can do a hard coded type transition without any type checks
- // (see GenerateStFldWithCachedFinalType), which is always worth doing, even if the type is not needed
- // downstream. We're not introducing any additional bailouts.
- if (propertySymOpnd->HasFinalType() && propertySymOpnd->HasInitialType() && !propertySymOpnd->IsTypeDead())
- {
- // We have a final type in hand, so we can JIT (most of) the type transition work.
- return this->GenerateStFldWithCachedFinalType(instrStFld, propertySymOpnd);
- }
- if (propertySymOpnd->HasTypeMismatch())
- {
- // So we have a type mismatch, which happens when the type (and the type without property if ObjTypeSpecStore
- // is on) on this instruction didn't match the live type value according to the flow. We must have hit some
- // stale inline cache (perhaps inlined from a different function, or on a code path not taken for a while).
- // Either way, we know exactly what type the object must have at this point (fully determined by flow), but
- // we don't know whether that type already has the property we're storing here. All in all, we know exactly
- // what shape the object will have after this operation, but we're not sure what label (type) to give this
- // shape. Thus we can simply let the fast path do its thing based on the live inline cache. The downstream
- // instructions relying only on this shape (loads and stores) are safe, and those that need the next type
- // (i.e. adds) will do the same thing as this instruction.
- return false;
- }
- // If we're still here then we must need a primary type check on this instruction to protect
- // a sequence of field operations downstream, or a local type check for an isolated field store.
- Assert(propertySymOpnd->NeedsPrimaryTypeCheck() || propertySymOpnd->NeedsLocalTypeCheck());
- labelTypeCheckFailed = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- labelBothTypeChecksFailed = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- labelObjCheckFailed = hasTypeCheckBailout ? labelBothTypeChecksFailed : IR::LabelInstr::New(Js::OpCode::Label, func, true);
- typeOpnd = this->GenerateCachedTypeCheck(instrStFld, propertySymOpnd, labelObjCheckFailed, labelBothTypeChecksFailed, labelTypeCheckFailed);
- *typeOpndOut = typeOpnd;
- }
- // Either we are protected by a type check upstream or we just emitted a type check above,
- // now it's time to store the field value.
- GenerateDirectFieldStore(instrStFld, propertySymOpnd);
- // If we are protected by a type check upstream, we don't need a bailout or helper here, delete the instruction
- // and return "true" to indicate that we succeeded in eliminating it.
- if (isTypeChecked)
- {
- Assert(labelTypeCheckFailed == nullptr && labelBothTypeChecksFailed == nullptr);
- AssertMsg(!instrStFld->HasBailOutInfo(), "Why does a direct field store have bailout?");
- instrStFld->Remove();
- return true;
- }
- // Otherwise, branch around the helper on successful type check.
- labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
- instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, func);
- instrStFld->InsertBefore(instr);
- // On failed type check, try the type without property if we've got one.
- instrStFld->InsertBefore(labelTypeCheckFailed);
- // Caution, this is one of the dusty corners of the JIT. We only get here if this is an isolated StFld which adds a property, or
- // ObjTypeSpecStore is off. In the former case no downstream operations depend on the final type produced here, and we can fall
- // back on live cache and helper if the type doesn't match. In the latter we may have a cache with type transition, which must
- // produce a value for the type after transition, because that type is consumed downstream. Thus, if the object's type doesn't
- // match either the type with or the type without the property we're storing, we must bail out here.
- bool emitAddProperty = propertySymOpnd->IsMono() && propertySymOpnd->HasInitialType();
- if (emitAddProperty)
- {
- GenerateCachedTypeWithoutPropertyCheck(instrStFld, propertySymOpnd, typeOpnd, labelBothTypeChecksFailed);
- GenerateFieldStoreWithTypeChange(instrStFld, propertySymOpnd, propertySymOpnd->GetInitialType(), propertySymOpnd->GetType());
- instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, func);
- instrStFld->InsertBefore(instr);
- }
- instrStFld->InsertBefore(labelBothTypeChecksFailed);
- instrStFld->InsertAfter(labelDone);
- if (hasTypeCheckBailout)
- {
- AssertMsg(PHASE_ON1(Js::ObjTypeSpecIsolatedFldOpsWithBailOutPhase) || !propertySymOpnd->IsTypeDead(),
- "Why does a field store have a type check bailout, if its type is dead?");
- if (instrStFld->GetBailOutInfo()->bailOutInstr != instrStFld)
- {
- // Set the cache index in the bailout info so that the generated code will write it into the
- // bailout record at runtime.
- instrStFld->GetBailOutInfo()->polymorphicCacheIndex = propertySymOpnd->m_inlineCacheIndex;
- }
- else
- {
- Assert(instrStFld->GetBailOutInfo()->polymorphicCacheIndex == propertySymOpnd->m_inlineCacheIndex);
- }
- instrStFld->m_opcode = Js::OpCode::BailOut;
- instrStFld->FreeSrc1();
- instrStFld->FreeDst();
- this->GenerateBailOut(instrStFld);
- return true;
- }
- else
- {
- *continueAsHelperOut = true;
- Assert(labelObjCheckFailed != nullptr && labelObjCheckFailed != labelBothTypeChecksFailed);
- *labelHelperOut = labelObjCheckFailed;
- return false;
- }
- }
- IR::RegOpnd *
- Lowerer::GenerateCachedTypeCheck(IR::Instr *instrChk, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr* labelObjCheckFailed, IR::LabelInstr *labelTypeCheckFailed, IR::LabelInstr *labelSecondChance)
- {
- Assert(propertySymOpnd->MayNeedTypeCheckProtection());
- Func* func = instrChk->m_func;
- IR::RegOpnd *regOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(func);
- regOpnd->SetValueType(propertySymOpnd->GetPropertyOwnerValueType());
- if (!regOpnd->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(regOpnd, instrChk, labelObjCheckFailed);
- }
- IR::Opnd *expectedTypeOpnd;
- bool emitDirectCheck = true;
- // Note: don't attempt equivalent type check if we're doing a final type optimization or if we have a monomorphic
- // cache and no type check bailout. In the latter case, we can wind up doing expensive failed equivalence checks
- // repeatedly and never rejit.
- bool doEquivTypeCheck =
- propertySymOpnd->HasEquivalentTypeSet() &&
- !(propertySymOpnd->HasFinalType() && propertySymOpnd->HasInitialType()) &&
- !propertySymOpnd->MustDoMonoCheck() &&
- (propertySymOpnd->IsPoly() || instrChk->HasTypeCheckBailOut());
- Assert(doEquivTypeCheck || !instrChk->HasEquivalentTypeCheckBailOut());
- Js::Type* type = doEquivTypeCheck ? propertySymOpnd->GetFirstEquivalentType() : propertySymOpnd->GetType();
- Js::PropertyGuard* typeCheckGuard = doEquivTypeCheck ?
- (Js::PropertyGuard*)CreateEquivalentTypeGuardAndLinkToGuardedProperties(type, propertySymOpnd) :
- (Js::PropertyGuard*)CreateTypePropertyGuardForGuardedProperties(type, propertySymOpnd);
- if (typeCheckGuard == nullptr)
- {
- Assert(type != nullptr);
- expectedTypeOpnd = IR::AddrOpnd::New(type, IR::AddrOpndKindDynamicType, func, true);
- }
- else
- {
- Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
- expectedTypeOpnd = IR::MemRefOpnd::New((void*)(typeCheckGuard->GetAddressOfValue()), TyMachPtr, func, IR::AddrOpndKindDynamicGuardValueRef);
- emitDirectCheck = false;
- }
- if (PHASE_VERBOSE_TRACE(Js::ObjTypeSpecPhase, this->m_func))
- {
- OUTPUT_VERBOSE_TRACE_FUNC(Js::ObjTypeSpecPhase, this->m_func, L"Emitted %s type check for type 0x%p",
- emitDirectCheck ? L"direct" : propertySymOpnd->IsPoly() ? L"equivalent" : L"indirect", type);
- #if DBG
- if (propertySymOpnd->GetGuardedPropOps() != nullptr)
- {
- Output::Print(L" guarding operations:\n ");
- propertySymOpnd->GetGuardedPropOps()->Dump();
- }
- else
- {
- Output::Print(L"\n");
- }
- #else
- Output::Print(L"\n");
- #endif
- Output::Flush();
- }
- IR::RegOpnd* typeOpnd = IR::RegOpnd::New(TyMachReg, func);
- IR::Opnd *sourceType;
- if (regOpnd->m_sym->IsConst() && !regOpnd->m_sym->IsIntConst() && !regOpnd->m_sym->IsFloatConst())
- {
- sourceType = IR::MemRefOpnd::New((BYTE*)regOpnd->m_sym->GetConstAddress() +
- Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func, IR::AddrOpndKindDynamicObjectTypeRef);
- }
- else
- {
- sourceType = IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func);
- }
- m_lowererMD.CreateAssign(typeOpnd, sourceType, instrChk);
- if (doEquivTypeCheck)
- {
- // TODO (ObjTypeSpec): For isolated equivalent type checks it would be good to emit a check if the cache is still valid, and
- // if not go straight to live polymorphic cache. This way we wouldn't have to bail out and re-JIT, and also wouldn't continue
- // to try the equivalent type cache, miss it and do the slow comparison. This may be as easy as sticking a null on the main
- // type in the equivalent type cache.
- IR::LabelInstr* labelCheckEquivalentType = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelCheckEquivalentType, instrChk);
- IR::LabelInstr *labelTypeCheckSucceeded = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- InsertBranch(Js::OpCode::Br, labelTypeCheckSucceeded, instrChk);
- instrChk->InsertBefore(labelCheckEquivalentType);
- this->m_lowererMD.LoadHelperArgument(instrChk, IR::AddrOpnd::New((Js::Var)typeCheckGuard, IR::AddrOpndKindDynamicTypeCheckGuard, func, true));
- this->m_lowererMD.LoadHelperArgument(instrChk, typeOpnd);
- IR::RegOpnd* equivalentTypeCheckResultOpnd = IR::RegOpnd::New(TyUint8, func);
- IR::HelperCallOpnd* equivalentTypeCheckHelperCallOpnd = IR::HelperCallOpnd::New(IR::HelperCheckIfTypeIsEquivalent, func);
- IR::Instr* equivalentTypeCheckCallInstr = IR::Instr::New(Js::OpCode::Call, equivalentTypeCheckResultOpnd, equivalentTypeCheckHelperCallOpnd, func);
- instrChk->InsertBefore(equivalentTypeCheckCallInstr);
- this->m_lowererMD.LowerCall(equivalentTypeCheckCallInstr, 0);
- InsertTestBranch(equivalentTypeCheckResultOpnd, equivalentTypeCheckResultOpnd, Js::OpCode::BrEq_A, labelTypeCheckFailed, instrChk);
- // TODO (ObjTypeSpec): Consider emitting a shared bailout to which a specific bailout kind is written at runtime. This would allow us to distinguish
- // between non-equivalent type and other cases, such as invalidated guard (due to fixed field overwrite, perhaps) or too much thrashing on the
- // equivalent type cache. We could determine bailout kind based on the value returned by the helper. In the case of cache thrashing we could just
- // turn off the whole optimization for a given function.
- instrChk->InsertBefore(labelTypeCheckSucceeded);
- }
- else
- {
- InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelSecondChance != nullptr ? labelSecondChance : labelTypeCheckFailed, instrChk);
- }
- // Don't pin the type for polymorphic operations. The code can successfully execute even if this type is no longer referenced by any objects,
- // as long as there are other objects with types equivalent on the properties referenced by this code. The type is kept alive until entry point
- // installation by the JIT transfer data, and after that by the equivalent type cache, so it will stay alive unless or until it gets evicted
- // from the cache.
- if (!doEquivTypeCheck)
- {
- PinTypeRef(type, type, instrChk, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
- }
- return typeOpnd;
- }
- void
- Lowerer::PinTypeRef(Js::Type* type, void* typeRef, IR::Instr* instr, Js::PropertyId propertyId)
- {
- this->m_func->PinTypeRef(typeRef);
- if (PHASE_TRACE(Js::TracePinnedTypesPhase, this->m_func))
- {
- wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(L"PinnedTypes: function %s(%s) instr %s property %s(#%u) pinned %s reference 0x%p to type 0x%p.\n",
- this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
- Js::OpCodeUtil::GetOpCodeName(instr->m_opcode), GetScriptContext()->GetPropertyNameLocked(propertyId)->GetBuffer(), propertyId,
- typeRef == type ? L"strong" : L"weak", typeRef, type);
- Output::Flush();
- }
- }
- void
- Lowerer::GenerateCachedTypeWithoutPropertyCheck(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, IR::Opnd *typeOpnd, IR::LabelInstr *labelTypeCheckFailed)
- {
- Assert(propertySymOpnd->IsMonoObjTypeSpecCandidate());
- Assert(propertySymOpnd->HasInitialType());
- Js::Type* typeWithoutProperty = propertySymOpnd->GetInitialType();
- // We should never add properties to objects of static types.
- Assert(Js::DynamicType::Is(typeWithoutProperty->GetTypeId()));
- if (typeOpnd == nullptr)
- {
- // No opnd holding the type was passed in, so we have to load the type here.
- IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
- if (!baseOpnd->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(baseOpnd, instrInsert, labelTypeCheckFailed);
- }
- IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
- typeOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
- m_lowererMD.CreateAssign(typeOpnd, opnd, instrInsert);
- }
- Js::JitTypePropertyGuard* typePropertyGuard = CreateTypePropertyGuardForGuardedProperties(typeWithoutProperty, propertySymOpnd);
- IR::Opnd *expectedTypeOpnd;
- if (typePropertyGuard)
- {
- bool emitDirectCheck = true;
- Assert(typePropertyGuard != nullptr);
- Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
- expectedTypeOpnd = IR::MemRefOpnd::New((void*)(typePropertyGuard->GetAddressOfValue()), TyMachPtr, this->m_func, IR::AddrOpndKindDynamicGuardValueRef);
- emitDirectCheck = false;
- OUTPUT_VERBOSE_TRACE_FUNC(Js::ObjTypeSpecPhase, this->m_func, L"Emitted %s type check for type 0x%p.\n",
- emitDirectCheck ? L"direct" : L"indirect", typeWithoutProperty);
- }
- else
- {
- expectedTypeOpnd = IR::AddrOpnd::New(typeWithoutProperty, IR::AddrOpndKindDynamicType, m_func, true);
- }
- InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelTypeCheckFailed, instrInsert);
- // Technically, it should be enough to pin the final type, because it should keep all of its predecessors alive, but
- // just to be extra cautious, let's pin the initial type as well.
- PinTypeRef(typeWithoutProperty, typeWithoutProperty, instrInsert, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
- }
- void
- Lowerer::GenerateFixedFieldGuardCheck(IR::Instr *insertPointInstr, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
- {
- GeneratePropertyGuardCheck(insertPointInstr, propertySymOpnd, labelBailOut);
- }
- Js::JitTypePropertyGuard*
- Lowerer::CreateTypePropertyGuardForGuardedProperties(Js::Type* type, IR::PropertySymOpnd* propertySymOpnd)
- {
- // We should always have a list of guarded properties.
- Assert(propertySymOpnd->GetGuardedPropOps() != nullptr);
- Js::JitTypePropertyGuard* guard = nullptr;
- Js::EntryPointInfo* entryPointInfo = this->m_func->m_workItem->GetEntryPoint();
- if (entryPointInfo->HasSharedPropertyGuards())
- {
- // Consider (ObjTypeSpec): Because we allocate these guards from the JIT thread we can't share guards for the same type across multiple functions.
- // This leads to proliferation of property guards on the thread context. The alternative would be to pre-allocate shared (by value) guards
- // from the thread context during work item creation. We would create too many of them (because some types aren't actually used as guards),
- // but we could share a guard for a given type between functions. This may ultimately be better.
- LinkGuardToGuardedProperties(entryPointInfo, propertySymOpnd->GetGuardedPropOps(), [this, type, &guard](Js::PropertyId propertyId)
- {
- if (DoLazyFixedTypeBailout(this->m_func))
- {
- this->m_func->lazyBailoutProperties.Item(propertyId);
- }
- else
- {
- if (guard == nullptr)
- {
- guard = this->m_func->GetOrCreateSingleTypeGuard(type);
- }
- if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
- {
- wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- wchar_t workItemName[256];
- this->m_func->m_workItem->GetDisplayName(workItemName, _countof(workItemName));
- Output::Print(L"ObjTypeSpec: function %s(%s) registered guard 0x%p with value 0x%p for property %s (%u).\n",
- workItemName, this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
- guard, guard->GetValue(), this->GetScriptContext()->GetPropertyNameLocked(propertyId)->GetBuffer(), propertyId);
- Output::Flush();
- }
- this->m_func->EnsurePropertyGuardsByPropertyId();
- this->m_func->LinkGuardToPropertyId(propertyId, guard);
- }
- });
- }
- return guard;
- }
- Js::JitEquivalentTypeGuard*
- Lowerer::CreateEquivalentTypeGuardAndLinkToGuardedProperties(Js::Type* type, IR::PropertySymOpnd* propertySymOpnd)
- {
- // We should always have a list of guarded properties.
- Assert(propertySymOpnd->HasObjTypeSpecFldInfo() && propertySymOpnd->HasEquivalentTypeSet() && propertySymOpnd->GetGuardedPropOps());
- Js::JitEquivalentTypeGuard* guard = this->m_func->CreateEquivalentTypeGuard(type, propertySymOpnd->GetObjTypeSpecFldId());
- Js::EntryPointInfo* entryPointInfo = this->m_func->m_workItem->GetEntryPoint();
- if (entryPointInfo->HasSharedPropertyGuards())
- {
- LinkGuardToGuardedProperties(entryPointInfo, propertySymOpnd->GetGuardedPropOps(), [=](Js::PropertyId propertyId)
- {
- if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
- {
- wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(L"ObjTypeSpec: function %s(%s) registered equivalent type spec guard 0x%p with value 0x%p for property %s (%u).\n",
- this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
- guard, guard->GetValue(), GetScriptContext()->GetPropertyNameLocked(propertyId)->GetBuffer(), propertyId);
- Output::Flush();
- }
- this->m_func->EnsurePropertyGuardsByPropertyId();
- this->m_func->LinkGuardToPropertyId(propertyId, guard);
- });
- }
- Assert(guard->GetCache() != nullptr);
- Js::EquivalentTypeCache* cache = guard->GetCache();
- // TODO (ObjTypeSpec): If we delayed populating the types until encoder, we could bulk allocate all equivalent type caches
- // in one block from the heap. This would allow us to not allocate them from the native code data allocator and free them
- // when no longer needed. However, we would need to store the global property operation ID in the guard, so we can look up
- // the info in the encoder. Perhaps we could overload the cache pointer to be the ID until encoder.
- // Copy types from the type set to the guard's cache
- Js::EquivalentTypeSet* typeSet = propertySymOpnd->GetEquivalentTypeSet();
- uint16 cachedTypeCount = typeSet->GetCount() < EQUIVALENT_TYPE_CACHE_SIZE ? typeSet->GetCount() : EQUIVALENT_TYPE_CACHE_SIZE;
- for (uint16 ti = 0; ti < cachedTypeCount; ti++)
- {
- cache->types[ti] = typeSet->GetType(ti);
- }
- // Populate property ID and slot index arrays on the guard's cache. We iterate over the
- // bit vector of property operations protected by this guard, but some property operations
- // may be referring to the same property ID (but not share the same cache). We skip
- // redundant entries by maintaining a hash set of property IDs we've already encountered.
- auto propOps = propertySymOpnd->GetGuardedPropOps();
- uint propOpCount = propOps->Count();
- bool isTypeStatic = Js::StaticType::Is(type->GetTypeId());
- JsUtil::BaseDictionary<Js::PropertyId, Js::EquivalentPropertyEntry*, JitArenaAllocator> propIds(this->m_alloc, propOpCount);
- Js::EquivalentPropertyEntry* properties = AnewArray(this->m_alloc, Js::EquivalentPropertyEntry, propOpCount);
- uint propIdCount = 0;
- FOREACH_BITSET_IN_SPARSEBV(propOpId, propOps)
- {
- Js::ObjTypeSpecFldInfo* propOpInfo = this->m_func->GetGlobalObjTypeSpecFldInfo(propOpId);
- Js::PropertyId propertyId = propOpInfo->GetPropertyId();
- Js::PropertyIndex propOpIndex = Js::Constants::NoSlot;
- bool hasFixedValue = propOpInfo->HasFixedValue();
- if (hasFixedValue)
- {
- cache->SetHasFixedValue();
- }
- bool isLoadedFromProto = propOpInfo->IsLoadedFromProto();
- if (isLoadedFromProto)
- {
- cache->SetIsLoadedFromProto();
- }
- else
- {
- propOpIndex = propOpInfo->GetSlotIndex();
- }
- bool propOpUsesAuxSlot = propOpInfo->UsesAuxSlot();
- AssertMsg(!isTypeStatic || !propOpInfo->IsBeingStored(), "Why are we storing a field to an object of static type?");
- Js::EquivalentPropertyEntry* entry;
- if (propIds.TryGetValue(propertyId, &entry))
- {
- if (propOpIndex == entry->slotIndex && propOpUsesAuxSlot == entry->isAuxSlot)
- {
- entry->mustBeWritable |= propOpInfo->IsBeingStored();
- }
- else
- {
- // Due to inline cache sharing we have the same property accessed using different caches
- // with inconsistent info. This means a guaranteed bailout on the equivalent type check.
- // We'll just let it happen and turn off the optimization for this function. We could avoid
- // this problem by tracking property information on the value type in glob opt.
- if (PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->m_func))
- {
- wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Js::FunctionBody* topFunctionBody = this->m_func->GetJnFunction();
- Js::ScriptContext* scriptContext = topFunctionBody->GetScriptContext();
- Output::Print(L"EquivObjTypeSpec: top function %s (%s): duplicate property clash on %s(#%d) \n",
- topFunctionBody->GetDisplayName(), topFunctionBody->GetDebugNumberSet(debugStringBuffer), propertyId, scriptContext->GetPropertyNameLocked(propertyId)->GetBuffer());
- Output::Flush();
- }
- Assert(propIdCount < propOpCount);
- __analysis_assume(propIdCount < propOpCount);
- entry = &properties[propIdCount++];
- entry->propertyId = propertyId;
- entry->slotIndex = propOpIndex;
- entry->isAuxSlot = propOpUsesAuxSlot;
- entry->mustBeWritable = propOpInfo->IsBeingStored();
- }
- }
- else
- {
- Assert(propIdCount < propOpCount);
- __analysis_assume(propIdCount < propOpCount);
- entry = &properties[propIdCount++];
- entry->propertyId = propertyId;
- entry->slotIndex = propOpIndex;
- entry->isAuxSlot = propOpUsesAuxSlot;
- entry->mustBeWritable = propOpInfo->IsBeingStored();
- propIds.AddNew(propertyId, entry);
- }
- }
- NEXT_BITSET_IN_SPARSEBV;
- cache->record.propertyCount = propIdCount;
- cache->record.properties = NativeCodeDataNewArray(this->m_func->GetNativeCodeDataAllocator(), Js::EquivalentPropertyEntry, propIdCount);
- memcpy(cache->record.properties, properties, propIdCount * sizeof(Js::EquivalentPropertyEntry));
- return guard;
- }
- bool
- Lowerer::LinkCtorCacheToGuardedProperties(Js::JitTimeConstructorCache* ctorCache)
- {
- // We do not always have guarded properties. If the constructor is empty and the subsequent code doesn't load or store any of
- // the constructed object's properties, or if all inline caches are empty then this ctor cache doesn't guard any properties.
- if (ctorCache->GetGuardedPropOps() == nullptr)
- {
- return false;
- }
- bool linked = false;
- Js::EntryPointInfo* entryPointInfo = this->m_func->m_workItem->GetEntryPoint();
- if (entryPointInfo->HasSharedPropertyGuards())
- {
- linked = LinkGuardToGuardedProperties(entryPointInfo, ctorCache->GetGuardedPropOps(), [=](Js::PropertyId propertyId)
- {
- if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
- {
- wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(L"ObjTypeSpec: function %s(%s) registered ctor cache 0x%p with value 0x%p for property %s (%u).\n",
- this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
- ctorCache->runtimeCache, ctorCache->type, GetScriptContext()->GetPropertyNameLocked(propertyId)->GetBuffer(), propertyId);
- Output::Flush();
- }
- this->m_func->EnsureCtorCachesByPropertyId();
- this->m_func->LinkCtorCacheToPropertyId(propertyId, ctorCache);
- });
- }
- return linked;
- }
- template<typename LinkFunc>
- bool
- Lowerer::LinkGuardToGuardedProperties(Js::EntryPointInfo* entryPointInfo, const BVSparse<JitArenaAllocator>* guardedPropOps, LinkFunc link)
- {
- Assert(entryPointInfo != nullptr);
- Assert(entryPointInfo->HasSharedPropertyGuards());
- Assert(guardedPropOps != nullptr);
- bool linked = false;
- // For every entry in the bit vector, register the guard for the corresponding property ID.
- FOREACH_BITSET_IN_SPARSEBV(propertyOpId, guardedPropOps)
- {
- Js::ObjTypeSpecFldInfo* propertyOpInfo = this->m_func->GetGlobalObjTypeSpecFldInfo(propertyOpId);
- Js::PropertyId propertyId = propertyOpInfo->GetPropertyId();
- // It's okay for an equivalent type check to be registered as a guard against a property becoming read-only. This transpires if, there is
- // a different monomorphic type check upstream, which guarantees the actual type of the object needed for the hard-coded type transition,
- // but it is later followed by a sequence of polymorphic inline caches, which do not have that type in the type set. At the beginning of
- // that sequence we'll emit an equivalent type check to verify that the actual type has relevant properties on appropriate slots. Then in
- // the dead store pass we'll walk upwards and encounter this check first, thus we'll drop the guarded properties accumulated thus far
- // (including the one being added) on that check.
- // AssertMsg(!propertyOpInfo->IsBeingAdded() || !isEquivalentTypeGuard, "Why do we have an equivalent type check protecting a property add?");
- if (propertyOpInfo->IsBeingAdded() || propertyOpInfo->IsLoadedFromProto() || propertyOpInfo->HasFixedValue())
- {
- // Equivalent object type spec only supports fixed fields on prototypes. This is to simplify the slow type equivalence check.
- // See JavascriptOperators::CheckIfTypeIsEquivalent.
- Assert(!propertyOpInfo->IsPoly() || (!propertyOpInfo->HasFixedValue() || propertyOpInfo->IsLoadedFromProto() || propertyOpInfo->UsesAccessor()));
- if (entryPointInfo->HasSharedPropertyGuard(propertyId))
- {
- link(propertyId);
- linked = true;
- }
- else
- {
- #if TRUE
- AssertMsg(false, "Did we fail to create a shared property guard for a guarded property?");
- #else
- if (PHASE_VERBOSE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
- {
- if (!this->m_func->m_workItem->GetEntryPoint()->HasSharedPropertyGuard(propertyId))
- {
- wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(L"ObjTypeStore: function %s(%s): no shared property guard for property % (%u).\n",
- this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
- GetScriptContext()->GetPropertyNameLocked(propertyId)->GetBuffer(), propertyId);
- Output::Flush();
- }
- }
- #endif
- }
- }
- }
- NEXT_BITSET_IN_SPARSEBV;
- return linked;
- }
- void
- Lowerer::GeneratePropertyGuardCheck(IR::Instr *insertPointInstr, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
- {
- Js::PropertyGuard* guard = propertySymOpnd->GetPropertyGuard();
- Assert(guard != nullptr);
- if (!DoLazyFixedDataBailout(this->m_func))
- {
- Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
- IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
- IR::MemRefOpnd* guardOpnd = IR::MemRefOpnd::New((void*)guard->GetAddressOfValue(), TyMachPtr, this->m_func, IR::AddrOpndKindDynamicGuardValueRef);
- InsertCompareBranch(guardOpnd, zeroOpnd, Js::OpCode::BrEq_A, labelBailOut, insertPointInstr);
- }
- else
- {
- this->m_func->lazyBailoutProperties.Item(propertySymOpnd->GetPropertyId());
- }
- }
- IR::Instr*
- Lowerer::GeneratePropertyGuardCheckBailoutAndLoadType(IR::Instr *insertInstr)
- {
- IR::Instr* instrPrev = insertInstr->m_prev;
- IR::Opnd* numberTypeOpnd = IR::AddrOpnd::New(insertInstr->m_func->GetScriptContext()->GetLibrary()->GetNumberTypeStatic(), IR::AddrOpndKindDynamicType, insertInstr->m_func);
- IR::PropertySymOpnd* propertySymOpnd = insertInstr->GetSrc1()->AsPropertySymOpnd();
- IR::LabelInstr* labelBailout = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func, true);
- IR::LabelInstr* labelContinue = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func);
- IR::LabelInstr* loadNumberTypeLabel = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func, true);
- GeneratePropertyGuardCheck(insertInstr, propertySymOpnd, labelBailout);
- IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
- GenerateObjectTestAndTypeLoad(insertInstr, baseOpnd, insertInstr->GetDst()->AsRegOpnd(), loadNumberTypeLabel);
- insertInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelContinue, this->m_func));
- insertInstr->InsertBefore(loadNumberTypeLabel);
- this->m_lowererMD.CreateAssign(insertInstr->GetDst(), numberTypeOpnd, insertInstr);
- insertInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelContinue, this->m_func));
- insertInstr->InsertBefore(labelBailout);
- insertInstr->InsertAfter(labelContinue);
- insertInstr->FreeSrc1();
- insertInstr->m_opcode = Js::OpCode::BailOut;
- this->GenerateBailOut(insertInstr);
- return instrPrev;
- }
- void
- Lowerer::GenerateNonWritablePropertyCheck(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
- {
- IR::Opnd *opnd;
- IR::Instr *instr;
- // Generate a check for non-writable properties, on the model of the work done by PatchPutValueetc.
- // Inline the check on the bit in the prototype object's type. If that check fails, call the helper.
- // If the helper finds a non-writable property, bail out, as we're counting on being able to add the property.
- Js::Type *typeWithoutProperty = propertySymOpnd->GetInitialType();
- Assert(typeWithoutProperty);
- Js::RecyclableObject *protoObject = typeWithoutProperty->GetPrototype();
- Assert(protoObject);
- // s1 = MOV [proto->type].ptr
- IR::RegOpnd *typeOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
- opnd = IR::MemRefOpnd::New((char*)protoObject + Js::RecyclableObject::GetOffsetOfType(), TyMachReg,
- this->m_func, IR::AddrOpndKindDynamicObjectTypeRef);
- m_lowererMD.CreateAssign(typeOpnd, opnd, instrInsert);
- // TEST [s1->areThisAndPrototypesEnsuredToHaveOnlyWritableDataProperties].u8, 1
- // JNE $continue
- IR::LabelInstr *labelContinue = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- opnd = IR::IndirOpnd::New(typeOpnd, (int32)Js::Type::OffsetOfWritablePropertiesFlag(), TyUint8, this->m_func);
- InsertTestBranch(opnd, IR::IntConstOpnd::New(1, TyUint8, this->m_func), Js::OpCode::BrNeq_A, labelContinue, instrInsert);
- // $Lhelper:
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- instrInsert->InsertBefore(labelHelper);
- // s2 = CALL DoProtoCheck, prototype
- opnd = IR::AddrOpnd::New(protoObject, IR::AddrOpndKindDynamicVar, this->m_func, true);
- m_lowererMD.LoadHelperArgument(instrInsert, opnd);
- opnd = IR::HelperCallOpnd::New(IR::HelperCheckProtoHasNonWritable, this->m_func);
- instr = IR::Instr::New(Js::OpCode::Call, IR::RegOpnd::New(TyUint8, this->m_func), opnd, this->m_func);
- instrInsert->InsertBefore(instr);
- opnd = instr->GetDst();
- m_lowererMD.LowerCall(instr, 0);
- InsertTestBranch(opnd, opnd, Js::OpCode::BrEq_A, labelBailOut, instrInsert);
- // $Lcontinue:
- instrInsert->InsertBefore(labelContinue);
- }
- void
- Lowerer::GenerateAdjustSlots(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, Js::Type* initialType, Js::Type* finalType)
- {
- IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
- bool adjusted = this->GenerateAdjustBaseSlots(instrInsert, baseOpnd, initialType, finalType);
- if (!adjusted)
- {
- baseOpnd->Free(m_func);
- }
- }
- bool
- Lowerer::GenerateAdjustBaseSlots(IR::Instr *instrInsert, IR::RegOpnd *baseOpnd, Js::Type* initialType, Js::Type* finalType)
- {
- // Possibly allocate new slot capacity to accommodate a type transition.
- Js::DynamicType *oldType = static_cast<Js::DynamicType*>(initialType);
- Assert(oldType);
- Js::DynamicType *newType = static_cast<Js::DynamicType*>(finalType);
- Assert(newType);
- AssertMsg(Js::DynamicObject::IsTypeHandlerCompatibleForObjectHeaderInlining(oldType->GetTypeHandler(), newType->GetTypeHandler()),
- "Incompatible typeHandler transition?");
- int oldCount = oldType->GetTypeHandler()->GetSlotCapacity();
- int newCount = newType->GetTypeHandler()->GetSlotCapacity();
- Js::PropertyIndex inlineSlotCapacity = oldType->GetTypeHandler()->GetInlineSlotCapacity();
- Js::PropertyIndex newInlineSlotCapacity = newType->GetTypeHandler()->GetInlineSlotCapacity();
- if (oldCount >= newCount || newCount <= inlineSlotCapacity)
- {
- // Already have enough slot capacity. Do nothing.
- return false;
- }
- // Call AdjustSlots using the new counts. Because AdjustSlots uses the "no dispose" flavor of alloc,
- // no implicit calls are possible, and we don't need an implicit call check and bailout.
- // CALL AdjustSlots, instance, newInlineSlotCapacity, newAuxSlotCapacity
- //3rd Param
- Assert(newCount > newInlineSlotCapacity);
- const int newAuxSlotCapacity = newCount - newInlineSlotCapacity;
- m_lowererMD.LoadHelperArgument(instrInsert, IR::IntConstOpnd::New(newAuxSlotCapacity, TyInt32, this->m_func));
- //2nd Param
- m_lowererMD.LoadHelperArgument(instrInsert, IR::IntConstOpnd::New(newInlineSlotCapacity, TyUint16, this->m_func));
- //1st Param (instance)
- m_lowererMD.LoadHelperArgument(instrInsert, baseOpnd);
- //CALL HelperAdjustSlots
- IR::Opnd *opnd = IR::HelperCallOpnd::New(IR::HelperAdjustSlots, this->m_func);
- IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
- instr->SetSrc1(opnd);
- instrInsert->InsertBefore(instr);
- m_lowererMD.LowerCall(instr, 0);
- return true;
- }
- void
- Lowerer::GenerateFieldStoreWithTypeChange(IR::Instr * instrStFld, IR::PropertySymOpnd *propertySymOpnd, Js::Type* initialType, Js::Type* finalType)
- {
- // Adjust instance slots, if necessary.
- this->GenerateAdjustSlots(instrStFld, propertySymOpnd, initialType, finalType);
- // We should never add properties to objects of static types.
- Assert(Js::DynamicType::Is(finalType->GetTypeId()));
- // Let's pin the final type to be sure its alive when we try to do the type transition.
- PinTypeRef(finalType, finalType, instrStFld, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
- IR::Opnd *finalTypeOpnd = IR::AddrOpnd::New(finalType, IR::AddrOpndKindDynamicType, instrStFld->m_func, true);
- // Set the new type.
- IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(instrStFld->m_func);
- IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrStFld->m_func);
- this->m_lowererMD.CreateAssign(opnd, finalTypeOpnd, instrStFld);
- // Now do the store.
- GenerateDirectFieldStore(instrStFld, propertySymOpnd);
- }
- bool
- Lowerer::GenerateStFldWithCachedFinalType(IR::Instr * instrStFld, IR::PropertySymOpnd *propertySymOpnd)
- {
- // This function tries to treat a sequence of add-property stores as a single type transition.
- Assert(propertySymOpnd == instrStFld->GetDst()->AsPropertySymOpnd());
- Assert(propertySymOpnd->IsMonoObjTypeSpecCandidate());
- Assert(propertySymOpnd->HasFinalType());
- Assert(propertySymOpnd->HasInitialType());
- IR::Instr *instr;
- IR::LabelInstr *labelBailOut = nullptr;
- AssertMsg(!propertySymOpnd->IsTypeChecked(), "Why are we doing a type transition when we have the type we want?");
- // If the initial type must be checked here, do it.
- Assert(instrStFld->HasBailOutInfo());
- labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- GenerateCachedTypeWithoutPropertyCheck(instrStFld, propertySymOpnd, nullptr/*typeOpnd*/, labelBailOut);
- // Do the type transition.
- GenerateFieldStoreWithTypeChange(instrStFld, propertySymOpnd, propertySymOpnd->GetInitialType(), propertySymOpnd->GetFinalType());
- instrStFld->FreeSrc1();
- instrStFld->FreeDst();
- // Insert the bailout and let the main path branch around it.
- IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
- instrStFld->InsertBefore(instr);
- if (instrStFld->HasBailOutInfo())
- {
- Assert(labelBailOut != nullptr);
- instrStFld->InsertBefore(labelBailOut);
- instrStFld->InsertAfter(labelDone);
- instrStFld->m_opcode = Js::OpCode::BailOut;
- this->GenerateBailOut(instrStFld);
- }
- else
- {
- instrStFld->InsertAfter(labelDone);
- instrStFld->Remove();
- }
- return true;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerScopedStFld
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerScopedStFld(IR::Instr * stFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache,
- bool withPropertyOperationFlags, Js::PropertyOperationFlags flags)
- {
- IR::Instr *instrPrev = stFldInstr->m_prev;
- if (withPropertyOperationFlags)
- {
- m_lowererMD.LoadHelperArgument(stFldInstr,
- IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
- }
- if(!withInlineCache)
- {
- LoadScriptContext(stFldInstr);
- }
- // Pass the default instance
- IR::Opnd *src = stFldInstr->UnlinkSrc2();
- m_lowererMD.LoadHelperArgument(stFldInstr, src);
- // Pass the value to store
- src = stFldInstr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(stFldInstr, src);
- // Pass the property sym to store to
- IR::Opnd *dst = stFldInstr->UnlinkDst();
- AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
- this->LoadPropertySymAsArgument(stFldInstr, dst);
- if (withInlineCache)
- {
- AssertMsg(dst->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
- m_lowererMD.LoadHelperArgument(
- stFldInstr,
- IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
- // Not using the polymorphic inline cache because the fast path only uses the monomorphic inline cache
- this->m_lowererMD.LoadHelperArgument(stFldInstr, this->LoadRuntimeInlineCacheOpnd(stFldInstr, dst->AsPropertySymOpnd()));
- m_lowererMD.LoadHelperArgument(stFldInstr, LoadFunctionBodyOpnd(stFldInstr));
- }
- m_lowererMD.ChangeToHelperCall(stFldInstr, helperMethod);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerLoadVar
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerLoadVar(IR::Instr *instr, IR::Opnd *opnd)
- {
- instr->SetSrc1(opnd);
- return m_lowererMD.ChangeToAssign(instr);
- }
- IR::Instr *
- Lowerer::LoadHelperTemp(IR::Instr * instr, IR::Instr * instrInsert)
- {
- IR::Opnd *tempOpnd;
- IR::Opnd *dst = instr->GetDst();
- AssertMsg(dst != nullptr, "Always expect a dst for these.");
- AssertMsg(instr->dstIsTempNumber, "Should only be loading temps here");
- Assert(dst->IsRegOpnd());
- StackSym * tempNumberSym = this->GetTempNumberSym(dst, instr->dstIsTempNumberTransferred);
- IR::Instr *load = this->m_lowererMD.LoadStackAddress(tempNumberSym);
- instrInsert->InsertBefore(load);
- tempOpnd = load->GetDst();
- m_lowererMD.LoadHelperArgument(instrInsert, tempOpnd);
- return load;
- }
- void
- Lowerer::LoadArgumentCount(IR::Instr *const instr)
- {
- Assert(instr);
- Assert(instr->GetDst());
- Assert(!instr->GetSrc1());
- Assert(!instr->GetSrc2());
- if(instr->m_func->IsInlinee())
- {
- // Argument count including 'this'
- instr->SetSrc1(IR::IntConstOpnd::New(instr->m_func->actualCount, TyUint32, instr->m_func, true));
- LowererMD::ChangeToAssign(instr);
- }
- else if (instr->m_func->GetJnFunction()->IsGenerator())
- {
- IR::SymOpnd* symOpnd = LoadCallInfo(instr);
- instr->SetSrc1(symOpnd);
- LowererMD::ChangeToAssign(instr);
- }
- else
- {
- m_lowererMD.LoadArgumentCount(instr);
- }
- }
- void
- Lowerer::LoadStackArgPtr(IR::Instr *const instr)
- {
- Assert(instr);
- Assert(instr->GetDst());
- Assert(!instr->GetSrc1());
- Assert(!instr->GetSrc2());
- if(instr->m_func->IsInlinee())
- {
- // Address of argument after 'this'
- const auto firstRealArgStackSym = instr->m_func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
- this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr);
- instr->SetSrc1(IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, instr->m_func));
- LowererMD::ChangeToLea(instr);
- }
- else
- {
- m_lowererMD.LoadStackArgPtr(instr);
- }
- }
- void
- Lowerer::LoadArgumentsFromFrame(IR::Instr *const instr)
- {
- Assert(instr);
- Assert(instr->GetDst());
- Assert(!instr->GetSrc1());
- Assert(!instr->GetSrc2());
- if(instr->m_func->IsInlinee())
- {
- // Use the inline object meta arg slot for the arguments object
- instr->SetSrc1(instr->m_func->GetInlineeArgumentsObjectSlotOpnd());
- LowererMD::ChangeToAssign(instr);
- }
- else
- {
- m_lowererMD.LoadArgumentsFromFrame(instr);
- }
- }
- IR::Instr *
- Lowerer::LowerUnaryHelper(IR::Instr *instr, IR::JnHelperMethod helperMethod)
- {
- IR::Instr *instrPrev;
- IR::Opnd *src1 = instr->UnlinkSrc1();
- instrPrev = m_lowererMD.LoadHelperArgument(instr, src1);
- m_lowererMD.ChangeToHelperCall(instr, helperMethod);
- return instrPrev;
- }
- // helper takes memory context as second argument
- IR::Instr *
- Lowerer::LowerUnaryHelperMem(IR::Instr *instr, IR::JnHelperMethod helperMethod)
- {
- IR::Instr *instrPrev;
- instrPrev = LoadScriptContext(instr);
- return this->LowerUnaryHelper(instr, helperMethod);
- }
- IR::Instr *
- Lowerer::LowerUnaryHelperMemWithFuncBody(IR::Instr *instr, IR::JnHelperMethod helperMethod)
- {
- m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionBodyOpnd(instr));
- return this->LowerUnaryHelperMem(instr, helperMethod);
- }
- IR::Instr *
- Lowerer::LowerBinaryHelperMemWithFuncBody(IR::Instr *instr, IR::JnHelperMethod helperMethod)
- {
- AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Int2, "Expected a binary instruction...");
- m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionBodyOpnd(instr));
- return this->LowerBinaryHelperMem(instr, helperMethod);
- }
- IR::Instr *
- Lowerer::LowerUnaryHelperMemWithTemp(IR::Instr *instr, IR::JnHelperMethod helperMethod)
- {
- AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2, "Expected a unary instruction...");
- IR::Instr * instrFirst;
- IR::Opnd * tempOpnd;
- if (instr->dstIsTempNumber)
- {
- instrFirst = this->LoadHelperTemp(instr, instr);
- }
- else
- {
- tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
- instrFirst = m_lowererMD.LoadHelperArgument(instr, tempOpnd);
- }
- this->LowerUnaryHelperMem(instr, helperMethod);
- return instrFirst;
- }
- IR::Instr *
- Lowerer::LowerUnaryHelperMemWithTemp2(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::JnHelperMethod helperMethodWithTemp)
- {
- AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2, "Expected a unary instruction...");
- if (instr->dstIsTempNumber)
- {
- IR::Instr * instrFirst = this->LoadHelperTemp(instr, instr);
- this->LowerUnaryHelperMem(instr, helperMethodWithTemp);
- return instrFirst;
- }
- return this->LowerUnaryHelperMem(instr, helperMethod);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerBinaryHelper
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerBinaryHelper(IR::Instr *instr, IR::JnHelperMethod helperMethod)
- {
- // The only case where this would still be null when we return is when
- // helperMethod == HelperOP_CmSrEq_EmptyString; in which case we ignore
- // instrPrev.
- IR::Instr *instrPrev = nullptr;
- AssertMsg((Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Unsigned1 && !instr->GetDst()) ||
- Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3 ||
- Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2Int1 ||
- Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Int2 ||
- Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::ElementU ||
- instr->m_opcode == Js::OpCode::InvalCachedScope, "Expected a binary instruction...");
- IR::Opnd *src2 = instr->UnlinkSrc2();
- if (helperMethod != IR::HelperOP_CmSrEq_EmptyString)
- instrPrev = m_lowererMD.LoadHelperArgument(instr, src2);
- IR::Opnd *src1 = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, src1);
- m_lowererMD.ChangeToHelperCall(instr, helperMethod);
- return instrPrev;
- }
- // helper takes memory context as third argument
- IR::Instr *
- Lowerer::LowerBinaryHelperMem(IR::Instr *instr, IR::JnHelperMethod helperMethod)
- {
- IR::Instr *instrPrev;
- AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3 ||
- Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2Int1 ||
- Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Int2, "Expected a binary instruction...");
- instrPrev = LoadScriptContext(instr);
- return this->LowerBinaryHelper(instr, helperMethod);
- }
- IR::Instr *
- Lowerer::LowerBinaryHelperMemWithTemp(IR::Instr *instr, IR::JnHelperMethod helperMethod)
- {
- AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3, "Expected a binary instruction...");
- IR::Instr * instrFirst;
- IR::Opnd * tempOpnd;
- if (instr->dstIsTempNumber)
- {
- instrFirst = this->LoadHelperTemp(instr, instr);
- }
- else
- {
- tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
- instrFirst = m_lowererMD.LoadHelperArgument(instr, tempOpnd);
- }
- this->LowerBinaryHelperMem(instr, helperMethod);
- return instrFirst;
- }
- IR::Instr *
- Lowerer::LowerBinaryHelperMemWithTemp2(
- IR::Instr *instr,
- IR::JnHelperMethod helperMethod,
- IR::JnHelperMethod helperMethodWithTemp
- )
- {
- AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3, "Expected a binary instruction...");
- if (instr->dstIsTempNumber && instr->GetDst() && instr->GetDst()->GetValueType().HasBeenNumber())
- {
- IR::Instr * instrFirst = this->LoadHelperTemp(instr, instr);
- this->LowerBinaryHelperMem(instr, helperMethodWithTemp);
- return instrFirst;
- }
- return this->LowerBinaryHelperMem(instr, helperMethod);
- }
- IR::Instr *
- Lowerer::LowerAddLeftDeadForString(IR::Instr *instr)
- {
- IR::Opnd * opndLeft;
- IR::Opnd * opndRight;
- opndLeft = instr->GetSrc1();
- opndRight = instr->GetSrc2();
- Assert(opndLeft && opndRight);
- bool generateFastPath = this->m_func->DoFastPaths();
- if (!generateFastPath
- || !opndLeft->IsRegOpnd()
- || !opndRight->IsRegOpnd()
- || !instr->GetDst()->IsRegOpnd()
- || !opndLeft->GetValueType().IsLikelyString()
- || !opndRight->GetValueType().IsLikelyString()
- || !opndLeft->IsEqual(instr->GetDst()->AsRegOpnd())
- || opndLeft->IsEqual(opndRight))
- {
- return this->LowerBinaryHelperMemWithTemp(instr, IR::HelperOp_AddLeftDead);
- }
- IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- IR::LabelInstr * labelFallThrough = instr->GetOrCreateContinueLabel(false);
- IR::LabelInstr *insertBeforeInstr = labelHelper;
- instr->InsertBefore(labelHelper);
- if (!opndLeft->IsNotTaggedValue())
- {
- this->m_lowererMD.GenerateObjectTest(opndLeft->AsRegOpnd(), insertBeforeInstr, labelHelper);
- }
- InsertCompareBranch(
- IR::IndirOpnd::New(opndLeft->AsRegOpnd(), 0, TyMachPtr, m_func),
- this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableCompoundString),
- Js::OpCode::BrNeq_A,
- labelHelper,
- insertBeforeInstr);
- GenerateStringTest(opndRight->AsRegOpnd(), insertBeforeInstr, labelHelper);
- // left->m_charLength <= JavascriptArray::MaxCharLength
- IR::IndirOpnd *indirLeftCharLengthOpnd = IR::IndirOpnd::New(opndLeft->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, m_func);
- IR::RegOpnd *regLeftCharLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
- InsertMove(regLeftCharLengthOpnd, indirLeftCharLengthOpnd, insertBeforeInstr);
- InsertCompareBranch(
- regLeftCharLengthOpnd,
- IR::IntConstOpnd::New(Js::JavascriptString::MaxCharLength, TyUint32, m_func),
- Js::OpCode::BrGt_A,
- labelHelper,
- insertBeforeInstr);
- // left->m_pszValue == NULL (!left->IsFinalized())
- InsertCompareBranch(
- IR::IndirOpnd::New(opndLeft->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func),
- IR::AddrOpnd::NewNull(m_func),
- Js::OpCode::BrNeq_A,
- labelHelper,
- insertBeforeInstr);
- // right->m_pszValue != NULL (right->IsFinalized())
- InsertCompareBranch(
- IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func),
- IR::AddrOpnd::NewNull(m_func),
- Js::OpCode::BrEq_A,
- labelHelper,
- insertBeforeInstr);
- // if ownsLastBlock != 0
- InsertCompareBranch(
- IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfOwnsLastBlock(), TyUint8, m_func),
- IR::IntConstOpnd::New(0, TyUint8, m_func),
- Js::OpCode::BrEq_A,
- labelHelper,
- insertBeforeInstr);
- // if right->m_charLength == 1
- InsertCompareBranch(IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_charLength), TyUint32, m_func),
- IR::IntConstOpnd::New(1, TyUint32, m_func),
- Js::OpCode::BrNeq_A, labelHelper, insertBeforeInstr);
- // if left->m_directCharLength == -1
- InsertCompareBranch(IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfDirectCharLength(), TyUint32, m_func),
- IR::IntConstOpnd::New(UINT32_MAX, TyUint32, m_func),
- Js::OpCode::BrNeq_A, labelHelper, insertBeforeInstr);
- // if lastBlockInfo.charLength < lastBlockInfo.charCapacity
- IR::IndirOpnd *indirCharLength = IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo()+ (int32)Js::CompoundString::GetOffsetOfLastBlockInfoCharLength(), TyMachPtr, m_func);
- IR::RegOpnd *charLengthOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
- InsertMove(charLengthOpnd, indirCharLength, insertBeforeInstr);
- InsertCompareBranch(charLengthOpnd, IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo() + (int32)Js::CompoundString::GetOffsetOfLastBlockInfoCharCapacity(), TyMachPtr, m_func), Js::OpCode::BrGe_A, labelHelper, insertBeforeInstr);
- // load c= right->m_pszValue[0]
- IR::RegOpnd *pszValue0Opnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- IR::IndirOpnd *indirRightPszOpnd = IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func);
- InsertMove(pszValue0Opnd, indirRightPszOpnd, insertBeforeInstr);
- IR::RegOpnd *charResultOpnd = IR::RegOpnd::New(TyUint16, this->m_func);
- InsertMove(charResultOpnd, IR::IndirOpnd::New(pszValue0Opnd, 0, TyUint16, this->m_func), insertBeforeInstr);
- // lastBlockInfo.buffer[blockCharLength] = c;
- IR::RegOpnd *baseOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- InsertMove(baseOpnd, IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo() + (int32)Js::CompoundString::GetOffsetOfLastBlockInfoBuffer(), TyMachPtr, m_func), insertBeforeInstr);
- IR::IndirOpnd *indirBufferToStore = IR::IndirOpnd::New(baseOpnd, charLengthOpnd, (byte)Math::Log2(sizeof(wchar_t)), TyUint16, m_func);
- InsertMove(indirBufferToStore, charResultOpnd, insertBeforeInstr);
- // left->m_charLength++
- InsertAdd(false, indirLeftCharLengthOpnd, regLeftCharLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
- // lastBlockInfo.charLength++
- InsertAdd(false, indirCharLength, indirCharLength, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
- InsertBranch(Js::OpCode::Br, labelFallThrough, insertBeforeInstr);
- return this->LowerBinaryHelperMemWithTemp(instr, IR::HelperOp_AddLeftDead);
- }
- IR::Instr *
- Lowerer::LowerBinaryHelperMemWithTemp3(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::JnHelperMethod helperMethodWithTemp, IR::JnHelperMethod helperMethodLeftDead)
- {
- IR::Opnd *src1 = instr->GetSrc1();
- if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_isTempLastUse && !src1->GetValueType().IsNotString())
- {
- Assert(helperMethodLeftDead == IR::HelperOp_AddLeftDead);
- return LowerAddLeftDeadForString(instr);
- }
- else
- {
- return this->LowerBinaryHelperMemWithTemp2(instr, helperMethod, helperMethodWithTemp);
- }
- }
- StackSym *
- Lowerer::GetTempNumberSym(IR::Opnd * opnd, bool isTempTransferred)
- {
- AssertMsg(opnd->IsRegOpnd(), "Expected regOpnd");
- if (isTempTransferred)
- {
- StackSym * tempNumberSym = StackSym::New(TyMisc, m_func);
- this->m_func->StackAllocate(tempNumberSym, sizeof(Js::JavascriptNumber));
- return tempNumberSym;
- }
- StackSym * stackSym = opnd->AsRegOpnd()->m_sym;
- StackSym * tempNumberSym = stackSym->m_tempNumberSym;
- if (tempNumberSym == nullptr)
- {
- tempNumberSym = StackSym::New(TyMisc, m_func);
- this->m_func->StackAllocate(tempNumberSym, sizeof(Js::JavascriptNumber));
- stackSym->m_tempNumberSym = tempNumberSym;
- }
- return tempNumberSym;
- }
- void Lowerer::LowerProfiledLdElemI(IR::JitProfilingInstr *const instr)
- {
- Assert(instr);
- /*
- Var ProfilingHelpers::ProfiledLdElem(
- const Var base,
- const Var varIndex,
- FunctionBody *const functionBody,
- const ProfileId profileId)
- */
- Func *const func = instr->m_func;
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, func));
- m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(func));
- IR::IndirOpnd *const indir = instr->UnlinkSrc1()->AsIndirOpnd();
- IR::Opnd *const indexOpnd = indir->UnlinkIndexOpnd();
- Assert(indexOpnd || indir->GetOffset() >= 0 && !Js::TaggedInt::IsOverflow(indir->GetOffset()));
- m_lowererMD.LoadHelperArgument(
- instr,
- indexOpnd
- ? static_cast<IR::Opnd *>(indexOpnd)
- : IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(indir->GetOffset()), IR::AddrOpndKindDynamicVar, func));
- m_lowererMD.LoadHelperArgument(instr, indir->UnlinkBaseOpnd());
- indir->Free(func);
- instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledLdElem, func));
- m_lowererMD.LowerCall(instr, 0);
- }
- void Lowerer::LowerProfiledStElemI(IR::JitProfilingInstr *const instr, const Js::PropertyOperationFlags flags)
- {
- Assert(instr);
- /*
- void ProfilingHelpers::ProfiledStElem(
- const Var base,
- const Var varIndex,
- const Var value,
- FunctionBody *const functionBody,
- const ProfileId profileId,
- const PropertyOperationFlags flags)
- */
- Func *const func = instr->m_func;
- IR::JnHelperMethod helper;
- if(flags == Js::PropertyOperation_None)
- {
- helper = IR::HelperProfiledStElem_DefaultFlags;
- }
- else
- {
- helper = IR::HelperProfiledStElem;
- m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(flags, TyInt32, func, true));
- }
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, func));
- m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(func));
- m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
- IR::IndirOpnd *const indir = instr->UnlinkDst()->AsIndirOpnd();
- IR::Opnd *const indexOpnd = indir->UnlinkIndexOpnd();
- Assert(indexOpnd || indir->GetOffset() >= 0 && !Js::TaggedInt::IsOverflow(indir->GetOffset()));
- m_lowererMD.LoadHelperArgument(
- instr,
- indexOpnd
- ? static_cast<IR::Opnd *>(indexOpnd)
- : IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(indir->GetOffset()), IR::AddrOpndKindDynamicVar, func));
- m_lowererMD.LoadHelperArgument(instr, indir->UnlinkBaseOpnd());
- indir->Free(func);
- instr->SetSrc1(IR::HelperCallOpnd::New(helper, func));
- m_lowererMD.LowerCall(instr, 0);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerStElemI
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerStElemI(IR::Instr * instr, Js::PropertyOperationFlags flags, bool isHelper, IR::JnHelperMethod helperMethod)
- {
- IR::Instr *instrPrev = instr->m_prev;
- if (instr->IsJitProfilingInstr())
- {
- Assert(!isHelper);
- LowerProfiledStElemI(instr->AsJitProfilingInstr(), flags);
- return instrPrev;
- }
- IR::Opnd *src1 = instr->GetSrc1();
- IR::Opnd *dst = instr->GetDst();
- IR::Opnd *newDst = nullptr;
- IRType srcType = src1->GetType();
- AssertMsg(dst->IsIndirOpnd(), "Expected indirOpnd on StElementI");
- #if !FLOATVAR
- if (dst->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyOptimizedTypedArray() && src1->IsRegOpnd())
- {
- // We allow the source of typedArray StElem to be marked as temp, since we just need the value,
- // however if the array turns out to be a non-typed array, or the index isn't valid (the value is then stored as a property)
- // the temp needs to be boxed if it is a float. The BoxStackNumber helper will box JavascriptNumbers
- // which are on the stack.
- // regVar = BoxStackNumber(src1, scriptContext)
- IR::Instr *newInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
- IR::RegOpnd *regVar = IR::RegOpnd::New(TyVar, this->m_func);
- newInstr->SetDst(regVar);
- newInstr->SetSrc1(src1);
- instr->InsertBefore(newInstr);
- LowerUnaryHelperMem(newInstr, IR::HelperBoxStackNumber);
- // MOV src1, regVar
- newInstr = IR::Instr::New(Js::OpCode::Ld_A, src1, regVar, this->m_func);
- instr->InsertBefore(m_lowererMD.ChangeToAssign(newInstr));
- }
- #endif
- if(instr->HasBailOutInfo())
- {
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if(bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment)
- {
- Assert(!(bailOutKind & IR::BailOutOnMissingValue));
- LowerBailOnInvalidatedArrayHeadSegment(instr, isHelper);
- bailOutKind ^= IR::BailOutOnInvalidatedArrayHeadSegment;
- Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
- }
- else if(bailOutKind & IR::BailOutOnMissingValue)
- {
- LowerBailOnCreatedMissingValue(instr, isHelper);
- bailOutKind ^= IR::BailOutOnMissingValue;
- Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
- }
- if(bailOutKind & IR::BailOutOnInvalidatedArrayLength)
- {
- LowerBailOnInvalidatedArrayLength(instr, isHelper);
- bailOutKind ^= IR::BailOutOnInvalidatedArrayLength;
- Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
- }
- if(bailOutKind & IR::BailOutConvertedNativeArray)
- {
- IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
- instr->InsertAfter(labelSkipBailOut);
- LowerOneBailOutKind(instr, IR::BailOutConvertedNativeArray, isHelper);
- newDst = IR::RegOpnd::New(TyMachReg, m_func);
- InsertTestBranch(newDst, newDst, Js::OpCode::BrEq_A, labelSkipBailOut, instr->m_next);
- }
- }
- instr->UnlinkDst();
- instr->UnlinkSrc1();
- IR::Opnd *indexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
- Assert(
- helperMethod == IR::HelperOP_InitElemGetter ||
- helperMethod == IR::HelperOP_InitElemSetter ||
- helperMethod == IR::HelperOP_InitComputedProperty ||
- helperMethod == IR::HelperOp_SetElementI ||
- helperMethod == IR::HelperOp_InitClassMemberComputedName ||
- helperMethod == IR::HelperOp_InitClassMemberGetComputedName ||
- helperMethod == IR::HelperOp_InitClassMemberSetComputedName
- );
- if (indexOpnd && indexOpnd->GetType() != TyVar)
- {
- if (indexOpnd->GetType() == TyInt32)
- {
- helperMethod =
- srcType == TyVar ? IR::HelperOp_SetElementI_Int32 :
- srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI_Int32 :
- IR::HelperOp_SetNativeFloatElementI_Int32;
- }
- else if (indexOpnd->GetType() == TyUint32)
- {
- helperMethod =
- srcType == TyVar ? IR::HelperOp_SetElementI_UInt32 :
- srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI_UInt32 :
- IR::HelperOp_SetNativeFloatElementI_UInt32;
- }
- else
- {
- Assert(FALSE);
- }
- }
- else
- {
- if (indexOpnd == nullptr)
- {
- // No index; the offset identifies the element.
- IntConstType offset = (IntConstType)dst->AsIndirOpnd()->GetOffset();
- indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
- }
- if (srcType != TyVar)
- {
- helperMethod =
- srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI : IR::HelperOp_SetNativeFloatElementI;
- }
- }
- if (srcType == TyFloat64)
- {
- m_lowererMD.LoadDoubleHelperArgument(instr, src1);
- }
- m_lowererMD.LoadHelperArgument(instr,
- IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
- LoadScriptContext(instr);
- if (srcType != TyFloat64)
- {
- m_lowererMD.LoadHelperArgument(instr, src1);
- }
- m_lowererMD.LoadHelperArgument(instr, indexOpnd);
- IR::Opnd *baseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
- m_lowererMD.LoadHelperArgument(instr, baseOpnd);
- dst->Free(this->m_func);
- if (newDst)
- {
- instr->SetDst(newDst);
- }
- m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, nullptr, nullptr, isHelper);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerLdElemI
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerLdElemI(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool isHelper)
- {
- IR::Instr *instrPrev = instr->m_prev;
- if(instr->IsJitProfilingInstr())
- {
- Assert(helperMethod == IR::HelperOp_GetElementI);
- Assert(!isHelper);
- LowerProfiledLdElemI(instr->AsJitProfilingInstr());
- return instrPrev;
- }
- if (!isHelper && instr->DoStackArgsOpt(this->m_func))
- {
- IR::LabelInstr * labelLdElem = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
- // Pass in null for labelFallThru to only generate the LdHeapArgument call
- GenerateFastArgumentsLdElemI(instr, labelLdElem, nullptr);
- instr->InsertBefore(labelLdElem);
- }
- IR::Opnd *src1 = instr->UnlinkSrc1();
- AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd");
- IR::IndirOpnd *indirOpnd = src1->AsIndirOpnd();
- bool loadScriptContext = true;
- IRType dstType = instr->GetDst()->GetType();
- IR::Opnd *indexOpnd = indirOpnd->UnlinkIndexOpnd();
- if (indexOpnd && indexOpnd->GetType() != TyVar)
- {
- Assert(indexOpnd->GetType() == TyUint32 || indexOpnd->GetType() == TyInt32);
- switch (helperMethod)
- {
- case IR::HelperOp_GetElementI:
- if (indexOpnd->GetType() == TyUint32)
- {
- helperMethod =
- dstType == TyVar ? IR::HelperOp_GetElementI_UInt32 :
- dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI_UInt32 :
- IR::HelperOp_GetNativeFloatElementI_UInt32;
- }
- else
- {
- helperMethod =
- dstType == TyVar ? IR::HelperOp_GetElementI_Int32 :
- dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI_Int32 :
- IR::HelperOp_GetNativeFloatElementI_Int32;
- }
- break;
- case IR::HelperOp_GetMethodElement:
- Assert(dstType == TyVar);
- helperMethod = indexOpnd->GetType() == TyUint32?
- IR::HelperOp_GetMethodElement_UInt32 : IR::HelperOp_GetMethodElement_Int32;
- break;
- case IR::HelperOp_TypeofElem:
- Assert(dstType == TyVar);
- helperMethod = indexOpnd->GetType() == TyUint32?
- IR::HelperOp_TypeofElem_UInt32 : IR::HelperOp_TypeofElem_Int32;
- break;
- default:
- Assert(false);
- }
- }
- else
- {
- if (indexOpnd == nullptr)
- {
- // No index; the offset identifies the element.
- IntConstType offset = (IntConstType)src1->AsIndirOpnd()->GetOffset();
- indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
- }
- if (dstType != TyVar)
- {
- loadScriptContext = false;
- helperMethod =
- dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI : IR::HelperOp_GetNativeFloatElementI;
- }
- }
- // Jitted loop bodies have volatile information about values created outside the loop, so don't update array creation site
- // profile data from jitted loop bodies
- if(!m_func->IsLoopBody())
- {
- const ValueType baseValueType(indirOpnd->GetBaseOpnd()->GetValueType());
- if( baseValueType.IsLikelyObject() &&
- baseValueType.GetObjectType() == ObjectType::Array &&
- !baseValueType.HasIntElements())
- {
- switch(helperMethod)
- {
- case IR::HelperOp_GetElementI:
- helperMethod =
- baseValueType.HasFloatElements()
- ? IR::HelperOp_GetElementI_ExpectingNativeFloatArray
- : IR::HelperOp_GetElementI_ExpectingVarArray;
- break;
- case IR::HelperOp_GetElementI_UInt32:
- helperMethod =
- baseValueType.HasFloatElements()
- ? IR::HelperOp_GetElementI_UInt32_ExpectingNativeFloatArray
- : IR::HelperOp_GetElementI_UInt32_ExpectingVarArray;
- break;
- case IR::HelperOp_GetElementI_Int32:
- helperMethod =
- baseValueType.HasFloatElements()
- ? IR::HelperOp_GetElementI_Int32_ExpectingNativeFloatArray
- : IR::HelperOp_GetElementI_Int32_ExpectingVarArray;
- break;
- }
- }
- }
- if (loadScriptContext)
- {
- LoadScriptContext(instr);
- }
- m_lowererMD.LoadHelperArgument(instr, indexOpnd);
- IR::Opnd *baseOpnd = indirOpnd->UnlinkBaseOpnd();
- m_lowererMD.LoadHelperArgument(instr, baseOpnd);
- src1->Free(this->m_func);
- m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, nullptr, nullptr, isHelper);
- return instrPrev;
- }
- void Lowerer::LowerLdLen(IR::Instr *const instr, const bool isHelper)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::LdLen_A);
- // LdLen has persisted to this point for the sake of pre-lower opts.
- // Turn it into a LdFld of the "length" property.
- // This is normally a load of the internal "length" of an Array, so it probably doesn't benefit
- // from inline caching.
- // Changing the opcode to LdFld is done in LowerLdFld and needs to remain that way to take into
- // account ProfiledLdLen_A
- IR::RegOpnd * baseOpnd = instr->UnlinkSrc1()->AsRegOpnd();
- PropertySym* fieldSym = PropertySym::FindOrCreate(baseOpnd->m_sym->m_id, Js::PropertyIds::length, (uint32)-1, (uint)-1, PropertyKindData, m_func);
- baseOpnd->Free(this->m_func);
- instr->SetSrc1(IR::SymOpnd::New(fieldSym, TyVar, m_func));
- LowerLdFld(instr, IR::HelperOp_GetProperty, IR::HelperOp_GetProperty, false, nullptr, isHelper);
- }
- IR::Instr *
- Lowerer::LowerLdArrViewElem(IR::Instr * instr)
- {
- Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::LdInt8ArrViewElem ||
- instr->m_opcode == Js::OpCode::LdUInt8ArrViewElem ||
- instr->m_opcode == Js::OpCode::LdInt16ArrViewElem ||
- instr->m_opcode == Js::OpCode::LdUInt16ArrViewElem ||
- instr->m_opcode == Js::OpCode::LdInt32ArrViewElem ||
- instr->m_opcode == Js::OpCode::LdUInt32ArrViewElem ||
- instr->m_opcode == Js::OpCode::LdFloat32ArrViewElem ||
- instr->m_opcode == Js::OpCode::LdFloat64ArrViewElem);
- IR::Instr * instrPrev = instr->m_prev;
- IR::RegOpnd * indexOpnd = instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd();
- IR::Opnd * dst = instr->GetDst();
- IR::Opnd * src1 = instr->GetSrc1();
- IR::Opnd * src2 = instr->GetSrc2();
- IR::Instr * done;
- if (indexOpnd || m_func->GetJnFunction()->GetAsmJsFunctionInfo()->AccessNeedsBoundCheck((uint32)src1->AsIndirOpnd()->GetOffset()))
- {
- // CMP indexOpnd, src2(arrSize)
- // JA $helper
- // JMP $load
- // $helper:
- // MOV dst, 0
- // JMP $done
- // $load:
- // MOV dst, src1([arrayBuffer + indexOpnd])
- // $done:
- Assert(!dst->IsFloat32() || src1->IsFloat32());
- Assert(!dst->IsFloat64() || src1->IsFloat64());
- done = m_lowererMD.LowerAsmJsLdElemHelper(instr);
- }
- else
- {
- // any access below 0x10000 is safe
- instr->UnlinkDst();
- instr->UnlinkSrc1();
- if (src2)
- {
- instr->FreeSrc2();
- }
- done = instr;
- }
- InsertMove(dst, src1, done);
- instr->Remove();
- return instrPrev;
- }
- void
- Lowerer::LowerMemset(IR::Instr * instr, IR::RegOpnd * helperRet)
- {
- IR::Opnd * dst = instr->UnlinkDst();
- IR::Opnd * src1 = instr->UnlinkSrc1();
- Assert(dst->IsIndirOpnd());
- IR::Opnd *baseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
- IR::Opnd *indexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
- IR::Opnd *sizeOpnd = instr->UnlinkSrc2();
- Assert(baseOpnd);
- Assert(sizeOpnd);
- Assert(indexOpnd);
- IR::JnHelperMethod helperMethod = IR::HelperOp_Memset;
- instr->SetDst(helperRet);
- LoadScriptContext(instr);
- m_lowererMD.LoadHelperArgument(instr, sizeOpnd);
- m_lowererMD.LoadHelperArgument(instr, src1);
- m_lowererMD.LoadHelperArgument(instr, indexOpnd);
- m_lowererMD.LoadHelperArgument(instr, baseOpnd);
- m_lowererMD.ChangeToHelperCall(instr, helperMethod);
- dst->Free(m_func);
- }
- void
- Lowerer::LowerMemcopy(IR::Instr * instr, IR::RegOpnd * helperRet)
- {
- IR::Opnd * dst = instr->UnlinkDst();
- IR::Opnd * src = instr->UnlinkSrc1();
- Assert(dst->IsIndirOpnd());
- Assert(src->IsIndirOpnd());
- IR::Opnd *dstBaseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
- IR::Opnd *dstIndexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
- IR::Opnd *srcBaseOpnd = src->AsIndirOpnd()->UnlinkBaseOpnd();
- IR::Opnd *srcIndexOpnd = src->AsIndirOpnd()->UnlinkIndexOpnd();
- IR::Opnd *sizeOpnd = instr->UnlinkSrc2();
- Assert(sizeOpnd);
- Assert(dstBaseOpnd);
- Assert(dstIndexOpnd);
- Assert(srcBaseOpnd);
- Assert(srcIndexOpnd);
- IR::JnHelperMethod helperMethod = IR::HelperOp_Memcopy;
- instr->SetDst(helperRet);
- LoadScriptContext(instr);
- m_lowererMD.LoadHelperArgument(instr, sizeOpnd);
- m_lowererMD.LoadHelperArgument(instr, srcIndexOpnd);
- m_lowererMD.LoadHelperArgument(instr, srcBaseOpnd);
- m_lowererMD.LoadHelperArgument(instr, dstIndexOpnd);
- m_lowererMD.LoadHelperArgument(instr, dstBaseOpnd);
- m_lowererMD.ChangeToHelperCall(instr, helperMethod);
- dst->Free(m_func);
- src->Free(m_func);
- }
- IR::Instr *
- Lowerer::LowerMemOp(IR::Instr * instr)
- {
- Assert(instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
- IR::Instr *instrPrev = instr->m_prev;
- IR::RegOpnd* helperRet = IR::RegOpnd::New(TyInt8, instr->m_func);
- const bool isHelper = false;
- AssertMsg(instr->HasBailOutInfo(), "Expected bailOut on MemOp instruction");
- if (instr->HasBailOutInfo())
- {
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if (bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment)
- {
- Assert(!(bailOutKind & IR::BailOutOnMissingValue));
- LowerBailOnInvalidatedArrayHeadSegment(instr, isHelper);
- bailOutKind ^= IR::BailOutOnInvalidatedArrayHeadSegment;
- Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
- }
- else if (bailOutKind & IR::BailOutOnMissingValue)
- {
- LowerBailOnCreatedMissingValue(instr, isHelper);
- bailOutKind ^= IR::BailOutOnMissingValue;
- Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
- }
- if (bailOutKind & IR::BailOutOnInvalidatedArrayLength)
- {
- LowerBailOnInvalidatedArrayLength(instr, isHelper);
- bailOutKind ^= IR::BailOutOnInvalidatedArrayLength;
- Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
- }
- AssertMsg(bailOutKind & IR::BailOutOnMemOpError, "Expected BailOutOnMemOpError on MemOp instruction");
- if (bailOutKind & IR::BailOutOnMemOpError)
- {
- // Insert or get continue label
- IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isHelper);
- Func *const func = instr->m_func;
- LowerOneBailOutKind(instr, IR::BailOutOnMemOpError, isHelper);
- IR::Instr *const insertBeforeInstr = instr->m_next;
- // test helperRet, helperRet
- // jz $skipBailOut
- InsertCompareBranch(
- helperRet,
- IR::IntConstOpnd::New(0, TyInt8, func),
- Js::OpCode::BrNeq_A,
- skipBailOutLabel,
- insertBeforeInstr);
- // (Bail out with IR::BailOutOnMemOpError)
- // $skipBailOut:
- bailOutKind ^= IR::BailOutOnMemOpError;
- Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
- }
- instr->ClearBailOutInfo();
- }
- if (instr->m_opcode == Js::OpCode::Memset)
- {
- LowerMemset(instr, helperRet);
- }
- else if (instr->m_opcode == Js::OpCode::Memcopy)
- {
- LowerMemcopy(instr, helperRet);
- }
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerStArrViewElem(IR::Instr * instr)
- {
- Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::StInt8ArrViewElem ||
- instr->m_opcode == Js::OpCode::StUInt8ArrViewElem ||
- instr->m_opcode == Js::OpCode::StInt16ArrViewElem ||
- instr->m_opcode == Js::OpCode::StUInt16ArrViewElem ||
- instr->m_opcode == Js::OpCode::StInt32ArrViewElem ||
- instr->m_opcode == Js::OpCode::StUInt32ArrViewElem ||
- instr->m_opcode == Js::OpCode::StFloat32ArrViewElem ||
- instr->m_opcode == Js::OpCode::StFloat64ArrViewElem);
- IR::Instr * instrPrev = instr->m_prev;
- IR::Opnd * dst = instr->GetDst();
- IR::Opnd * src1 = instr->GetSrc1();
- IR::Opnd * src2 = instr->GetSrc2();
- // type of dst is the type of array
- IR::RegOpnd * indexOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
- Assert(!dst->IsFloat32() || src1->IsFloat32());
- Assert(!dst->IsFloat64() || src1->IsFloat64());
- IR::Instr * done;
- if (indexOpnd || m_func->GetJnFunction()->GetAsmJsFunctionInfo()->AccessNeedsBoundCheck((uint32)dst->AsIndirOpnd()->GetOffset()))
- {
- // CMP indexOpnd, src2(arrSize)
- // JA $helper
- // JMP $store
- // $helper:
- // JMP $done
- // $store:
- // MOV dst([arrayBuffer + indexOpnd]), src1
- // $done:
- done = m_lowererMD.LowerAsmJsStElemHelper(instr);
- }
- else
- {
- // any constant access below 0x10000 is safe, as that is the min heap size
- instr->UnlinkDst();
- instr->UnlinkSrc1();
- done = instr;
- if (src2)
- {
- instr->FreeSrc2();
- }
- }
- InsertMove(dst, src1, done);
- instr->Remove();
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerArrayDetachedCheck(IR::Instr * instr)
- {
- // TEST isDetached, isDetached
- // JE Done
- // Helper:
- // CALL Js::Throw::OutOfMemory
- // Done:
- Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
- IR::Instr * instrPrev = instr->m_prev;
- IR::Opnd * isDetachedOpnd = instr->UnlinkSrc1();
- Assert(isDetachedOpnd->IsIndirOpnd() || isDetachedOpnd->IsMemRefOpnd());
- IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
- IR::LabelInstr * helperLabel = InsertLabel(true, instr);
- InsertTestBranch(isDetachedOpnd, isDetachedOpnd, Js::OpCode::BrNotNeq_A, doneLabel, helperLabel);
- m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_OutOfMemoryError);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerDeleteElemI
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerDeleteElemI(IR::Instr * instr, bool strictMode)
- {
- IR::Instr *instrPrev;
- IR::Opnd *src1 = instr->UnlinkSrc1();
- AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd on DeleteElementI");
- Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
- if (strictMode)
- {
- propertyOperationFlag = Js::PropertyOperation_StrictMode;
- }
- instrPrev = instr->m_prev;
- IR::JnHelperMethod helperMethod = IR::HelperOp_DeleteElementI;
- IR::Opnd *indexOpnd = src1->AsIndirOpnd()->UnlinkIndexOpnd();
- if (indexOpnd)
- {
- if (indexOpnd->GetType() == TyInt32)
- {
- helperMethod = IR::HelperOp_DeleteElementI_Int32;
- }
- else if (indexOpnd->GetType() == TyUint32)
- {
- helperMethod = IR::HelperOp_DeleteElementI_UInt32;
- }
- else
- {
- Assert(indexOpnd->GetType() == TyVar);
- }
- }
- else
- {
- // No index; the offset identifies the element.
- IntConstType offset = (IntConstType)src1->AsIndirOpnd()->GetOffset();
- indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
- }
- m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
- LoadScriptContext(instr);
- m_lowererMD.LoadHelperArgument(instr, indexOpnd);
- IR::Opnd *baseOpnd = src1->AsIndirOpnd()->UnlinkBaseOpnd();
- m_lowererMD.LoadHelperArgument(instr, baseOpnd);
- src1->Free(this->m_func);
- m_lowererMD.ChangeToHelperCall(instr, helperMethod);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerBrB - lower 1-operand (boolean) conditional branch
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerBrBReturn(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool isHelper)
- {
- IR::Instr * instrPrev;
- IR::Instr * instrCall;
- IR::HelperCallOpnd * opndHelper;
- IR::Opnd * opndSrc;
- IR::Opnd * opndDst;
- AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
- Assert(instr->m_opcode == Js::OpCode::BrOnEmpty || instr->m_opcode == Js::OpCode::BrOnNotEmpty);
- opndSrc = instr->UnlinkSrc1();
- instrPrev = m_lowererMD.LoadHelperArgument(instr, opndSrc);
- // Generate helper call to convert the unknown operand to boolean
- opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
- opndDst = instr->UnlinkDst();
- instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
- instr->InsertBefore(instrCall);
- instrCall = m_lowererMD.LowerCall(instrCall, 0);
- // Branch on the result of the call
- instr->m_opcode = (instr->m_opcode == Js::OpCode::BrOnNotEmpty? Js::OpCode::BrTrue_A : Js::OpCode::BrFalse_A);
- instr->SetSrc1(opndDst);
- IR::Instr *loweredInstr;
- loweredInstr = this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, isHelper);
- #if DBG
- if (isHelper)
- {
- if (!loweredInstr->IsBranchInstr())
- {
- loweredInstr = loweredInstr->GetNextBranchOrLabel();
- }
- if (loweredInstr->IsBranchInstr())
- {
- loweredInstr->AsBranchInstr()->m_isHelperToNonHelperBranch = true;
- }
- }
- #endif
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerMultiBr
- /// - Lowers the instruction for dictionary look up(string case arms)
- ///
- ///----------------------------------------------------------------------------
- IR::Instr* Lowerer::LowerMultiBr(IR::Instr * instr, IR::JnHelperMethod helperMethod)
- {
- IR::Instr * instrPrev = instr->m_prev;
- IR::Instr * instrCall;
- IR::HelperCallOpnd * opndHelper;
- IR::Opnd * opndSrc;
- IR::Opnd * opndDst;
- StackSym * symDst;
- AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnd on BrB");
- // Push the args in reverse order.
- // The end and start labels for the function are used to guarantee
- // that the dictionary jump destinations haven't been tampered with, so we
- // will always jump to some location within this function
- IR::LabelOpnd * endFuncOpnd = IR::LabelOpnd::New(m_func->EnsureFuncEndLabel(), m_func);
- m_lowererMD.LoadHelperArgument(instr, endFuncOpnd);
- IR::LabelOpnd * startFuncOpnd = IR::LabelOpnd::New(m_func->EnsureFuncStartLabel(), m_func);
- m_lowererMD.LoadHelperArgument(instr, startFuncOpnd);
- //Load the address of the dictionary pair- Js::StringDictionaryWrapper
- IR::AddrOpnd* nativestringDictionaryOpnd = IR::AddrOpnd::New(instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary(), IR::AddrOpndKindDynamicMisc, this->m_func);
- m_lowererMD.LoadHelperArgument(instr, nativestringDictionaryOpnd);
- //Load the String passed in the Switch expression for look up - JavascriptString
- opndSrc = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, opndSrc);
- // Generate helper call for dictionary lookup.
- opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
- symDst = StackSym::New(TyMachPtr,this->m_func);
- opndDst = IR::RegOpnd::New(symDst, TyMachPtr, this->m_func);
- instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
- instr->InsertBefore(instrCall);
- instrCall = m_lowererMD.LowerCall(instrCall, 0);
- instr->SetSrc1(instrCall->GetDst());
- m_lowererMD.LowerMultiBranch(instr);
- return instrPrev;
- }
- void
- Lowerer::LowerJumpTableMultiBranch(IR::MultiBranchInstr * multiBrInstr, IR::RegOpnd * indexOpnd)
- {
- Func * func = this->m_func;
- IR::Opnd * opndDst = IR::RegOpnd::New(TyMachPtr, func);
- //Move the native address of the jump table to a register
- IR::LabelInstr * nativeJumpTableLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- nativeJumpTableLabel->m_isDataLabel = true;
- IR::LabelOpnd * nativeJumpTable = IR::LabelOpnd::New(nativeJumpTableLabel, m_func);
- IR::RegOpnd * nativeJumpTableReg = IR::RegOpnd::New(TyMachPtr, func);
- m_lowererMD.CreateAssign(nativeJumpTableReg, nativeJumpTable, multiBrInstr);
- BranchJumpTableWrapper * branchJumpTable = multiBrInstr->GetBranchJumpTable();
- AssertMsg(branchJumpTable->labelInstr == nullptr, "Should not be already assigned");
- branchJumpTable->labelInstr = nativeJumpTableLabel;
- //Indirect addressing @ target location in the jump table.
- //MOV eax, [nativeJumpTableReg + (offset * indirScale)]
- BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
- IR::Opnd * opndSrc = IR::IndirOpnd::New(nativeJumpTableReg, indexOpnd, indirScale, TyMachReg, this->m_func);
- IR::Instr * indirInstr = m_lowererMD.CreateAssign(opndDst, opndSrc, multiBrInstr);
- //MultiBr eax
- multiBrInstr->SetSrc1(indirInstr->GetDst());
- //Jump to the address at the target location in the jump table
- m_lowererMD.LowerMultiBranch(multiBrInstr);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerMultiBr
- /// - Lowers the instruction for jump table(consecutive integer case arms)
- ///
- ///----------------------------------------------------------------------------
- IR::Instr* Lowerer::LowerMultiBr(IR::Instr * instr)
- {
- IR::Instr * instrPrev = instr->m_prev;
- AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnd on BrB");
- AssertMsg(instr->IsBranchInstr() && instr->AsBranchInstr()->IsMultiBranch(), "Bad Instruction Lowering Call to LowerMultiBr()");
- IR::MultiBranchInstr * multiBrInstr = instr->AsBranchInstr()->AsMultiBrInstr();
- IR::RegOpnd * offset = instr->UnlinkSrc1()->AsRegOpnd();
- LowerJumpTableMultiBranch(multiBrInstr, offset);
- return instrPrev;
- }
- IR::Instr* Lowerer::LowerBrBMem(IR::Instr * instr, IR::JnHelperMethod helperMethod)
- {
- IR::Instr * instrPrev;
- IR::Instr * instrCall;
- IR::HelperCallOpnd * opndHelper;
- IR::Opnd * opndSrc;
- IR::Opnd * opndDst;
- StackSym * symDst;
- AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
- instrPrev = LoadScriptContext(instr);
- opndSrc = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, opndSrc);
- // Generate helper call to convert the unknown operand to boolean
- opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
- symDst = StackSym::New(TyVar, this->m_func);
- opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
- instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
- instr->InsertBefore(instrCall);
- instrCall = m_lowererMD.LowerCall(instrCall, 0);
- // Branch on the result of the call
- instr->SetSrc1(opndDst);
- m_lowererMD.LowerCondBranch(instr);
- return instrPrev;
- }
- IR::Instr* Lowerer::LowerBrOnObject(IR::Instr * instr, IR::JnHelperMethod helperMethod)
- {
- IR::Instr * instrPrev;
- IR::Instr * instrCall;
- IR::HelperCallOpnd * opndHelper;
- IR::Opnd * opndSrc;
- IR::Opnd * opndDst;
- StackSym * symDst;
- AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
- opndSrc = instr->UnlinkSrc1();
- instrPrev = m_lowererMD.LoadHelperArgument(instr, opndSrc);
- // Generate helper call to check if the operand's type is object
- opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
- symDst = StackSym::New(TyVar, this->m_func);
- opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
- instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
- instr->InsertBefore(instrCall);
- instrCall = m_lowererMD.LowerCall(instrCall, 0);
- // Branch on the result of the call
- instr->SetSrc1(opndDst);
- m_lowererMD.LowerCondBranch(instr);
- return instrPrev;
- }
- IR::Instr * Lowerer::LowerBrOnClassConstructor(IR::Instr * instr, IR::JnHelperMethod helperMethod)
- {
- IR::Instr * instrPrev;
- IR::Instr * instrCall;
- IR::HelperCallOpnd * opndHelper;
- IR::Opnd * opndSrc;
- IR::Opnd * opndDst;
- StackSym * symDst;
- AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
- opndSrc = instr->UnlinkSrc1();
- instrPrev = m_lowererMD.LoadHelperArgument(instr, opndSrc);
- // Generate helper call to check if the operand's type is object
- opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
- symDst = StackSym::New(TyVar, this->m_func);
- opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
- instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
- instr->InsertBefore(instrCall);
- instrCall = m_lowererMD.LowerCall(instrCall, 0);
- // Branch on the result of the call
- instr->SetSrc1(opndDst);
- m_lowererMD.LowerCondBranch(instr);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerBrCMem(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool noMathFastPath, bool isHelper)
- {
- IR::Instr * instrPrev = instr->m_prev;
- IR::Instr * instrCall;
- IR::HelperCallOpnd * opndHelper;
- IR::Opnd * opndSrc;
- IR::Opnd * opndDst;
- StackSym * symDst;
- bool inverted = false;
- AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() != nullptr, "Expected 2 src opnds on BrC");
- if (!noMathFastPath && !this->GenerateFastCondBranch(instr->AsBranchInstr(), &isHelper))
- {
- return instrPrev;
- }
- // Push the args in reverse order.
- const bool loadScriptContext = !(helperMethod == IR::HelperOp_StrictEqualString || helperMethod == IR::HelperOp_StrictEqualEmptyString);
- const bool loadArg2 = !(helperMethod == IR::HelperOp_StrictEqualEmptyString);
- if (helperMethod == IR::HelperOp_NotEqual)
- {
- // Op_NotEqual() returns !Op_Equal(). It is faster to call Op_Equal() directly.
- helperMethod = IR::HelperOp_Equal;
- instr->AsBranchInstr()->Invert();
- inverted = true;
- }
- else if(helperMethod == IR::HelperOp_NotStrictEqual)
- {
- // Op_NotStrictEqual() returns !Op_StrictEqual(). It is faster to call Op_StrictEqual() directly.
- helperMethod = IR::HelperOp_StrictEqual;
- instr->AsBranchInstr()->Invert();
- inverted = true;
- }
- if (loadScriptContext)
- LoadScriptContext(instr);
- opndSrc = instr->UnlinkSrc2();
- if (loadArg2)
- m_lowererMD.LoadHelperArgument(instr, opndSrc);
- opndSrc = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, opndSrc);
- // Generate helper call to compare the source operands.
- opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
- symDst = StackSym::New(TyMachReg, this->m_func);
- opndDst = IR::RegOpnd::New(symDst, TyMachReg, this->m_func);
- instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
- instr->InsertBefore(instrCall);
- instrCall = m_lowererMD.LowerCall(instrCall, 0);
- switch (instr->m_opcode)
- {
- case Js::OpCode::BrNotEq_A:
- case Js::OpCode::BrNotNeq_A:
- case Js::OpCode::BrSrNotEq_A:
- case Js::OpCode::BrSrNotNeq_A:
- if (instr->HasBailOutInfo())
- {
- instr->GetBailOutInfo()->isInvertedBranch = true;
- }
- break;
- case Js::OpCode::BrNotGe_A:
- case Js::OpCode::BrNotGt_A:
- case Js::OpCode::BrNotLe_A:
- case Js::OpCode::BrNotLt_A:
- inverted = true;
- break;
- }
- // Branch if the result is "true".
- instr->SetSrc1(opndDst);
- instr->m_opcode = (inverted ? Js::OpCode::BrFalse_A : Js::OpCode::BrTrue_A);
- this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, !noMathFastPath && isHelper);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerBrFncApply(IR::Instr * instr, IR::JnHelperMethod helperMethod) {
- IR::Instr * instrPrev = instr->m_prev;
- IR::Instr * instrCall;
- IR::HelperCallOpnd * opndHelper;
- IR::Opnd * opndSrc;
- IR::Opnd * opndDst;
- StackSym * symDst;
- AssertMsg(instr->GetSrc1() != nullptr, "Expected 1 src opnd on BrFncApply");
- LoadScriptContext(instr);
- opndSrc = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, opndSrc);
- // Generate helper call to compare the source operands.
- opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
- symDst = StackSym::New(TyMachReg, this->m_func);
- opndDst = IR::RegOpnd::New(symDst, TyMachReg, this->m_func);
- instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
- instr->InsertBefore(instrCall);
- instrCall = m_lowererMD.LowerCall(instrCall, 0);
- // Branch if the result is "true".
- instr->SetSrc1(opndDst);
- instr->m_opcode = Js::OpCode::BrTrue_A;
- m_lowererMD.LowerCondBranch(instr);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerBrProperty - lower branch-on-has/no-property
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerBrProperty(IR::Instr * instr, IR::JnHelperMethod helper)
- {
- IR::Instr * instrPrev;
- IR::Instr * instrCall;
- IR::HelperCallOpnd * opndHelper;
- IR::Opnd * opndSrc;
- IR::Opnd * opndDst;
- opndSrc = instr->UnlinkSrc1();
- AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(),
- "Expected propertySym as src of BrProperty");
- instrPrev = LoadScriptContext(instr);
- this->LoadPropertySymAsArgument(instr, opndSrc);
- opndHelper = IR::HelperCallOpnd::New(helper, this->m_func);
- opndDst = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), TyMachReg, this->m_func);
- instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
- instr->InsertBefore(instrCall);
- instrCall = m_lowererMD.LowerCall(instrCall, 0);
- // Branch on the result of the call
- instr->SetSrc1(opndDst);
- switch (instr->m_opcode)
- {
- case Js::OpCode::BrOnHasProperty:
- instr->m_opcode = Js::OpCode::BrTrue_A;
- break;
- case Js::OpCode::BrOnNoProperty:
- instr->m_opcode = Js::OpCode::BrFalse_A;
- break;
- default:
- AssertMsg(0, "Unknown opcode on BrProperty branch");
- break;
- }
- this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, false);
- return instrPrev;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerElementUndefined
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerElementUndefined(IR::Instr * instr, IR::JnHelperMethod helper)
- {
- IR::Opnd *dst = instr->UnlinkDst();
- AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as dst of Ld Undefined");
- // Pass the property sym to store to
- this->LoadPropertySymAsArgument(instr, dst);
- m_lowererMD.ChangeToHelperCall(instr, helper);
- return instr;
- }
- IR::Instr *
- Lowerer::LowerElementUndefinedMem(IR::Instr * instr, IR::JnHelperMethod helper)
- {
- // Pass script context
- IR::Instr * instrPrev = LoadScriptContext(instr);
- this->LowerElementUndefined(instr, helper);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerLdElemUndef(IR::Instr * instr)
- {
- if (this->m_func->GetJnFunction()->IsEval())
- {
- return LowerElementUndefinedMem(instr, IR::HelperOp_LdElemUndefDynamic);
- }
- else
- {
- return LowerElementUndefined(instr, IR::HelperOp_LdElemUndef);
- }
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerElementUndefinedScoped
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerElementUndefinedScoped(IR::Instr * instr, IR::JnHelperMethod helper)
- {
- IR::Instr * instrPrev = instr->m_prev;
- // Pass the default instance
- IR::Opnd *src = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, src);
- // Pass the property sym to store to
- IR::Opnd * dst = instr->UnlinkDst();
- AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as dst of Ld Undefined Scoped");
- this->LoadPropertySymAsArgument(instr, dst);
- m_lowererMD.ChangeToHelperCall(instr, helper);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerElementUndefinedScopedMem(IR::Instr * instr, IR::JnHelperMethod helper)
- {
- // Pass script context
- IR::Instr * instrPrev = LoadScriptContext(instr);
- this->LowerElementUndefinedScoped(instr, helper);
- return instrPrev;
- }
- void
- Lowerer::LowerStLoopBodyCount(IR::Instr* instr)
- {
- Js::LoopHeader *header = ((JsLoopBodyCodeGen*)m_func->m_workItem)->loopHeader;
- IR::MemRefOpnd *loopBodyCounterOpnd = IR::MemRefOpnd::New((BYTE*)(header) + header->GetOffsetOfProfiledLoopCounter(), TyUint32, this->m_func);
- instr->SetDst(loopBodyCounterOpnd);
- instr->ReplaceSrc1(instr->GetSrc1()->AsRegOpnd()->UseWithNewType(TyUint32, this->m_func));
- IR::AutoReuseOpnd(loopBodyCounterOpnd, this->m_func);
- m_lowererMD.ChangeToAssign(instr);
- return;
- }
- #if !FLOATVAR
- IR::Instr *
- Lowerer::LowerStSlotBoxTemp(IR::Instr *stSlot)
- {
- // regVar = BoxStackNumber(src, scriptContext)
- IR::RegOpnd * regSrc = stSlot->UnlinkSrc1()->AsRegOpnd();
- IR::Instr * instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
- IR::RegOpnd *regVar = IR::RegOpnd::New(TyVar, this->m_func);
- instr->SetDst(regVar);
- instr->SetSrc1(regSrc);
- stSlot->InsertBefore(instr);
- this->LowerUnaryHelperMem(instr, IR::HelperBoxStackNumber);
- stSlot->SetSrc1(regVar);
- return this->LowerStSlot(stSlot);
- }
- #endif
- IR::Opnd *
- Lowerer::CreateOpndForSlotAccess(IR::Opnd * opnd)
- {
- IR::SymOpnd * symOpnd = opnd->AsSymOpnd();
- PropertySym * dstSym = symOpnd->m_sym->AsPropertySym();
- if (!m_func->IsLoopBody() &&
- m_func->DoStackFrameDisplay() &&
- (dstSym->m_stackSym == m_func->GetLocalClosureSym() || dstSym->m_stackSym == m_func->GetLocalFrameDisplaySym()))
- {
- // Stack closure syms are made to look like slot accesses for the benefit of GlobOpt, so that it can do proper
- // copy prop and implicit call bailout. But what we really want is local stack load/store.
- // Don't do this for loop body, though, since we don't have the value saved on the stack.
- return IR::SymOpnd::New(dstSym->m_stackSym, 0, TyMachReg, this->m_func);
- }
- int32 offset = dstSym->m_propertyId;
- if (!m_func->GetJnFunction()->GetIsAsmJsFunction())
- {
- offset = offset * TySize[opnd->GetType()];
- }
- if (m_func->IsTJLoopBody())
- {
- offset = offset - m_func->GetJnFunction()->GetAsmJsFunctionInfo()->GetTotalSizeinBytes();
- }
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(symOpnd->CreatePropertyOwnerOpnd(m_func),
- offset , opnd->GetType(), this->m_func);
- return indirOpnd;
- }
- IR::Instr *
- Lowerer::LowerStSlot(IR::Instr *instr)
- {
- // StSlot stores the nth Var in the buffer pointed to by the property sym's stack sym.
- IR::Opnd * dstOpnd = instr->UnlinkDst();
- AssertMsg(dstOpnd, "Expected dst opnd on StSlot");
- IR::Opnd * dstNew = this->CreateOpndForSlotAccess(dstOpnd);
- dstOpnd->Free(this->m_func);
- instr->SetDst(dstNew);
- m_lowererMD.ChangeToWriteBarrierAssign(instr);
- return instr;
- }
- IR::Instr *
- Lowerer::LowerStSlotChkUndecl(IR::Instr *instrStSlot)
- {
- Assert(instrStSlot->GetSrc2() != nullptr);
- // Src2 is required only to avoid dead store false positives during GlobOpt.
- instrStSlot->FreeSrc2();
- IR::Opnd *dstOpnd = this->CreateOpndForSlotAccess(instrStSlot->GetDst());
- IR::Instr *instr = this->LowerStSlot(instrStSlot);
- this->GenUndeclChk(instr, dstOpnd);
- return instr;
- }
- void Lowerer::LowerProfileLdSlot(IR::Opnd *const valueOpnd, Func *const ldSlotFunc, const Js::ProfileId profileId, IR::Instr *const insertBeforeInstr)
- {
- Assert(valueOpnd);
- Assert(profileId != Js::Constants::NoProfileId);
- Assert(insertBeforeInstr);
- Func *const irFunc = insertBeforeInstr->m_func;
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, IR::Opnd::CreateProfileIdOpnd(profileId, irFunc));
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, CreateFunctionBodyOpnd(ldSlotFunc));
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, valueOpnd);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, irFunc);
- callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfileLdSlot, irFunc));
- insertBeforeInstr->InsertBefore(callInstr);
- m_lowererMD.LowerCall(callInstr, 0);
- }
- IR::Instr *
- Lowerer::LowerLdSlot(IR::Instr *instr)
- {
- IR::Opnd * srcOpnd = instr->UnlinkSrc1();
- AssertMsg(srcOpnd, "Expected src opnd on LdSlot");
- IR::Opnd * srcNew = this->CreateOpndForSlotAccess(srcOpnd);
- srcOpnd->Free(this->m_func);
- instr->SetSrc1(srcNew);
- m_lowererMD.ChangeToAssign(instr);
- return instr;
- }
- IR::Instr *
- Lowerer::LowerChkUndecl(IR::Instr *instr)
- {
- IR::Instr *instrPrev = instr->m_prev;
- this->GenUndeclChk(instr, instr->GetSrc1());
- instr->Remove();
- return instrPrev;
- }
- void
- Lowerer::GenUndeclChk(IR::Instr *instrInsert, IR::Opnd *opnd)
- {
- IR::LabelInstr *labelContinue = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- InsertCompareBranch(
- opnd,
- LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueUndeclBlockVar),
- Js::OpCode::BrNeq_A, labelContinue, instrInsert);
- IR::LabelInstr *labelThrow = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- instrInsert->InsertBefore(labelThrow);
- IR::Instr *instr = IR::Instr::New(
- Js::OpCode::RuntimeReferenceError,
- IR::RegOpnd::New(TyMachReg, m_func),
- IR::IntConstOpnd::New(SCODE_CODE(JSERR_UseBeforeDeclaration), TyInt32, m_func),
- m_func);
- instrInsert->InsertBefore(instr);
- this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeReferenceError);
- instrInsert->InsertBefore(labelContinue);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerStElemC
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerStElemC(IR::Instr * stElem)
- {
- IR::Instr *instrPrev = stElem->m_prev;
- IR::IndirOpnd * indirOpnd = stElem->GetDst()->AsIndirOpnd();
- IR::RegOpnd *indexOpnd = indirOpnd->UnlinkIndexOpnd();
- Assert(!indexOpnd || indexOpnd->m_sym->IsIntConst());
- IntConstType value;
- if (indexOpnd)
- {
- value = indexOpnd->AsRegOpnd()->m_sym->GetIntConstValue();
- indexOpnd->Free(this->m_func);
- }
- else
- {
- value = (IntConstType)indirOpnd->GetOffset();
- }
- if (stElem->IsJitProfilingInstr())
- {
- Assert(stElem->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
- m_lowererMD.LoadHelperArgument(stElem, stElem->UnlinkSrc1());
- const auto meth = stElem->m_opcode == Js::OpCode::StElemC ? IR::HelperSimpleStoreArrayHelper : IR::HelperSimpleStoreArraySegHelper;
- stElem->SetSrc1(IR::HelperCallOpnd::New(meth, m_func));
- m_lowererMD.LoadHelperArgument(stElem, IR::IntConstOpnd::New(value, TyUint32, m_func));
- m_lowererMD.LoadHelperArgument(stElem, indirOpnd->UnlinkBaseOpnd());
- stElem->UnlinkDst()->Free(m_func);
- m_lowererMD.LowerCall(stElem, 0);
- return instrPrev;
- }
- IntConstType base;
- IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
- const ValueType baseValueType(baseOpnd->GetValueType());
- if(baseValueType.IsLikelyNativeArray())
- {
- Assert(stElem->m_opcode == Js::OpCode::StElemC);
- IR::LabelInstr *labelBailOut = nullptr;
- IR::Instr *instrBailOut = nullptr;
- if (stElem->HasBailOutInfo())
- {
- labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- instrBailOut = stElem;
- stElem = IR::Instr::New(instrBailOut->m_opcode, m_func);
- instrBailOut->TransferTo(stElem);
- instrBailOut->InsertBefore(stElem);
- IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- InsertBranch(Js::OpCode::Br, labelDone, instrBailOut);
- instrBailOut->InsertBefore(labelBailOut);
- instrBailOut->InsertAfter(labelDone);
- instrBailOut->m_opcode = Js::OpCode::BailOut;
- GenerateBailOut(instrBailOut);
- }
- if (!baseValueType.IsObject())
- {
- // Likely native array: do a vtable check and bail if it fails.
- Assert(labelBailOut);
- GenerateArrayTest(baseOpnd, labelBailOut, labelBailOut, stElem, true);
- }
- if (stElem->GetSrc1()->GetType() == TyVar)
- {
- // Storing a non-specialized value. This may cause array conversion, which invalidates all the code
- // that depends on the array check we've already done.
- // Call a helper that returns the type ID of the resulting array, check it here against the one we
- // expect, and bail if it fails.
- Assert(labelBailOut);
- // Call a helper to (try and) unbox the var and store it.
- // If we had to convert the array to do the store, we'll bail.
- LoadScriptContext(stElem);
- m_lowererMD.LoadHelperArgument(stElem, stElem->UnlinkSrc1());
- IR::Opnd *indexOpnd = IR::IntConstOpnd::New(value, TyUint32, m_func);
- m_lowererMD.LoadHelperArgument(stElem, indexOpnd);
- m_lowererMD.LoadHelperArgument(stElem, indirOpnd->UnlinkBaseOpnd());
- IR::JnHelperMethod helperMethod;
- if (baseValueType.HasIntElements())
- {
- helperMethod = IR::HelperScrArr_SetNativeIntElementC;
- }
- else
- {
- helperMethod = IR::HelperScrArr_SetNativeFloatElementC;
- }
- IR::Instr *instrInsertBranch = stElem->m_next;
- IR::RegOpnd *typeIdOpnd = IR::RegOpnd::New(TyUint32, m_func);
- stElem->ReplaceDst(typeIdOpnd);
- m_lowererMD.ChangeToHelperCall(stElem, helperMethod);
- InsertCompareBranch(
- typeIdOpnd,
- IR::IntConstOpnd::New(
- baseValueType.HasIntElements() ?
- Js::TypeIds_NativeIntArray : Js::TypeIds_NativeFloatArray, TyUint32, m_func),
- Js::OpCode::BrNeq_A,
- labelBailOut,
- instrInsertBranch);
- return instrPrev;
- }
- else if (baseValueType.HasIntElements() && labelBailOut)
- {
- Assert(stElem->GetSrc1()->GetType() == GetArrayIndirType(baseValueType));
- IR::Opnd* missingElementOpnd = GetMissingItemOpnd(stElem->GetSrc1()->GetType(), m_func);
- if (!stElem->GetSrc1()->IsEqual(missingElementOpnd))
- {
- InsertCompareBranch(stElem->GetSrc1(), missingElementOpnd , Js::OpCode::BrEq_A, labelBailOut, stElem, true);
- }
- else
- {
- //Its a missing value store and data flow proves that src1 is always missing value. Array cannot be a int array at the first place
- //if this code was ever hit. Just bailout, this code path would be updated with the profile information next time around.
- InsertBranch(Js::OpCode::Br, labelBailOut, stElem);
- #if DBG
- labelBailOut->m_noHelperAssert = true;
- #endif
- stElem->Remove();
- return instrPrev;
- }
- }
- else
- {
- Assert(stElem->GetSrc1()->GetType() == GetArrayIndirType(baseValueType));
- }
- stElem->GetDst()->SetType(stElem->GetSrc1()->GetType());
- Assert(value <= Js::SparseArraySegmentBase::INLINE_CHUNK_SIZE);
- if(baseValueType.HasIntElements())
- {
- base = sizeof(Js::JavascriptNativeIntArray) + offsetof(Js::SparseArraySegment<int32>, elements);
- }
- else
- {
- base = sizeof(Js::JavascriptNativeFloatArray) + offsetof(Js::SparseArraySegment<double>, elements);
- }
- }
- else if(baseValueType.IsLikelyObject() && baseValueType.GetObjectType() == ObjectType::Array)
- {
- Assert(stElem->m_opcode == Js::OpCode::StElemC);
- Assert(value <= Js::SparseArraySegmentBase::INLINE_CHUNK_SIZE);
- base = sizeof(Js::JavascriptArray) + offsetof(Js::SparseArraySegment<Js::Var>, elements);
- }
- else
- {
- Assert(stElem->m_opcode == Js::OpCode::StElemC || stElem->m_opcode == Js::OpCode::StArrSegElemC);
- Assert(indirOpnd->GetBaseOpnd()->GetType() == TyVar);
- base = offsetof(Js::SparseArraySegment<Js::Var>, elements);
- }
- Assert(value >= 0);
- // MOV [r3 + offset(element) + index], src
- const BYTE indirScale =
- baseValueType.IsLikelyAnyOptimizedArray() ? GetArrayIndirScale(baseValueType) : m_lowererMD.GetDefaultIndirScale();
- IntConstType offset = base + (value << indirScale);
- Assert(Math::FitsInDWord(offset));
- indirOpnd->SetOffset((int32)offset);
- m_lowererMD.ChangeToWriteBarrierAssign(stElem);
- return instrPrev;
- }
- void Lowerer::LowerLdArrHead(IR::Instr *const instr)
- {
- IR::RegOpnd *array = instr->UnlinkSrc1()->AsRegOpnd();
- const ValueType arrayValueType(array->GetValueType());
- Assert(arrayValueType.IsAnyOptimizedArray());
- if(arrayValueType.GetObjectType() == ObjectType::ObjectWithArray)
- {
- array = LoadObjectArray(array, instr);
- }
- // mov arrayHeadSegment, [array + offset(headSegment)]
- instr->GetDst()->SetType(TyMachPtr);
- instr->SetSrc1(
- IR::IndirOpnd::New(
- array,
- GetArrayOffsetOfHeadSegment(arrayValueType),
- TyMachPtr,
- instr->m_func));
- LowererMD::ChangeToAssign(instr);
- }
- // Creates the rest parameter array.
- // Var JavascriptArray::OP_NewScArrayWithElements(
- // uint32 elementCount,
- // Var *elements,
- // ScriptContext* scriptContext)
- IR::Instr *Lowerer::LowerRestParameter(IR::Opnd *formalsOpnd, IR::Opnd *dstOpnd, IR::Opnd *excessOpnd, IR::Instr *instr, IR::RegOpnd *generatorArgsPtrOpnd)
- {
- IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, dstOpnd, instr->m_func);
- instr->InsertAfter(helperCallInstr);
- // Var JavascriptArray::OP_NewScArrayWithElements(
- // int32 elementCount,
- // Var *elements,
- // ScriptContext* scriptContext)
- IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScArrayWithElements;
- LoadScriptContext(helperCallInstr);
- BOOL isGenerator = this->m_func->GetJnFunction()->IsGenerator();
- // Elements pointer = ebp + (formals count + formals offset + 1)*sizeof(Var)
- IR::RegOpnd *srcOpnd = isGenerator ? generatorArgsPtrOpnd : IR::Opnd::CreateFramePointerOpnd(this->m_func);
- uint16 actualOffset = isGenerator ? 0 : GetFormalParamOffset(); //4
- IR::RegOpnd *argPtrOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- InsertAdd(false, argPtrOpnd, srcOpnd, IR::IntConstOpnd::New((formalsOpnd->AsIntConstOpnd()->GetValue() + actualOffset) * MachPtr, TyUint32, this->m_func), helperCallInstr);
- m_lowererMD.LoadHelperArgument(helperCallInstr, argPtrOpnd);
- m_lowererMD.LoadHelperArgument(helperCallInstr, excessOpnd);
- m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
- return helperCallInstr;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerArgIn
- ///
- /// This function checks the passed-in argument count against the index of this
- /// argument and uses null for a param value if the caller didn't explicitly
- /// pass anything.
- ///
- ///----------------------------------------------------------------------------
- IR::Instr *
- Lowerer::LowerArgIn(IR::Instr *instrArgIn)
- {
- IR::LabelInstr * labelDone;
- IR::LabelInstr * labelUndef;
- IR::LabelInstr * labelNormal;
- IR::LabelInstr * labelInit;
- IR::LabelInstr * labelInitNext;
- IR::BranchInstr * instrBranch;
- IR::Instr * instrArgInNext;
- IR::Instr * instrInsert;
- IR::Instr * instrPrev;
- IR::Instr * instrResume = nullptr;
- IR::Opnd * dstOpnd;
- IR::Opnd * srcOpnd;
- IR::Opnd * opndUndef;
- Js::ArgSlot argIndex;
- StackSym * symParam;
- BOOLEAN isDuplicate;
- IR::RegOpnd * generatorArgsPtrOpnd = nullptr;
- // We start with:
- // s1 = ArgIn_A param1
- // s2 = ArgIn_A param2
- // ...
- // sn = ArgIn_A paramn
- //
- // We want to end up with:
- //
- // s1 = ArgIn_A param1 -- Note that this is unconditional
- // count = (load from param area)
- // BrLt_A $start, count, n -- Forward cbranch to the uncommon case
- // Br $Ln
- // $start:
- // sn = assign undef
- // BrGe_A $Ln-1, count, n-1
- // sn-1 = assign undef
- // ...
- // s2 = assign undef
- // Br $done
- // $Ln:
- // sn = assign paramn
- // $Ln-1:
- // sn-1 = assign paramn-1
- // ...
- // s2 = assign param2
- // $done:
- IR::Opnd *restDst = nullptr;
- bool hasRest = instrArgIn->m_opcode == Js::OpCode::ArgIn_Rest;
- if (hasRest)
- {
- IR::Instr *restInstr = instrArgIn;
- restDst = restInstr->UnlinkDst();
- if (m_func->GetJnFunction()->GetHasImplicitArgIns() && m_func->GetInParamsCount() > 1)
- {
- while (instrArgIn->m_opcode != Js::OpCode::ArgIn_A)
- {
- instrArgIn = instrArgIn->m_prev;
- if (instrResume == nullptr)
- {
- instrResume = instrArgIn;
- }
- }
- restInstr->Remove();
- }
- else
- {
- IR::Instr * instrCount = m_lowererMD.LoadInputParamCount(instrArgIn, -this->m_func->GetInParamsCount());
- IR::Opnd * excessOpnd = instrCount->GetDst();
- IR::LabelInstr *createRestArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- // BrGe $createRestArray, excess, 0
- InsertCompareBranch(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), Js::OpCode::BrGe_A, createRestArrayLabel, instrArgIn);
- // MOV excess, 0
- InsertMove(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), instrArgIn);
- // $createRestArray
- instrArgIn->InsertBefore(createRestArrayLabel);
- if (m_func->GetJnFunction()->IsGenerator())
- {
- generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrArgIn);
- }
- IR::IntConstOpnd * formalsOpnd = IR::IntConstOpnd::New(this->m_func->GetInParamsCount(), TyUint32, this->m_func);
- IR::Instr *prev = LowerRestParameter(formalsOpnd, restDst, excessOpnd, instrArgIn, generatorArgsPtrOpnd);
- instrArgIn->Remove();
- return prev;
- }
- }
- srcOpnd = instrArgIn->GetSrc1();
- symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
- argIndex = symParam->GetParamSlotNum();
- if (argIndex == 1)
- {
- // The "this" argument is not source-dependent and doesn't need to be checked.
- if (m_func->GetJnFunction()->IsGenerator())
- {
- generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrArgIn);
- ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
- }
- m_lowererMD.ChangeToAssign(instrArgIn);
- return instrResume == nullptr ? instrArgIn->m_prev : instrResume;
- }
- Js::ArgSlot formalsCount = this->m_func->GetInParamsCount();
- AssertMsg(argIndex == formalsCount, "Expect to see the ArgIn's in numerical order");
- // Because there may be instructions between the ArgIn's, such as saves to the frame object,
- // we find the top of the sequence of ArgIn's and insert everything there. This assumes that
- // ArgIn's use param symbols as src's and not the results of previous instructions.
- instrPrev = instrArgIn;
- instrInsert = instrArgIn->m_next;
- while (argIndex > 2)
- {
- instrPrev = instrPrev->m_prev;
- if (instrPrev->m_opcode == Js::OpCode::ArgIn_A)
- {
- srcOpnd = instrPrev->GetSrc1();
- symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
- AssertMsg(symParam->GetParamSlotNum() == argIndex - 1, "ArgIn's not in numerical order");
- argIndex = symParam->GetParamSlotNum();
- }
- else
- {
- // Make sure that this instruction gets lowered.
- if (instrResume == nullptr)
- {
- instrResume = instrPrev;
- }
- }
- }
- // The loading of parameters will be inserted above this instruction.
- instrInsert = instrPrev;
- if (instrResume == nullptr)
- {
- // We found no intervening non-ArgIn's, so lowering can resume at the previous instruction.
- instrResume = instrInsert->m_prev;
- }
- // Now insert all the checks and undef-assigns.
- if (m_func->GetJnFunction()->IsGenerator())
- {
- generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrInsert);
- }
- // excessOpnd = (load from param area) - formalCounts
- IR::Instr * instrCount = this->m_lowererMD.LoadInputParamCount(instrInsert, -formalsCount, true);
- IR::Opnd * excessOpnd = instrCount->GetDst();
- labelUndef = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /*helperLabel*/ true);
- Lowerer::InsertBranch(Js::OpCode::BrLt_A, labelUndef, instrInsert);
- // Br $Ln
- labelNormal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- labelInit = labelNormal;
- instrBranch = IR::BranchInstr::New(Js::OpCode::Br, labelNormal, this->m_func);
- instrInsert->InsertBefore(instrBranch);
- this->m_lowererMD.LowerUncondBranch(instrBranch);
- // Insert the labels
- instrInsert->InsertBefore(labelUndef);
- instrInsert->InsertBefore(labelNormal);
- // MOV undefReg, undefAddress
- IR::Opnd* opndUndefAddress = this->LoadLibraryValueOpnd(labelNormal, LibraryValue::ValueUndefined);
- opndUndef = IR::RegOpnd::New(TyMachPtr, this->m_func);
- LowererMD::CreateAssign(opndUndef, opndUndefAddress, labelNormal);
- BVSparse<JitArenaAllocator> *formalsBv = JitAnew(this->m_func->m_alloc, BVSparse<JitArenaAllocator>, this->m_func->m_alloc);
- while (formalsCount > 2)
- {
- dstOpnd = instrArgIn->GetDst();
- Assert(dstOpnd->IsRegOpnd());
- isDuplicate = formalsBv->TestAndSet(dstOpnd->AsRegOpnd()->m_sym->AsStackSym()->m_id);
- // Now insert the undef initialization before the "normal" label
- // sn = assign undef
- LowererMD::CreateAssign(dstOpnd, opndUndef, labelNormal);
- // INC excessOpnd
- // BrEq_A $Ln-1
- formalsCount--;
- InsertAdd(true, excessOpnd, excessOpnd, IR::IntConstOpnd::New(1, TyInt32, this->m_func), labelNormal);
- labelInitNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- InsertBranch(Js::OpCode::BrEq_A, labelInitNext, labelNormal);
- // And insert the "normal" initialization before the "done" label
- // sn = assign paramn
- // $Ln-1:
- labelInit->InsertAfter(labelInitNext);
- labelInit = labelInitNext;
- instrArgInNext = instrArgIn->m_prev;
- instrArgIn->Unlink();
- // function foo(x, x) { use(x); }
- // This should refer to the second 'x'. Since we reverse the order here however, we need to skip
- // the initialization of the first 'x' to not override the one for the second. WOOB:1105504
- if (isDuplicate)
- {
- instrArgIn->Free();
- }
- else
- {
- ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
- labelInit->InsertBefore(instrArgIn);
- this->m_lowererMD.ChangeToAssign(instrArgIn);
- }
- instrArgIn = instrArgInNext;
- while (instrArgIn->m_opcode != Js::OpCode::ArgIn_A)
- {
- instrArgIn = instrArgIn->m_prev;
- AssertMsg(instrArgIn, "???");
- }
- AssertMsg(instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum() == formalsCount,
- "Expect all ArgIn's to be in numerical order by param slot");
- }
- // Insert final undef and normal initializations, jumping unconditionally to the end
- // rather than checking against the decremented formals count as we did inside the loop above.
- // s2 = assign undef
- dstOpnd = instrArgIn->GetDst();
- Assert(dstOpnd->IsRegOpnd());
- isDuplicate = formalsBv->TestAndSet(dstOpnd->AsRegOpnd()->m_sym->AsStackSym()->m_id);
- LowererMD::CreateAssign(dstOpnd, opndUndef, labelNormal);
- if (hasRest)
- {
- InsertMove(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), labelNormal);
- }
- // Br $done
- labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- instrBranch = IR::BranchInstr::New(Js::OpCode::Br, labelDone, this->m_func);
- labelNormal->InsertBefore(instrBranch);
- this->m_lowererMD.LowerUncondBranch(instrBranch);
- // s2 = assign param2
- // $done:
- labelInit->InsertAfter(labelDone);
- if (hasRest)
- {
- // The formals count has been tainted, so restore it before lowering rest
- IR::IntConstOpnd * formalsOpnd = IR::IntConstOpnd::New(this->m_func->GetInParamsCount(), TyUint32, this->m_func);
- LowerRestParameter(formalsOpnd, restDst, excessOpnd, labelDone, generatorArgsPtrOpnd);
- }
- instrArgIn->Unlink();
- if (isDuplicate)
- {
- instrArgIn->Free();
- }
- else
- {
- ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
- labelDone->InsertBefore(instrArgIn);
- this->m_lowererMD.ChangeToAssign(instrArgIn);
- }
- return instrResume;
- }
- void
- Lowerer::ConvertArgOpndIfGeneratorFunction(IR::Instr *instrArgIn, IR::RegOpnd *generatorArgsPtrOpnd)
- {
- if (this->m_func->GetJnFunction()->IsGenerator())
- {
- // Replace stack param operand with offset into arguments array held by
- // the generator object.
- IR::Opnd * srcOpnd = instrArgIn->UnlinkSrc1();
- StackSym * symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
- Js::ArgSlot argIndex = symParam->GetParamSlotNum();
- IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorArgsPtrOpnd, (argIndex - 1) * MachPtr, TyMachPtr, this->m_func);
- srcOpnd->Free(this->m_func);
- instrArgIn->SetSrc1(indirOpnd);
- }
- }
- IR::RegOpnd *
- Lowerer::LoadGeneratorArgsPtr(IR::Instr *instrInsert)
- {
- IR::Instr * instr = LoadGeneratorObject(instrInsert);
- IR::RegOpnd * generatorRegOpnd = instr->GetDst()->AsRegOpnd();
- IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorRegOpnd, Js::JavascriptGenerator::GetArgsPtrOffset(), TyMachPtr, instrInsert->m_func);
- IR::RegOpnd * argsPtrOpnd = IR::RegOpnd::New(TyMachReg, instrInsert->m_func);
- LowererMD::CreateAssign(argsPtrOpnd, indirOpnd, instrInsert);
- return argsPtrOpnd;
- }
- IR::Instr *
- Lowerer::LoadGeneratorObject(IR::Instr * instrInsert)
- {
- StackSym * generatorSym = StackSym::NewParamSlotSym(1, instrInsert->m_func);
- instrInsert->m_func->SetArgOffset(generatorSym, LowererMD::GetFormalParamOffset() * MachPtr);
- IR::SymOpnd * generatorSymOpnd = IR::SymOpnd::New(generatorSym, TyMachPtr, instrInsert->m_func);
- IR::RegOpnd * generatorRegOpnd = IR::RegOpnd::New(TyMachPtr, instrInsert->m_func);
- return LowererMD::CreateAssign(generatorRegOpnd, generatorSymOpnd, instrInsert);
- }
- IR::Instr *
- Lowerer::LowerArgInAsmJs(IR::Instr * instrArgIn)
- {
- Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
- Js::ArgSlot argCount = m_func->GetJnFunction()->GetAsmJsFunctionInfo()->GetArgCount();
- IR::Instr * instr = instrArgIn;
- for (int argNum = argCount - 1; argNum >= 0; --argNum)
- {
- IR::Instr * instrPrev = instr->m_prev;
- m_lowererMD.ChangeToAssign(instr);
- instr = instrPrev;
- }
- return instr;
- }
- bool
- Lowerer::InlineBuiltInLibraryCall(IR::Instr *callInstr)
- {
- IR::Opnd *src1 = callInstr->GetSrc1();
- IR::Opnd *src2 = callInstr->GetSrc2();
- // Get the arg count by looking at the slot number of the last arg symbol.
- if (!src2->IsSymOpnd())
- {
- // No args? Not sure this is possible, but handle it.
- return false;
- }
- StackSym *argLinkSym = src2->AsSymOpnd()->m_sym->AsStackSym();
- // Subtract "this" from the arg count.
- IntConstType argCount = argLinkSym->GetArgSlotNum() - 1;
- // Find the callee's built-in index (if any).
- Js::BuiltinFunction index = Func::GetBuiltInIndex(src1);
- // Warning!
- // Don't add new built-in to following switch. Built-ins needs to be inlined in call direct way.
- // Following is only for prejit scenarios where we don't get inlining always and generate fast path in lowerer.
- // Generating fastpath here misses fixed functions and globopt optimizations.
- switch(index)
- {
- case Js::BuiltinFunction::String_CharAt:
- case Js::BuiltinFunction::String_CharCodeAt:
- if (argCount != 1)
- {
- return false;
- }
- if (!callInstr->GetDst())
- {
- // Optimization of Char[Code]At assumes result is used.
- return false;
- }
- break;
- case Js::BuiltinFunction::Math_Abs:
- #ifdef _M_IX86
- if (!AutoSystemInfo::Data.SSE2Available())
- {
- return false;
- }
- #endif
- if (argCount != 1)
- {
- return false;
- }
- if (!callInstr->GetDst())
- {
- // Optimization of Abs assumes result is used.
- return false;
- }
- break;
- case Js::BuiltinFunction::Array_Push:
- {
- if (argCount != 1)
- {
- return false;
- }
- if (callInstr->GetDst())
- {
- // Optimization of push assumes result is unused.
- return false;
- }
- StackSym *linkSym = callInstr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
- Assert(linkSym->IsSingleDef());
- linkSym = linkSym->m_instrDef->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
- Assert(linkSym->IsSingleDef());
- IR::Opnd *const arrayOpnd = linkSym->m_instrDef->GetSrc1();
- if(!arrayOpnd->IsRegOpnd())
- {
- // This should be rare, but needs to be handled.
- // By now, we've already started some of the inlining. Simply jmp to the helper.
- // The branch will get peeped later.
- return false;
- }
- if(!ShouldGenerateArrayFastPath(arrayOpnd, false, false, false) ||
- arrayOpnd->GetValueType().IsLikelyNativeArray())
- {
- // Rejecting native array for now, since we have to do a FromVar at the call site and bail out.
- return false;
- }
- break;
- }
- case Js::BuiltinFunction::String_Replace:
- {
- if(argCount != 2)
- {
- return false;
- }
- if(!ShouldGenerateStringReplaceFastPath(callInstr, argCount))
- {
- return false;
- }
- break;
- }
- default:
- return false;
- }
- Assert(Func::IsBuiltInInlinedInLowerer(callInstr->GetSrc1()));
- IR::Opnd *callTargetOpnd = callInstr->GetSrc1();
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- IR::Opnd *objRefOpnd = IR::MemRefOpnd::New((void*)this->GetObjRefForBuiltInTarget(callTargetOpnd->AsRegOpnd()), TyMachReg, this->m_func);
- InsertCompareBranch(callTargetOpnd, objRefOpnd, Js::OpCode::BrNeq_A, labelHelper, callInstr);
- callInstr->InsertBefore(labelHelper);
- Assert(argCount <= 2);
- IR::Opnd *argsOpnd[3];
- IR::Opnd *linkOpnd = callInstr->GetSrc2();
- while(linkOpnd->IsSymOpnd())
- {
- IR::SymOpnd *src2 = linkOpnd->AsSymOpnd();
- StackSym *sym = src2->m_sym->AsStackSym();
- Assert(sym->m_isSingleDef);
- IR::Instr *argInstr = sym->m_instrDef;
- Assert(argCount >= 0);
- argsOpnd[argCount] = argInstr->GetSrc1();
- argCount--;
- argInstr->Unlink();
- labelHelper->InsertAfter(argInstr);
- linkOpnd = argInstr->GetSrc2();
- }
- AnalysisAssert(argCount == -1);
- // Move startcall
- Assert(linkOpnd->IsRegOpnd());
- StackSym *sym = linkOpnd->AsRegOpnd()->m_sym;
- Assert(sym->m_isSingleDef);
- IR::Instr *startCall = sym->m_instrDef;
- Assert(startCall->m_opcode == Js::OpCode::StartCall);
- startCall->Unlink();
- labelHelper->InsertAfter(startCall);
- // $doneLabel:
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- callInstr->InsertAfter(doneLabel);
- bool success = true;
- switch(index)
- {
- case Js::BuiltinFunction::Math_Abs:
- this->m_lowererMD.GenerateFastAbs(callInstr->GetDst(), argsOpnd[1], callInstr, labelHelper, labelHelper, doneLabel);
- break;
- case Js::BuiltinFunction::String_CharCodeAt:
- case Js::BuiltinFunction::String_CharAt:
- success = this->m_lowererMD.GenerateFastCharAt(index, callInstr->GetDst(), argsOpnd[0], argsOpnd[1],
- callInstr, labelHelper, labelHelper, doneLabel);
- break;
- case Js::BuiltinFunction::Array_Push:
- success = GenerateFastPush(argsOpnd[0], argsOpnd[1], callInstr, labelHelper, labelHelper, nullptr, doneLabel);
- break;
- case Js::BuiltinFunction::String_Replace:
- success = GenerateFastReplace(argsOpnd[0], argsOpnd[1], argsOpnd[2], callInstr, labelHelper, labelHelper, doneLabel);
- break;
- default:
- Assert(UNREACHED);
- }
- IR::Instr *instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, doneLabel, this->m_func);
- labelHelper->InsertBefore(instr);
- return success;
- }
- // Perform lowerer part of inlining built-in function.
- // For details, see inline.cpp.
- //
- // Description of changes here (note that taking care of Argouts are similar to InlineeStart):
- // - Move ArgOut_A_InlineBuiltIn next to the call instr -- used by bailout processing in register allocator.
- // - Remove StartCall and InlineBuiltInStart for this call.
- // Before:
- // StartCall fn
- // d1 = BIA s1, link1
- // ...
- // InlineBuiltInStart fn, link0
- // After:
- // ...
- // d1 = BIA s1, NULL
- void Lowerer::LowerInlineBuiltIn(IR::Instr* builtInEndInstr)
- {
- Assert(builtInEndInstr->m_opcode == Js::OpCode::InlineBuiltInEnd || builtInEndInstr->m_opcode == Js::OpCode::InlineNonTrackingBuiltInEnd);
- IR::Instr* startCallInstr;
- builtInEndInstr->IterateArgInstrs([&](IR::Instr* argInstr) {
- startCallInstr = argInstr->GetSrc2()->GetStackSym()->m_instrDef;
- return false;
- });
- // Keep the startCall around as bailout refers to it. Just unlink it for now - do not delete it.
- startCallInstr->Unlink();
- builtInEndInstr->Remove();
- }
- Js::JavascriptFunction **
- Lowerer::GetObjRefForBuiltInTarget(IR::RegOpnd * regOpnd)
- {
- Js::JavascriptFunction ** mathFns =
- this->m_func->GetScriptContext()->GetLibrary()->GetBuiltinFunctions();
- Js::BuiltinFunction index = regOpnd->m_sym->m_builtInIndex;
- AssertMsg(index < Js::BuiltinFunction::Count, "Invalid built-in index on a call target marked as built-in");
- return mathFns + index;
- }
- IR::Instr *
- Lowerer::LowerNewRegEx(IR::Instr * instr)
- {
- IR::Opnd *src1 = instr->UnlinkSrc1();
- Assert(src1->IsAddrOpnd());
- #if ENABLE_REGEX_CONFIG_OPTIONS
- if (REGEX_CONFIG_FLAG(RegexTracing))
- {
- Assert(!instr->GetDst()->CanStoreTemp());
- IR::Instr * instrPrev = LoadScriptContext(instr);
- instrPrev = m_lowererMD.LoadHelperArgument(instr, src1);
- m_lowererMD.ChangeToHelperCall(instr, IR::HelperScrRegEx_OP_NewRegEx);
- return instrPrev;
- }
- #endif
- IR::Instr * instrPrev = instr->m_prev;
- IR::RegOpnd * dstOpnd = instr->UnlinkDst()->AsRegOpnd();
- IR::SymOpnd * tempObjectSymOpnd;
- bool isZeroed = GenerateRecyclerOrMarkTempAlloc(instr, dstOpnd, IR::HelperAllocMemForJavascriptRegExp, sizeof(Js::JavascriptRegExp), &tempObjectSymOpnd);
- if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
- {
- // Hoist the vtable and pattern init to the outer most loop top as it never changes
- InsertMove(tempObjectSymOpnd,
- LoadVTableValueOpnd(this->outerMostLoopLabel, VTableValue::VtableJavascriptRegExp),
- this->outerMostLoopLabel, false);
- }
- else
- {
- GenerateMemInit(dstOpnd, 0, LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp), instr, isZeroed);
- }
- GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfType(),
- this->LoadLibraryValueOpnd(instr, LibraryValue::ValueRegexType), instr, isZeroed);
- GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfAuxSlots(), instr, isZeroed);
- GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfObjectArray(), instr, isZeroed);
- if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
- {
- InsertMove(IR::SymOpnd::New(tempObjectSymOpnd->m_sym,
- tempObjectSymOpnd->m_offset + Js::JavascriptRegExp::GetOffsetOfPattern(), TyMachPtr, this->m_func),
- src1, this->outerMostLoopLabel, false);
- }
- else
- {
- GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfPattern(), src1, instr, isZeroed);
- }
- GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfSplitPattern(), instr, isZeroed);
- GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), instr, isZeroed);
- GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), 0, instr, isZeroed);
- instr->Remove();
- return instrPrev;
- }
- IR::Instr *
- Lowerer::GenerateRuntimeError(IR::Instr * insertBeforeInstr, Js::MessageId errorCode, IR::JnHelperMethod helper /*= IR::JnHelperMethod::HelperOp_RuntimeTypeError*/)
- {
- IR::Instr * runtimeErrorInstr = IR::Instr::New(Js::OpCode::RuntimeTypeError, this->m_func);
- runtimeErrorInstr->SetSrc1(IR::IntConstOpnd::New(errorCode, TyInt32, this->m_func, true));
- insertBeforeInstr->InsertBefore(runtimeErrorInstr);
- return this->LowerUnaryHelperMem(runtimeErrorInstr, helper);
- }
- bool Lowerer::IsNullOrUndefRegOpnd(IR::RegOpnd *opnd) const
- {
- StackSym *sym = opnd->m_sym;
- if (!sym->IsConst() || sym->IsIntConst() || sym->IsFloatConst())
- {
- return false;
- }
- Js::Var var = sym->GetConstAddress();
- Js::TypeId typeId = Js::RecyclableObject::FromVar(var)->GetTypeId();
- return typeId == Js::TypeIds_Null || typeId == Js::TypeIds_Undefined;
- }
- bool Lowerer::IsConstRegOpnd(IR::RegOpnd *opnd) const
- {
- StackSym *sym = opnd->m_sym;
- if (!sym->IsConst() || sym->IsIntConst() || sym->IsFloatConst())
- {
- return false;
- }
- Js::Var var = sym->GetConstAddress();
- Js::TypeId typeId = Js::RecyclableObject::FromVar(var)->GetTypeId();
- return typeId == Js::TypeIds_Null || typeId == Js::TypeIds_Undefined || typeId == Js::TypeIds_Boolean;
- }
- bool
- Lowerer::HasSideEffects(IR::Instr *instr)
- {
- if (LowererMD::IsCall(instr))
- {
- #ifdef _M_IX86
- IR::Opnd *src1 = instr->GetSrc1();
- if (src1->IsHelperCallOpnd())
- {
- IR::HelperCallOpnd * helper = src1->AsHelperCallOpnd();
- switch(helper->m_fnHelper)
- {
- case IR::HelperOp_Int32ToAtomInPlace:
- case IR::HelperOp_Int32ToAtom:
- case IR::HelperOp_UInt32ToAtom:
- return false;
- }
- }
- #endif
- return true;
- }
- return instr->HasAnySideEffects();
- }
- IR::Instr*
- Lowerer::GenerateFastInlineBuiltInMathRandom(IR::Instr* instr)
- {
- AssertMsg(instr->GetDst()->IsFloat(), "dst must be float.");
- IR::Instr* retInstr = instr->m_prev;
- IR::Opnd* dst = instr->GetDst();
- IR::Opnd* tmpdst = dst;
- if(!dst->IsRegOpnd())
- {
- tmpdst = IR::RegOpnd::New(dst->GetType(), instr->m_func);
- }
- LoadScriptContext(instr);
- IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, tmpdst, instr->m_func);
- instr->InsertBefore(helperCallInstr);
- m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperDirectMath_Random);
- if(tmpdst != dst)
- {
- InsertMove(dst, tmpdst, instr);
- }
- instr->Remove();
- return retInstr;
- }
- IR::Instr *
- Lowerer::LowerCallDirect(IR::Instr * instr)
- {
- IR::Opnd* linkOpnd = instr->UnlinkSrc2();
- StackSym *linkSym = linkOpnd->AsSymOpnd()->m_sym->AsStackSym();
- IR::Instr* argInstr = linkSym->m_instrDef;
- Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_InlineSpecialized);
- IR::Opnd* funcObj = argInstr->UnlinkSrc1();
- instr->SetSrc2(argInstr->UnlinkSrc2());
- argInstr->Remove();
- if(instr->HasBailOutInfo())
- {
- IR::Instr * bailOutInstr = this->SplitBailOnImplicitCall(instr, instr->m_next, instr->m_next);
- this->LowerBailOnEqualOrNotEqual(bailOutInstr);
- }
- Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
- return this->GenerateDirectCall(instr, funcObj, (ushort)flags);
- }
- IR::Instr *
- Lowerer::GenerateDirectCall(IR::Instr* inlineInstr, IR::Opnd* funcObj, ushort callflags)
- {
- int32 argCount = m_lowererMD.LowerCallArgs(inlineInstr, callflags);
- m_lowererMD.LoadHelperArgument(inlineInstr, funcObj);
- m_lowererMD.LowerCall(inlineInstr, (Js::ArgSlot)argCount); //to account for function object and callinfo
- return inlineInstr->m_prev;
- }
- /*
- * GenerateHelperToArrayPushFastPath
- * Generates Helper Call and pushes arguments to the Push HelperCall
- */
- IR::Instr *
- Lowerer::GenerateHelperToArrayPushFastPath(IR::Instr * instr, IR::LabelInstr * bailOutLabelHelper)
- {
- IR::Opnd * arrayHelperOpnd = instr->UnlinkSrc1();
- IR::Opnd * elementHelperOpnd = instr->UnlinkSrc2();
- IR::JnHelperMethod helperMethod;
- if(elementHelperOpnd->IsInt32())
- {
- Assert(arrayHelperOpnd->GetValueType().IsLikelyNativeIntArray());
- helperMethod = IR::HelperArray_NativeIntPush;
- m_lowererMD.LoadHelperArgument(instr, elementHelperOpnd);
- }
- else if(elementHelperOpnd->IsFloat())
- {
- Assert(arrayHelperOpnd->GetValueType().IsLikelyNativeFloatArray());
- helperMethod = IR::HelperArray_NativeFloatPush;
- m_lowererMD.LoadDoubleHelperArgument(instr, elementHelperOpnd);
- }
- else
- {
- helperMethod = IR::HelperArray_VarPush;
- m_lowererMD.LoadHelperArgument(instr, elementHelperOpnd);
- }
- m_lowererMD.LoadHelperArgument(instr, arrayHelperOpnd);
- LoadScriptContext(instr);
- return m_lowererMD.ChangeToHelperCall(instr, helperMethod);
- }
- /*
- * GenerateHelperToArrayPopFastPath
- * Generates Helper Call and pushes arguments to the Pop HelperCall
- */
- IR::Instr *
- Lowerer::GenerateHelperToArrayPopFastPath(IR::Instr * instr, IR::LabelInstr * doneLabel, IR::LabelInstr * bailOutLabelHelper)
- {
- IR::Opnd * arrayHelperOpnd = instr->UnlinkSrc1();
- ValueType arrayValueType = arrayHelperOpnd->GetValueType();
- IR::JnHelperMethod helperMethod;
- //Decide the helperMethod based on dst availability and nativity of the array.
- if(arrayValueType.IsLikelyNativeArray() && !instr->GetDst())
- {
- helperMethod = IR::HelperArray_NativePopWithNoDst;
- }
- else if(arrayValueType.IsLikelyNativeIntArray())
- {
- helperMethod = IR::HelperArray_NativeIntPop;
- }
- else if(arrayValueType.IsLikelyNativeFloatArray())
- {
- helperMethod = IR::HelperArray_NativeFloatPop;
- }
- else
- {
- helperMethod = IR::HelperArray_VarPop;
- }
- m_lowererMD.LoadHelperArgument(instr, arrayHelperOpnd);
- //We do not need scriptContext for HelperArray_NativePopWithNoDst call.
- if(helperMethod != IR::HelperArray_NativePopWithNoDst)
- {
- LoadScriptContext(instr);
- }
- IR::Instr * retInstr = m_lowererMD.ChangeToHelperCall(instr, helperMethod, bailOutLabelHelper);
- //We don't need missing item check for var arrays, as there it is taken care by the helper.
- if(arrayValueType.IsLikelyNativeArray())
- {
- if(retInstr->GetDst())
- {
- //Do this check only for native arrays with Dst. For Var arrays, this is taken care in the Runtime helper itself.
- InsertCompareBranch(GetMissingItemOpnd(retInstr->GetDst()->GetType(), m_func), retInstr->GetDst(), Js::OpCode::BrNeq_A, doneLabel, bailOutLabelHelper);
- }
- else
- {
- //We need unconditional jump to doneLabel, if there is no dst in Pop instr.
- InsertBranch(Js::OpCode::Br, true, doneLabel, bailOutLabelHelper);
- }
- }
- return retInstr;
- }
- IR::Instr *
- Lowerer::LowerCondBranchCheckBailOut(IR::BranchInstr * branchInstr, IR::Instr * helperCall, bool isHelper)
- {
- Assert(branchInstr->m_opcode == Js::OpCode::BrTrue_A || branchInstr->m_opcode == Js::OpCode::BrFalse_A);
- if (branchInstr->HasBailOutInfo())
- {
- IR::BailOutKind debuggerBailOutKind = IR::BailOutInvalid;
- if (branchInstr->HasAuxBailOut())
- {
- // We have shared debugger bailout. For branches we lower it here, not in SplitBailForDebugger.
- // See SplitBailForDebugger for details.
- AssertMsg(!(branchInstr->GetBailOutKind() & IR::BailOutForDebuggerBits), "There should be no debugger bits in main bailout kind.");
- debuggerBailOutKind = branchInstr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits;
- AssertMsg((debuggerBailOutKind & ~(IR::BailOutIgnoreException | IR::BailOutForceByFlag)) == 0, "Only IR::BailOutIgnoreException|ForceByFlag supported here.");
- }
- IR::Instr * bailOutInstr = this->SplitBailOnImplicitCall(branchInstr, helperCall, branchInstr);
- IR::Instr* prevInstr = this->LowerBailOnEqualOrNotEqual(bailOutInstr, branchInstr, nullptr, nullptr, isHelper);
- if (debuggerBailOutKind != IR::BailOutInvalid)
- {
- // Note that by this time implicit calls bailout is already lowered.
- // What we do here is use same bailout info and lower debugger bailout which would be shared bailout.
- BailOutInfo* bailOutInfo = bailOutInstr->GetBailOutInfo();
- IR::BailOutInstr* debuggerBailoutInstr = IR::BailOutInstr::New(
- Js::OpCode::BailForDebugger, debuggerBailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
- prevInstr->InsertAfter(debuggerBailoutInstr);
- // The result of that is:
- // original helper op_* instr, then debugger bailout, then implicit calls bailout/etc with the branch instr.
- // Example:
- // s35(eax).i32 = CALL Op_GreaterEqual.u32 # -- original op_* helper
- // s34.i32 = MOV s35(eax).i32 #
- // BailForDebugger # Bailout: #0042 (BailOutIgnoreException) -- the debugger bailout
- // CMP [0x0003BDE0].i8, 1 (0x1).i8 # -- implicit calls check
- // JEQ $L10 #
- //$L11: [helper] #
- // CALL SaveAllRegistersAndBranchBailOut.u32 # Bailout: #0042 (BailOutOnImplicitCalls)
- // JMP $L5 #
- //$L10: [helper] #
- // BrFalse_A $L3, s34.i32 #0034 -- The BrTrue/BrFalse branch (branch instr)
- //$L6: [helper] #0042
- this->LowerBailForDebugger(debuggerBailoutInstr, isHelper);
- // After lowering this we will have a check which on bailout condition will JMP to $L11.
- }
- }
- return m_lowererMD.LowerCondBranch(branchInstr);
- }
- IR::Instr *
- Lowerer::LoadArgumentsFromStack(IR::Instr * instr)
- {
- IR::Instr * prevInstr = instr->m_prev;
- Assert(instr->GetDst()->IsRegOpnd());
- if (instr->m_func->IsInlinee())
- {
- instr->ReplaceSrc1(instr->m_func->GetInlineeArgumentsObjectSlotOpnd());
- }
- else
- {
- instr->ReplaceSrc1(this->m_lowererMD.CreateStackArgumentsSlotOpnd());
- }
- this->m_lowererMD.ChangeToAssign(instr);
- return prevInstr;
- }
- IR::SymOpnd *
- Lowerer::LoadCallInfo(IR::Instr * instrInsert)
- {
- IR::SymOpnd * srcOpnd;
- Func * func = instrInsert->m_func;
- if (func->GetJnFunction()->IsGenerator())
- {
- // Generator function arguments and ArgumentsInfo are not on the stack. Instead they
- // are accessed off the generator object (which is prm1).
- StackSym * generatorSym = StackSym::NewParamSlotSym(1, func);
- func->SetArgOffset(generatorSym, LowererMD::GetFormalParamOffset() * MachPtr);
- IR::SymOpnd * generatorSymOpnd = IR::SymOpnd::New(generatorSym, TyMachPtr, func);
- IR::RegOpnd * generatorRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
- LowererMD::CreateAssign(generatorRegOpnd, generatorSymOpnd, instrInsert);
- IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorRegOpnd, Js::JavascriptGenerator::GetCallInfoOffset(), TyMachPtr, func);
- IR::Instr * instr = LowererMD::CreateAssign(IR::RegOpnd::New(TyMachPtr, func), indirOpnd, instrInsert);
- StackSym * callInfoSym = StackSym::New(TyMachReg, func);
- IR::SymOpnd * callInfoSymOpnd = IR::SymOpnd::New(callInfoSym, TyMachReg, func);
- LowererMD::CreateAssign(callInfoSymOpnd, instr->GetDst(), instrInsert);
- srcOpnd = IR::SymOpnd::New(callInfoSym, TyMachReg, func);
- }
- else
- {
- // Otherwise callInfo is always the "second" argument.
- // The stack looks like this:
- //
- // script param N
- // ...
- // script param 1
- // callinfo
- // function object
- // return addr
- // FP -> FP chain
- StackSym * srcSym = LowererMD::GetImplicitParamSlotSym(1, func);
- srcOpnd = IR::SymOpnd::New(srcSym, TyMachReg, func);
- }
- return srcOpnd;
- }
- IR::Instr *
- Lowerer::LowerBailOnNotStackArgs(IR::Instr * instr)
- {
- if (!this->m_func->GetHasStackArgs())
- {
- throw Js::RejitException(RejitReason::InlineApplyDisabled);
- }
- IR::Instr * prevInstr = instr->m_prev;
- // Bail out test
- // Label to skip Bailout and continue
- IR::LabelInstr * continueLabelInstr;
- IR::Instr *instrNext = instr->m_next;
- if (instrNext->IsLabelInstr())
- {
- continueLabelInstr = instrNext->AsLabelInstr();
- }
- else
- {
- continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
- instr->InsertAfter(continueLabelInstr);
- }
- IR::LabelInstr * helperLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- if (!instr->m_func->IsInlinee())
- {
- //BailOut if it is not stack args or the number of actuals (except "this" argument) is greater than or equal to 15.
- IR::Opnd* stackArgs = instr->UnlinkSrc1();
- InsertCompareBranch(stackArgs, instr->UnlinkSrc2(), Js::OpCode::BrNeq_A, helperLabelInstr, instr);
- IR::RegOpnd* ldLenDstOpnd = IR::RegOpnd::New(TyUint32, instr->m_func);
- IR::Instr* ldLen = IR::Instr::New(Js::OpCode::LdLen_A, ldLenDstOpnd, stackArgs, instr->m_func);
- ldLenDstOpnd->SetValueType(ValueType::GetTaggedInt()); //LdLen_A works only on stack arguments
- instr->InsertBefore(ldLen);
- this->GenerateFastRealStackArgumentsLdLen(ldLen);
- this->InsertCompareBranch(ldLenDstOpnd, IR::IntConstOpnd::New(Js::InlineeCallInfo::MaxInlineeArgoutCount, TyUint32, m_func, true), Js::OpCode::BrLt_A, true, continueLabelInstr, instr);
- }
- else
- {
- //For Inlined functions, we are sure actuals can't exceed Js::InlineeCallInfo::MaxInlineeArgoutCount (15).
- InsertCompareBranch(instr->UnlinkSrc1(), instr->UnlinkSrc2(), Js::OpCode::BrEq_A, continueLabelInstr, instr);
- }
- instr->InsertBefore(helperLabelInstr);
- this->GenerateBailOut(instr, nullptr, nullptr);
- return prevInstr;
- }
- IR::Instr *
- Lowerer::LowerBailOnNotSpreadable(IR::Instr *instr)
- {
- // We only avoid bailing out / throwing a rejit exception when the array operand is a simple, non-optimized, non-object array.
- IR::Instr * prevInstr = instr->m_prev;
- Func *func = instr->m_func;
- IR::RegOpnd *arrayOpnd = nullptr;
- IR::Opnd *arraySrcOpnd = instr->UnlinkSrc1();
- if (!arraySrcOpnd->IsRegOpnd())
- {
- arrayOpnd = IR::RegOpnd::New(TyMachPtr, func);
- LowererMD::CreateAssign(arrayOpnd, arraySrcOpnd, instr);
- }
- else
- {
- arrayOpnd = arraySrcOpnd->AsRegOpnd();
- }
- const ValueType baseValueType(arrayOpnd->GetValueType());
- // Check if we can just throw a rejit exception based on valuetype alone instead of bailing out.
- if (!baseValueType.IsLikelyArray()
- || baseValueType.IsLikelyAnyOptimizedArray()
- || (baseValueType.IsLikelyObject() && (baseValueType.GetObjectType() == ObjectType::ObjectWithArray))
- // Validate that GenerateArrayTest will not fail.
- || !(baseValueType.IsUninitialized() || baseValueType.HasBeenObject())
- || m_func->IsInlinee())
- {
- throw Js::RejitException(RejitReason::InlineSpreadDisabled);
- }
- // Past this point, we will need to use a bailout.
- IR::LabelInstr *bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true /* isOpHelper */);
- // See if we can skip various array checks on value type alone
- if (!baseValueType.IsArray())
- {
- GenerateArrayTest(arrayOpnd, bailOutLabel, bailOutLabel, instr, false);
- }
- if (!(baseValueType.IsArray() && baseValueType.HasNoMissingValues()))
- {
- InsertTestBranch(
- IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, func),
- IR::IntConstOpnd::New(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues), TyUint8, func, true),
- Js::OpCode::BrEq_A,
- bailOutLabel,
- instr);
- }
- IR::IndirOpnd *arrayLenPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, func);
- InsertCompareBranch(arrayLenPtrOpnd, IR::IntConstOpnd::New(Js::InlineeCallInfo::MaxInlineeArgoutCount - 1, TyUint8, func), Js::OpCode::BrGt_A, true, bailOutLabel, instr);
- IR::LabelInstr *skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertBranch(Js::OpCode::Br, skipBailOutLabel, instr);
- instr->InsertBefore(bailOutLabel);
- instr->InsertAfter(skipBailOutLabel);
- GenerateBailOut(instr);
- return prevInstr;
- }
- IR::Instr *
- Lowerer::LowerBailOnNotPolymorphicInlinee(IR::Instr * instr)
- {
- Assert(instr->HasBailOutInfo() && (instr->GetBailOutKind() == IR::BailOutOnFailedPolymorphicInlineTypeCheck || instr->GetBailOutKind() == IR::BailOutOnPolymorphicInlineFunction));
- IR::Instr* instrPrev = instr->m_prev;
- this->GenerateBailOut(instr, nullptr, nullptr);
- return instrPrev;
- }
- void
- Lowerer::LowerBailoutCheckAndLabel(IR::Instr *instr, bool onEqual, bool isHelper)
- {
- // Label to skip Bailout and continue
- IR::LabelInstr * continueLabelInstr;
- IR::Instr *instrNext = instr->m_next;
- if (instrNext->IsLabelInstr())
- {
- continueLabelInstr = instrNext->AsLabelInstr();
- }
- else
- {
- continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
- instr->InsertAfter(continueLabelInstr);
- }
- if(instr->GetBailOutKind() == IR::BailOutInjected)
- {
- // BailOnEqual 0, 0
- Assert(onEqual);
- Assert(instr->GetSrc1()->IsEqual(instr->GetSrc2()));
- Assert(instr->GetSrc1()->AsIntConstOpnd()->GetValue() == 0);
- // The operands cannot be equal when generating a compare (assert) but since this is for testing purposes, hoist a src.
- // Ideally, we would just create a BailOut instruction that generates a guaranteed bailout, but there seem to be issues
- // with doing this in a non-helper path. So finally, it would generate:
- // xor s0, s0
- // test s0, s0
- // jnz $continue
- // $bailout:
- // // bailout
- // $continue:
- instr->HoistSrc1(LowererMD::GetLoadOp(instr->GetSrc1()->GetType()));
- }
- InsertCompareBranch(instr->UnlinkSrc1(), instr->UnlinkSrc2(),
- onEqual ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, continueLabelInstr, instr);
- if (!isHelper)
- {
- IR::LabelInstr * helperLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- instr->InsertBefore(helperLabelInstr);
- }
- }
- IR::Instr *
- Lowerer::LowerBailOnEqualOrNotEqual(IR::Instr * instr,
- IR::BranchInstr *branchInstr, // = nullptr
- IR::LabelInstr *labelBailOut, // = nullptr
- IR::PropertySymOpnd * propSymOpnd, // = nullptr
- bool isHelper) // = false
- {
- IR::Instr * prevInstr = instr->m_prev;
- // Bail out test
- bool onEqual = instr->m_opcode == Js::OpCode::BailOnEqual;
- LowerBailoutCheckAndLabel(instr, onEqual, isHelper);
- // BailOutOnImplicitCalls is a post-op bailout. Since we look at the profile info for LdFld/StFld to decide whether the instruction may or may not call an accessor,
- // we need to update this profile information on the bailout path for BailOutOnImplicitCalls if the implicit call was an accessor call.
- if(propSymOpnd && ((instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCalls) && (propSymOpnd->m_inlineCacheIndex != -1) &&
- instr->m_func->GetJnFunction()->HasDynamicProfileInfo())
- {
- // result = AND implCallFlags, ~ImplicitCall_None
- // TST result, ImplicitCall_Accessor
- // JEQ $bail
- // OR profiledFlags, FldInfoAccessor
- // $bail
- IR::Opnd * implicitCallFlags = GetImplicitCallFlagsOpnd();
- IR::Opnd * accessorImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_Accessor & ~Js::ImplicitCall_None, GetImplicitCallFlagsType(), instr->m_func, true);
- IR::Opnd * maskNoImplicitCall = IR::IntConstOpnd::New((Js::ImplicitCallFlags)~Js::ImplicitCall_None, GetImplicitCallFlagsType(), instr->m_func, true);
- IR::Opnd * fldInfoAccessor = IR::IntConstOpnd::New(Js::FldInfo_FromAccessor, GetFldInfoFlagsType(), instr->m_func, true);
- IR::LabelInstr * label = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true);
- IR::Instr * andInstr = InsertAnd(IR::RegOpnd::New(GetImplicitCallFlagsType(), instr->m_func), implicitCallFlags, maskNoImplicitCall, instr);
- InsertTestBranch(andInstr->GetDst(), accessorImplicitCall, Js::OpCode::BrEq_A, label, instr);
- Js::FldInfo * info = instr->m_func->GetJnFunction()->GetAnyDynamicProfileInfo()->GetFldInfo(instr->m_func->GetJnFunction(), propSymOpnd->m_inlineCacheIndex);
- IR::Opnd * profiledFlags = IR::MemRefOpnd::New((char*)info + info->GetOffsetOfFlags(), TyInt8, instr->m_func);
- InsertOr(profiledFlags, profiledFlags, fldInfoAccessor, instr);
- instr->InsertBefore(label);
- }
- this->GenerateBailOut(instr, branchInstr, labelBailOut);
- return prevInstr;
- }
- void Lowerer::LowerBailOnNegative(IR::Instr *const instr)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::BailOnNegative);
- Assert(instr->HasBailOutInfo());
- Assert(!instr->GetDst());
- Assert(instr->GetSrc1());
- Assert(instr->GetSrc1()->GetType() == TyInt32 || instr->GetSrc1()->GetType() == TyUint32);
- Assert(!instr->GetSrc2());
- IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(false);
- LowerOneBailOutKind(instr, instr->GetBailOutKind(), false);
- Assert(!instr->HasBailOutInfo());
- IR::Instr *insertBeforeInstr = instr->m_next;
- Func *const func = instr->m_func;
- // test src, src
- // jns $skipBailOut
- InsertCompareBranch(
- instr->UnlinkSrc1(),
- IR::IntConstOpnd::New(0, TyInt32, func, true),
- Js::OpCode::BrGe_A,
- skipBailOutLabel,
- insertBeforeInstr);
- instr->Remove();
- }
- IR::Instr *
- Lowerer::LowerBailOnNotObject(IR::Instr *instr,
- IR::BranchInstr *branchInstr /* = nullptr */,
- IR::LabelInstr *labelBailOut /* = nullptr */)
- {
- IR::Instr *prevInstr = instr->m_prev;
- IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label,
- m_func);
- instr->InsertAfter(continueLabelInstr);
- this->m_lowererMD.GenerateObjectTest(instr->UnlinkSrc1(),
- instr,
- continueLabelInstr,
- /* fContinueLabel = */ true);
- this->GenerateBailOut(instr, branchInstr, labelBailOut);
- return prevInstr;
- }
- IR::Instr *
- Lowerer::LowerBailOnNotBuiltIn(IR::Instr *instr,
- IR::BranchInstr *branchInstr /* = nullptr */,
- IR::LabelInstr *labelBailOut /* = nullptr */)
- {
- Assert(instr->GetSrc2()->IsIntConstOpnd());
- IR::Instr *prevInstr = instr->m_prev;
- Js::JavascriptFunction ** builtInFuncs = this->m_func->GetScriptContext()->GetLibrary()->GetBuiltinFunctions();
- Js::BuiltinFunction builtInIndex = instr->UnlinkSrc2()->AsIntConstOpnd()->AsInt32();
- IR::Opnd *builtIn = IR::MemRefOpnd::New((void*)(builtInFuncs + builtInIndex), TyMachReg, instr->m_func);
- #if TESTBUILTINFORNULL
- IR::LabelInstr * continueAfterTestLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
- InsertTestBranch(builtIn, builtIn, Js::OpCode::BrNeq_A, continueAfterTestLabel, instr);
- this->m_lowererMD.GenerateDebugBreak(instr);
- instr->InsertBefore(continueAfterTestLabel);
- #endif
- IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
- instr->InsertAfter(continueLabel);
- InsertCompareBranch(instr->UnlinkSrc1(), builtIn, Js::OpCode::BrEq_A, continueLabel, instr);
- GenerateBailOut(instr, branchInstr, labelBailOut);
- return prevInstr;
- }
- IR::Instr *
- Lowerer::LowerBailForDebugger(IR::Instr* instr, bool isInsideHelper /* = false */)
- {
- IR::Instr * prevInstr = instr->m_prev;
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- AssertMsg(bailOutKind, "bailOutKind should not be zero at this time.");
- AssertMsg(!(bailOutKind & IR::BailOutExplicit) || bailOutKind == IR::BailOutExplicit,
- "BailOutExplicit cannot be combined with any other bailout flags.");
- IR::LabelInstr* bailOutLabel = nullptr;
- if (!(bailOutKind & IR::BailOutExplicit))
- {
- Js::DebugManager* debugManager = this->GetScriptContext()->GetThreadContext()->GetDebugManager();
- DebuggingFlags* flags = debugManager->GetDebuggingFlags();
- // Check 1 (do we need to bail out?)
- // JXX bailoutLabel
- // Check 2 (do we need to bail out?)
- // JXX bailoutLabel
- // ...
- // JMP continueLabel
- // bailoutDocumentLabel:
- // (determine if document boundary reached - if not, JMP to continueLabel)
- // NOTE: THIS BLOCK IS CONDITIONALLY GENERATED BASED ON doGenerateBailOutDocumentBlock
- // bailoutLabel:
- // bail out
- // continueLabel:
- // ...
- IR::LabelInstr* bailOutDocumentLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ true);
- instr->InsertBefore(bailOutDocumentLabel);
- IR::LabelInstr* bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ true);
- instr->InsertBefore(bailOutLabel);
- IR::LabelInstr* continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ isInsideHelper);
- instr->InsertAfter(continueLabel);
- IR::BranchInstr* continueBranchInstr = this->InsertBranch(Js::OpCode::Br, continueLabel, bailOutDocumentLabel); // JMP continueLabel.
- bool doGenerateBailOutDocumentBlock = false;
- const IR::BailOutKind c_forceAndIgnoreEx = IR::BailOutForceByFlag | IR::BailOutIgnoreException;
- if ((bailOutKind & c_forceAndIgnoreEx) == c_forceAndIgnoreEx)
- {
- // It's faster to check these together in 1 check rather than 2 separate checks at run time.
- // CMP [&(flags->m_forceInterpreter, flags->m_isIgnoreException)], 0
- // BNE bailout
- IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetForceInterpreterOffset(), TyInt16, m_func);
- IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt16, m_func, /*dontEncode*/ true);
- InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
- bailOutKind ^= c_forceAndIgnoreEx;
- }
- else
- {
- if (bailOutKind & IR::BailOutForceByFlag)
- {
- // CMP [&flags->m_forceInterpreter], 0
- // BNE bailout
- IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetForceInterpreterOffset(), TyInt8, m_func);
- IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func, /*dontEncode*/ true);
- InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
- bailOutKind ^= IR::BailOutForceByFlag;
- }
- if (bailOutKind & IR::BailOutIgnoreException)
- {
- // CMP [&flags->m_byteCodeOffsetAfterIgnoreException], DebuggingFlags::InvalidByteCodeOffset
- // BNE bailout
- IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + flags->GetByteCodeOffsetAfterIgnoreExceptionOffset(), TyInt32, m_func);
- IR::Opnd* opnd2 = IR::IntConstOpnd::New(DebuggingFlags::InvalidByteCodeOffset, TyInt32, m_func, /*dontEncode*/ true);
- InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
- bailOutKind ^= IR::BailOutIgnoreException;
- }
- }
- if (bailOutKind & IR::BailOutBreakPointInFunction)
- {
- // CMP [&functionBody->m_sourceInfo.m_probeCount], 0
- // BNE bailout
- Js::FunctionBody* body = m_func->GetJnFunction();
- IR::Opnd* opnd1 = IR::MemRefOpnd::New(&body->GetSourceInfo()->m_probeCount, TyInt32, m_func);
- IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt32, m_func, /*dontEncode*/ true);
- InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
- bailOutKind ^= IR::BailOutBreakPointInFunction;
- }
- // on method entry
- if(bailOutKind & IR::BailOutStep)
- {
- // TEST STEP_BAILOUT, [&stepController->StepType]
- // BNE BailoutLabel
- IR::Opnd* opnd1 = IR::MemRefOpnd::New((void*)(debugManager->stepController.GetAddressOfStepType()), TyInt8, m_func);
- IR::Opnd* opnd2 = IR::IntConstOpnd::New(Js::STEP_BAILOUT, TyInt8, this->m_func, /*dontEncode*/ true);
- InsertTestBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
- // CMP STEP_DOCUMENT, [&stepController->StepType]
- // BEQ BailoutDocumentLabel
- opnd1 = IR::MemRefOpnd::New((void*)(debugManager->stepController.GetAddressOfStepType()), TyInt8, m_func);
- opnd2 = IR::IntConstOpnd::New(Js::STEP_DOCUMENT, TyInt8, this->m_func, /*dontEncode*/ true);
- InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrEq_A, /*isUnsigned*/ true, bailOutDocumentLabel, continueBranchInstr);
- doGenerateBailOutDocumentBlock = true;
- bailOutKind ^= IR::BailOutStep;
- }
- // on method exit
- if (bailOutKind & IR::BailOutStackFrameBase)
- {
- // CMP EffectiveFrameBase, [&stepController->frameAddrWhenSet]
- // BA bailoutLabel
- RegNum effectiveFrameBaseReg;
- #ifdef _M_X64
- effectiveFrameBaseReg = m_lowererMD.GetRegStackPointer();
- #else
- effectiveFrameBaseReg = m_lowererMD.GetRegFramePointer();
- #endif
- IR::Opnd* opnd1 = IR::RegOpnd::New(nullptr, effectiveFrameBaseReg, TyMachReg, m_func);
- IR::Opnd* opnd2 = IR::MemRefOpnd::New(debugManager->stepController.GetAddressOfFrameAddress(), TyMachReg, m_func);
- this->InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrGt_A, /*isUnsigned*/ true, bailOutLabel, continueBranchInstr);
- // CMP STEP_DOCUMENT, [&stepController->StepType]
- // BEQ BailoutDocumentLabel
- opnd1 = IR::MemRefOpnd::New((void*)(debugManager->stepController.GetAddressOfStepType()), TyInt8, m_func);
- opnd2 = IR::IntConstOpnd::New(Js::STEP_DOCUMENT, TyInt8, this->m_func, /*dontEncode*/ true);
- InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrEq_A, /*isUnsigned*/ true, bailOutDocumentLabel, continueBranchInstr);
- doGenerateBailOutDocumentBlock = true;
- bailOutKind ^= IR::BailOutStackFrameBase;
- }
- if (bailOutKind & IR::BailOutLocalValueChanged)
- {
- int32 hasLocalVarChangedOffset = m_func->GetHasLocalVarChangedOffset();
- if (hasLocalVarChangedOffset != Js::Constants::InvalidOffset)
- {
- // CMP [EBP + hasLocalVarChangedStackOffset], 0
- // BNE bailout
- StackSym* sym = StackSym::New(TyInt8, m_func);
- sym->m_offset = hasLocalVarChangedOffset;
- sym->m_allocated = true;
- IR::Opnd* opnd1 = IR::SymOpnd::New(sym, TyInt8, m_func);
- IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func);
- InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
- }
- bailOutKind ^= IR::BailOutLocalValueChanged;
- }
- if (doGenerateBailOutDocumentBlock)
- {
- // GENERATE the BailoutDocumentLabel
- // bailOutDocumentLabel:
- // CMP CurrentScriptId, [&stepController->ScriptIdWhenSet]
- // BEQ ContinueLabel
- // bailOutLabel: // (fallthrough bailOutLabel)
- Js::FunctionBody* body = m_func->GetJnFunction();
- IR::Opnd* opnd1 = IR::MemRefOpnd::New(body->GetAddressOfScriptId(), TyInt32, m_func);
- IR::Opnd* opnd2 = IR::MemRefOpnd::New(debugManager->stepController.GetAddressOfScriptIdWhenSet(), TyInt32, m_func);
- IR::RegOpnd* reg1 = IR::RegOpnd::New(TyInt32, m_func);
- InsertMove(reg1, opnd2, bailOutLabel);
- InsertCompareBranch(opnd1, reg1, Js::OpCode::BrEq_A, /*isUnsigned*/ true, continueLabel, bailOutLabel);
- }
- AssertMsg(bailOutKind == (IR::BailOutKind)0, "Some of the bits in BailOutKind were not processed!");
- // Note: at this time the 'instr' is in between bailoutLabel and continueLabel.
- }
- else
- {
- // For explicit/unconditional bailout use label which is not a helper, otherwise we would get a helper in main code path
- // which breaks helper label consistency (you can only get to helper from a conditional branch in main code), see DbCheckPostLower.
- bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
- }
- this->GenerateBailOut(instr, nullptr, bailOutLabel);
- return prevInstr;
- }
- IR::Instr*
- Lowerer::LowerBailOnException(IR::Instr * instr)
- {
- Assert(instr->HasBailOutInfo());
- IR::Instr * instrPrev = instr->m_prev;
- Assert(instrPrev->m_opcode == Js::OpCode::Catch);
- this->GenerateBailOut(instr, nullptr, nullptr);
- return instrPrev;
- }
- // Generate BailOut Lowerer Instruction if the value is INT_MIN.
- // It it's not INT_MIN, we continue without bailout.
- IR::Instr *
- Lowerer::LowerBailOnIntMin(IR::Instr *instr, IR::BranchInstr *branchInstr /* = nullptr */, IR::LabelInstr *labelBailOut /* = nullptr */)
- {
- Assert(instr);
- Assert(instr->GetSrc1());
- IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- instr->InsertAfter(continueLabelInstr);
- if(!instr->HasBailOutInfo())
- {
- instr->Remove();
- }
- else
- {
- Assert(instr->GetBailOutKind() == IR::BailOnIntMin);
- // Note: src1 must be int32 at this point.
- if (instr->GetSrc1()->IsIntConstOpnd())
- {
- // For consts we can check the value at JIT time. Note: without this check we'll have to legalize the CMP instr.
- IR::IntConstOpnd* intConst = instr->UnlinkSrc1()->AsIntConstOpnd();
- if (intConst->GetValue() == INT_MIN)
- {
- this->GenerateBailOut(instr, branchInstr, labelBailOut);
- intConst->Free(instr->m_func);
- }
- else
- {
- instr->Remove();
- }
- }
- else
- {
- InsertCompareBranch(instr->UnlinkSrc1(), IR::IntConstOpnd::New(INT_MIN, TyInt32, this->m_func), Js::OpCode::BrNeq_A, continueLabelInstr, instr);
- this->GenerateBailOut(instr, branchInstr, labelBailOut);
- }
- }
- return continueLabelInstr;
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerBailOnNotString
- /// Generate BailOut Lowerer Instruction if not a String
- ///
- ///----------------------------------------------------------------------------
- void Lowerer::LowerBailOnNotString(IR::Instr *instr)
- {
- if (!instr->GetSrc1()->GetValueType().IsString())
- {
- /*Creating a MOV instruction*/
- IR::Instr * movInstr = IR::Instr::New(instr->m_opcode, instr->UnlinkDst(), instr->UnlinkSrc1(), instr->m_func);
- instr->InsertBefore(movInstr);
- IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::LabelInstr *helperLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- instr->InsertAfter(continueLabelInstr);
- IR::RegOpnd *srcReg = movInstr->GetSrc1()->IsRegOpnd() ? movInstr->GetSrc1()->AsRegOpnd() : nullptr;
- this->GenerateStringTest(srcReg, instr, helperLabelInstr, continueLabelInstr);
- this->GenerateBailOut(instr, nullptr, helperLabelInstr);
- }
- else
- {
- instr->ClearBailOutInfo();
- }
- }
- void Lowerer::LowerOneBailOutKind(
- IR::Instr *const instr,
- const IR::BailOutKind bailOutKindToLower,
- const bool isInHelperBlock,
- const bool preserveBailOutKindInInstr)
- {
- Assert(instr);
- Assert(bailOutKindToLower);
- Assert(!(bailOutKindToLower & IR::BailOutKindBits) || !(bailOutKindToLower & bailOutKindToLower - 1u));
- Func *const func = instr->m_func;
- // Split bailouts other than the one being lowered here
- BailOutInfo *const bailOutInfo = instr->GetBailOutInfo();
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- Assert(
- bailOutKindToLower & IR::BailOutKindBits
- ? bailOutKind & bailOutKindToLower
- : (bailOutKind & ~IR::BailOutKindBits) == bailOutKindToLower);
- if(!preserveBailOutKindInInstr)
- {
- bailOutKind -= bailOutKindToLower;
- }
- if(bailOutKind)
- {
- if(bailOutInfo->bailOutInstr == instr)
- {
- // Create a shared bailout point for the split bailout checks
- IR::Instr *const sharedBail = instr->ShareBailOut();
- Assert(sharedBail->GetBailOutInfo() == bailOutInfo);
- GenerateBailOut(sharedBail);
- }
- instr->SetBailOutKind(bailOutKind);
- }
- else
- {
- instr->UnlinkBailOutInfo();
- if(bailOutInfo->bailOutInstr == instr)
- {
- bailOutInfo->bailOutInstr = nullptr;
- }
- }
- IR::Instr *const insertBeforeInstr = instr->m_next;
- // (Bail out with the requested bail out kind)
- IR::BailOutInstr *const bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOut, bailOutKindToLower, bailOutInfo, func);
- bailOutInstr->SetByteCodeOffset(instr);
- insertBeforeInstr->InsertBefore(bailOutInstr);
- GenerateBailOut(bailOutInstr);
- // The caller is expected to generate code to decide whether to bail out
- }
- void Lowerer::SplitBailOnNotArray(
- IR::Instr *const instr,
- IR::Instr * *const bailOnNotArrayRef,
- IR::Instr * *const bailOnMissingValueRef)
- {
- Assert(instr);
- Assert(!instr->GetDst());
- Assert(instr->GetSrc1());
- Assert(instr->GetSrc1()->IsRegOpnd());
- Assert(!instr->GetSrc2());
- Assert(bailOnNotArrayRef);
- Assert(bailOnMissingValueRef);
- IR::Instr *&bailOnNotArray = *bailOnNotArrayRef;
- IR::Instr *&bailOnMissingValue = *bailOnMissingValueRef;
- bailOnNotArray = instr;
- bailOnMissingValue = nullptr;
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if(bailOutKind == IR::BailOutOnNotArray ||
- bailOutKind == IR::BailOutOnNotNativeArray)
- {
- return;
- }
- // Split array checks
- BailOutInfo *const bailOutInfo = instr->GetBailOutInfo();
- if(bailOutInfo->bailOutInstr == instr)
- {
- // Create a shared bailout point for the split bailout checks
- IR::Instr *const sharedBail = instr->ShareBailOut();
- Assert(sharedBail->GetBailOutInfo() == bailOutInfo);
- LowerBailTarget(sharedBail);
- }
- bailOutKind -= IR::BailOutOnMissingValue;
- Assert(bailOutKind == IR::BailOutOnNotArray ||
- bailOutKind == IR::BailOutOnNotNativeArray);
- instr->SetBailOutKind(bailOutKind);
- Func *const func = bailOutInfo->bailOutFunc;
- IR::Instr *const insertBeforeInstr = instr->m_next;
- // Split missing value checks
- bailOnMissingValue = IR::BailOutInstr::New(Js::OpCode::BailOnNotArray, IR::BailOutOnMissingValue, bailOutInfo, func);
- bailOnMissingValue->SetByteCodeOffset(instr);
- insertBeforeInstr->InsertBefore(bailOnMissingValue);
- }
- IR::RegOpnd *Lowerer::LowerBailOnNotArray(IR::Instr *const instr)
- {
- Assert(instr);
- Assert(!instr->GetDst());
- Assert(instr->GetSrc1());
- Assert(instr->GetSrc1()->IsRegOpnd());
- Assert(!instr->GetSrc2());
- Func *const func = instr->m_func;
- // Label to jump to (or fall through to) when bailing out
- const auto bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true /* isOpHelper */);
- instr->InsertBefore(bailOutLabel);
- // Label to jump to when not bailing out
- const auto skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- instr->InsertAfter(skipBailOutLabel);
- // Do the array tests and jump to bailOutLabel if it's not an array. Fall through if it is an array.
- IR::RegOpnd *const arrayOpnd =
- GenerateArrayTest(instr->UnlinkSrc1()->AsRegOpnd(), bailOutLabel, bailOutLabel, bailOutLabel, true);
- // Skip bail-out when it is an array
- InsertBranch(Js::OpCode::Br, skipBailOutLabel, bailOutLabel);
- // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
- // ordering instructions anymore.
- GenerateBailOut(instr);
- return arrayOpnd;
- }
- void Lowerer::LowerBailOnMissingValue(IR::Instr *const instr, IR::RegOpnd *const arrayOpnd)
- {
- Assert(instr);
- Assert(!instr->GetDst());
- Assert(!instr->GetSrc1());
- Assert(!instr->GetSrc2());
- Assert(arrayOpnd);
- Assert(arrayOpnd->GetValueType().IsArrayOrObjectWithArray());
- Func *const func = instr->m_func;
- // Label to jump to when not bailing out
- const auto skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- instr->InsertAfter(skipBailOutLabel);
- // Skip bail-out when the array has no missing values
- //
- // test [array + offsetOf(objectArrayOrFlags)], Js::DynamicObjectFlags::HasNoMissingValues
- // jnz $skipBailOut
- const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, func);
- CompileAssert(
- static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
- Js::DynamicObjectFlags::HasNoMissingValues);
- InsertTestBranch(
- IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, func),
- IR::IntConstOpnd::New(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues), TyUint8, func, true),
- Js::OpCode::BrNeq_A,
- skipBailOutLabel,
- instr);
- // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
- // ordering instructions anymore.
- GenerateBailOut(instr);
- }
- void Lowerer::LowerBailOnInvalidatedArrayHeadSegment(IR::Instr *const instr, const bool isInHelperBlock)
- {
- /*
- // Generate checks for whether the head segment or the head segment length changed during the helper call
- if(!(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentSym()))
- {
- // Record the array head segment before the helper call
- headSegmentBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayHeadSegmentForArrayOrObjectWithArray(base)
- }
- if(!(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentLengthSym()))
- {
- // Record the array head segment length before the helper call
- if(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentSym())
- {
- mov headSegmentLengthBeforeHelperCall, [headSegmentBeforeHelperCall + offsetOf(length)]
- }
- else
- {
- headSegmentLengthBeforeHelperCall =
- Js::JavascriptArray::Jit_GetArrayHeadSegmentLength(headSegmentBeforeHelperCall)
- }
- }
- helperCall:
- (Helper call and other bailout checks)
- // If the array has a different head segment or head segment length after the helper call, then this store needs to bail
- // out
- invalidatedHeadSegment =
- JavascriptArray::Jit_OperationInvalidatedArrayHeadSegment(
- headSegmentBeforeHelperCall,
- headSegmentLengthBeforeHelperCall,
- base)
- test invalidatedHeadSegment, invalidatedHeadSegment
- jz $skipBailOut
- (Bail out with IR::BailOutOnInvalidatedArrayHeadSegment)
- $skipBailOut:
- */
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
- Assert(instr->GetDst());
- Assert(instr->GetDst()->IsIndirOpnd());
- Func *const func = instr->m_func;
- IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
- const ValueType baseValueType(baseOpnd->GetValueType());
- Assert(!baseValueType.IsNotArrayOrObjectWithArray());
- const bool isArrayOrObjectWithArray = baseValueType.IsArrayOrObjectWithArray();
- IR::ArrayRegOpnd *const arrayOpnd = baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd() : nullptr;
- IR::RegOpnd *headSegmentBeforeHelperCallOpnd;
- IR::AutoReuseOpnd autoReuseHeadSegmentBeforeHelperCallOpnd;
- if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentSym())
- {
- headSegmentBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->HeadSegmentSym(), TyMachPtr, func);
- autoReuseHeadSegmentBeforeHelperCallOpnd.Initialize(headSegmentBeforeHelperCallOpnd, func);
- }
- else
- {
- // Record the array head segment before the helper call
- // headSegmentBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayHeadSegmentForArrayOrObjectWithArray(base)
- m_lowererMD.LoadHelperArgument(instr, baseOpnd);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
- headSegmentBeforeHelperCallOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, func), TyMachPtr, func);
- autoReuseHeadSegmentBeforeHelperCallOpnd.Initialize(headSegmentBeforeHelperCallOpnd, func);
- callInstr->SetDst(headSegmentBeforeHelperCallOpnd);
- instr->InsertBefore(callInstr);
- m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayHeadSegmentForArrayOrObjectWithArray);
- }
- IR::RegOpnd *headSegmentLengthBeforeHelperCallOpnd;
- IR::AutoReuseOpnd autoReuseHeadSegmentLengthBeforeHelperCallOpnd;
- if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentLengthSym())
- {
- headSegmentLengthBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->HeadSegmentLengthSym(), TyUint32, func);
- autoReuseHeadSegmentLengthBeforeHelperCallOpnd.Initialize(headSegmentLengthBeforeHelperCallOpnd, func);
- }
- else
- {
- headSegmentLengthBeforeHelperCallOpnd = IR::RegOpnd::New(StackSym::New(TyUint32, func), TyUint32, func);
- autoReuseHeadSegmentLengthBeforeHelperCallOpnd.Initialize(headSegmentLengthBeforeHelperCallOpnd, func);
- if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentSym())
- {
- // Record the array head segment length before the helper call
- // mov headSegmentLengthBeforeHelperCall, [headSegmentBeforeHelperCall + offsetOf(length)]
- InsertMove(
- headSegmentLengthBeforeHelperCallOpnd,
- IR::IndirOpnd::New(
- headSegmentBeforeHelperCallOpnd,
- Js::SparseArraySegmentBase::GetOffsetOfLength(),
- TyUint32,
- func),
- instr);
- }
- else
- {
- // Record the array head segment length before the helper call
- // headSegmentLengthBeforeHelperCall =
- // Js::JavascriptArray::Jit_GetArrayHeadSegmentLength(headSegmentBeforeHelperCall)
- m_lowererMD.LoadHelperArgument(instr, headSegmentBeforeHelperCallOpnd);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
- callInstr->SetDst(headSegmentLengthBeforeHelperCallOpnd);
- instr->InsertBefore(callInstr);
- m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayHeadSegmentLength);
- }
- }
- IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
- LowerOneBailOutKind(instr, IR::BailOutOnInvalidatedArrayHeadSegment, isInHelperBlock);
- IR::Instr *const insertBeforeInstr = instr->m_next;
- // If the array has a different head segment or head segment length after the helper call, then this store needs to bail out
- // invalidatedHeadSegment =
- // JavascriptArray::Jit_OperationInvalidatedArrayHeadSegment(
- // headSegmentBeforeHelperCall,
- // headSegmentLengthBeforeHelperCall,
- // base)
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, headSegmentLengthBeforeHelperCallOpnd);
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, headSegmentBeforeHelperCallOpnd);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
- IR::RegOpnd *const invalidatedHeadSegmentOpnd = IR::RegOpnd::New(TyUint8, func);
- const IR::AutoReuseOpnd autoReuseInvalidatedHeadSegmentOpnd(invalidatedHeadSegmentOpnd, func);
- callInstr->SetDst(invalidatedHeadSegmentOpnd);
- insertBeforeInstr->InsertBefore(callInstr);
- m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationInvalidatedArrayHeadSegment);
- // test invalidatedHeadSegment, invalidatedHeadSegment
- // jz $skipBailOut
- InsertTestBranch(
- invalidatedHeadSegmentOpnd,
- invalidatedHeadSegmentOpnd,
- Js::OpCode::BrEq_A,
- skipBailOutLabel,
- insertBeforeInstr);
- // (Bail out with IR::BailOutOnInvalidatedArrayHeadSegment)
- // $skipBailOut:
- }
- void Lowerer::LowerBailOnInvalidatedArrayLength(IR::Instr *const instr, const bool isInHelperBlock)
- {
- /*
- // Generate checks for whether the length changed during the helper call
- if(!(arrayOpnd && arrayOpnd.LengthSym() && arrayOpnd.LengthSym() != arrayOpnd.HeadSegmentLengthSym()))
- {
- // Record the array length before the helper call
- lengthBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayLength(base)
- }
- helperCall:
- (Helper call and other bailout checks)
- // If the array has a different length after the helper call, then this store needs to bail out
- invalidatedLength = JavascriptArray::Jit_OperationInvalidatedArrayLength(lengthBeforeHelperCall, base)
- test invalidatedLength, invalidatedLength
- jz $skipBailOut
- (Bail out with IR::BailOutOnInvalidatedArrayLength)
- $skipBailOut:
- */
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
- Assert(instr->GetDst());
- Assert(instr->GetDst()->IsIndirOpnd());
- Func *const func = instr->m_func;
- IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
- const ValueType baseValueType(baseOpnd->GetValueType());
- Assert(!baseValueType.IsNotArray());
- IR::ArrayRegOpnd *const arrayOpnd = baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd() : nullptr;
- IR::RegOpnd *lengthBeforeHelperCallOpnd;
- IR::AutoReuseOpnd autoReuseLengthBeforeHelperCallOpnd;
- if(arrayOpnd && arrayOpnd->LengthSym() && arrayOpnd->LengthSym() != arrayOpnd->HeadSegmentLengthSym())
- {
- lengthBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->LengthSym(), arrayOpnd->LengthSym()->GetType(), func);
- autoReuseLengthBeforeHelperCallOpnd.Initialize(lengthBeforeHelperCallOpnd, func);
- }
- else
- {
- // Record the array length before the helper call
- // lengthBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayLength(base)
- m_lowererMD.LoadHelperArgument(instr, baseOpnd);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
- lengthBeforeHelperCallOpnd = IR::RegOpnd::New(TyUint32, func);
- autoReuseLengthBeforeHelperCallOpnd.Initialize(lengthBeforeHelperCallOpnd, func);
- callInstr->SetDst(lengthBeforeHelperCallOpnd);
- instr->InsertBefore(callInstr);
- m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayLength);
- }
- IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
- LowerOneBailOutKind(instr, IR::BailOutOnInvalidatedArrayLength, isInHelperBlock);
- IR::Instr *const insertBeforeInstr = instr->m_next;
- // If the array has a different length after the helper call, then this store needs to bail out
- // invalidatedLength = JavascriptArray::Jit_OperationInvalidatedArrayLength(lengthBeforeHelperCall, base)
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, lengthBeforeHelperCallOpnd);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
- IR::RegOpnd *const invalidatedLengthOpnd = IR::RegOpnd::New(TyUint8, func);
- const IR::AutoReuseOpnd autoReuseInvalidatedLengthOpnd(invalidatedLengthOpnd, func);
- callInstr->SetDst(invalidatedLengthOpnd);
- insertBeforeInstr->InsertBefore(callInstr);
- m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationInvalidatedArrayLength);
- // test invalidatedLength, invalidatedLength
- // jz $skipBailOut
- InsertTestBranch(
- invalidatedLengthOpnd,
- invalidatedLengthOpnd,
- Js::OpCode::BrEq_A,
- skipBailOutLabel,
- insertBeforeInstr);
- // (Bail out with IR::BailOutOnInvalidatedArrayLength)
- // $skipBailOut:
- }
- void Lowerer::LowerBailOnCreatedMissingValue(IR::Instr *const instr, const bool isInHelperBlock)
- {
- /*
- // Generate checks for whether the first missing value was created during the helper call
- if(!(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()))
- {
- // Record whether the array has missing values before the helper call
- arrayFlagsBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayFlagsForArrayOrObjectWithArray(base)
- }
- helperCall:
- (Helper call and other bailout checks)
- // If the array had no missing values before the helper call, and the array has missing values after the helper
- // call, then this store created the first missing value in the array and needs to bail out
- if(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())
- (arrayFlagsBeforeHelperCall = Js::DynamicObjectFlags::HasNoMissingValues)
- createdFirstMissingValue = JavascriptArray::Jit_OperationCreatedFirstMissingValue(arrayFlagsBeforeHelperCall, base)
- test createdFirstMissingValue, createdFirstMissingValue
- jz $skipBailOut
- (Bail out with IR::BailOutOnMissingValue)
- $skipBailOut:
- */
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
- Assert(instr->GetDst());
- Assert(instr->GetDst()->IsIndirOpnd());
- Func *const func = instr->m_func;
- IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
- const ValueType baseValueType(baseOpnd->GetValueType());
- Assert(!baseValueType.IsNotArrayOrObjectWithArray());
- IR::Opnd *arrayFlagsBeforeHelperCallOpnd = nullptr;
- IR::AutoReuseOpnd autoReuseArrayFlagsBeforeHelperCallOpnd;
- const IRType arrayFlagsType = sizeof(uintptr_t) == sizeof(uint32) ? TyUint32 : TyUint64;
- if(!(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()))
- {
- // Record whether the array has missing values before the helper call
- // arrayFlagsBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayFlagsForArrayOrObjectWithArray(base)
- m_lowererMD.LoadHelperArgument(instr, baseOpnd);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
- arrayFlagsBeforeHelperCallOpnd = IR::RegOpnd::New(arrayFlagsType, func);
- autoReuseArrayFlagsBeforeHelperCallOpnd.Initialize(arrayFlagsBeforeHelperCallOpnd, func);
- callInstr->SetDst(arrayFlagsBeforeHelperCallOpnd);
- instr->InsertBefore(callInstr);
- m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayFlagsForArrayOrObjectWithArray);
- }
- IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
- LowerOneBailOutKind(instr, IR::BailOutOnMissingValue, isInHelperBlock);
- IR::Instr *const insertBeforeInstr = instr->m_next;
- // If the array had no missing values before the helper call, and the array has missing values after the helper
- // call, then this store created the first missing value in the array and needs to bail out
- if(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())
- {
- // (arrayFlagsBeforeHelperCall = Js::DynamicObjectFlags::HasNoMissingValues)
- Assert(!arrayFlagsBeforeHelperCallOpnd);
- arrayFlagsBeforeHelperCallOpnd =
- arrayFlagsType == TyUint32
- ? static_cast<IR::Opnd *>(
- IR::IntConstOpnd::New(
- static_cast<uintptr_t>(Js::DynamicObjectFlags::HasNoMissingValues),
- arrayFlagsType,
- func,
- true))
- : IR::AddrOpnd::New(
- reinterpret_cast<void *>(Js::DynamicObjectFlags::HasNoMissingValues),
- IR::AddrOpndKindConstantVar,
- func,
- true);
- autoReuseArrayFlagsBeforeHelperCallOpnd.Initialize(arrayFlagsBeforeHelperCallOpnd, func);
- }
- else
- {
- Assert(arrayFlagsBeforeHelperCallOpnd);
- }
- // createdFirstMissingValue = JavascriptArray::Jit_OperationCreatedFirstMissingValue(arrayFlagsBeforeHelperCall, base)
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, arrayFlagsBeforeHelperCallOpnd);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
- IR::RegOpnd *const createdFirstMissingValueOpnd = IR::RegOpnd::New(TyUint8, func);
- IR::AutoReuseOpnd autoReuseCreatedFirstMissingValueOpnd(createdFirstMissingValueOpnd, func);
- callInstr->SetDst(createdFirstMissingValueOpnd);
- insertBeforeInstr->InsertBefore(callInstr);
- m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationCreatedFirstMissingValue);
- // test createdFirstMissingValue, createdFirstMissingValue
- // jz $skipBailOut
- InsertCompareBranch(
- createdFirstMissingValueOpnd,
- IR::IntConstOpnd::New(0, createdFirstMissingValueOpnd->GetType(), func, true),
- Js::OpCode::BrEq_A,
- skipBailOutLabel,
- insertBeforeInstr);
- // (Bail out with IR::BailOutOnMissingValue)
- // $skipBailOut:
- }
- void Lowerer::LowerBoundCheck(IR::Instr *const instr)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::BoundCheck || instr->m_opcode == Js::OpCode::UnsignedBoundCheck);
- #if DBG
- if(instr->m_opcode == Js::OpCode::UnsignedBoundCheck)
- {
- // UnsignedBoundCheck is currently only supported for the pattern:
- // UnsignedBoundCheck s1 <= s2 + c, where c == 0 || c == -1
- Assert(instr->GetSrc1()->IsRegOpnd());
- Assert(instr->GetSrc1()->IsInt32());
- Assert(instr->GetSrc2());
- Assert(!instr->GetSrc2()->IsIntConstOpnd());
- if(instr->GetDst())
- {
- const int32 c = instr->GetDst()->AsIntConstOpnd()->AsInt32();
- Assert(c == 0 || c == -1);
- }
- }
- #endif
- const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- Assert(
- bailOutKind == IR::BailOutOnArrayAccessHelperCall ||
- bailOutKind == IR::BailOutOnInvalidatedArrayHeadSegment ||
- bailOutKind == IR::BailOutOnFailedHoistedBoundCheck ||
- bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
- IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(false);
- LowerOneBailOutKind(instr, bailOutKind, false);
- Assert(!instr->HasBailOutInfo());
- IR::Instr *insertBeforeInstr = instr->m_next;
- #if DBG
- const auto VerifyLeftOrRightOpnd = [&](IR::Opnd *const opnd, const bool isRightOpnd)
- {
- if(!opnd)
- {
- Assert(isRightOpnd);
- return;
- }
- if(opnd->IsIntConstOpnd())
- {
- Assert(!isRightOpnd || opnd->AsIntConstOpnd()->GetValue() != 0);
- return;
- }
- Assert(opnd->GetType() == TyInt32 || opnd->GetType() == TyUint32);
- };
- #endif
- // left <= right + offset (src1 <= src2 + dst)
- IR::Opnd *leftOpnd = instr->UnlinkSrc1();
- DebugOnly(VerifyLeftOrRightOpnd(leftOpnd, false));
- IR::Opnd *rightOpnd = instr->UnlinkSrc2();
- DebugOnly(VerifyLeftOrRightOpnd(rightOpnd, true));
- Assert(!leftOpnd->IsIntConstOpnd() || rightOpnd && !rightOpnd->IsIntConstOpnd());
- IR::IntConstOpnd *offsetOpnd = instr->GetDst() ? instr->UnlinkDst()->AsIntConstOpnd() : nullptr;
- Assert(!offsetOpnd || offsetOpnd->GetValue() != 0);
- const bool doUnsignedCompare = instr->m_opcode == Js::OpCode::UnsignedBoundCheck;
- instr->Remove();
- Func *const func = insertBeforeInstr->m_func;
- IntConstType offset = offsetOpnd ? offsetOpnd->GetValue() : 0;
- Js::OpCode compareOpCode = Js::OpCode::BrLe_A;
- if(leftOpnd->IsIntConstOpnd() && rightOpnd->IsRegOpnd() && offset != IntConstMin)
- {
- // Put the constants together: swap the operands, negate the offset, and invert the branch
- IR::Opnd *const tempOpnd = leftOpnd;
- leftOpnd = rightOpnd;
- rightOpnd = tempOpnd;
- offset = -offset;
- compareOpCode = Js::OpCode::BrGe_A;
- }
- if(rightOpnd->IsIntConstOpnd())
- {
- // Try to aggregate right + offset into a constant offset
- IntConstType newOffset;
- if(!IntConstMath::Add(offset, rightOpnd->AsIntConstOpnd()->GetValue(), &newOffset))
- {
- offset = newOffset;
- rightOpnd = nullptr;
- offsetOpnd = nullptr;
- }
- }
- // Determine if the Add for (right + offset) is necessary, and the op code that will be used for the comparison
- IR::AutoReuseOpnd autoReuseAddResultOpnd;
- if(offset == -1 && compareOpCode == Js::OpCode::BrLe_A)
- {
- offset = 0;
- compareOpCode = Js::OpCode::BrLt_A;
- }
- else if(offset == 1 && compareOpCode == Js::OpCode::BrGe_A)
- {
- offset = 0;
- compareOpCode = Js::OpCode::BrGt_A;
- }
- else if(offset != 0 && rightOpnd)
- {
- // Need to Add (right + offset). If it overflows, bail out.
- IR::LabelInstr *const bailOutLabel = insertBeforeInstr->m_prev->GetOrCreateContinueLabel(true);
- insertBeforeInstr = bailOutLabel;
- // mov temp, right
- // add temp, offset
- // jo $bailOut
- // $bailOut: (insertBeforeInstr)
- Assert(!offsetOpnd || offsetOpnd->GetValue() == offset);
- IR::RegOpnd *const addResultOpnd = IR::RegOpnd::New(TyMachReg, func);
- autoReuseAddResultOpnd.Initialize(addResultOpnd, func);
- InsertAdd(
- true,
- addResultOpnd,
- rightOpnd,
- offsetOpnd ? offsetOpnd : IR::IntConstOpnd::New(offset, TyMachReg, func, true),
- insertBeforeInstr);
- InsertBranch(LowererMD::MDOverflowBranchOpcode, bailOutLabel, insertBeforeInstr);
- rightOpnd = addResultOpnd;
- }
- // cmp left, right
- // jl[e] $skipBailOut
- // $bailOut:
- if(!rightOpnd)
- {
- rightOpnd = IR::IntConstOpnd::New(offset, TyInt32, func, true);
- }
- InsertCompareBranch(leftOpnd, rightOpnd, compareOpCode, doUnsignedCompare, skipBailOutLabel, insertBeforeInstr);
- }
- IR::Instr *
- Lowerer::LowerBailTarget(IR::Instr * instr)
- {
- // this is just a bailout target, just skip over it and generate a label before so other bailout can jump here.
- IR::Instr * prevInstr = instr->m_prev;
- IR::LabelInstr * continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- instr->InsertAfter(continueLabelInstr);
- IR::BranchInstr * skipInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, continueLabelInstr, this->m_func);
- instr->InsertBefore(skipInstr);
- this->GenerateBailOut(instr);
- return prevInstr;
- }
- IR::Instr *
- Lowerer::SplitBailOnImplicitCall(IR::Instr *& instr)
- {
- Assert(instr->IsPlainInstr() || instr->IsProfiledInstr());
- const auto bailOutKind = instr->GetBailOutKind();
- Assert(
- BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind) ||
- bailOutKind == IR::BailOutOnLossyToInt32ImplicitCalls ||
- bailOutKind == IR::BailOutExpectingObject);
- IR::Opnd * implicitCallFlags = this->GetImplicitCallFlagsOpnd();
- const IR::AutoReuseOpnd autoReuseImplicitCallFlags(implicitCallFlags, instr->m_func);
- IR::IntConstOpnd * noImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_None, TyInt8, this->m_func, true);
- const IR::AutoReuseOpnd autoReuseNoImplicitCall(noImplicitCall, instr->m_func);
- // Reset the implicit call flag on every helper call
- LowererMD::CreateAssign(implicitCallFlags, noImplicitCall, instr);
- IR::Instr *disableImplicitCallsInstr = nullptr, *enableImplicitCallsInstr = nullptr;
- if(bailOutKind == IR::BailOutOnLossyToInt32ImplicitCalls ||
- bailOutKind == IR::BailOutOnImplicitCallsPreOp)
- {
- const auto disableImplicitCallAddress =
- m_lowererMD.GenerateMemRef(
- instr->m_func->GetScriptContext()->GetThreadContext()->GetAddressOfDisableImplicitFlags(),
- TyInt8,
- instr);
- // Disable implicit calls since they will be called after bailing out
- disableImplicitCallsInstr =
- IR::Instr::New(
- Js::OpCode::Ld_A,
- disableImplicitCallAddress,
- IR::IntConstOpnd::New(
- // LossyToInt32 is a special case where we need to disable exceptions because the helper can throw where the interpreter wouldn't
- bailOutKind == IR::BailOutOnLossyToInt32ImplicitCalls ? DisableImplicitCallAndExceptionFlag : DisableImplicitCallFlag,
- TyInt8, instr->m_func, true),
- instr->m_func);
- instr->InsertBefore(disableImplicitCallsInstr);
- // Create instruction for re-enabling implicit calls
- enableImplicitCallsInstr =
- IR::Instr::New(
- Js::OpCode::Ld_A,
- disableImplicitCallAddress,
- IR::IntConstOpnd::New(DisableImplicitNoFlag, TyInt8, instr->m_func, true),
- instr->m_func);
- }
- IR::Instr * bailOutInstr = instr;
- instr = IR::Instr::New(instr->m_opcode, instr->m_func);
- bailOutInstr->TransferTo(instr);
- bailOutInstr->InsertBefore(instr);
- if(disableImplicitCallsInstr)
- {
- // Re-enable implicit calls
- Assert(enableImplicitCallsInstr);
- bailOutInstr->InsertBefore(enableImplicitCallsInstr);
- // Lower both instructions. Lowering an instruction may free the instruction's original operands, so do that last.
- LowererMD::ChangeToAssign(disableImplicitCallsInstr);
- LowererMD::ChangeToAssign(enableImplicitCallsInstr);
- }
- bailOutInstr->m_opcode = Js::OpCode::BailOnNotEqual;
- bailOutInstr->SetSrc1(implicitCallFlags);
- bailOutInstr->SetSrc2(noImplicitCall);
- return bailOutInstr;
- }
- IR::Instr *
- Lowerer::SplitBailOnImplicitCall(IR::Instr * instr, IR::Instr * helperCall, IR::Instr * insertBeforeInstr)
- {
- IR::Opnd * implicitCallFlags = this->GetImplicitCallFlagsOpnd();
- const IR::AutoReuseOpnd autoReuseImplicitCallFlags(implicitCallFlags, instr->m_func);
- IR::IntConstOpnd * noImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_None, TyInt8, this->m_func, true);
- const IR::AutoReuseOpnd autoReuseNoImplicitCall(noImplicitCall, instr->m_func);
- // Reset the implicit call flag on every helper call
- LowererMD::CreateAssign(implicitCallFlags, noImplicitCall, helperCall->m_prev);
- BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
- if (bailOutInfo->bailOutInstr == instr)
- {
- bailOutInfo->bailOutInstr = nullptr;
- }
- IR::Instr * bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOnNotEqual, IR::BailOutOnImplicitCalls, bailOutInfo, bailOutInfo->bailOutFunc);
- bailOutInstr->SetSrc1(implicitCallFlags);
- bailOutInstr->SetSrc2(noImplicitCall);
- insertBeforeInstr->InsertBefore(bailOutInstr);
- instr->ClearBailOutInfo();
- return bailOutInstr;
- }
- // Split out bailout for debugger into separate bailout instr out of real instr which has bailout for debugger.
- // Returns the instr which needs to lower next, which would normally be last of splitted instr.
- // IR on input:
- // - Real instr with BailOutInfo but it's opcode is not BailForDebugger.
- // - debugger bailout is not shared. In this case we'll have debugger bailout in instr->GetBailOutKind().
- // - debugger bailout is shared. In this case we'll have debugger bailout in instr->GetAuxBailOutKind().
- // IR on output:
- // - Either of:
- // - real instr, then debuggerBailout -- in case we only had debugger bailout.
- // - real instr with BailOutInfo w/o debugger bailout, then debuggerBailout, then sharedBailout -- in case bailout for debugger was shared w/some other b.o.
- IR::Instr* Lowerer::SplitBailForDebugger(IR::Instr* instr)
- {
- Assert(m_func->IsJitInDebugMode() && instr->m_opcode != Js::OpCode::BailForDebugger);
- IR::BailOutKind debuggerBailOutKind; // Used for splitted instr.
- BailOutInfo* bailOutInfo = instr->GetBailOutInfo();
- IR::Instr* sharedBailoutInstr = nullptr;
- if (instr->GetBailOutKind() & IR::BailOutForDebuggerBits)
- {
- // debugger bailout is not shared.
- Assert(!instr->HasAuxBailOut());
- AssertMsg(!(instr->GetBailOutKind() & ~IR::BailOutForDebuggerBits), "There should only be debugger bailout bits in the instr.");
- debuggerBailOutKind = instr->GetBailOutKind() & IR::BailOutForDebuggerBits;
- // There is no non-debugger bailout in the instr, still can't clear bailout info, as we use it for the splitted instr,
- // but we need to mark the bailout as hasn't been generated yet.
- if (bailOutInfo->bailOutInstr == instr)
- {
- // null will be picked up by following BailOutInstr::New which will change it to new bailout instr.
- bailOutInfo->bailOutInstr = nullptr;
- }
- // Remove bailout info from the original instr which from now on becomes just regular instr, w/o deallocating bailout info.
- instr->ClearBailOutInfo();
- }
- else if (instr->IsBranchInstr() && instr->HasBailOutInfo() && instr->HasAuxBailOut())
- {
- // Branches with shared bailout are lowered in LowerCondBranchCheckBailOut,
- // can't do here because we need to use BranchBailOutRecord but don't know which BrTrue/BrFalse to use for it.
- debuggerBailOutKind = IR::BailOutInvalid;
- }
- else if (instr->HasAuxBailOut() && instr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits)
- {
- // debugger bailout is shared.
- AssertMsg(!(instr->GetBailOutKind() & IR::BailOutForDebuggerBits), "There should be no debugger bits in main bailout kind.");
- debuggerBailOutKind = instr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits;
- // This will insert SharedBail instr after current instr and set bailOutInfo->bailOutInstr to the shared one.
- sharedBailoutInstr = instr->ShareBailOut();
- // As we extracted aux bail out, invalidate all tracks of it in the instr.
- instr->ResetAuxBailOut();
- }
- else
- {
- AssertMsg(FALSE, "shouldn't get here");
- debuggerBailOutKind = IR::BailOutInvalid;
- }
- if (debuggerBailOutKind != IR::BailOutInvalid)
- {
- IR::BailOutInstr* debuggerBailoutInstr = IR::BailOutInstr::New(
- Js::OpCode::BailForDebugger, debuggerBailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
- instr->InsertAfter(debuggerBailoutInstr);
- // Since we go backwards, we need to process extracted out bailout for debugger first.
- instr = sharedBailoutInstr ? sharedBailoutInstr : debuggerBailoutInstr;
- }
- return instr;
- }
- IR::Instr *
- Lowerer::SplitBailOnResultCondition(IR::Instr *const instr) const
- {
- Assert(instr);
- Assert(!instr->IsLowered());
- Assert(
- instr->GetBailOutKind() & IR::BailOutOnResultConditions ||
- instr->GetBailOutKind() == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
- const auto nonBailOutInstr = IR::Instr::New(instr->m_opcode, instr->m_func);
- instr->TransferTo(nonBailOutInstr);
- instr->InsertBefore(nonBailOutInstr);
- return nonBailOutInstr;
- }
- void
- Lowerer::LowerBailOnResultCondition(
- IR::Instr *const instr,
- IR::LabelInstr * *const bailOutLabel,
- IR::LabelInstr * *const skipBailOutLabel)
- {
- Assert(instr);
- Assert(
- instr->GetBailOutKind() & IR::BailOutOnResultConditions ||
- instr->GetBailOutKind() == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
- Assert(bailOutLabel);
- Assert(skipBailOutLabel);
- // Label to jump to (or fall through to) when bailing out. The actual bailout label
- // (bailOutInfo->bailOutInstr->AsLabelInstr()) may be shared, and code may be added to restore values before the jump to the
- // actual bailout label in the cloned bailout case, so always create a new bailout label for this particular path.
- *bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true /* isOpHelper */);
- instr->InsertBefore(*bailOutLabel);
- // Label to jump to when not bailing out
- *skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
- instr->InsertAfter(*skipBailOutLabel);
- // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
- // ordering instructions anymore.
- GenerateBailOut(instr);
- }
- void
- Lowerer::PreserveSourcesForBailOnResultCondition(IR::Instr *const instr, IR::LabelInstr *const skipBailOutLabel) const
- {
- Assert(instr);
- Assert(!instr->IsLowered());
- Assert(!instr->HasBailOutInfo());
- // Since this instruction may bail out, writing to the destination cannot overwrite one of the sources, or we may lose one
- // of the sources needed to redo the equivalent byte code instruction. Determine if the sources need to be preserved.
- const auto dst = instr->GetDst();
- Assert(dst);
- const auto dstStackSym = dst->GetStackSym();
- if(!dstStackSym || !dstStackSym->HasByteCodeRegSlot())
- {
- // We only need to ensure that a byte-code source is not being overwritten
- return;
- }
- switch(instr->m_opcode)
- {
- // The sources of these instructions don't need restoring, or will be restored in the bailout path
- case Js::OpCode::Neg_I4:
- // In case of overflow or zero, the result is the same as the operand
- case Js::OpCode::Add_I4:
- case Js::OpCode::Sub_I4:
- // In case of overflow, there is always enough information to restore the operands
- return;
- }
- Assert(instr->GetSrc1());
- if(!dst->IsEqual(instr->GetSrc1()) && !(instr->GetSrc2() && dst->IsEqual(instr->GetSrc2())))
- {
- // The destination is different from the sources
- return;
- }
- // The destination is the same as one of the sources and the original sources cannot be restored after the instruction, so
- // use a temporary destination for the result and move it back to the original destination after deciding not to bail out
- LowererMD::ChangeToAssign(instr->SinkDst(Js::OpCode::Ld_I4, RegNOREG, skipBailOutLabel));
- }
- void
- Lowerer::LowerInstrWithBailOnResultCondition(
- IR::Instr *const instr,
- const IR::BailOutKind bailOutKind,
- IR::LabelInstr *const bailOutLabel,
- IR::LabelInstr *const skipBailOutLabel) const
- {
- Assert(instr);
- Assert(!instr->IsLowered());
- Assert(!instr->HasBailOutInfo());
- Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
- Assert(bailOutLabel);
- Assert(instr->m_next == bailOutLabel);
- Assert(skipBailOutLabel);
- // Preserve sources that are overwritten by the instruction if needed
- PreserveSourcesForBailOnResultCondition(instr, skipBailOutLabel);
- // Lower the instruction
- switch(instr->m_opcode)
- {
- case Js::OpCode::Neg_I4:
- LowererMD::LowerInt4NegWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
- break;
- case Js::OpCode::Add_I4:
- LowererMD::LowerInt4AddWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
- break;
- case Js::OpCode::Sub_I4:
- LowererMD::LowerInt4SubWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
- break;
- case Js::OpCode::Mul_I4:
- LowererMD::LowerInt4MulWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
- break;
- case Js::OpCode::Rem_I4:
- m_lowererMD.LowerInt4RemWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
- break;
- default:
- Assert(false); // not implemented
- __assume(false);
- }
- }
- void
- Lowerer::GenerateObjectTestAndTypeLoad(IR::Instr *instrLdSt, IR::RegOpnd *opndBase, IR::RegOpnd *opndType, IR::LabelInstr *labelHelper)
- {
- IR::IndirOpnd *opndIndir;
- if (!opndBase->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(opndBase, instrLdSt, labelHelper);
- }
- opndIndir = IR::IndirOpnd::New(opndBase, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
- m_lowererMD.CreateAssign(opndType, opndIndir, instrLdSt);
- }
- IR::LabelInstr *
- Lowerer::GenerateBailOut(IR::Instr * instr, IR::BranchInstr * branchInstr, IR::LabelInstr *bailOutLabel)
- {
- BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
- IR::Instr * bailOutInstr = bailOutInfo->bailOutInstr;
- IR::LabelInstr *collectRuntimeStatsLabel = nullptr;
- if (instr->IsCloned())
- {
- Assert(bailOutInstr != instr);
- // jump to the cloned bail out label
- IR::LabelInstr * bailOutLabelInstr = bailOutInstr->AsLabelInstr();
- IR::BranchInstr * bailOutBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, bailOutLabelInstr, this->m_func);
- instr->InsertBefore(bailOutBranch);
- instr->Remove();
- return bailOutLabel;
- }
- if (bailOutInstr != instr)
- {
- // this bailOutInfo is shared, just jump to the bailout target
- // Add helper label to trigger layout.
- collectRuntimeStatsLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- instr->InsertBefore(collectRuntimeStatsLabel);
- IR::MemRefOpnd *pIndexOpndForBailOutKind =
- IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + BailOutRecord::GetOffsetOfBailOutKind(), TyUint32, this->m_func, IR::AddrOpndKindDynamicBailOutKindRef);
- m_lowererMD.CreateAssign(
- pIndexOpndForBailOutKind, IR::IntConstOpnd::New(instr->GetBailOutKind(), pIndexOpndForBailOutKind->GetType(), this->m_func), instr);
- // No point in doing this for BailOutFailedEquivalentTypeCheck or BailOutFailedEquivalentFixedFieldTypeCheck,
- // because the respective inline cache is already polymorphic, anyway.
- if (instr->GetBailOutKind() == IR::BailOutFailedTypeCheck || instr->GetBailOutKind() == IR::BailOutFailedFixedFieldTypeCheck)
- {
- // We have a type check bailout that shares a bailout record with other instructions.
- // Generate code to write the cache index into the bailout record before we jump to the call site.
- Assert(bailOutInfo->polymorphicCacheIndex != (uint)-1);
- Assert(bailOutInfo->bailOutRecord);
- IR::MemRefOpnd *pIndexOpnd =
- IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + BailOutRecord::GetOffsetOfPolymorphicCacheIndex(), TyUint32, this->m_func);
- m_lowererMD.CreateAssign(
- pIndexOpnd, IR::IntConstOpnd::New(bailOutInfo->polymorphicCacheIndex, TyUint32, this->m_func), instr);
- }
- // GenerateBailOut should have replaced this as a label as we should have already lowered
- // the main bailOutInstr.
- IR::LabelInstr * bailOutTargetLabel = bailOutInstr->AsLabelInstr();
- #if DBG
- if (bailOutTargetLabel->m_noHelperAssert)
- {
- collectRuntimeStatsLabel->m_noHelperAssert = true;
- }
- #endif
- Assert(bailOutLabel == nullptr || bailOutLabel == bailOutTargetLabel);
- IR::BranchInstr * branchInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, bailOutTargetLabel, this->m_func);
- instr->InsertAfter(branchInstr);
- instr->Remove();
- return collectRuntimeStatsLabel ? collectRuntimeStatsLabel : bailOutLabel;
- }
- // The bailout hasn't be generated yet.
- Assert(!bailOutInstr->IsLabelInstr());
- // Add helper label to trigger layout.
- collectRuntimeStatsLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- instr->InsertBefore(collectRuntimeStatsLabel);
- // capture the condition for this bailout
- if (bailOutLabel == nullptr)
- {
- // Create a label and place it in the bailout info so that shared bailout point can jump to this one
- if (instr->m_prev->IsLabelInstr())
- {
- bailOutLabel = instr->m_prev->AsLabelInstr();
- Assert(bailOutLabel->isOpHelper);
- }
- else
- {
- bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- instr->InsertBefore(bailOutLabel);
- }
- }
- else
- {
- instr->InsertBefore(bailOutLabel);
- }
- #if DBG
- if (bailOutInstr->m_opcode == Js::OpCode::BailOnNoSimdTypeSpec || bailOutInstr->m_opcode == Js::OpCode::BailOnNoProfile || bailOutInstr->m_opcode == Js::OpCode::BailOnException || bailOutInstr->m_opcode == Js::OpCode::Yield)
- {
- bailOutLabel->m_noHelperAssert = true;
- }
- #endif
- bailOutInfo->bailOutInstr = bailOutLabel;
- bailOutLabel->m_hasNonBranchRef = true;
- // Create the bail out record
- Assert(bailOutInfo->bailOutRecord == nullptr);
- BailOutRecord * bailOutRecord;
- IR::JnHelperMethod helperMethod;
- if (branchInstr != nullptr)
- {
- Assert(branchInstr->GetSrc2() == nullptr);
- Assert(branchInstr->GetDst() == nullptr);
- IR::LabelInstr * targetLabel = branchInstr->GetTarget();
- Assert(targetLabel->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset);
- uint32 trueOffset;
- uint32 falseOffset;
- IR::Opnd *condOpnd = branchInstr->GetSrc1();
- bool invertTarget = (branchInstr->m_opcode == Js::OpCode::BrFalse_A);
- if (bailOutInfo->isInvertedBranch)
- {
- // Flip the condition
- IR::Instr *subInstr = IR::Instr::New(Js::OpCode::Sub_I4, condOpnd, condOpnd, IR::IntConstOpnd::New(1, TyInt32, instr->m_func), instr->m_func);
- instr->InsertBefore(subInstr);
- this->m_lowererMD.EmitInt4Instr(subInstr);
- // We should really do a DEC/NEG for a full 2's complement flip from 0/1 to 1/0,
- // but DEC is sufficient to flip from 0/1 to -1/0, which is false/true to true/false...
- //instr->InsertBefore(IR::Instr::New(Js::OpCode::Neg_I4, condOpnd, condOpnd, instr->m_func));
- invertTarget = invertTarget ? false : true;
- }
- if (!invertTarget)
- {
- trueOffset = targetLabel->GetByteCodeOffset();
- falseOffset = bailOutInfo->bailOutOffset;
- }
- else
- {
- falseOffset = targetLabel->GetByteCodeOffset();
- trueOffset = bailOutInfo->bailOutOffset;
- }
- bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
- BranchBailOutRecord, trueOffset, falseOffset, branchInstr->GetByteCodeReg(), instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
- helperMethod = IR::HelperSaveAllRegistersAndBranchBailOut;
- #ifdef _M_IX86
- if(!AutoSystemInfo::Data.SSE2Available())
- {
- helperMethod = IR::HelperSaveAllRegistersNoSse2AndBranchBailOut;
- }
- #endif
- // Save the condition. The register allocator will generate arguments.
- bailOutInfo->branchConditionOpnd = branchInstr->GetSrc1()->Copy(branchInstr->m_func);
- }
- else
- {
- bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
- BailOutRecord, bailOutInfo->bailOutOffset, bailOutInfo->polymorphicCacheIndex, instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
- helperMethod = IR::HelperSaveAllRegistersAndBailOut;
- #ifdef _M_IX86
- if(!AutoSystemInfo::Data.SSE2Available())
- {
- helperMethod = IR::HelperSaveAllRegistersNoSse2AndBailOut;
- }
- #endif
- }
- // Save the bailout record. The register allocator will generate arguments.
- bailOutInfo->bailOutRecord = bailOutRecord;
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- bailOutRecord->bailOutOpcode = bailOutInfo->bailOutOpcode;
- #endif
- // Call the bail out wrapper
- instr->m_opcode = Js::OpCode::Call;
- if(instr->GetDst())
- {
- // To facilitate register allocation, don't assign a destination. The result will anyway go into the return register,
- // but the register allocator does not need to kill that register for the call.
- instr->FreeDst();
- }
- instr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, this->m_func));
- m_lowererMD.LowerCall(instr, 0);
- if (bailOutInstr->GetBailOutKind() != IR::BailOutForGeneratorYield)
- {
- // Defer introducing the JMP to epilog until LowerPrologEpilog phase for Yield bailouts so
- // that Yield does not appear to have flow out of its containing block for the RegAlloc phase.
- // Yield is an unconditional bailout but we want to simulate the flow as if the Yield were
- // just like a call.
- GenerateJumpToEpilogForBailOut(bailOutInfo, instr);
- }
- return collectRuntimeStatsLabel ? collectRuntimeStatsLabel : bailOutLabel;
- }
- void
- Lowerer::GenerateJumpToEpilogForBailOut(BailOutInfo * bailOutInfo, IR::Instr *instr)
- {
- IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->m_prev;
- // JMP to the epilog
- IR::LabelInstr * exitTargetInstr;
- if (exitPrevInstr->IsLabelInstr())
- {
- exitTargetInstr = exitPrevInstr->AsLabelInstr();
- }
- else
- {
- exitTargetInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
- exitPrevInstr->InsertAfter(exitTargetInstr);
- }
- exitTargetInstr = m_lowererMD.GetBailOutStackRestoreLabel(bailOutInfo, exitTargetInstr);
- IR::Instr * instrAfter = instr->m_next;
- IR::BranchInstr * exitInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, exitTargetInstr, this->m_func);
- instrAfter->InsertBefore(exitInstr);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::GenerateFastCondBranch
- ///
- ///----------------------------------------------------------------------------
- bool
- Lowerer::GenerateFastCondBranch(IR::BranchInstr * instrBranch, bool *pIsHelper)
- {
- // The idea is to do an inline compare if we can prove that both sources
- // are tagged ints
- //
- // Given:
- //
- // Brxx_A $L, src1, src2
- //
- // Generate:
- //
- // (If not Int31's, goto $helper)
- // Jxx $L, src1, src2
- // JMP $fallthru
- // $helper:
- // (caller will generate normal helper call sequence)
- // $fallthru:
- IR::LabelInstr * labelHelper = nullptr;
- IR::LabelInstr * labelFallThru;
- IR::BranchInstr * instr;
- IR::Opnd * opndSrc1;
- IR::Opnd * opndSrc2;
- opndSrc1 = instrBranch->GetSrc1();
- opndSrc2 = instrBranch->GetSrc2();
- AssertMsg(opndSrc1 && opndSrc2, "BrC expects 2 src operands");
- // Not tagged ints?
- if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
- {
- return true;
- }
- if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
- {
- return true;
- }
- // Tagged ints?
- bool isTaggedInts = false;
- if (opndSrc1->IsTaggedInt())
- {
- if (opndSrc2->IsTaggedInt())
- {
- isTaggedInts = true;
- }
- }
- if (!isTaggedInts)
- {
- labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- this->m_lowererMD.GenerateSmIntPairTest(instrBranch, opndSrc1, opndSrc2, labelHelper);
- }
- // Jxx $L, src1, src2
- opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
- opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
- instr = IR::BranchInstr::New(instrBranch->m_opcode, instrBranch->GetTarget(), opndSrc1, opndSrc2, this->m_func);
- instrBranch->InsertBefore(instr);
- this->m_lowererMD.LowerCondBranch(instr);
- if (isTaggedInts)
- {
- instrBranch->Remove();
- // Skip lowering call to helper
- return false;
- }
- // JMP $fallthru
- IR::Instr *instrNext = instrBranch->GetNextRealInstrOrLabel();
- if (instrNext->IsLabelInstr())
- {
- labelFallThru = instrNext->AsLabelInstr();
- }
- else
- {
- labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /**pIsHelper*/FALSE);
- instrBranch->InsertAfter(labelFallThru);
- }
- instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallThru, this->m_func);
- instrBranch->InsertBefore(instr);
- // $helper:
- // (caller will generate normal helper call sequence)
- // $fallthru:
- AssertMsg(labelHelper, "Should not be NULL");
- instrBranch->InsertBefore(labelHelper);
- *pIsHelper = true;
- return true;
- }
- void
- Lowerer::LowerInlineeStart(IR::Instr * inlineeStartInstr)
- {
- IR::Opnd *linkOpnd = inlineeStartInstr->GetSrc2();
- if (!linkOpnd)
- {
- Assert(inlineeStartInstr->m_func->m_hasInlineArgsOpt);
- return;
- }
- AssertMsg(inlineeStartInstr->m_func->firstActualStackOffset != -1, "This should have been already done in backward pass");
- IR::Instr *startCall;
- // Free the argOut links and lower them to MOVs
- inlineeStartInstr->IterateArgInstrs([&](IR::Instr* argInstr){
- Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A || argInstr->m_opcode == Js::OpCode::ArgOut_A_Inline);
- startCall = argInstr->GetSrc2()->GetStackSym()->m_instrDef;
- argInstr->FreeSrc2();
- #pragma prefast(suppress:6235, "Non-Zero Constant in Condition")
- if (!PHASE_ON(Js::EliminateArgoutForInlineePhase, this->m_func) || inlineeStartInstr->m_func->GetJnFunction()->GetHasOrParentHasArguments())
- {
- m_lowererMD.ChangeToAssign(argInstr);
- }
- else
- {
- argInstr->m_opcode = Js::OpCode::ArgOut_A_InlineBuiltIn;
- }
- return false;
- });
- IR::Instr *argInsertInstr = inlineeStartInstr;
- uint i = 0;
- inlineeStartInstr->IterateMetaArgs( [&] (IR::Instr* metaArg)
- {
- if(i == 0)
- {
- LowererMD::CreateAssign(metaArg->m_func->GetNextInlineeFrameArgCountSlotOpnd(),
- IR::AddrOpnd::NewNull(metaArg->m_func),
- argInsertInstr);
- }
- if (i == Js::Constants::InlineeMetaArgIndex_FunctionObject)
- {
- metaArg->SetSrc1(inlineeStartInstr->GetSrc1());
- }
- metaArg->Unlink();
- argInsertInstr->InsertBefore(metaArg);
- IR::Instr* prev = metaArg->m_prev;
- m_lowererMD.ChangeToAssign(metaArg);
- if (i == Js::Constants::InlineeMetaArgIndex_Argc)
- {
- #if defined(_M_IX86) || defined(_M_X64)
- Assert(metaArg == prev->m_next);
- #else //defined(_M_ARM)
- Assert(prev->m_next->m_opcode == Js::OpCode::LDIMM);
- #endif
- metaArg = prev->m_next;
- Assert(metaArg->GetSrc1()->AsAddrOpnd()->m_dontEncode == true);
- metaArg->isInlineeEntryInstr = true;
- LowererMD::Legalize(metaArg);
- }
- argInsertInstr = metaArg;
- i++;
- return false;
- });
- if (inlineeStartInstr->m_func->m_hasInlineArgsOpt)
- {
- inlineeStartInstr->FreeSrc1();
- inlineeStartInstr->FreeSrc2();
- inlineeStartInstr->FreeDst();
- }
- else
- {
- inlineeStartInstr->Remove();
- }
- }
- void
- Lowerer::LowerInlineeEnd(IR::Instr *instr)
- {
- Assert(instr->m_func->IsInlinee());
- Assert(m_func->IsTopFunc());
- // No need to emit code if the function wasn't marked as having implicit calls or bailout. Dead-Store should have removed inline overhead.
- if (instr->m_func->GetHasImplicitCalls() || PHASE_OFF(Js::DeadStorePhase, this->m_func))
- {
- LowererMD::CreateAssign(instr->m_func->GetInlineeArgCountSlotOpnd(),
- IR::AddrOpnd::New(0, IR::AddrOpndKindConstantVar, instr->m_func),
- instr);
- }
- // Keep InlineeEnd around as it is used by register allocator, if we have optimized the arguments stack
- if (instr->m_func->m_hasInlineArgsOpt)
- {
- instr->FreeSrc1();
- }
- else
- {
- instr->Remove();
- }
- }
- IR::Instr *
- Lowerer::LoadFloatFromNonReg(IR::Opnd * opndSrc, IR::Opnd * opndDst, IR::Instr * instrInsert)
- {
- double value;
- if (opndSrc->IsAddrOpnd())
- {
- Js::Var var = opndSrc->AsAddrOpnd()->m_address;
- if (Js::TaggedInt::Is(var))
- {
- value = Js::TaggedInt::ToDouble(var);
- }
- else
- {
- value = Js::JavascriptNumber::GetValue(var);
- }
- }
- else if (opndSrc->IsIntConstOpnd())
- {
- if (opndSrc->IsUInt32())
- {
- value = (double)(uint32)opndSrc->AsIntConstOpnd()->GetValue();
- }
- else
- {
- value = (double)opndSrc->AsIntConstOpnd()->GetValue();
- }
- }
- else if (opndSrc->IsFloatConstOpnd())
- {
- value = (double)opndSrc->AsFloatConstOpnd()->m_value;
- }
- else
- {
- AssertMsg(0, "Unexpected opnd type");
- value = 0;
- }
- return LowererMD::LoadFloatValue(opndDst, value, instrInsert);
- }
- void
- Lowerer::LoadInt32FromUntaggedVar(IR::Instr *const instrLoad)
- {
- Assert(instrLoad);
- Assert(instrLoad->GetDst());
- Assert(instrLoad->GetDst()->IsRegOpnd());
- Assert(instrLoad->GetDst()->IsInt32());
- Assert(instrLoad->GetSrc1());
- Assert(instrLoad->GetSrc1()->IsRegOpnd());
- Assert(instrLoad->GetSrc1()->IsVar());
- Assert(!instrLoad->GetSrc2());
- // push src
- // int32Value = call JavascriptNumber::GetNonzeroInt32Value_NoChecks
- // test int32Value, int32Value
- // jne $done
- // (fall through to 'instrLoad'; caller will generate code here)
- // $done:
- // (rest of program)
- Func *const func = instrLoad->m_func;
- IR::LabelInstr *const doneLabel = instrLoad->GetOrCreateContinueLabel();
- // push src
- // int32Value = call JavascriptNumber::GetNonzeroInt32Value_NoChecks
- StackSym *const int32ValueSym = instrLoad->GetDst()->AsRegOpnd()->m_sym;
- IR::Instr *const instr =
- IR::Instr::New(
- Js::OpCode::Call,
- IR::RegOpnd::New(int32ValueSym, TyInt32, func),
- instrLoad->GetSrc1()->AsRegOpnd(),
- func);
- instrLoad->InsertBefore(instr);
- LowerUnaryHelper(instr, IR::HelperGetNonzeroInt32Value_NoTaggedIntCheck);
- // test int32Value, int32Value
- // jne $done
- InsertCompareBranch(
- IR::RegOpnd::New(int32ValueSym, TyInt32, func),
- IR::IntConstOpnd::New(0, TyInt32, func, true),
- Js::OpCode::BrNeq_A,
- doneLabel,
- instrLoad);
- }
- bool
- Lowerer::GetValueFromIndirOpnd(IR::IndirOpnd *indirOpnd, IR::Opnd **pValueOpnd, IntConstType *pValue)
- {
- IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
- IR::Opnd* valueOpnd = nullptr;
- IntConstType value = 0;
- if (!indexOpnd)
- {
- value = (IntConstType)indirOpnd->GetOffset();
- if (value < 0)
- {
- // Can't do fast path for negative index
- return false;
- }
- valueOpnd = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
- }
- else if (indexOpnd->m_sym->IsIntConst())
- {
- value = indexOpnd->AsRegOpnd()->m_sym->GetIntConstValue();
- if (value < 0)
- {
- // Can't do fast path for negative index
- return false;
- }
- valueOpnd = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
- }
- *pValueOpnd = valueOpnd;
- *pValue = value;
- return true;
- }
- void
- Lowerer::GenerateFastBrOnObject(IR::Instr *instr)
- {
- Assert(instr->m_opcode == Js::OpCode::BrOnObject_A);
- IR::RegOpnd *object = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
- IR::LabelInstr *done = instr->GetOrCreateContinueLabel();
- IR::LabelInstr *target = instr->AsBranchInstr()->GetTarget();
- IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
- IR::IntConstOpnd *typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, instr->m_func);
- if (!object)
- {
- object = IR::RegOpnd::New(TyVar, m_func);
- LowererMD::CreateAssign(object, instr->GetSrc1(), instr);
- }
- // TEST object, 1
- // JNE $done
- // MOV typeRegOpnd, [object + offset(Type)]
- // CMP [typeRegOpnd + offset(TypeId)], TypeIds_LastJavascriptPrimitiveType
- // JGT $target
- // $done:
- m_lowererMD.GenerateObjectTest(object, instr, done);
- InsertMove(typeRegOpnd,
- IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
- instr);
- InsertCompareBranch(
- IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
- typeIdOpnd, Js::OpCode::BrGt_A, target, instr);
- instr->Remove();
- }
- void Lowerer::GenerateObjectHeaderInliningTest(IR::RegOpnd *baseOpnd, IR::LabelInstr * target,IR::Instr *insertBeforeInstr)
- {
- Assert(baseOpnd);
- Assert(target);
- AssertMsg(
- baseOpnd->GetValueType().IsLikelyObject() &&
- baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray,
- "Why are we here, when the object is already known not to have an ObjArray");
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- // mov type, [base + offsetOf(type)]
- IR::RegOpnd *const opnd = IR::RegOpnd::New(TyMachPtr, func);
- m_lowererMD.CreateAssign(
- opnd,
- IR::IndirOpnd::New(
- baseOpnd,
- Js::DynamicObject::GetOffsetOfType(),
- opnd->GetType(),
- func),
- insertBeforeInstr);
- // mov typeHandler, [type + offsetOf(typeHandler)]
- m_lowererMD.CreateAssign(
- opnd,
- IR::IndirOpnd::New(
- opnd,
- Js::DynamicType::GetOffsetOfTypeHandler(),
- opnd->GetType(),
- func),
- insertBeforeInstr);
- IR::IndirOpnd * offsetOfInlineSlotOpnd = IR::IndirOpnd::New(opnd,Js::DynamicTypeHandler::GetOffsetOfOffsetOfInlineSlots(), TyInt16, func);
- IR::IntConstOpnd * objHeaderInlinedSlotOffset = IR::IntConstOpnd::New(Js::DynamicTypeHandler::GetOffsetOfObjectHeaderInlineSlots(), TyInt16, func);
- // CMP [typeHandler + offsetOf(offsetOfInlineSlots)], objHeaderInlinedSlotOffset
- InsertCompareBranch(
- offsetOfInlineSlotOpnd,
- objHeaderInlinedSlotOffset,
- Js::OpCode::BrEq_A,
- target,
- insertBeforeInstr);
- }
- void Lowerer::GenerateObjectTypeTest(IR::RegOpnd *srcReg, IR::Instr *instrInsert, IR::LabelInstr *labelHelper)
- {
- Assert(srcReg);
- if (!srcReg->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(srcReg, instrInsert, labelHelper);
- }
- // CMP [srcReg], Js::DynamicObject::`vtable'
- // JNE $helper
- InsertCompareBranch(
- IR::IndirOpnd::New(srcReg, 0, TyMachPtr, m_func),
- LoadVTableValueOpnd(instrInsert, VTableValue::VtableDynamicObject),
- Js::OpCode::BrNeq_A,
- labelHelper,
- instrInsert);
- }
- const VTableValue Lowerer::VtableAddresses[static_cast<ValueType::TSize>(ObjectType::Count)] =
- {
- /* ObjectType::UninitializedObject */ VTableValue::VtableInvalid,
- /* ObjectType::Object */ VTableValue::VtableInvalid,
- /* ObjectType::RegExp */ VTableValue::VtableInvalid,
- /* ObjectType::ObjectWithArray */ VTableValue::VtableJavascriptArray,
- /* ObjectType::Array */ VTableValue::VtableJavascriptArray,
- /* ObjectType::Int8Array */ VTableValue::VtableInt8Array,
- /* ObjectType::Uint8Array */ VTableValue::VtableUint8Array,
- /* ObjectType::Uint8ClampedArray */ VTableValue::VtableUint8ClampedArray,
- /* ObjectType::Int16Array */ VTableValue::VtableInt16Array,
- /* ObjectType::Uint16Array */ VTableValue::VtableUint16Array,
- /* ObjectType::Int32Array */ VTableValue::VtableInt32Array,
- /* ObjectType::Uint32Array */ VTableValue::VtableUint32Array,
- /* ObjectType::Float32Array */ VTableValue::VtableFloat32Array,
- /* ObjectType::Float64Array */ VTableValue::VtableFloat64Array,
- /* ObjectType::Int8VirtualArray */ VTableValue::VtableInt8VirtualArray,
- /* ObjectType::Uint8VirtualArray */ VTableValue::VtableUint8VirtualArray,
- /* ObjectType::Uint8ClampedVirtualArray */ VTableValue::VtableUint8ClampedVirtualArray,
- /* ObjectType::Int16VirtualArray */ VTableValue::VtableInt16VirtualArray,
- /* ObjectType::Uint16VirtualArray */ VTableValue::VtableUint16VirtualArray,
- /* ObjectType::Int32VirtualArray */ VTableValue::VtableInt32VirtualArray,
- /* ObjectType::Uint32VirtualArray */ VTableValue::VtableUint32VirtualArray,
- /* ObjectType::Float32VirtualArray */ VTableValue::VtableFloat32VirtualArray,
- /* ObjectType::Float64VirtualArray */ VTableValue::VtableFloat64VirtualArray,
- /* ObjectType::Int8MixedArray */ VTableValue::VtableInt8Array,
- /* ObjectType::Uint8MixedArray */ VTableValue::VtableUint8Array,
- /* ObjectType::Uint8ClampedMixedArray */ VTableValue::VtableUint8ClampedArray,
- /* ObjectType::Int16MixedArray */ VTableValue::VtableInt16Array,
- /* ObjectType::Uint16MixedArray */ VTableValue::VtableUint16Array,
- /* ObjectType::Int32MixedArray */ VTableValue::VtableInt32Array,
- /* ObjectType::Uint32MixedArray */ VTableValue::VtableUint32Array,
- /* ObjectType::Float32MixedArray */ VTableValue::VtableFloat32Array,
- /* ObjectType::Float64MixedArray */ VTableValue::VtableFloat64Array,
- /* ObjectType::Int64Array */ VTableValue::VtableInt64Array,
- /* ObjectType::Uint64Array */ VTableValue::VtableUint64Array,
- /* ObjectType::BoolArray */ VTableValue::VtableBoolArray,
- /* ObjectType::CharArray */ VTableValue::VtableCharArray
- };
- const uint32 Lowerer::OffsetsOfHeadSegment[static_cast<ValueType::TSize>(ObjectType::Count)] =
- {
- /* ObjectType::UninitializedObject */ static_cast<uint32>(-1),
- /* ObjectType::Object */ static_cast<uint32>(-1),
- /* ObjectType::RegExp */ static_cast<uint32>(-1),
- /* ObjectType::ObjectWithArray */ Js::JavascriptArray::GetOffsetOfHead(),
- /* ObjectType::Array */ Js::JavascriptArray::GetOffsetOfHead(),
- /* ObjectType::Int8Array */ Js::Int8Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint8Array */ Js::Uint8Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint8ClampedArray */ Js::Uint8ClampedArray::GetOffsetOfBuffer(),
- /* ObjectType::Int16Array */ Js::Int16Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint16Array */ Js::Uint16Array::GetOffsetOfBuffer(),
- /* ObjectType::Int32Array */ Js::Int32Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint32Array */ Js::Uint32Array::GetOffsetOfBuffer(),
- /* ObjectType::Float32Array */ Js::Float32Array::GetOffsetOfBuffer(),
- /* ObjectType::Float64Array */ Js::Float64Array::GetOffsetOfBuffer(),
- /* ObjectType::Int8VirtualArray */ Js::Int8VirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Uint8VirtualArray */ Js::Uint8VirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Uint8ClampedVirtualArray */ Js::Uint8ClampedVirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Int16VirtualArray */ Js::Int16VirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Uint16VirtualArray */ Js::Uint16VirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Int32VirtualArray */ Js::Int32VirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Uint32VirtualArray */ Js::Uint32VirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Float32VirtualArray */ Js::Float32VirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Float64VirtualArray */ Js::Float64VirtualArray::GetOffsetOfBuffer(),
- /* ObjectType::Int8MixedArray */ Js::Int8Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint8MixedArray */ Js::Uint8Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint8ClampedMixedArray */ Js::Uint8ClampedArray::GetOffsetOfBuffer(),
- /* ObjectType::Int16MixedArray */ Js::Int16Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint16MixedArray */ Js::Uint16Array::GetOffsetOfBuffer(),
- /* ObjectType::Int32MixedArray */ Js::Int32Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint32MixedArray */ Js::Uint32Array::GetOffsetOfBuffer(),
- /* ObjectType::Float32MixedArray */ Js::Float32Array::GetOffsetOfBuffer(),
- /* ObjectType::Float64MixedArray */ Js::Float64Array::GetOffsetOfBuffer(),
- /* ObjectType::Int64Array */ Js::Int64Array::GetOffsetOfBuffer(),
- /* ObjectType::Uint64Array */ Js::Uint64Array::GetOffsetOfBuffer(),
- /* ObjectType::BoolArray */ Js::BoolArray::GetOffsetOfBuffer(),
- /* ObjectType::CharArray */ Js::CharArray::GetOffsetOfBuffer()
- };
- const uint32 Lowerer::OffsetsOfLength[static_cast<ValueType::TSize>(ObjectType::Count)] =
- {
- /* ObjectType::UninitializedObject */ static_cast<uint32>(-1),
- /* ObjectType::Object */ static_cast<uint32>(-1),
- /* ObjectType::RegExp */ static_cast<uint32>(-1),
- /* ObjectType::ObjectWithArray */ Js::JavascriptArray::GetOffsetOfLength(),
- /* ObjectType::Array */ Js::JavascriptArray::GetOffsetOfLength(),
- /* ObjectType::Int8Array */ Js::Int8Array::GetOffsetOfLength(),
- /* ObjectType::Uint8Array */ Js::Uint8Array::GetOffsetOfLength(),
- /* ObjectType::Uint8ClampedArray */ Js::Uint8ClampedArray::GetOffsetOfLength(),
- /* ObjectType::Int16Array */ Js::Int16Array::GetOffsetOfLength(),
- /* ObjectType::Uint16Array */ Js::Uint16Array::GetOffsetOfLength(),
- /* ObjectType::Int32Array */ Js::Int32Array::GetOffsetOfLength(),
- /* ObjectType::Uint32Array */ Js::Uint32Array::GetOffsetOfLength(),
- /* ObjectType::Float32Array */ Js::Float32Array::GetOffsetOfLength(),
- /* ObjectType::Float64Array */ Js::Float64Array::GetOffsetOfLength(),
- /* ObjectType::Int8VirtualArray */ Js::Int8VirtualArray::GetOffsetOfLength(),
- /* ObjectType::Uint8VirtualArray */ Js::Uint8VirtualArray::GetOffsetOfLength(),
- /* ObjectType::Uint8ClampedVirtualArray */ Js::Uint8ClampedVirtualArray::GetOffsetOfLength(),
- /* ObjectType::Int16VirtualArray */ Js::Int16VirtualArray::GetOffsetOfLength(),
- /* ObjectType::Uint16VirtualArray */ Js::Uint16VirtualArray::GetOffsetOfLength(),
- /* ObjectType::Int32VirtualArray */ Js::Int32VirtualArray::GetOffsetOfLength(),
- /* ObjectType::Uint32VirtualArray */ Js::Uint32VirtualArray::GetOffsetOfLength(),
- /* ObjectType::Float32VirtualArray */ Js::Float32VirtualArray::GetOffsetOfLength(),
- /* ObjectType::Float64VirtualArray */ Js::Float64VirtualArray::GetOffsetOfLength(),
- /* ObjectType::Int8MixedArray */ Js::Int8Array::GetOffsetOfLength(),
- /* ObjectType::Uint8MixedArray */ Js::Uint8Array::GetOffsetOfLength(),
- /* ObjectType::Uint8ClampedMixedArray */ Js::Uint8ClampedArray::GetOffsetOfLength(),
- /* ObjectType::Int16MixedArray */ Js::Int16Array::GetOffsetOfLength(),
- /* ObjectType::Uint16MixedArray */ Js::Uint16Array::GetOffsetOfLength(),
- /* ObjectType::Int32MixedArray */ Js::Int32Array::GetOffsetOfLength(),
- /* ObjectType::Uint32MixedArray */ Js::Uint32Array::GetOffsetOfLength(),
- /* ObjectType::Float32MixedArray */ Js::Float32Array::GetOffsetOfLength(),
- /* ObjectType::Float64MixedArray */ Js::Float64Array::GetOffsetOfLength(),
- /* ObjectType::Int64Array */ Js::Int64Array::GetOffsetOfLength(),
- /* ObjectType::Uint64Array */ Js::Uint64Array::GetOffsetOfLength(),
- /* ObjectType::BoolArray */ Js::BoolArray::GetOffsetOfLength(),
- /* ObjectType::CharArray */ Js::CharArray::GetOffsetOfLength()
- };
- const IRType Lowerer::IndirTypes[static_cast<ValueType::TSize>(ObjectType::Count)] =
- {
- /* ObjectType::UninitializedObject */ TyIllegal,
- /* ObjectType::Object */ TyIllegal,
- /* ObjectType::RegExp */ TyIllegal,
- /* ObjectType::ObjectWithArray */ TyVar,
- /* ObjectType::Array */ TyVar,
- /* ObjectType::Int8Array */ TyInt8,
- /* ObjectType::Uint8Array */ TyUint8,
- /* ObjectType::Uint8ClampedArray */ TyUint8,
- /* ObjectType::Int16Array */ TyInt16,
- /* ObjectType::Uint16Array */ TyUint16,
- /* ObjectType::Int32Array */ TyInt32,
- /* ObjectType::Uint32Array */ TyUint32,
- /* ObjectType::Float32Array */ TyFloat32,
- /* ObjectType::Float64Array */ TyFloat64,
- /* ObjectType::Int8VirtualArray */ TyInt8,
- /* ObjectType::Uint8VirtualArray */ TyUint8,
- /* ObjectType::Uint8ClampedVirtualArray */ TyUint8,
- /* ObjectType::Int16VirtualArray */ TyInt16,
- /* ObjectType::Uint16vArray */ TyUint16,
- /* ObjectType::Int32VirtualArray */ TyInt32,
- /* ObjectType::Uint32VirtualArray */ TyUint32,
- /* ObjectType::Float32VirtualArray */ TyFloat32,
- /* ObjectType::Float64VirtualArray */ TyFloat64,
- /* ObjectType::Int8MixedArray */ TyInt8,
- /* ObjectType::Uint8MixedArray */ TyUint8,
- /* ObjectType::Uint8ClampedMixedArray */ TyUint8,
- /* ObjectType::Int16MixedArray */ TyInt16,
- /* ObjectType::Uint16MixedArray */ TyUint16,
- /* ObjectType::Int32MixedArray */ TyInt32,
- /* ObjectType::Uint32MixedArray */ TyUint32,
- /* ObjectType::Float32MixedArray */ TyFloat32,
- /* ObjectType::Float64MixedArray */ TyFloat64,
- /* ObjectType::Int64Array */ TyInt64,
- /* ObjectType::Uint64Array */ TyUint64,
- /* ObjectType::BoolArray */ TyUint8,
- /* ObjectType::CharArray */ TyUint16
- };
- const BYTE Lowerer::IndirScales[static_cast<ValueType::TSize>(ObjectType::Count)] =
- {
- /* ObjectType::UninitializedObject */ static_cast<BYTE>(-1),
- /* ObjectType::Object */ static_cast<BYTE>(-1),
- /* ObjectType::RegExp */ static_cast<BYTE>(-1),
- /* ObjectType::ObjectWithArray */ LowererMD::GetDefaultIndirScale(),
- /* ObjectType::Array */ LowererMD::GetDefaultIndirScale(),
- /* ObjectType::Int8Array */ 0, // log2(sizeof(int8))
- /* ObjectType::Uint8Array */ 0, // log2(sizeof(uint8))
- /* ObjectType::Uint8ClampedArray */ 0, // log2(sizeof(uint8))
- /* ObjectType::Int16Array */ 1, // log2(sizeof(int16))
- /* ObjectType::Uint16Array */ 1, // log2(sizeof(uint16))
- /* ObjectType::Int32Array */ 2, // log2(sizeof(int32))
- /* ObjectType::Uint32Array */ 2, // log2(sizeof(uint32))
- /* ObjectType::Float32Array */ 2, // log2(sizeof(float))
- /* ObjectType::Float64Array */ 3, // log2(sizeof(double))
- /* ObjectType::Int8VirtualArray */ 0, // log2(sizeof(int8))
- /* ObjectType::Uint8VirtualArray */ 0, // log2(sizeof(uint8))
- /* ObjectType::Uint8ClampedVirtualArray */ 0, // log2(sizeof(uint8))
- /* ObjectType::Int16VirtualArray */ 1, // log2(sizeof(int16))
- /* ObjectType::Uint16VirtualArray */ 1, // log2(sizeof(uint16))
- /* ObjectType::Int32VirtualArray */ 2, // log2(sizeof(int32))
- /* ObjectType::Uint32VirtualArray */ 2, // log2(sizeof(uint32))
- /* ObjectType::Float32VirtualArray */ 2, // log2(sizeof(float))
- /* ObjectType::Float64VirtualArray */ 3, // log2(sizeof(double))
- /* ObjectType::Int8MixedArray */ 0, // log2(sizeof(int8))
- /* ObjectType::Uint8MixedArray */ 0, // log2(sizeof(uint8))
- /* ObjectType::Uint8ClampedMixedArray */ 0, // log2(sizeof(uint8))
- /* ObjectType::Int16MixedArray */ 1, // log2(sizeof(int16))
- /* ObjectType::Uint16MixedArray */ 1, // log2(sizeof(uint16))
- /* ObjectType::Int32MixedArray */ 2, // log2(sizeof(int32))
- /* ObjectType::Uint32MixedArray */ 2, // log2(sizeof(uint32))
- /* ObjectType::Float32MixedArray */ 2, // log2(sizeof(float))
- /* ObjectType::Float64MixedArray */ 3, // log2(sizeof(double))
- /* ObjectType::Int64Array */ 3, // log2(sizeof(int64))
- /* ObjectType::Uint64Array */ 3, // log2(sizeof(uint64))
- /* ObjectType::BoolArray */ 0, // log2(sizeof(bool))
- /* ObjectType::CharArray */ 1 // log2(sizeof(wchar_t))
- };
- VTableValue Lowerer::GetArrayVtableAddress(const ValueType valueType, bool getVirtual)
- {
- Assert(valueType.IsLikelyAnyOptimizedArray());
- if(valueType.IsLikelyArrayOrObjectWithArray())
- {
- if(valueType.HasIntElements())
- {
- return VTableValue::VtableNativeIntArray;
- }
- else if(valueType.HasFloatElements())
- {
- return VTableValue::VtableNativeFloatArray;
- }
- }
- if (getVirtual && valueType.IsLikelyMixedTypedArrayType())
- {
- return VtableAddresses[static_cast<ValueType::TSize>(valueType.GetMixedToVirtualTypedArrayObjectType())];
- }
- return VtableAddresses[static_cast<ValueType::TSize>(valueType.GetObjectType())];
- }
- uint32 Lowerer::GetArrayOffsetOfHeadSegment(const ValueType valueType)
- {
- Assert(valueType.IsLikelyAnyOptimizedArray());
- return OffsetsOfHeadSegment[static_cast<ValueType::TSize>(valueType.GetObjectType())];
- }
- uint32 Lowerer::GetArrayOffsetOfLength(const ValueType valueType)
- {
- Assert(valueType.IsLikelyAnyOptimizedArray());
- return OffsetsOfLength[static_cast<ValueType::TSize>(valueType.GetObjectType())];
- }
- IRType Lowerer::GetArrayIndirType(const ValueType valueType)
- {
- Assert(valueType.IsLikelyAnyOptimizedArray());
- if(valueType.IsLikelyArrayOrObjectWithArray())
- {
- if(valueType.HasIntElements())
- {
- return TyInt32;
- }
- else if(valueType.HasFloatElements())
- {
- return TyFloat64;
- }
- }
- return IndirTypes[static_cast<ValueType::TSize>(valueType.GetObjectType())];
- }
- BYTE Lowerer::GetArrayIndirScale(const ValueType valueType) const
- {
- Assert(valueType.IsLikelyAnyOptimizedArray());
- if(valueType.IsLikelyArrayOrObjectWithArray())
- {
- if(valueType.HasIntElements())
- {
- return 2; // log2(sizeof(int32))
- }
- else if(valueType.HasFloatElements())
- {
- return 3; // log2(sizeof(double))
- }
- }
- return IndirScales[static_cast<ValueType::TSize>(valueType.GetObjectType())];
- }
- bool Lowerer::ShouldGenerateArrayFastPath(
- const IR::Opnd *const arrayOpnd,
- const bool supportsObjectsWithArrays,
- const bool supportsTypedArrays,
- const bool requiresSse2ForFloatArrays) const
- {
- Assert(arrayOpnd);
- const ValueType arrayValueType(arrayOpnd->GetValueType());
- if(arrayValueType.IsUninitialized())
- {
- // Don't have info about the value type, better to generate the fast path anyway
- return true;
- }
- if (!arrayValueType.IsLikelyObject())
- {
- if (!arrayValueType.HasBeenObject() || arrayValueType.IsLikelyString())
- {
- return false;
- }
- //We have seen at least once there is an object in the code path. Generate fastpath hoping it to be array.
- //Its nice if we can get all the attributes set but valueType is only 16 bits. Consider expanding the same.
- return true;
- }
- if( !supportsObjectsWithArrays && arrayValueType.GetObjectType() == ObjectType::ObjectWithArray ||
- !supportsTypedArrays && arrayValueType.IsLikelyTypedArray())
- {
- // The fast path likely would not hit
- return false;
- }
- if(arrayValueType.GetObjectType() == ObjectType::UninitializedObject)
- {
- // Don't have info about the object type, better to generate the fast path anyway
- return true;
- }
- #ifdef _M_IX86
- if(requiresSse2ForFloatArrays &&
- (
- arrayValueType.GetObjectType() == ObjectType::Float32Array ||
- arrayValueType.GetObjectType() == ObjectType::Float64Array
- ) &&
- !AutoSystemInfo::Data.SSE2Available())
- {
- // Fast paths for float arrays rely on SSE2
- return false;
- }
- #endif
- return !arrayValueType.IsLikelyAnyUnOptimizedArray();
- }
- IR::RegOpnd *Lowerer::LoadObjectArray(IR::RegOpnd *const baseOpnd, IR::Instr *const insertBeforeInstr)
- {
- Assert(baseOpnd);
- Assert(
- baseOpnd->GetValueType().IsLikelyObject() &&
- baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- // mov array, [base + offsetOf(objectArrayOrFlags)]
- IR::RegOpnd *const arrayOpnd =
- baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd()->CopyAsRegOpnd(func) : baseOpnd->Copy(func)->AsRegOpnd();
- arrayOpnd->m_sym = StackSym::New(TyVar, func);
- arrayOpnd->SetValueType(arrayOpnd->GetValueType().ToArray());
- const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, func, false /* autoDelete */);
- m_lowererMD.CreateAssign(
- arrayOpnd,
- IR::IndirOpnd::New(
- baseOpnd,
- Js::DynamicObject::GetOffsetOfObjectArray(),
- arrayOpnd->GetType(),
- func),
- insertBeforeInstr);
- return arrayOpnd;
- }
- void
- Lowerer::GenerateIsEnabledArraySetElementFastPathCheck(
- IR::LabelInstr * isDisabledLabel,
- IR::Instr * const insertBeforeInstr)
- {
- InsertCompareBranch(
- this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable),
- LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableInvalid),
- Js::OpCode::BrEq_A,
- isDisabledLabel,
- insertBeforeInstr);
- }
- IR::RegOpnd *Lowerer::GenerateArrayTest(
- IR::RegOpnd *const baseOpnd,
- IR::LabelInstr *const isNotObjectLabel,
- IR::LabelInstr *const isNotArrayLabel,
- IR::Instr *const insertBeforeInstr,
- const bool forceFloat,
- const bool isStore,
- const bool allowDefiniteArray)
- {
- Assert(baseOpnd);
- const ValueType baseValueType(baseOpnd->GetValueType());
- // Shouldn't request to do an array test when it's already known to be an array, or if it's unlikely to be an array
- Assert(!baseValueType.IsAnyOptimizedArray() || allowDefiniteArray || baseValueType.IsNativeArray());
- Assert(baseValueType.IsUninitialized() || baseValueType.HasBeenObject());
- Assert(isNotObjectLabel);
- Assert(isNotArrayLabel);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::RegOpnd *arrayOpnd;
- IR::AutoReuseOpnd autoReuseArrayOpnd;
- if(baseValueType.IsLikelyObject() && baseValueType.GetObjectType() == ObjectType::ObjectWithArray)
- {
- // Only DynamicObject is allowed (DynamicObject vtable is ensured) because some object types have special handling for
- // index properties - arguments object, string object, external object, etc.
- GenerateObjectTypeTest(baseOpnd, insertBeforeInstr, isNotObjectLabel);
- GenerateObjectHeaderInliningTest(baseOpnd, isNotArrayLabel, insertBeforeInstr);
- arrayOpnd = LoadObjectArray(baseOpnd, insertBeforeInstr);
- autoReuseArrayOpnd.Initialize(arrayOpnd, func, false /* autoDelete */);
- // test array, array
- // je $isNotArrayLabel
- // test array, 1
- // jne $isNotArrayLabel
- InsertTestBranch(
- arrayOpnd,
- arrayOpnd,
- Js::OpCode::BrEq_A,
- isNotArrayLabel,
- insertBeforeInstr);
- InsertTestBranch(
- arrayOpnd,
- IR::IntConstOpnd::New(1, TyUint8, func, true),
- Js::OpCode::BrNeq_A,
- isNotArrayLabel,
- insertBeforeInstr);
- }
- else
- {
- if(!baseOpnd->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(baseOpnd, insertBeforeInstr, isNotObjectLabel);
- }
- arrayOpnd = baseOpnd->Copy(func)->AsRegOpnd();
- if(!baseValueType.IsLikelyAnyOptimizedArray())
- {
- arrayOpnd->SetValueType(
- ValueType::GetObject(ObjectType::Array)
- .ToLikely()
- .SetHasNoMissingValues(false)
- .SetArrayTypeId(Js::TypeIds_Array));
- }
- autoReuseArrayOpnd.Initialize(arrayOpnd, func, false /* autoDelete */);
- }
- VTableValue vtableAddress = baseValueType.IsLikelyAnyOptimizedArray()
- ? GetArrayVtableAddress(baseValueType)
- : VTableValue::VtableJavascriptArray;
- VTableValue virtualVtableAddress = VTableValue::VtableInvalid;
- if (baseValueType.IsLikelyMixedTypedArrayType())
- {
- virtualVtableAddress = GetArrayVtableAddress(baseValueType, true);
- }
- IR::Opnd * vtableOpnd;
- IR::Opnd * vtableVirtualOpnd = nullptr;
- if (isStore &&
- (vtableAddress == VTableValue::VtableJavascriptArray ||
- baseValueType.IsLikelyNativeArray()))
- {
- vtableOpnd = IR::RegOpnd::New(TyMachPtr, func);
- if (baseValueType.IsLikelyNativeArray())
- {
- if (baseValueType.HasIntElements())
- {
- InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesIntArraySetElementFastPathVtable), insertBeforeInstr);
- }
- else
- {
- Assert(baseValueType.HasFloatElements());
- InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesFloatArraySetElementFastPathVtable), insertBeforeInstr);
- }
- }
- else
- {
- InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable), insertBeforeInstr);
- }
- }
- else
- {
- vtableOpnd = LoadVTableValueOpnd(insertBeforeInstr, vtableAddress);
- }
- // cmp [array], vtableAddress
- // jne $isNotArrayLabel
- if (forceFloat && baseValueType.IsLikelyNativeFloatArray())
- {
- // We expect a native float array. If we get native int instead, convert it on the spot and bail out afterward.
- const auto goodArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- InsertCompareBranch(
- IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
- vtableOpnd,
- Js::OpCode::BrEq_A,
- goodArrayLabel,
- insertBeforeInstr);
- IR::LabelInstr *notFloatArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- insertBeforeInstr->InsertBefore(notFloatArrayLabel);
- if (isStore)
- {
- vtableOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(vtableOpnd, IR::MemRefOpnd::New(
- func->GetScriptContext()->optimizationOverrides.GetAddressOfIntArraySetElementFastPathVtable(),
- TyMachPtr, func), insertBeforeInstr);
- }
- else
- {
- vtableOpnd = LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableJavascriptNativeIntArray);
- }
- InsertCompareBranch(
- IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
- vtableOpnd,
- Js::OpCode::BrNeq_A,
- isNotArrayLabel,
- insertBeforeInstr);
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, arrayOpnd);
- IR::Instr *helperInstr = IR::Instr::New(Js::OpCode::Call, m_func);
- insertBeforeInstr->InsertBefore(helperInstr);
- m_lowererMD.ChangeToHelperCall(helperInstr, IR::HelperIntArr_ToNativeFloatArray);
- // Branch to the (bailout) label, because converting the array may have made our array checks unsafe.
- InsertBranch(Js::OpCode::Br, isNotArrayLabel, insertBeforeInstr);
- insertBeforeInstr->InsertBefore(goodArrayLabel);
- }
- else
- {
- IR::LabelInstr* goodArrayLabel = nullptr;
- if (baseValueType.IsLikelyMixedTypedArrayType())
- {
- goodArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- InsertCompareBranch(
- IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
- vtableOpnd,
- Js::OpCode::BrEq_A,
- goodArrayLabel,
- insertBeforeInstr);
- Assert(virtualVtableAddress);
- vtableVirtualOpnd = LoadVTableValueOpnd(insertBeforeInstr, virtualVtableAddress);
- Assert(vtableVirtualOpnd);
- InsertCompareBranch(
- IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
- vtableVirtualOpnd,
- Js::OpCode::BrNeq_A,
- isNotArrayLabel,
- insertBeforeInstr);
- insertBeforeInstr->InsertBefore(goodArrayLabel);
- }
- else
- {
- InsertCompareBranch(
- IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
- vtableOpnd,
- Js::OpCode::BrNeq_A,
- isNotArrayLabel,
- insertBeforeInstr);
- }
- }
- ValueType arrayValueType(arrayOpnd->GetValueType());
- if(arrayValueType.IsLikelyArrayOrObjectWithArray() && !arrayValueType.IsObject())
- {
- arrayValueType = arrayValueType.SetHasNoMissingValues(false);
- }
- arrayValueType = arrayValueType.ToDefiniteObject();
- arrayOpnd->SetValueType(arrayValueType);
- return arrayOpnd;
- }
- IR::LabelInstr *Lowerer::InsertLabel(const bool isHelper, IR::Instr *const insertBeforeInstr)
- {
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::LabelInstr *const instr = IR::LabelInstr::New(Js::OpCode::Label, func, isHelper);
- insertBeforeInstr->InsertBefore(instr);
- return instr;
- }
- IR::Instr *Lowerer::InsertMoveWithBarrier(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr)
- {
- return Lowerer::InsertMove(dst, src, insertBeforeInstr, true);
- }
- IR::Instr *Lowerer::InsertMove(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr, bool generateWriteBarrier)
- {
- Assert(dst);
- Assert(src);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- if(dst->IsFloat() && src->IsConstOpnd())
- {
- return LoadFloatFromNonReg(src, dst, insertBeforeInstr);
- }
- if(TySize[dst->GetType()] < TySize[src->GetType()])
- {
- src = src->UseWithNewType(dst->GetType(), func);
- }
- IR::Instr *const instr = IR::Instr::New(Js::OpCode::Ld_A, dst, src, func);
- insertBeforeInstr->InsertBefore(instr);
- if (generateWriteBarrier)
- {
- LowererMD::ChangeToWriteBarrierAssign(instr);
- }
- else
- {
- LowererMD::ChangeToAssign(instr);
- }
- return instr;
- }
- IR::BranchInstr *Lowerer::InsertBranch(
- const Js::OpCode opCode,
- IR::LabelInstr *const target,
- IR::Instr *const insertBeforeInstr)
- {
- return InsertBranch(opCode, false /* isUnsigned */, target, insertBeforeInstr);
- }
- IR::BranchInstr *Lowerer::InsertBranch(
- const Js::OpCode opCode,
- const bool isUnsigned,
- IR::LabelInstr *const target,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(target);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::BranchInstr *const instr = IR::BranchInstr::New(opCode, target, func);
- if(!instr->IsLowered())
- {
- if(opCode == Js::OpCode::Br)
- {
- instr->m_opcode = LowererMD::MDUncondBranchOpcode;
- }
- else if(isUnsigned)
- {
- instr->m_opcode = LowererMD::MDUnsignedBranchOpcode(opCode);
- }
- else
- {
- instr->m_opcode = LowererMD::MDBranchOpcode(opCode);
- }
- }
- insertBeforeInstr->InsertBefore(instr);
- return instr;
- }
- IR::Instr *Lowerer::InsertCompare(IR::Opnd *const src1, IR::Opnd *const src2, IR::Instr *const insertBeforeInstr)
- {
- Assert(src1);
- Assert(!src1->IsFloat64()); // not implemented
- Assert(src2);
- Assert(!src2->IsFloat64()); // not implemented
- Assert(!src1->IsEqual(src2));
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(Js::OpCode::CMP, func);
- instr->SetSrc1(src1);
- instr->SetSrc2(src2);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::Legalize(instr);
- return instr;
- }
- IR::BranchInstr *Lowerer::InsertCompareBranch(
- IR::Opnd *const compareSrc1,
- IR::Opnd *const compareSrc2,
- Js::OpCode branchOpCode,
- IR::LabelInstr *const target,
- IR::Instr *const insertBeforeInstr,
- const bool ignoreNaN)
- {
- return InsertCompareBranch(compareSrc1, compareSrc2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr, ignoreNaN);
- }
- IR::BranchInstr *Lowerer::InsertCompareBranch(
- IR::Opnd *compareSrc1,
- IR::Opnd *compareSrc2,
- Js::OpCode branchOpCode,
- const bool isUnsigned,
- IR::LabelInstr *const target,
- IR::Instr *const insertBeforeInstr,
- const bool ignoreNaN)
- {
- Assert(compareSrc1);
- Assert(compareSrc2);
- Func *const func = insertBeforeInstr->m_func;
- if(compareSrc1->IsFloat64())
- {
- Assert(compareSrc2->IsFloat64());
- Assert(!isUnsigned);
- IR::BranchInstr *const instr = IR::BranchInstr::New(branchOpCode, target, compareSrc1, compareSrc2, func);
- insertBeforeInstr->InsertBefore(instr);
- return LowererMD::LowerFloatCondBranch(instr, ignoreNaN);
- }
- Js::OpCode swapSrcsBranchOpCode;
- switch(branchOpCode)
- {
- case Js::OpCode::BrEq_A:
- case Js::OpCode::BrNeq_A:
- swapSrcsBranchOpCode = branchOpCode;
- goto Common_BrEqNeqGeGtLeLt;
- case Js::OpCode::BrGe_A:
- swapSrcsBranchOpCode = Js::OpCode::BrLe_A;
- goto Common_BrEqNeqGeGtLeLt;
- case Js::OpCode::BrGt_A:
- swapSrcsBranchOpCode = Js::OpCode::BrLt_A;
- goto Common_BrEqNeqGeGtLeLt;
- case Js::OpCode::BrLe_A:
- swapSrcsBranchOpCode = Js::OpCode::BrGe_A;
- goto Common_BrEqNeqGeGtLeLt;
- case Js::OpCode::BrLt_A:
- swapSrcsBranchOpCode = Js::OpCode::BrGt_A;
- // fall through
- Common_BrEqNeqGeGtLeLt:
- // Check if src1 is a constant and src2 is not, and facilitate folding the constant into the Cmp instruction
- if( (
- compareSrc1->IsIntConstOpnd() ||
- (
- compareSrc1->IsAddrOpnd() &&
- Math::FitsInDWord(reinterpret_cast<size_t>(compareSrc1->AsAddrOpnd()->m_address))
- )
- ) &&
- !compareSrc2->IsIntConstOpnd() &&
- !compareSrc2->IsAddrOpnd())
- {
- // Swap the sources and branch
- IR::Opnd *const tempSrc = compareSrc1;
- compareSrc1 = compareSrc2;
- compareSrc2 = tempSrc;
- branchOpCode = swapSrcsBranchOpCode;
- }
- // Check for compare with zero, to prefer using Test instead of Cmp
- if( !compareSrc1->IsRegOpnd() ||
- !(
- compareSrc2->IsIntConstOpnd() && compareSrc2->AsIntConstOpnd()->GetValue() == 0 ||
- compareSrc2->IsAddrOpnd() && !compareSrc2->AsAddrOpnd()->m_address
- ) ||
- branchOpCode == Js::OpCode::BrGt_A || branchOpCode == Js::OpCode::BrLe_A)
- {
- goto Default;
- }
- if(branchOpCode == Js::OpCode::BrGe_A || branchOpCode == Js::OpCode::BrLt_A)
- {
- if(isUnsigned)
- {
- goto Default;
- }
- branchOpCode = LowererMD::MDCompareWithZeroBranchOpcode(branchOpCode);
- }
- if(!compareSrc2->IsInUse())
- {
- compareSrc2->Free(func);
- }
- InsertTest(compareSrc1, compareSrc1, insertBeforeInstr);
- break;
- default:
- Default:
- InsertCompare(compareSrc1, compareSrc2, insertBeforeInstr);
- break;
- }
- return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
- }
- IR::Instr *Lowerer::InsertTest(IR::Opnd *const src1, IR::Opnd *const src2, IR::Instr *const insertBeforeInstr)
- {
- Assert(src1);
- Assert(!src1->IsFloat64()); // not implemented
- Assert(src2);
- Assert(!src2->IsFloat64()); // not implemented
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(LowererMD::MDTestOpcode, func);
- instr->SetSrc1(src1);
- instr->SetSrc2(src2);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::Legalize(instr);
- return instr;
- }
- IR::BranchInstr *Lowerer::InsertTestBranch(
- IR::Opnd *const testSrc1,
- IR::Opnd *const testSrc2,
- const Js::OpCode branchOpCode,
- IR::LabelInstr *const target,
- IR::Instr *const insertBeforeInstr)
- {
- return InsertTestBranch(testSrc1, testSrc2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr);
- }
- IR::BranchInstr *Lowerer::InsertTestBranch(
- IR::Opnd *const testSrc1,
- IR::Opnd *const testSrc2,
- const Js::OpCode branchOpCode,
- const bool isUnsigned,
- IR::LabelInstr *const target,
- IR::Instr *const insertBeforeInstr)
- {
- InsertTest(testSrc1, testSrc2, insertBeforeInstr);
- return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
- }
- IR::Instr *Lowerer::InsertAdd(
- const bool needFlags,
- IR::Opnd *const dst,
- IR::Opnd *src1,
- IR::Opnd *src2,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(src1);
- Assert(src2);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- if(src2->IsIntConstOpnd())
- {
- IR::IntConstOpnd *const intConstOpnd = src2->AsIntConstOpnd();
- const IntConstType value = intConstOpnd->GetValue();
- if(value < 0 && value != IntConstMin)
- {
- // Change (s1 = s1 + -5) into (s1 = s1 - 5)
- IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
- newSrc2->SetValue(-value);
- return InsertSub(needFlags, dst, src1, newSrc2, insertBeforeInstr);
- }
- }
- else if(src1->IsIntConstOpnd())
- {
- IR::IntConstOpnd *const intConstOpnd = src1->AsIntConstOpnd();
- const IntConstType value = intConstOpnd->GetValue();
- if(value < 0 && value != IntConstMin)
- {
- // Change (s1 = -5 + s1) into (s1 = s1 - 5)
- IR::Opnd *const newSrc1 = src2;
- IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
- newSrc2->SetValue(-value);
- return InsertSub(needFlags, dst, newSrc1, newSrc2, insertBeforeInstr);
- }
- }
- IR::Instr *const instr = IR::Instr::New(Js::OpCode::Add_A, dst, src1, src2, func);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::ChangeToAdd(instr, needFlags);
- LowererMD::Legalize(instr);
- return instr;
- }
- IR::Instr *Lowerer::InsertSub(
- const bool needFlags,
- IR::Opnd *const dst,
- IR::Opnd *src1,
- IR::Opnd *src2,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(src1);
- Assert(src2);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- if(src2->IsIntConstOpnd())
- {
- IR::IntConstOpnd *const intConstOpnd = src2->AsIntConstOpnd();
- const IntConstType value = intConstOpnd->GetValue();
- if(value < 0 && value != IntConstMin)
- {
- // Change (s1 = s1 - -5) into (s1 = s1 + 5)
- IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
- newSrc2->SetValue(-value);
- return InsertAdd(needFlags, dst, src1, newSrc2, insertBeforeInstr);
- }
- }
- IR::Instr *const instr = IR::Instr::New(Js::OpCode::Sub_A, dst, src1, src2, func);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::ChangeToSub(instr, needFlags);
- LowererMD::Legalize(instr);
- return instr;
- }
- IR::Instr *Lowerer::InsertLea(IR::RegOpnd *const dst, IR::Opnd *const src, IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(src);
- Assert(src->IsIndirOpnd() || src->IsSymOpnd());
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(Js::OpCode::LEA, dst, src, func);
- insertBeforeInstr->InsertBefore(instr);
- return LowererMD::ChangeToLea(instr);
- }
- IR::Instr *Lowerer::InsertAnd(
- IR::Opnd *const dst,
- IR::Opnd *const src1,
- IR::Opnd *const src2,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(src1);
- Assert(src2);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(Js::OpCode::AND, dst, src1, src2, func);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::Legalize(instr);
- return instr;
- }
- IR::Instr *Lowerer::InsertOr(
- IR::Opnd *const dst,
- IR::Opnd *const src1,
- IR::Opnd *const src2,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(src1);
- Assert(src2);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(LowererMD::MDOrOpcode, dst, src1, src2, func);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::Legalize(instr);
- return instr;
- }
- IR::Instr *Lowerer::InsertShift(
- const Js::OpCode opCode,
- const bool needFlags,
- IR::Opnd *const dst,
- IR::Opnd *const src1,
- IR::Opnd *const src2,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(!dst->IsFloat64()); // not implemented
- Assert(src1);
- Assert(!src1->IsFloat64()); // not implemented
- Assert(src2);
- Assert(!src2->IsFloat64()); // not implemented
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(opCode, dst, src1, src2, func);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::ChangeToShift(instr, needFlags);
- LowererMD::Legalize(instr);
- return instr;
- }
- IR::Instr *Lowerer::InsertShiftBranch(
- const Js::OpCode shiftOpCode,
- IR::Opnd *const dst,
- IR::Opnd *const src1,
- IR::Opnd *const src2,
- const Js::OpCode branchOpCode,
- IR::LabelInstr *const target,
- IR::Instr *const insertBeforeInstr)
- {
- return InsertShiftBranch(shiftOpCode, dst, src1, src2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr);
- }
- IR::Instr *Lowerer::InsertShiftBranch(
- const Js::OpCode shiftOpCode,
- IR::Opnd *const dst,
- IR::Opnd *const src1,
- IR::Opnd *const src2,
- const Js::OpCode branchOpCode,
- const bool isUnsigned,
- IR::LabelInstr *const target,
- IR::Instr *const insertBeforeInstr)
- {
- InsertShift(shiftOpCode, true /* needFlags */, dst, src1, src2, insertBeforeInstr);
- return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
- }
- IR::Instr *Lowerer::InsertConvertFloat32ToFloat64(
- IR::Opnd *const dst,
- IR::Opnd *const src,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(dst->IsFloat64());
- Assert(src);
- Assert(src->IsFloat32());
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(LowererMD::MDConvertFloat32ToFloat64Opcode, dst, src, func);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::Legalize(instr);
- return instr;
- }
- IR::Instr *Lowerer::InsertConvertFloat64ToFloat32(
- IR::Opnd *const dst,
- IR::Opnd *const src,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(dst);
- Assert(dst->IsFloat32());
- Assert(src);
- Assert(src->IsFloat64());
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::Instr *const instr = IR::Instr::New(LowererMD::MDConvertFloat64ToFloat32Opcode, dst, src, func);
- insertBeforeInstr->InsertBefore(instr);
- LowererMD::Legalize(instr);
- return instr;
- }
- void Lowerer::InsertIncUInt8PreventOverflow(
- IR::Opnd *const dst,
- IR::Opnd *const src,
- IR::Instr *const insertBeforeInstr,
- IR::Instr * *const onOverflowInsertBeforeInstrRef)
- {
- LowererMD::InsertIncUInt8PreventOverflow(dst, src, insertBeforeInstr, onOverflowInsertBeforeInstrRef);
- }
- void Lowerer::InsertDecUInt8PreventOverflow(
- IR::Opnd *const dst,
- IR::Opnd *const src,
- IR::Instr *const insertBeforeInstr,
- IR::Instr * *const onOverflowInsertBeforeInstrRef)
- {
- LowererMD::InsertDecUInt8PreventOverflow(dst, src, insertBeforeInstr, onOverflowInsertBeforeInstrRef);
- }
- void Lowerer::InsertFloatCheckForZeroOrNanBranch(
- IR::Opnd *const src,
- const bool branchOnZeroOrNan,
- IR::LabelInstr *const target,
- IR::LabelInstr *const fallthroughLabel,
- IR::Instr *const insertBeforeInstr)
- {
- Assert(src);
- Assert(src->IsFloat64());
- Assert(target);
- Assert(!fallthroughLabel || fallthroughLabel != target);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- IR::BranchInstr *const branchOnEqualOrNotEqual =
- InsertCompareBranch(
- src,
- IR::MemRefOpnd::New((double*)&(Js::JavascriptNumber::k_Zero), TyFloat64, func),
- branchOnZeroOrNan ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A,
- target,
- insertBeforeInstr,
- true /* ignoreNaN */);
- // x86/x64
- // When NaN is ignored, on x86 and x64, JE branches when equal or unordered since an unordered result sets the zero
- // flag, and JNE branches when not equal and not unordered. By comparing with zero, JE will branch when src is zero or
- // NaN, and JNE will branch when src is not zero and not NaN.
- //
- // ARM
- // When NaN is ignored, BEQ branches when equal and not unordered, and BNE branches when not equal or unordered. So,
- // when comparing src with zero, an unordered check needs to be added before the BEQ/BNE.
- branchOnEqualOrNotEqual; // satisfy the compiler
- #ifdef _M_ARM
- InsertBranch(
- Js::OpCode::BVS,
- branchOnZeroOrNan
- ? target
- : fallthroughLabel ? fallthroughLabel : insertBeforeInstr->m_prev->GetOrCreateContinueLabel(),
- branchOnEqualOrNotEqual);
- #endif
- }
- IR::IndirOpnd *
- Lowerer::GenerateFastElemICommon(
- IR::Instr * ldElem,
- bool isStore,
- IR::IndirOpnd * indirOpnd,
- IR::LabelInstr * labelHelper,
- IR::LabelInstr * labelCantUseArray,
- IR::LabelInstr *labelFallthrough,
- bool * pIsTypedArrayElement,
- bool * pIsStringIndex,
- bool *emitBailoutRef,
- IR::LabelInstr **pLabelSegmentLengthIncreased /*= nullptr*/,
- bool checkArrayLengthOverflow /*= true*/,
- bool forceGenerateFastPath /* = false */,
- bool returnLength,
- IR::LabelInstr *bailOutLabelInstr /* = nullptr*/)
- {
- *pIsTypedArrayElement = false;
- *pIsStringIndex = false;
- if(pLabelSegmentLengthIncreased)
- {
- *pLabelSegmentLengthIncreased = nullptr;
- }
- IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
- AssertMsg(baseOpnd, "This shouldn't be NULL");
- // Caution: If making changes to the conditions under which we don't emit the typical array checks, make sure
- // the code in GlobOpt::ShouldAssumeIndirOpndHasNonNegativeIntIndex is updated accordingly. We don't want the
- // global optimizer to type specialize instructions, for which the lowerer is forced to emit unconditional
- // bailouts.
- if (baseOpnd->IsTaggedInt())
- {
- return NULL;
- }
- IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
- if (indexOpnd)
- {
- if (indexOpnd->GetValueType().IsString())
- {
- if (!baseOpnd->GetValueType().IsLikelyOptimizedTypedArray())
- {
- // If profile data says that it's a typed array - do not generate the property string fast path as the src. could be a temp and that would cause a bug.
- *pIsTypedArrayElement = false;
- *pIsStringIndex = true;
- return m_lowererMD.GenerateFastElemIStringIndexCommon(ldElem, isStore, indirOpnd, labelHelper);
- }
- else
- {
- // There's no point in generating the int index fast path if we know the index has a string value.
- return nullptr;
- }
- }
- }
- return
- GenerateFastElemIIntIndexCommon(
- ldElem,
- isStore,
- indirOpnd,
- labelHelper,
- labelCantUseArray,
- labelFallthrough,
- pIsTypedArrayElement,
- emitBailoutRef,
- pLabelSegmentLengthIncreased,
- checkArrayLengthOverflow,
- false,
- returnLength,
- bailOutLabelInstr);
- }
- IR::IndirOpnd *
- Lowerer::GenerateFastElemIIntIndexCommon(
- IR::Instr * ldElem,
- bool isStore,
- IR::IndirOpnd * indirOpnd,
- IR::LabelInstr * labelHelper,
- IR::LabelInstr * labelCantUseArray,
- IR::LabelInstr *labelFallthrough,
- bool * pIsTypedArrayElement,
- bool *emitBailoutRef,
- IR::LabelInstr **pLabelSegmentLengthIncreased,
- bool checkArrayLengthOverflow /*= true*/,
- bool forceGenerateFastPath /* = false */,
- bool returnLength,
- IR::LabelInstr *bailOutLabelInstr /* = nullptr*/)
- {
- IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
- IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
- Assert(!baseOpnd->IsTaggedInt() || (indexOpnd && indexOpnd->IsNotInt()));
- BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
- IRType indirType = TyVar;
- const ValueType baseValueType(baseOpnd->GetValueType());
- // TEST base, AtomTag -- check base not tagged int
- // JNE $helper
- // if (base.GetValueType() != Array) {
- // CMP [base], JavascriptArray::`vtable'
- // JNE $helper
- // }
- // TEST index, 1 -- index tagged int
- // JEQ $helper
- // if (inputIndex is not int const) {
- // MOV index, inputIndex
- // SAR index, Js::VarTag_Shift -- remote atom tag
- // JS $helper -- exclude negative index
- // }
- // MOV headSegment, [base + offset(head)]
- // CMP [headSegment + offset(length)], index -- bounds check
- // if (opcode == StElemI_A) {
- // JA $done (for typedarray, JA $toNumberHelper)
- // CMP [headSegment + offset(size)], index -- chunk has room?
- // JBE $helper
- // if (index is not int const) {
- // LEA newLength, [index + 1]
- // } else {
- // newLength = index + 1
- // }
- // MOV [headSegment + offset(length)], newLength -- update length on chunk
- // CMP [base + offset(length)], newLength
- // JAE $done
- // MOV [base + offset(length)], newLength -- update length on array
- // if(length to be returned){
- // SHL newLength, AtomTag
- // INC newLength
- // MOV dst, newLength
- // }
- // JMP $done
- //
- // $toNumberHelper: Call HelperOp_ConvNumber_Full
- // JMP $done
- // $done
- // } else {la
- // JBE $helper
- // }
- // return [headSegment + offset(elements) + index]
- // Caution: If making changes to the conditions under which we don't emit the typical array checks, make sure
- // the code in GlobOpt::ShouldAssumeIndirOpndHasNonNegativeIntIndex is updated accordingly. We don't want the
- // global optimizer to type specialize instructions, for which the lowerer is forced to emit unconditional
- // bailouts.
- bool isIndexNotInt = false;
- IntConstType value = 0;
- IR::Opnd * indexValueOpnd = nullptr;
- bool invertBoundCheckComparison = false;
- if (indirOpnd->TryGetIntConstIndexValue(true, &value, &isIndexNotInt))
- {
- if (value >= 0)
- {
- indexValueOpnd = IR::IntConstOpnd::New(value, TyUint32, this->m_func);
- invertBoundCheckComparison = true; // facilitate folding the constant index into the compare instruction
- }
- else
- {
- // If the index is a negative int constant we go directly to helper.
- Assert(!forceGenerateFastPath);
- return nullptr;
- }
- }
- else if (isIndexNotInt)
- {
- // If we know the index is not an int we go directly to helper.
- Assert(!forceGenerateFastPath);
- return nullptr;
- }
- //At this point indexValueOpnd is either NULL or contains the valueOpnd
- if(!forceGenerateFastPath && !ShouldGenerateArrayFastPath(baseOpnd, true, true, true))
- {
- return nullptr;
- }
- if(baseValueType.IsLikelyAnyOptimizedArray())
- {
- indirScale = GetArrayIndirScale(baseValueType);
- indirType = GetArrayIndirType(baseValueType);
- }
- IRType elementType = TyIllegal;
- IR::Opnd * element = nullptr;
- if(ldElem->m_opcode == Js::OpCode::InlineArrayPush)
- {
- element = ldElem->GetSrc2();
- elementType = element->GetType();
- }
- else if(isStore && ldElem->GetSrc1())
- {
- element = ldElem->GetSrc1();
- elementType = element->GetType();
- }
- Assert(isStore || (element == nullptr && elementType == TyIllegal));
- if (isStore && baseValueType.IsLikelyNativeArray() && indirType != elementType)
- {
- // We're trying to write a value of the wrong type, which should force a conversion of the array.
- // Go to the helper for that.
- return nullptr;
- }
- IR::RegOpnd *arrayOpnd = baseOpnd;
- IR::RegOpnd *headSegmentOpnd = nullptr;
- IR::Opnd *headSegmentLengthOpnd = nullptr;
- IR::AutoReuseOpnd autoReuseHeadSegmentOpnd, autoReuseHeadSegmentLengthOpnd;
- bool indexIsNonnegative = indexValueOpnd || indexOpnd->GetType() == TyUint32 || !checkArrayLengthOverflow;
- bool indexIsLessThanHeadSegmentLength = false;
- if(!baseValueType.IsAnyOptimizedArray())
- {
- arrayOpnd = GenerateArrayTest(baseOpnd, labelCantUseArray, labelCantUseArray, ldElem, true, isStore);
- }
- else
- {
- if(arrayOpnd->IsArrayRegOpnd())
- {
- IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
- if(arrayRegOpnd->HeadSegmentSym())
- {
- headSegmentOpnd = IR::RegOpnd::New(arrayRegOpnd->HeadSegmentSym(), TyMachPtr, m_func);
- DebugOnly(headSegmentOpnd->FreezeSymValue());
- autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
- }
- if(arrayRegOpnd->HeadSegmentLengthSym())
- {
- headSegmentLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->HeadSegmentLengthSym(), TyUint32, m_func);
- DebugOnly(headSegmentLengthOpnd->AsRegOpnd()->FreezeSymValue());
- autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
- }
- if (arrayRegOpnd->EliminatedLowerBoundCheck())
- {
- indexIsNonnegative = true;
- }
- if(arrayRegOpnd->EliminatedUpperBoundCheck())
- {
- indexIsLessThanHeadSegmentLength = true;
- }
- }
- }
- IR::AutoReuseOpnd autoReuseArrayOpnd;
- if(arrayOpnd->GetValueType().GetObjectType() != ObjectType::ObjectWithArray)
- {
- autoReuseArrayOpnd.Initialize(arrayOpnd, m_func);
- }
- const auto EnsureObjectArrayLoaded = [&]()
- {
- if(arrayOpnd->GetValueType().GetObjectType() != ObjectType::ObjectWithArray)
- {
- return;
- }
- arrayOpnd = LoadObjectArray(arrayOpnd, ldElem);
- autoReuseArrayOpnd.Initialize(arrayOpnd, m_func);
- };
- const bool doUpperBoundCheck = checkArrayLengthOverflow && !indexIsLessThanHeadSegmentLength;
- if(!indexValueOpnd)
- {
- indexValueOpnd =
- m_lowererMD.LoadNonnegativeIndex(
- indexOpnd,
- (
- indexIsNonnegative
- #if !INT32VAR
- ||
- // On 32-bit platforms, skip the negative check since for now, the unsigned upper bound check covers it
- doUpperBoundCheck
- #endif
- ),
- labelCantUseArray,
- labelHelper,
- ldElem);
- }
- const IR::AutoReuseOpnd autoReuseIndexValueOpnd(indexValueOpnd, m_func);
- if (baseValueType.IsLikelyTypedArray())
- {
- *pIsTypedArrayElement = true;
- if(doUpperBoundCheck)
- {
- if(!headSegmentLengthOpnd)
- {
- // (headSegmentLength = [base + offset(length)])
- int lengthOffset;
- lengthOffset = Js::Float64Array::GetOffsetOfLength();
- headSegmentLengthOpnd = IR::IndirOpnd::New(arrayOpnd, lengthOffset, TyUint32, m_func);
- autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
- }
- // CMP index, headSegmentLength -- upper bound check
- if(!invertBoundCheckComparison)
- {
- InsertCompare(indexValueOpnd, headSegmentLengthOpnd, ldElem);
- }
- else
- {
- InsertCompare(headSegmentLengthOpnd, indexValueOpnd, ldElem);
- }
- }
- }
- else
- {
- *pIsTypedArrayElement = false;
- if (isStore &&
- baseValueType.IsLikelyNativeIntArray() &&
- (!element->IsIntConstOpnd() || Js::SparseArraySegment<int32>::GetMissingItem() == element->AsIntConstOpnd()->AsInt32()))
- {
- Assert(ldElem->m_opcode != Js::OpCode::InlineArrayPush || bailOutLabelInstr);
- // Check for a write of the MissingItem value.
- InsertCompareBranch(
- element,
- GetMissingItemOpnd(elementType, m_func),
- Js::OpCode::BrEq_A,
- ldElem->m_opcode == Js::OpCode::InlineArrayPush ? bailOutLabelInstr : labelCantUseArray,
- ldElem,
- true);
- }
- if(!headSegmentOpnd)
- {
- EnsureObjectArrayLoaded();
- // MOV headSegment, [base + offset(head)]
- indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfHead(), TyMachPtr, this->m_func);
- headSegmentOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
- InsertMove(headSegmentOpnd, indirOpnd, ldElem);
- }
- if(doUpperBoundCheck)
- {
- if(!headSegmentLengthOpnd)
- {
- // (headSegmentLength = [headSegment + offset(length)])
- headSegmentLengthOpnd =
- IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
- autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
- }
- // CMP index, headSegmentLength -- upper bound check
- if(!invertBoundCheckComparison)
- {
- InsertCompare(indexValueOpnd, headSegmentLengthOpnd, ldElem);
- }
- else
- {
- InsertCompare(headSegmentLengthOpnd, indexValueOpnd, ldElem);
- }
- }
- }
- const IR::BailOutKind bailOutKind = ldElem->HasBailOutInfo() ? ldElem->GetBailOutKind() : IR::BailOutInvalid;
- if(indexIsLessThanHeadSegmentLength ||
- bailOutKind & (IR::BailOutOnArrayAccessHelperCall | IR::BailOutOnInvalidatedArrayHeadSegment))
- {
- if(bailOutKind & (IR::BailOutOnArrayAccessHelperCall | IR::BailOutOnInvalidatedArrayHeadSegment))
- {
- // The bailout must be pre-op because it will not have completed the operation
- Assert(ldElem->GetBailOutInfo()->bailOutOffset == ldElem->GetByteCodeOffset());
- // Verify other bailouts these can be combined with
- Assert(
- !(
- bailOutKind &
- IR::BailOutKindBits &
- ~(
- IR::BailOutOnArrayAccessHelperCall |
- IR::BailOutOnInvalidatedArrayHeadSegment |
- IR::BailOutOnInvalidatedArrayLength |
- IR::BailOutConventionalNativeArrayAccessOnly |
- (bailOutKind & IR::BailOutOnArrayAccessHelperCall ? IR::BailOutInvalid : IR::BailOutConvertedNativeArray)
- )
- ));
- if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
- {
- // Omit the helper call and generate a bailout instead
- Assert(emitBailoutRef);
- *emitBailoutRef = true;
- }
- }
- if(indexIsLessThanHeadSegmentLength)
- {
- Assert(!(bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment));
- }
- else
- {
- IR::LabelInstr *bailOutLabel;
- if(bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment)
- {
- Assert(isStore);
- // Lower a separate (but shared) bailout for this case, and preserve the bailout kind in the instruction if the
- // helper call is going to be generated, because the bailout kind needs to be lowered again and differently in the
- // helper call path.
- //
- // Generate:
- // (LdElem)
- // jmp $continue
- // $bailOut:
- // Bail out with IR::BailOutOnInvalidatedArrayHeadSegment
- // $continue:
- LowerOneBailOutKind(
- ldElem,
- IR::BailOutOnInvalidatedArrayHeadSegment,
- false,
- !(bailOutKind & IR::BailOutOnArrayAccessHelperCall));
- bailOutLabel = ldElem->GetOrCreateContinueLabel(true);
- InsertBranch(Js::OpCode::Br, labelFallthrough, bailOutLabel);
- }
- else
- {
- Assert(bailOutKind & IR::BailOutOnArrayAccessHelperCall);
- bailOutLabel = labelHelper;
- }
- // Bail out if the index is outside the head segment bounds
- // jae $bailOut
- Assert(checkArrayLengthOverflow);
- InsertBranch(
- !invertBoundCheckComparison ? Js::OpCode::BrGe_A : Js::OpCode::BrLe_A,
- true /* isUnsigned */,
- bailOutLabel,
- ldElem);
- }
- }
- else if (isStore && !baseValueType.IsLikelyTypedArray()) // #if (opcode == StElemI_A)
- {
- IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- IR::LabelInstr *labelSegmentLengthIncreased = nullptr;
- const bool isPush = ldElem->m_opcode != Js::OpCode::StElemI_A && ldElem->m_opcode != Js::OpCode::StElemI_A_Strict;
- // Put the head segment size check and length updates in a helper block since they're not the common path for StElem.
- // For push, that is the common path so keep it in a non-helper block.
- const bool isInHelperBlock = !isPush;
- if(checkArrayLengthOverflow)
- {
- if(pLabelSegmentLengthIncreased &&
- !(
- baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues() ||
- (ldElem->m_opcode == Js::OpCode::StElemI_A || ldElem->m_opcode == Js::OpCode::StElemI_A_Strict) &&
- ldElem->IsProfiledInstr() && !ldElem->AsProfiledInstr()->u.stElemInfo->LikelyFillsMissingValue()
- ))
- {
- // For arrays that are not guaranteed to have no missing values, before storing to an element where
- // (index < length), the element value needs to be checked to see if it's a missing value, and if so, fall back
- // to the helper. This is done to keep the missing value tracking precise in arrays. So, create a separate label
- // for the case where the length was increased (index >= length), and pass it back to GenerateFastStElemI, which
- // will fill in the rest.
- labelSegmentLengthIncreased = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelperBlock);
- *pLabelSegmentLengthIncreased = labelSegmentLengthIncreased;
- }
- else
- {
- labelSegmentLengthIncreased = labelDone;
- }
- // JB $done
- InsertBranch(
- !invertBoundCheckComparison ? Js::OpCode::BrLt_A : Js::OpCode::BrGt_A,
- true /* isUnsigned */,
- labelDone,
- ldElem);
- }
- if(isInHelperBlock)
- {
- InsertLabel(true /* isHelper */, ldElem);
- }
- EnsureObjectArrayLoaded();
- do // while(false);
- {
- if(checkArrayLengthOverflow)
- {
- if(ldElem->HasBailOutInfo() && ldElem->GetBailOutKind() & IR::BailOutOnMissingValue)
- {
- // Need to bail out if this store would create a missing value. The store would cause a missing value to be
- // created if (index > length && index < size). If (index >= size) we would go to helper anyway, and the bailout
- // handling for this is done after the helper call, so just go to helper if (index > length).
- //
- // jne $helper // branch for (cmp index, headSegmentLength)
- InsertBranch(Js::OpCode::BrNeq_A, labelHelper, ldElem);
- }
- else
- {
- // If (index < size) we will not call the helper, so the array flags must be updated to reflect that it no
- // longer has no missing values.
- //
- // jne indexGreaterThanLength // branch for (cmp index, headSegmentLength)
- // cmp index, [headSegment + offset(size)]
- // jae $helper
- // jmp indexLessThanSize
- // indexGreaterThanLength:
- // cmp index, [headSegment + offset(size)]
- // jae $helper
- // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
- // indexLessThanSize:
- IR::LabelInstr *const indexGreaterThanLengthLabel = InsertLabel(true /* isHelper */, ldElem);
- IR::LabelInstr *const indexLessThanSizeLabel = InsertLabel(isInHelperBlock, ldElem);
- // jne indexGreaterThanLength // branch for (cmp index, headSegmentLength)
- // cmp index, [headSegment + offset(size)]
- // jae $helper
- // jmp indexLessThanSize
- // indexGreaterThanLength:
- InsertBranch(Js::OpCode::BrNeq_A, indexGreaterThanLengthLabel, indexGreaterThanLengthLabel);
- InsertCompareBranch(
- indexValueOpnd,
- IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, m_func),
- Js::OpCode::BrGe_A,
- true /* isUnsigned */,
- labelHelper,
- indexGreaterThanLengthLabel);
- InsertBranch(Js::OpCode::Br, indexLessThanSizeLabel, indexGreaterThanLengthLabel);
- // indexGreaterThanLength:
- // cmp index, [headSegment + offset(size)]
- // jae $helper
- // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
- // indexLessThanSize:
- InsertCompareBranch(
- indexValueOpnd,
- IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, m_func),
- Js::OpCode::BrGe_A,
- true /* isUnsigned */,
- labelHelper,
- indexLessThanSizeLabel);
- CompileAssert(
- static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
- Js::DynamicObjectFlags::HasNoMissingValues);
- InsertAnd(
- IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
- IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
- IR::IntConstOpnd::New(
- static_cast<uint8>(~Js::DynamicObjectFlags::HasNoMissingValues),
- TyUint8,
- m_func,
- true),
- indexLessThanSizeLabel);
- // indexLessThanSize:
- break;
- }
- }
- // CMP index, [headSegment + offset(size)]
- // JAE $helper
- indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, this->m_func);
- InsertCompareBranch(indexValueOpnd, indirOpnd, Js::OpCode::BrGe_A, true /* isUnsigned */, labelHelper, ldElem);
- } while(false);
- if(isPush)
- {
- IR::LabelInstr *const updateLengthLabel = InsertLabel(isInHelperBlock, ldElem);
- if(!doUpperBoundCheck && !headSegmentLengthOpnd)
- {
- // (headSegmentLength = [headSegment + offset(length)])
- headSegmentLengthOpnd =
- IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
- autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
- }
- // For push, it is guaranteed that (index >= length). We already know that (index < size), but we need to check if
- // (index > length) because in that case a missing value will be created and the missing value tracking in the array
- // needs to be updated.
- //
- // cmp index, headSegmentLength
- // je $updateLength
- // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
- // updateLength:
- InsertCompareBranch(
- indexValueOpnd,
- headSegmentLengthOpnd,
- Js::OpCode::BrEq_A,
- updateLengthLabel,
- updateLengthLabel);
- CompileAssert(
- static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
- Js::DynamicObjectFlags::HasNoMissingValues);
- InsertAnd(
- IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
- IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
- IR::IntConstOpnd::New(
- static_cast<uint8>(~Js::DynamicObjectFlags::HasNoMissingValues),
- TyUint8,
- m_func,
- true),
- updateLengthLabel);
- }
- if (baseValueType.IsArrayOrObjectWithArray())
- {
- // We didn't emit an array check, but if we are going to grow the array
- // We need to go to helper if there is an ES5 array/objectarray used as prototype
- GenerateIsEnabledArraySetElementFastPathCheck(labelHelper, ldElem);
- }
- IR::Opnd *newLengthOpnd;
- IR::AutoReuseOpnd autoReuseNewLengthOpnd;
- if (indexValueOpnd->IsRegOpnd())
- {
- // LEA newLength, [index + 1]
- newLengthOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
- autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
- InsertAdd(false /* needFlags */, newLengthOpnd, indexValueOpnd, IR::IntConstOpnd::New(1, TyUint32, m_func), ldElem);
- }
- else
- {
- newLengthOpnd = IR::IntConstOpnd::New(value + 1, TyUint32, this->m_func);
- autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
- }
- // MOV [headSegment + offset(length)], newLength
- indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, length), TyUint32, this->m_func);
- InsertMove(indirOpnd, newLengthOpnd, ldElem);
- if (checkArrayLengthOverflow)
- {
- // CMP newLength, [base + offset(length)]
- // JBE $segmentLengthIncreased
- Assert(labelSegmentLengthIncreased);
- indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
- InsertCompareBranch(
- newLengthOpnd,
- indirOpnd,
- Js::OpCode::BrLe_A,
- true /* isUnsigned */,
- labelSegmentLengthIncreased,
- ldElem);
- if(!isInHelperBlock)
- {
- InsertLabel(true /* isHelper */, ldElem);
- }
- }
- // MOV [base + offset(length)], newLength
- indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
- InsertMove(indirOpnd, newLengthOpnd, ldElem);
- if(returnLength)
- {
- if(newLengthOpnd->GetSize() != MachPtr)
- {
- newLengthOpnd = newLengthOpnd->UseWithNewType(TyMachPtr, m_func)->AsRegOpnd();
- }
- // SHL newLength, AtomTag
- // INC newLength
- this->m_lowererMD.GenerateInt32ToVarConversion(newLengthOpnd, ldElem);
- // MOV dst, newLength
- InsertMove(ldElem->GetDst(), newLengthOpnd, ldElem);
- }
- if(labelSegmentLengthIncreased && labelSegmentLengthIncreased != labelDone)
- {
- // labelSegmentLengthIncreased:
- ldElem->InsertBefore(labelSegmentLengthIncreased);
- }
- // $done
- ldElem->InsertBefore(labelDone);
- }
- else // #else
- {
- if (checkArrayLengthOverflow)
- {
- if (*pIsTypedArrayElement && isStore)
- {
- IR::LabelInstr *labelInlineSet = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- //For positive index beyond length or negative index its essentially nop for typed array store
- InsertBranch(
- !invertBoundCheckComparison ? Js::OpCode::BrLt_A : Js::OpCode::BrGt_A,
- true /* isUnsigned */,
- labelInlineSet,
- ldElem);
- // For typed array, call ToNumber before we fallThrough.
- if (ldElem->GetSrc1()->GetType() == TyVar && !ldElem->GetSrc1()->GetValueType().IsPrimitive())
- {
- IR::Instr *toNumberInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
- toNumberInstr->SetSrc1(ldElem->GetSrc1());
- ldElem->InsertBefore(toNumberInstr);
- LowerUnaryHelperMem(toNumberInstr, IR::HelperOp_ConvNumber_Full);
- }
- InsertBranch(Js::OpCode::Br, labelFallthrough, ldElem); //Jump to fallThrough
- ldElem->InsertBefore(labelInlineSet);
- }
- else
- {
- // JAE $helper
- InsertBranch(
- !invertBoundCheckComparison ? Js::OpCode::BrGe_A : Js::OpCode::BrLe_A,
- true /* isUnsigned */,
- labelHelper,
- ldElem);
- }
- }
- EnsureObjectArrayLoaded();
- if (ldElem->m_opcode == Js::OpCode::InlineArrayPop)
- {
- Assert(!baseValueType.IsLikelyTypedArray());
- Assert(bailOutLabelInstr);
- if (indexValueOpnd->IsIntConstOpnd())
- {
- // indirOpnd = [headSegment + index + offset(elements)]
- IntConstType offset = offsetof(Js::SparseArraySegment<Js::Var>, elements) + (value << indirScale);
- // TODO: Assert(Math::FitsInDWord(offset));
- indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
- }
- else
- {
- // indirOpnd = [headSegment + offset(elements) + (index << scale)]
- indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
- indirOpnd->SetOffset(offsetof(Js::SparseArraySegment<Js::Var>, elements));
- }
- IR::Opnd * tmpDst = nullptr;
- IR::Opnd * dst = ldElem->GetDst();
- //Pop might not have a dst, if not don't worry about returning the last element. But we still have to
- // worry about gaps, because these force us to access the prototype chain, which may have side-effects.
- if (dst || !baseValueType.HasNoMissingValues())
- {
- if (!dst)
- {
- dst = IR::RegOpnd::New(indirType, this->m_func);
- }
- else if (dst->AsRegOpnd()->m_sym == arrayOpnd->m_sym)
- {
- tmpDst = IR::RegOpnd::New(TyVar, this->m_func);
- dst = tmpDst;
- }
- // MOV dst, [head + offset]
- InsertMove(dst, indirOpnd, ldElem);
- //If the array has missing values, check for one
- if (!baseValueType.HasNoMissingValues())
- {
- InsertCompareBranch(
- dst,
- GetMissingItemOpnd(indirType, m_func),
- Js::OpCode::BrEq_A,
- bailOutLabelInstr,
- ldElem,
- true);
- }
- }
- // MOV [head + offset], missing
- InsertMove(indirOpnd, GetMissingItemOpnd(indirType, m_func), ldElem);
- IR::Opnd *newLengthOpnd;
- IR::AutoReuseOpnd autoReuseNewLengthOpnd;
- if (indexValueOpnd->IsRegOpnd())
- {
- // LEA newLength, [index]
- newLengthOpnd = indexValueOpnd;
- autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
- }
- else
- {
- newLengthOpnd = IR::IntConstOpnd::New(value, TyUint32, this->m_func);
- autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
- }
- //update segment length and array length
- // MOV [headSegment + offset(length)], newLength
- IR::IndirOpnd *lengthIndirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, length), TyUint32, this->m_func);
- InsertMove(lengthIndirOpnd, newLengthOpnd, ldElem);
- // MOV [base + offset(length)], newLength
- lengthIndirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
- InsertMove(lengthIndirOpnd, newLengthOpnd, ldElem);
- if (tmpDst)
- {
- // The array opnd and the destination is the same, need to move the value in the tmp dst
- // to the actual dst
- InsertMove(ldElem->GetDst(), tmpDst, ldElem);
- }
- return indirOpnd;
- }
- } // #endif
- if (baseValueType.IsLikelyTypedArray())
- {
- if(!headSegmentOpnd)
- {
- // MOV headSegment, [base + offset(arrayBuffer)]
- int bufferOffset;
- bufferOffset = Js::Float64Array::GetOffsetOfBuffer();
- indirOpnd = IR::IndirOpnd::New(arrayOpnd, bufferOffset, TyMachPtr, this->m_func);
- headSegmentOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
- InsertMove(headSegmentOpnd, indirOpnd, ldElem);
- }
- // indirOpnd = [headSegment + index]
- if (indexValueOpnd->IsIntConstOpnd())
- {
- IntConstType offset = (value << indirScale);
- // TODO: Assert(Math::FitsInDWord(offset));
- indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
- }
- else
- {
- indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
- }
- }
- else if (indexValueOpnd->IsIntConstOpnd())
- {
- // indirOpnd = [headSegment + index + offset(elements)]
- IntConstType offset = offsetof(Js::SparseArraySegment<Js::Var>, elements) + (value << indirScale);
- // TODO: Assert(Math::FitsInDWord(offset));
- indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
- }
- else
- {
- // indirOpnd = [headSegment + offset(elements) + (index << scale)]
- indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
- indirOpnd->SetOffset(offsetof(Js::SparseArraySegment<Js::Var>, elements));
- }
- return indirOpnd;
- }
- void
- Lowerer::GenerateTypeIdCheck(Js::TypeId typeId, IR::RegOpnd * opnd, IR::LabelInstr * labelFail, IR::Instr * insertBeforeInstr, bool generateObjectCheck)
- {
- if (generateObjectCheck && !opnd->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(opnd, insertBeforeInstr, labelFail);
- }
- // MOV r1, [opnd + offset(type)]
- IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
- const IR::AutoReuseOpnd autoReuseR1(r1, m_func);
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(opnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
- InsertMove(r1, indirOpnd, insertBeforeInstr);
- // CMP [r1 + offset(typeId)], typeid -- check src isString
- // JNE $fail
- indirOpnd = IR::IndirOpnd::New(r1, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
- InsertCompareBranch(
- indirOpnd,
- IR::IntConstOpnd::New(typeId, TyInt32, this->m_func),
- Js::OpCode::BrNeq_A,
- labelFail,
- insertBeforeInstr);
- }
- IR::RegOpnd *
- Lowerer::GenerateUntagVar(IR::RegOpnd * opnd, IR::LabelInstr * labelFail, IR::Instr * insertBeforeInstr, bool generateTagCheck)
- {
- if (!opnd->IsVar())
- {
- AssertMsg(opnd->GetSize() == 4, "This should be 32-bit wide");
- return opnd;
- }
- return m_lowererMD.GenerateUntagVar(opnd, labelFail, insertBeforeInstr, generateTagCheck && !opnd->IsTaggedInt());
- }
- void
- Lowerer::GenerateNotZeroTest( IR::Opnd * opndSrc, IR::LabelInstr * isZeroLabel, IR::Instr * insertBeforeInstr)
- {
- InsertTestBranch(opndSrc, opndSrc, Js::OpCode::BrEq_A, isZeroLabel, insertBeforeInstr);
- }
- bool
- Lowerer::GenerateFastStringLdElem(IR::Instr * ldElem, IR::LabelInstr * labelHelper, IR::LabelInstr * labelFallThru)
- {
- IR::IndirOpnd * indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
- IR::RegOpnd * baseOpnd = indirOpnd->GetBaseOpnd();
- // don't generate the fast path if the instance is not likely string
- if (!baseOpnd->GetValueType().IsLikelyString())
- {
- return false;
- }
- Assert(!baseOpnd->IsTaggedInt());
- IR::RegOpnd * indexOpnd = indirOpnd->GetIndexOpnd();
- // Don't generate the fast path if the index operand is not likely int
- if (indexOpnd && !indexOpnd->GetValueType().IsLikelyInt())
- {
- return false;
- }
- // Make sure the instance is a string
- Assert(!indexOpnd || !indexOpnd->IsNotInt());
- GenerateStringTest(baseOpnd, ldElem, labelHelper);
- IR::Opnd * index32CmpOpnd;
- IR::RegOpnd * bufferOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- const IR::AutoReuseOpnd autoReuseBufferOpnd(bufferOpnd, m_func);
- IR::IndirOpnd * charIndirOpnd;
- if (indexOpnd)
- {
- // Untag the var and generate the indir into the string buffer
- IR::RegOpnd * index32Opnd = GenerateUntagVar(indexOpnd, labelHelper, ldElem);
- charIndirOpnd = IR::IndirOpnd::New(bufferOpnd, index32Opnd, 1, TyUint16, this->m_func);
- index32CmpOpnd = index32Opnd;
- }
- else
- {
- // Just use the offset to indirect into the string buffer
- charIndirOpnd = IR::IndirOpnd::New(bufferOpnd, indirOpnd->GetOffset() * sizeof(wchar_t), TyUint16, this->m_func);
- index32CmpOpnd = IR::IntConstOpnd::New(indirOpnd->GetOffset(), TyUint32, this->m_func);
- }
- // Check if the index is in range of the string length
- // CMP [baseOpnd + offset(length)], indexOpnd -- string length
- // JBE $helper -- unsigned compare, and string length are at most INT_MAX - 1
- // -- so that even if we have a negative index, this will fail
- InsertCompareBranch(IR::IndirOpnd::New(baseOpnd, offsetof(Js::JavascriptString, m_charLength), TyInt32, this->m_func)
- , index32CmpOpnd, Js::OpCode::BrLe_A, true, labelHelper, ldElem);
- // Load the string buffer and make sure it is not null
- // MOV bufferOpnd, [baseOpnd + offset(m_pszValue)]
- // TEST bufferOpnd, bufferOpnd
- // JEQ $lableHelper
- indirOpnd = IR::IndirOpnd::New(baseOpnd, offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func);
- InsertMove(bufferOpnd, indirOpnd, ldElem);
- GenerateNotZeroTest(bufferOpnd, labelHelper, ldElem);
- // Load the character and check if it is 7bit ASCI (which we have the cache for)
- // MOV charOpnd, [bufferOpnd + index32Opnd]
- // CMP charOpnd, 0x80
- // JAE $helper
- IR::RegOpnd * charOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
- const IR::AutoReuseOpnd autoReuseCharOpnd(charOpnd, m_func);
- InsertMove(charOpnd, charIndirOpnd, ldElem);
- InsertCompareBranch(charOpnd, IR::IntConstOpnd::New(Js::CharStringCache::CharStringCacheSize, TyUint16, this->m_func),
- Js::OpCode::BrGe_A, true, labelHelper, ldElem);
- // Load the string from the cache
- // MOV charStringCache, <charStringCache, address>
- // MOV stringOpnd, [charStringCache + charOpnd * 4]
- IR::RegOpnd * cacheOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- const IR::AutoReuseOpnd autoReuseCacheOpnd(cacheOpnd, m_func);
- Assert(Js::JavascriptLibrary::GetCharStringCacheAOffset() == Js::JavascriptLibrary::GetCharStringCacheOffset());
- InsertMove(cacheOpnd, this->LoadLibraryValueOpnd(ldElem, LibraryValue::ValueCharStringCache), ldElem);
- // Check if we have created the string or not
- // TEST stringOpnd, stringOpnd
- // JE $helper
- IR::RegOpnd * stringOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- const IR::AutoReuseOpnd autoReuseStringOpnd(stringOpnd, m_func);
- InsertMove(stringOpnd, IR::IndirOpnd::New(cacheOpnd, charOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyVar, this->m_func), ldElem);
- GenerateNotZeroTest(stringOpnd, labelHelper, ldElem);
- InsertMove(ldElem->GetDst(), stringOpnd, ldElem);
- InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
- return true;
- }
- bool
- Lowerer::GenerateFastLdElemI(IR::Instr *& ldElem, bool *instrIsInHelperBlockRef)
- {
- Assert(instrIsInHelperBlockRef);
- bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
- instrIsInHelperBlock = false;
- IR::LabelInstr * labelHelper;
- IR::LabelInstr * labelFallThru;
- IR::LabelInstr * labelBailOut = nullptr;
- IR::LabelInstr * labelMissingNative = nullptr;
- IR::Opnd *src1 = ldElem->GetSrc1();
- AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd on LdElementI");
- IR::IndirOpnd * indirOpnd = src1->AsIndirOpnd();
- // From FastElemICommon:
- // TEST base, AtomTag -- check base not tagged int
- // JNE $helper
- // MOV r1, [base + offset(type)] -- check base isArray
- // CMP [r1 + offset(typeId)], TypeIds_Array
- // JNE $helper
- // TEST index, 1 -- index tagged int
- // JEQ $helper
- // MOV r2, index
- // SAR r2, Js::VarTag_Shift -- remote atom tag
- // JS $helper -- exclude negative index
- // MOV r4, [base + offset(head)]
- // CMP r2, [r4 + offset(length)] -- bounds check
- // JAE $helper
- // MOV r3, [r4 + offset(elements)]
- // Generated here:
- // MOV dst, [r3 + r2]
- // TEST dst, dst
- // JNE $fallthrough
- if(ldElem->m_opcode == Js::OpCode::LdMethodElem && indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyOptimizedTypedArray())
- {
- // Typed arrays don't return objects, so it's not worth generating a fast path for LdMethodElem. Calling the helper also
- // generates a better error message. Skip the fast path and just generate a helper call.
- return true;
- }
- labelFallThru = ldElem->GetOrCreateContinueLabel();
- labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- // If we know for sure (based on flow graph) we're loading from the arguments object, then ignore the (path-based) profile info.
- bool isNativeArrayLoad = !ldElem->DoStackArgsOpt(this->m_func) && indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyNativeArray();
- bool needMissingValueCheck = true;
- bool emittedFastPath = false;
- bool emitBailout = false;
- if (ldElem->DoStackArgsOpt(this->m_func))
- {
- emittedFastPath = GenerateFastArgumentsLdElemI(ldElem, labelHelper, labelFallThru);
- }
- else if (GenerateFastStringLdElem(ldElem, labelHelper, labelFallThru))
- {
- emittedFastPath = true;
- }
- else
- {
- IR::LabelInstr * labelCantUseArray = labelHelper;
- if (isNativeArrayLoad)
- {
- if (ldElem->GetDst()->GetType() == TyVar)
- {
- // Skip the fast path and just generate a helper call
- return true;
- }
- // Specialized native array lowering for LdElem requires that it is profiled. When not profiled, GlobOpt should not
- // have specialized it.
- Assert(ldElem->IsProfiledInstr());
- labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- labelCantUseArray = labelBailOut;
- }
- bool isTypedArrayElement, isStringIndex;
- indirOpnd =
- GenerateFastElemICommon(
- ldElem,
- false,
- src1->AsIndirOpnd(),
- labelHelper,
- labelCantUseArray,
- labelFallThru,
- &isTypedArrayElement,
- &isStringIndex,
- &emitBailout);
- IR::Opnd *dst = ldElem->GetDst();
- IRType dstType = dst->AsRegOpnd()->GetType();
- // The index is negative or not int.
- if (indirOpnd == nullptr)
- {
- Assert(!(ldElem->HasBailOutInfo() && ldElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall));
- // The global optimizer should never type specialize a LdElem for which the index is not int or an integer constant
- // with a negative value. This would force an unconditional bail out on the main code path.
- if (dst->IsVar())
- {
- if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
- {
- wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(L"Typed Array Lowering: function: %s (%s): instr %s, not specialized by glob opt due to negative or not likely int index.\n",
- this->m_func->GetJnFunction()->GetDisplayName(),
- this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
- Js::OpCodeUtil::GetOpCodeName(ldElem->m_opcode));
- Output::Flush();
- }
- // We must be dealing with some unconventional index value. Don't emit fast path, but go directly to helper.
- emittedFastPath = false;
- return true;
- }
- else
- {
- AssertMsg(false, "Global optimizer shouldn't have specialized this instruction.");
- Assert(dst->IsRegOpnd());
- // If global optimizer failed to notice the unconventional index and type specialized the dst,
- // there is nothing to do but bail out. This could happen if global optimizer's information based
- // on value tracking fails to recognize a non-integer index or a constant int index that is negative.
- // The bailout below ensures that we behave correctly in retail builds even under
- // these (unlikely) conditions. To satisfy the downstream code we must populate the type specialized operand
- // with some made up values, even though we will unconditionally bail out here and the values will never be
- // used.
- IR::IntConstOpnd *constOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func, true);
- InsertMove(dst, constOpnd, ldElem);
- ldElem->UnlinkSrc1();
- ldElem->UnlinkDst();
- GenerateBailOut(ldElem, nullptr, nullptr);
- emittedFastPath = true;
- return false;
- }
- }
- const IR::AutoReuseOpnd autoReuseIndirOpnd(indirOpnd, m_func);
- const ValueType baseValueType(src1->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
- if (ldElem->HasBailOutInfo() &&
- ldElem->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset &&
- ldElem->GetBailOutInfo()->bailOutOffset <= ldElem->GetByteCodeOffset() &&
- dst->IsEqual(src1->AsIndirOpnd()->GetBaseOpnd()) ||
- (src1->AsIndirOpnd()->GetIndexOpnd() && dst->IsEqual(src1->AsIndirOpnd()->GetIndexOpnd())))
- {
- // This is a pre-op bailout where the dst is the same as one of the srcs. The dst may be trashed before bailing out,
- // but since the operation will be processed again in the interpreter, src values need to be kept intact. Use a
- // temporary dst until after the operation is complete.
- IR::Instr *instrSink = ldElem->SinkDst(Js::OpCode::Ld_A);
- // The sink instruction needs to be on the fall-through path
- instrSink->Unlink();
- labelFallThru->InsertAfter(instrSink);
- LowererMD::ChangeToAssign(instrSink);
- dst = ldElem->GetDst();
- }
- if (isTypedArrayElement)
- {
- // For typedArrays, convert the loaded element to the appropriate type
- IR::RegOpnd *reg;
- IR::AutoReuseOpnd autoReuseReg;
- Assert(dst->IsRegOpnd());
- if(indirOpnd->IsFloat())
- {
- AssertMsg((dstType == TyFloat64) || (dstType == TyVar), "For Float32Array LdElemI's dst should be specialized to TyFloat64 or not at all.");
- if(indirOpnd->IsFloat32())
- {
- // MOVSS reg32.f32, indirOpnd.f32
- IR::RegOpnd *reg32 = IR::RegOpnd::New(TyFloat32, this->m_func);
- const IR::AutoReuseOpnd autoReuseReg32(reg32, m_func);
- InsertMove(reg32, indirOpnd, ldElem);
- // CVTPS2PD dst/reg.f64, reg32.f64
- reg = dstType == TyFloat64 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyFloat64, this->m_func);
- autoReuseReg.Initialize(reg, m_func);
- InsertConvertFloat32ToFloat64(reg, reg32, ldElem);
- }
- else
- {
- Assert(indirOpnd->IsFloat64());
- // MOVSD dst/reg.f64, indirOpnd.f64
- reg = dstType == TyFloat64 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyFloat64, this->m_func);
- autoReuseReg.Initialize(reg, m_func);
- InsertMove(reg, indirOpnd, ldElem);
- }
- if (dstType != TyFloat64)
- {
- // Convert reg.f64 to var
- m_lowererMD.SaveDoubleToVar(dst->AsRegOpnd(), reg, ldElem, ldElem);
- }
- #if FLOATVAR
- // For NaNs, go to the helper to guarantee we don't have an illegal NaN
- // UCOMISD reg, reg
- {
- IR::Instr *const instr = IR::Instr::New(Js::OpCode::UCOMISD, this->m_func);
- instr->SetSrc1(reg);
- instr->SetSrc2(reg);
- ldElem->InsertBefore(instr);
- }
- // JP $helper
- {
- IR::Instr *const instr = IR::BranchInstr::New(Js::OpCode::JP, labelHelper, this->m_func);
- ldElem->InsertBefore(instr);
- }
- #endif
- if(dstType == TyFloat64)
- {
- emitBailout = true;
- }
- }
- else
- {
- AssertMsg((dstType == TyInt32) || (dstType == TyVar), "For Int/UintArray LdElemI's dst should be specialized to TyInt32 or not at all.");
- reg = dstType == TyInt32 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyInt32, this->m_func);
- const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
- // Int32 and Uint32 arrays could overflow an int31, but the others can't
- if (indirOpnd->GetType() != TyUint32
- #if !INT32VAR
- && indirOpnd->GetType() != TyInt32
- #endif
- )
- {
- reg->SetValueType(ValueType::GetTaggedInt()); // Fits as a tagged-int
- }
- // MOV/MOVZX/MOVSX dst/reg.int32, IndirOpnd.type
- IR::Instr *const instr = InsertMove(reg, indirOpnd, ldElem);
- if (dstType == TyInt32)
- {
- instr->dstIsTempNumber = ldElem->dstIsTempNumber;
- instr->dstIsTempNumberTransferred = ldElem->dstIsTempNumberTransferred;
- if (indirOpnd->GetType() == TyUint32)
- {
- // TEST dst, dst
- // JSB $helper (bailout)
- InsertCompareBranch(
- reg,
- IR::IntConstOpnd::New(0, TyUint32, this->m_func, /* dontEncode = */ true),
- Js::OpCode::BrLt_A,
- labelHelper,
- ldElem);
- }
- emitBailout = true;
- }
- else
- {
- // MOV dst, reg
- IR::Instr *const instr = IR::Instr::New(Js::OpCode::ToVar, dst, reg, this->m_func);
- instr->dstIsTempNumber = ldElem->dstIsTempNumber;
- instr->dstIsTempNumberTransferred = ldElem->dstIsTempNumberTransferred;
- ldElem->InsertBefore(instr);
- // Convert dst to var
- m_lowererMD.EmitLoadVar(instr, /* isFromUint32 = */ (indirOpnd->GetType() == TyUint32));
- }
- }
- // JMP $fallthrough
- InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
- emittedFastPath = true;
- if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
- {
- char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
- baseValueType.ToString(baseValueTypeStr);
- wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(L"Typed Array Lowering: function: %s (%s), instr: %s, base value type: %S, %s.",
- this->m_func->GetJnFunction()->GetDisplayName(),
- this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
- Js::OpCodeUtil::GetOpCodeName(ldElem->m_opcode),
- baseValueTypeStr,
- (!dst->IsVar() ? L"specialized" : L"not specialized"));
- Output::Print(L"\n");
- Output::Flush();
- }
- }
- else
- {
- // MOV dst, indirOpnd
- InsertMove(dst, indirOpnd, ldElem);
- // The string index fast path does not operate on index properties (we don't get a PropertyString in that case), so
- // we don't need to do any further checks in that case
- // For LdMethodElem, if the loaded value is a tagged number, the error message generated by the helper call is
- // better than if we were to just try to call the number. Also, the call arguments need to be evaluated before
- // throwing the error, so just test whether it's an object and jump to helper if it's not.
- const bool needObjectTest = !isStringIndex && !isNativeArrayLoad && ldElem->m_opcode == Js::OpCode::LdMethodElem;
- needMissingValueCheck =
- !isStringIndex && !(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues());
- if(needMissingValueCheck)
- {
- // TEST dst, dst
- // JEQ $helper | JNE $fallthrough
- InsertCompareBranch(
- dst,
- GetMissingItemOpnd(dst->GetType(), m_func),
- needObjectTest ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A,
- needObjectTest ? labelHelper : labelFallThru,
- ldElem,
- true);
- if (isNativeArrayLoad)
- {
- Assert(!needObjectTest);
- Assert(labelHelper != labelBailOut);
- if(ldElem->AsProfiledInstr()->u.ldElemInfo->GetElementType().HasBeenUndefined())
- {
- // We're going to bail out trying to load "missing value" into a type-spec'd opnd.
- // Branch to a point where we'll convert the array so that we don't keep bailing here.
- // (Gappy arrays are not well-suited to nativeness.)
- labelMissingNative = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- InsertBranch(Js::OpCode::Br, labelMissingNative, ldElem);
- }
- else
- {
- // If the value has not been profiled to be undefined at some point, jump directly to bail out
- InsertBranch(Js::OpCode::Br, labelBailOut, ldElem);
- }
- }
- }
- if(needObjectTest)
- {
- // GenerateObjectTest(dst)
- // JIsObject $fallthrough
- m_lowererMD.GenerateObjectTest(dst, ldElem, labelFallThru, true);
- }
- else if(!needMissingValueCheck)
- {
- // JMP $fallthrough
- InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
- }
- emittedFastPath = true;
- }
- }
- // $helper:
- // bailout or caller generated helper call
- // $fallthru:
- if (!emittedFastPath)
- {
- labelHelper->isOpHelper = false;
- }
- ldElem->InsertBefore(labelHelper);
- instrIsInHelperBlock = true;
- if (isNativeArrayLoad)
- {
- Assert(ldElem->HasBailOutInfo());
- Assert(labelHelper != labelBailOut);
- // Transform the original instr:
- //
- // $helper:
- // dst = LdElemI_A src (BailOut)
- // $fallthrough:
- //
- // to:
- //
- // b $fallthru <--- we get here if we loaded a valid element directly
- // $helper:
- // dst = LdElemI_A src
- // cmp dst, MissingItem
- // bne $fallthrough
- // $bailout:
- // BailOut
- // $fallthrough:
- LowerOneBailOutKind(ldElem, IR::BailOutConventionalNativeArrayAccessOnly, instrIsInHelperBlock);
- IR::Instr *const insertBeforeInstr = ldElem->m_next;
- // Do missing value check on value returned from helper so that we don't have to check the index against
- // array length. (We already checked it above against the segment length.)
- bool hasBeenUndefined = ldElem->AsProfiledInstr()->u.ldElemInfo->GetElementType().HasBeenUndefined();
- if (hasBeenUndefined)
- {
- if(!emitBailout)
- {
- if (labelMissingNative == nullptr)
- {
- labelMissingNative = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- }
- InsertCompareBranch(GetMissingItemOpnd(ldElem->GetDst()->GetType(), m_func), ldElem->GetDst(), Js::OpCode::BrEq_A, labelMissingNative, insertBeforeInstr, true);
- }
- InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
- if(labelMissingNative)
- {
- // We're going to bail out on a load from a gap, but convert the array to Var first, so we don't just
- // bail here over and over. Gappy arrays are not well suited to nativeness.
- // NOTE: only emit this call if the profile tells us that this has happened before ("hasBeenUndefined").
- // Emitting this in Navier-Stokes brutalizes the score.
- insertBeforeInstr->InsertBefore(labelMissingNative);
- IR::JnHelperMethod helperMethod;
- indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
- if (indirOpnd->GetBaseOpnd()->GetValueType().HasIntElements())
- {
- helperMethod = IR::HelperIntArr_ToVarArray;
- }
- else
- {
- Assert(indirOpnd->GetBaseOpnd()->GetValueType().HasFloatElements());
- helperMethod = IR::HelperFloatArr_ToVarArray;
- }
- m_lowererMD.LoadHelperArgument(insertBeforeInstr, indirOpnd->GetBaseOpnd());
- IR::Instr *instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
- instrHelper->SetSrc1(IR::HelperCallOpnd::New(helperMethod, m_func));
- insertBeforeInstr->InsertBefore(instrHelper);
- m_lowererMD.LowerCall(instrHelper, 0);
- }
- }
- else
- {
- if(!emitBailout)
- {
- InsertCompareBranch(GetMissingItemOpnd(ldElem->GetDst()->GetType(), m_func), ldElem->GetDst(), Js::OpCode::BrEq_A, labelBailOut, insertBeforeInstr, true);
- }
- InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
- }
- insertBeforeInstr->InsertBefore(labelBailOut);
- }
- if (emitBailout)
- {
- ldElem->UnlinkSrc1();
- ldElem->UnlinkDst();
- GenerateBailOut(ldElem, nullptr, nullptr);
- }
- return !emitBailout;
- }
- IR::Opnd *
- Lowerer::GetMissingItemOpnd(IRType type, Func *func)
- {
- if (type == TyVar)
- {
- return IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, func, true);
- }
- if (type == TyInt32)
- {
- return IR::IntConstOpnd::New(Js::JavascriptNativeIntArray::MissingItem, TyInt32, func, true);
- }
- Assert(type == TyFloat64);
- return IR::MemRefOpnd::New((BYTE*)&Js::JavascriptNativeFloatArray::MissingItem, TyFloat64, func);
- }
- bool
- Lowerer::GenerateFastStElemI(IR::Instr *& stElem, bool *instrIsInHelperBlockRef)
- {
- Assert(instrIsInHelperBlockRef);
- bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
- instrIsInHelperBlock = false;
- IR::LabelInstr * labelHelper;
- IR::LabelInstr * labelSegmentLengthIncreased;
- IR::LabelInstr * labelFallThru;
- IR::LabelInstr * labelBailOut = nullptr;
- IR::Opnd *dst = stElem->GetDst();
- IR::IndirOpnd * indirOpnd = dst->AsIndirOpnd();
- AssertMsg(dst->IsIndirOpnd(), "Expected indirOpnd on StElementI");
- // From FastElemICommon:
- // TEST base, AtomTag -- check base not tagged int
- // JNE $helper
- // MOV r1, [base + offset(type)] -- check base isArray
- // CMP [r1 + offset(typeId)], TypeIds_Array
- // JNE $helper
- // TEST index, 1 -- index tagged int
- // JEQ $helper
- // MOV r2, index
- // SAR r2, Js::VarTag_Shift -- remote atom tag
- // JS $helper -- exclude negative index
- // MOV r4, [base + offset(head)]
- // CMP r2, [r4 + offset(length)] -- bounds check
- // JB $done
- // CMP r2, [r4 + offset(size)] -- chunk has room?
- // JAE $helper
- // LEA r5, [r2 + 1]
- // MOV [r4 + offset(length)], r5 -- update length on chunk
- // CMP r5, [base + offset(length)]
- // JBE $done
- // MOV [base + offset(length)], r5 -- update length on array
- // $done
- // LEA r3, [r4 + offset(elements)]
- // Generated here.
- // MOV [r3 + r2], src
- labelFallThru = stElem->GetOrCreateContinueLabel();
- labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- bool emitBailout = false;
- bool isNativeArrayStore = indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyNativeArray();
- IR::LabelInstr * labelCantUseArray = labelHelper;
- if (isNativeArrayStore)
- {
- if (stElem->GetSrc1()->GetType() != GetArrayIndirType(indirOpnd->GetBaseOpnd()->GetValueType()))
- {
- // Skip the fast path and just generate a helper call
- return true;
- }
- if(stElem->HasBailOutInfo())
- {
- const IR::BailOutKind bailOutKind = stElem->GetBailOutKind();
- if (bailOutKind & IR::BailOutConventionalNativeArrayAccessOnly)
- {
- labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- labelCantUseArray = labelBailOut;
- }
- }
- }
- bool isTypedArrayElement, isStringIndex;
- indirOpnd =
- GenerateFastElemICommon(
- stElem,
- true,
- indirOpnd,
- labelHelper,
- labelCantUseArray,
- labelFallThru,
- &isTypedArrayElement,
- &isStringIndex,
- &emitBailout,
- &labelSegmentLengthIncreased);
- IR::Opnd *src = stElem->GetSrc1();
- const IR::AutoReuseOpnd autoReuseSrc(src, m_func);
- // The index is negative or not int.
- if (indirOpnd == nullptr)
- {
- Assert(!(stElem->HasBailOutInfo() && stElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall));
- // The global optimizer should never type specialize a StElem for which we know the index is not int or is a negative
- // int constant. This would result in an unconditional bailout on the main code path.
- if (src->IsVar())
- {
- if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
- {
- wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(L"Typed Array Lowering: function: %s (%s): instr %s, not specialized by glob opt due to negative or not likely int index.\n",
- this->m_func->GetJnFunction()->GetDisplayName(),
- this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
- Js::OpCodeUtil::GetOpCodeName(stElem->m_opcode));
- Output::Flush();
- }
- // We must be dealing with some atypical index value. Don't emit fast path, but go directly to helper.
- return true;
- }
- else
- {
- // If global optimizer failed to notice the unconventional index and type specialized the src,
- // there is nothing to do but bail out. We should never hit this code path, unless the global optimizer's conditions
- // for not specializing the instruction don't match the lowerer's conditions for not emitting the array checks (see above).
- // This could happen if global optimizer's information based on value tracking fails to recognize a non-integer index or
- // a constant int index that is negative. The bailout below ensures that we behave correctly in retail builds even under
- // these (unlikely) conditions.
- AssertMsg(false, "Global optimizer shouldn't have specialized this instruction.");
- stElem->UnlinkSrc1();
- stElem->UnlinkDst();
- GenerateBailOut(stElem, nullptr, nullptr);
- return false;
- }
- }
- const IR::AutoReuseOpnd autoReuseIndirOpnd(indirOpnd, m_func);
- const ValueType baseValueType(dst->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
- if (isTypedArrayElement)
- {
- if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
- {
- char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
- baseValueType.ToString(baseValueTypeStr);
- wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- Output::Print(L"Typed Array Lowering: function: %s (%s), instr: %s, base value type: %S, %s.",
- this->m_func->GetJnFunction()->GetDisplayName(),
- this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
- Js::OpCodeUtil::GetOpCodeName(stElem->m_opcode),
- baseValueTypeStr,
- (!src->IsVar() ? L"specialized" : L"not specialized"));
- Output::Print(L"\n");
- Output::Flush();
- }
- ObjectType objectType = baseValueType.GetObjectType();
- if(indirOpnd->IsFloat())
- {
- if (src->GetType() == TyFloat64)
- {
- IR::RegOpnd *const regSrc = src->AsRegOpnd();
- if (indirOpnd->IsFloat32())
- {
- // CVTSD2SS reg.f32, regSrc.f64 -- Convert regSrc from f64 to f32
- IR::RegOpnd *const reg = IR::RegOpnd::New(TyFloat32, this->m_func);
- const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
- InsertConvertFloat64ToFloat32(reg, regSrc, stElem);
- // MOVSS indirOpnd, reg
- InsertMove(indirOpnd, reg, stElem);
- }
- else
- {
- // MOVSD indirOpnd, regSrc
- InsertMove(indirOpnd, regSrc, stElem);
- }
- emitBailout = true;
- }
- else
- {
- Assert(src->GetType() == TyVar);
- // MOV reg, src
- IR::RegOpnd *const reg = IR::RegOpnd::New(TyVar, this->m_func);
- const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
- InsertMove(reg, src, stElem);
- // Convert to float, and assign to indirOpnd
- if (baseValueType.IsLikelyOptimizedVirtualTypedArray())
- {
- IR::RegOpnd* dstReg = IR::RegOpnd::New(indirOpnd->GetType(), this->m_func);
- m_lowererMD.EmitLoadFloat(dstReg, reg, stElem);
- InsertMove(indirOpnd, dstReg, stElem);
- }
- else
- {
- m_lowererMD.EmitLoadFloat(indirOpnd, reg, stElem);
- }
- }
- }
- else if (objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedVirtualArray || objectType == ObjectType::Uint8ClampedMixedArray)
- {
- Assert(indirOpnd->GetType() == TyUint8);
- IR::RegOpnd *regSrc;
- IR::AutoReuseOpnd autoReuseRegSrc;
- if(src->IsRegOpnd())
- {
- regSrc = src->AsRegOpnd();
- }
- else
- {
- regSrc = IR::RegOpnd::New(StackSym::New(src->GetType(), m_func), src->GetType(), m_func);
- autoReuseRegSrc.Initialize(regSrc, m_func);
- InsertMove(regSrc, src, stElem);
- }
- IR::Opnd *bitMaskOpnd;
- IRType srcType = regSrc->GetType();
- if ((srcType == TyFloat64) || (srcType == TyInt32))
- {
- // if (srcType == TyInt32) {
- // TEST regSrc, ~255
- // JE $storeValue
- // JSB $handleNegative
- // MOV indirOpnd, 255
- // JMP $fallThru
- // $handleNegative [isHelper = false]
- // MOV indirOpnd, 0
- // JMP $fallThru
- // $storeValue
- // MOV indirOpnd, regSrc
- // }
- // else {
- // MOVSD regTmp, regSrc
- // ADDSD regTmp, 0.5
- // CVTTSD2SI regOpnd, regTmp
- // TEST regOpnd, ~255
- // JE $storeValue
- // $handleOutOfBounds [isHelper = true]
- // COMISD regSrc, [&FloatZero]
- // JB $handleNegative
- // MOV regOpnd, 255
- // JMP $storeValue
- // $handleNegative [isHelper = true]
- // MOV regOpnd, 0
- // $storeValue
- // MOV indirOpnd, regOpnd
- // }
- // $fallThru
- IR::RegOpnd *regOpnd;
- IR::AutoReuseOpnd autoReuseRegOpnd;
- if (srcType == TyInt32)
- {
- // When srcType == TyInt32 we will never call the helper and we will never
- // modify the regOpnd. Therefore, it's okay to use regSrc directly, and it
- // reduces register pressure.
- regOpnd = regSrc;
- }
- else
- {
- #ifdef _M_IX86
- AssertMsg(AutoSystemInfo::Data.SSE2Available(), "GlobOpt shouldn't have specialized Uint8ClampedArray StElem to float64 if SSE2 is unavailable.");
- #endif
- regOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
- autoReuseRegOpnd.Initialize(regOpnd, m_func);
- Assert(objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedVirtualArray || objectType == ObjectType::Uint8ClampedMixedArray);
- // Uint8ClampedArray follows IEEE 754 rounding rules for ties which round up
- // odd integers and round down even integers. Both ties result in the nearest
- // even integer value.
- //
- // CVTSD2SI regOpnd, regSrc
- LowererMD::InsertConvertFloat64ToInt32(RoundModeHalfToEven, regOpnd, regSrc, stElem);
- }
- IR::LabelInstr *labelStoreValue = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
- #ifndef _M_ARM
- // TEST regOpnd, ~255
- // JE $storeValue
- bitMaskOpnd = IR::IntConstOpnd::New(~255, TyInt32, this->m_func, true);
- InsertTestBranch(regOpnd, bitMaskOpnd, Js::OpCode::BrEq_A, labelStoreValue, stElem);
- #else // ARM
- // Special case for ARM, a shift may be better
- //
- // ASRS tempReg, src, 8
- // BEQ $inlineSet
- InsertShiftBranch(
- Js::OpCode::Shr_A,
- IR::RegOpnd::New(TyInt32, this->m_func),
- regOpnd,
- IR::IntConstOpnd::New(8, TyInt8, this->m_func),
- Js::OpCode::BrEq_A,
- labelStoreValue,
- stElem);
- #endif
- IR::LabelInstr *labelHandleNegative = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, srcType == TyFloat64);
- if (srcType == TyInt32)
- {
- // JSB $handleNegativeOrOverflow
- InsertBranch(
- LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A),
- labelHandleNegative,
- stElem);
- // MOV IndirOpnd.u8, 255
- InsertMove(indirOpnd, IR::IntConstOpnd::New(255, TyUint8, this->m_func, true), stElem);
- // JMP $fallThru
- InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
- // $handleNegative [isHelper = false]
- stElem->InsertBefore(labelHandleNegative);
- // MOV IndirOpnd.u8, 0
- InsertMove(indirOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func, true), stElem);
- // JMP $fallThru
- InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
- }
- else
- {
- Assert(regOpnd != regSrc);
- // This label is just to ensure the following code is moved to the helper block.
- // $handleOutOfBounds [isHelper = true]
- IR::LabelInstr *labelHandleOutOfBounds = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- stElem->InsertBefore(labelHandleOutOfBounds);
- // COMISD regSrc, FloatZero
- // JB labelHandleNegative
- IR::MemRefOpnd * zeroOpnd = IR::MemRefOpnd::New((double*)&(Js::JavascriptNumber::k_Zero), TyMachDouble, this->m_func);
- InsertCompareBranch(regSrc, zeroOpnd, Js::OpCode::BrNotGe_A, labelHandleNegative, stElem);
- // MOV regOpnd, 255
- InsertMove(regOpnd, IR::IntConstOpnd::New(255, TyUint8, this->m_func, true), stElem);
- // JMP $storeValue
- InsertBranch(Js::OpCode::Br, labelStoreValue, stElem);
- // $handleNegative [isHelper = true]
- stElem->InsertBefore(labelHandleNegative);
- // MOV regOpnd, 0
- InsertMove(regOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func, true), stElem);
- }
- // $storeValue
- stElem->InsertBefore(labelStoreValue);
- // MOV IndirOpnd.u8, regOpnd.u8
- InsertMove(indirOpnd, regOpnd, stElem);
- emitBailout = true;
- }
- else
- {
- Assert(srcType == TyVar);
- #if INT32VAR
- bitMaskOpnd = IR::AddrOpnd::New((Js::Var)~(INT_PTR)(Js::TaggedInt::ToVarUnchecked(255)), IR::AddrOpndKindConstantVar, this->m_func, true);
- #else
- bitMaskOpnd = IR::IntConstOpnd::New(~(INT_PTR)(Js::TaggedInt::ToVarUnchecked(255)), TyMachReg, this->m_func, true);
- #endif
- // Note: We are assuming that if no bits other than ~(TaggedInt(255)) are 1, that we have a tagged
- // int value between 0 - 255.
- // #if INT32VAR
- // This works for pointers because tagged int bit can't be on, and first 64k are not valid addresses
- // This works for floats because a valid float would have one of the upper 13 bits on.
- // #else
- // Any pointer is larger than 512 because first 64k memory is reserved by the OS
- // #endif
- IR::LabelInstr *labelInlineSet = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- #ifndef _M_ARM
- // TEST src, ~(TaggedInt(255)) -- Check for tagged int >= 255 and <= 0
- // JEQ $inlineSet
- InsertTestBranch(regSrc, bitMaskOpnd, Js::OpCode::BrEq_A, labelInlineSet, stElem);
- #else // ARM
- // Special case for ARM, a shift may be better
- //
- // ASRS tempReg, src, 8
- // BEQ $inlineSet
- InsertShiftBranch(
- Js::OpCode::Shr_A,
- IR::RegOpnd::New(TyInt32, this->m_func),
- regSrc,
- IR::IntConstOpnd::New(8, TyInt8, this->m_func),
- Js::OpCode::BrEq_A,
- labelInlineSet,
- stElem);
- #endif
- // Uint8ClampedArray::DirectSetItem(array, index, value);
- m_lowererMD.LoadHelperArgument(stElem, regSrc);
- IR::Opnd *indexOpnd = indirOpnd->GetIndexOpnd();
- if (indexOpnd == nullptr)
- {
- indexOpnd = IR::IntConstOpnd::New(indirOpnd->GetOffset(), TyInt32, this->m_func);
- }
- else
- {
- Assert(indirOpnd->GetOffset() == 0);
- }
- m_lowererMD.LoadHelperArgument(stElem, indexOpnd);
- m_lowererMD.LoadHelperArgument(stElem, stElem->GetDst()->AsIndirOpnd()->GetBaseOpnd());
- IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
- Assert(objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedMixedArray || objectType == ObjectType::Uint8ClampedVirtualArray);
- instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperUint8ClampedArraySetItem, this->m_func));
- stElem->InsertBefore(instr);
- m_lowererMD.LowerCall(instr, 0);
- // JMP $fallThrough
- InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
- //$inlineSet
- stElem->InsertBefore(labelInlineSet);
- IR::RegOpnd *regOpnd;
- IR::AutoReuseOpnd autoReuseRegOpnd;
- #if INT32VAR
- regOpnd = regSrc;
- #else
- // MOV r1, src
- // SAR r1, 1
- regOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
- autoReuseRegOpnd.Initialize(regOpnd, m_func);
- InsertShift(
- Js::OpCode::Shr_A,
- false /* needFlags */,
- regOpnd,
- regSrc,
- IR::IntConstOpnd::New(1, TyInt8, this->m_func),
- stElem);
- #endif
- // MOV IndirOpnd.u8, reg.u8
- InsertMove(indirOpnd, regOpnd, stElem);
- }
- }
- else
- {
- if (src->IsInt32())
- {
- // MOV indirOpnd, src
- InsertMove(indirOpnd, src, stElem);
- emitBailout = true;
- }
- else if (src->IsFloat64())
- {
- AssertMsg(indirOpnd->GetType() == TyUint32, "Only StElemI to Uint32Array could be specialized to float64.");
- #ifdef _M_IX86
- AssertMsg(AutoSystemInfo::Data.SSE2Available(), "GloOpt shouldn't have specialized Uint32Array StElemI to float64 if SSE2 is unavailable.");
- #endif
- IR::RegOpnd *const reg = IR::RegOpnd::New(TyInt32, this->m_func);
- const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
- m_lowererMD.EmitFloatToInt(reg, src, stElem);
- // MOV indirOpnd, reg
- InsertMove(indirOpnd, reg, stElem);
- emitBailout = true;
- }
- else
- {
- Assert(src->IsVar());
- if(src->IsAddrOpnd())
- {
- IR::AddrOpnd *const addrSrc = src->AsAddrOpnd();
- Assert(addrSrc->IsVar());
- Assert(Js::TaggedInt::Is(addrSrc->m_address));
- // MOV indirOpnd, intValue
- InsertMove(
- indirOpnd,
- IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(addrSrc->m_address), TyInt32, m_func),
- stElem);
- }
- else
- {
- IR::RegOpnd *const regSrc = src->AsRegOpnd();
- // FromVar reg, Src
- IR::RegOpnd *const reg = IR::RegOpnd::New(TyInt32, this->m_func);
- const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
- IR::Instr *const instr = IR::Instr::New(Js::OpCode::FromVar, reg, regSrc, stElem->m_func);
- stElem->InsertBefore(instr);
- // Convert reg to int32
- // Note: ToUint32 is implemented as (uint32)ToInt32()
- m_lowererMD.EmitLoadInt32(instr);
- // MOV indirOpnd, reg
- InsertMove(indirOpnd, reg, stElem);
- }
- }
- }
- }
- else
- {
- if(labelSegmentLengthIncreased)
- {
- IR::Instr *const insertBeforeInstr = labelSegmentLengthIncreased->m_next;
- // labelSegmentLengthIncreased:
- // mov [segment + index], src
- // jmp $fallThru
- InsertMove(indirOpnd, src, insertBeforeInstr);
- InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
- }
- if (!(isStringIndex || baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()))
- {
- if(!stElem->IsProfiledInstr() || stElem->AsProfiledInstr()->u.stElemInfo->LikelyFillsMissingValue())
- {
- // Check whether the store is filling a missing value. If so, fall back to the helper so that it can check whether
- // this store is filling the last missing value in the array. This is necessary to keep the missing value tracking
- // in arrays precise. The check is omitted when profile data says that the store is likely to create missing values.
- //
- // cmp [segment + index], Js::SparseArraySegment::MissingValue
- // je $helper
- InsertCompareBranch(
- indirOpnd,
- GetMissingItemOpnd(src->GetType(), m_func),
- Js::OpCode::BrEq_A,
- labelHelper,
- stElem,
- true);
- }
- else
- {
- GenerateIsEnabledArraySetElementFastPathCheck(labelHelper, stElem);
- }
- }
- // MOV [r3 + r2], src
- InsertMoveWithBarrier(indirOpnd, src, stElem);
- }
- // JMP $fallThru
- InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
- // $helper:
- // bailout or caller generated helper call
- // $fallThru:
- stElem->InsertBefore(labelHelper);
- instrIsInHelperBlock = true;
- if (isNativeArrayStore && !isStringIndex)
- {
- Assert(stElem->HasBailOutInfo());
- Assert(labelHelper != labelBailOut);
- // Transform the original instr:
- //
- // $helper:
- // dst = LdElemI_A src (BailOut)
- // $fallthrough:
- //
- // to:
- //
- // $helper:
- // dst = LdElemI_A src
- // b $fallthrough
- // $bailout:
- // BailOut
- // $fallthrough:
- LowerOneBailOutKind(stElem, IR::BailOutConventionalNativeArrayAccessOnly, instrIsInHelperBlock);
- IR::Instr *const insertBeforeInstr = stElem->m_next;
- InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
- insertBeforeInstr->InsertBefore(labelBailOut);
- }
- if (emitBailout)
- {
- stElem->UnlinkSrc1();
- stElem->UnlinkDst();
- GenerateBailOut(stElem, nullptr, nullptr);
- }
- return !emitBailout;
- }
- bool
- Lowerer::GenerateFastLdLen(IR::Instr *ldLen, bool *instrIsInHelperBlockRef)
- {
- Assert(instrIsInHelperBlockRef);
- bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
- instrIsInHelperBlock = false;
- // TEST src, AtomTag -- check src not tagged int
- // JNE $helper
- // CMP [src], JavascriptArray::`vtable' -- check base isArray
- // JNE $string
- // MOV length, [src + offset(length)] -- Load array length
- // JMP $tovar
- // $string:
- // CMP [src + offset(type)], static_string_type -- check src isString
- // JNE $helper
- // MOV length, [src + offset(length)] -- Load string length
- // $toVar:
- // TEST length, 0xC0000000 -- test for overflow of SHL, or negative
- // JNE $helper
- // SHL length, Js::VarTag_Shift -- restore the var tag on the result
- // INC length
- // MOV dst, length
- // JMP $fallthru
- // $helper:
- // CALL GetProperty(src, length_property_id, scriptContext)
- // $fallthru:
- IR::RegOpnd * opnd = ldLen->GetSrc1()->AsRegOpnd();
- IR::RegOpnd * dst = ldLen->GetDst()->AsRegOpnd();
- IR::RegOpnd * src = opnd->AsRegOpnd();
- const ValueType srcValueType(src->GetValueType());
- AssertMsg(src->IsRegOpnd(), "Expected regOpnd on LdLen");
- IR::LabelInstr *const labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- if (ldLen->DoStackArgsOpt(this->m_func))
- {
- GenerateFastArgumentsLdLen(ldLen, labelHelper, ldLen->GetOrCreateContinueLabel());
- }
- else
- {
- const bool arrayFastPath = ShouldGenerateArrayFastPath(src, false, true, false);
- // HasBeenString instead of IsLikelyString because it could be a merge between StringObject and String, and this
- // information about whether it's a StringObject or some other object is not available in the profile data
- const bool stringFastPath = srcValueType.IsUninitialized() || srcValueType.HasBeenString();
- if(!(arrayFastPath || stringFastPath))
- {
- return true;
- }
- const int32 arrayOffsetOfLength =
- srcValueType.IsLikelyAnyOptimizedArray()
- ? GetArrayOffsetOfLength(srcValueType)
- : Js::JavascriptArray::GetOffsetOfLength();
- IR::LabelInstr *labelString = nullptr;
- IR::RegOpnd *arrayOpnd = src;
- IR::RegOpnd *arrayLengthOpnd = nullptr;
- IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
- if(arrayFastPath)
- {
- if(!srcValueType.IsAnyOptimizedArray())
- {
- if(stringFastPath)
- {
- // If we don't have info about the src value type or its object type, the array and string fast paths are
- // generated
- labelString = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- }
- arrayOpnd = GenerateArrayTest(src, labelHelper, stringFastPath ? labelString : labelHelper, ldLen, false);
- }
- else if(src->IsArrayRegOpnd())
- {
- IR::ArrayRegOpnd *const arrayRegOpnd = src->AsArrayRegOpnd();
- if(arrayRegOpnd->LengthSym())
- {
- arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), TyUint32, m_func);
- DebugOnly(arrayLengthOpnd->FreezeSymValue());
- autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
- }
- }
- }
- const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
- IR::RegOpnd *lengthOpnd = nullptr;
- IR::AutoReuseOpnd autoReuseLengthOpnd;
- const auto EnsureLengthOpnd = [&]()
- {
- if(lengthOpnd)
- {
- return;
- }
- lengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
- autoReuseLengthOpnd.Initialize(lengthOpnd, m_func);
- };
- if(arrayFastPath)
- {
- if(arrayLengthOpnd)
- {
- lengthOpnd = arrayLengthOpnd;
- autoReuseLengthOpnd.Initialize(lengthOpnd, m_func);
- Assert(!stringFastPath);
- }
- else
- {
- // MOV length, [array + offset(length)] -- Load array length
- EnsureLengthOpnd();
- IR::IndirOpnd *const indirOpnd = IR::IndirOpnd::New(arrayOpnd, arrayOffsetOfLength, TyUint32, this->m_func);
- InsertMove(lengthOpnd, indirOpnd, ldLen);
- }
- }
- if(stringFastPath)
- {
- IR::LabelInstr *labelToVar = nullptr;
- if(arrayFastPath)
- {
- // JMP $tovar
- labelToVar = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- InsertBranch(Js::OpCode::Br, labelToVar, ldLen);
- // $string:
- ldLen->InsertBefore(labelString);
- }
- // CMP [src + offset(type)], static_stringtype -- check src isString
- // JNE $helper
- GenerateStringTest(src, ldLen, labelHelper, nullptr, !arrayFastPath);
- // MOV length, [src + offset(length)] -- Load string length
- EnsureLengthOpnd();
- IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(src, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func);
- InsertMove(lengthOpnd, indirOpnd, ldLen);
- if(arrayFastPath)
- {
- // $toVar:
- ldLen->InsertBefore(labelToVar);
- }
- }
- Assert(lengthOpnd);
- if(ldLen->HasBailOutInfo() && (ldLen->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnIrregularLength)
- {
- Assert(ldLen->GetBailOutKind() == IR::BailOutOnIrregularLength);
- Assert(dst->IsInt32());
- // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative
- // test length, length
- // js $helper
- // mov dst, length
- // jmp $fallthrough
- InsertCompareBranch(
- lengthOpnd,
- IR::IntConstOpnd::New(0, lengthOpnd->GetType(), m_func, true),
- Js::OpCode::BrLt_A,
- labelHelper,
- ldLen);
- InsertMove(dst, lengthOpnd, ldLen);
- InsertBranch(Js::OpCode::Br, ldLen->GetOrCreateContinueLabel(), ldLen);
- // $helper:
- // (Bail out with IR::BailOutOnIrregularLength)
- ldLen->InsertBefore(labelHelper);
- instrIsInHelperBlock = true;
- ldLen->FreeDst();
- ldLen->FreeSrc1();
- GenerateBailOut(ldLen);
- return false;
- }
- #if INT32VAR
- // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative
- // test length, length
- // js $helper
- InsertCompareBranch(
- lengthOpnd,
- IR::IntConstOpnd::New(0, lengthOpnd->GetType(), m_func, true),
- Js::OpCode::BrLt_A,
- labelHelper,
- ldLen);
- #else
- // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative.
- // Additionally, verify that the signed value's width is not greater than 31 bits, since it needs to be tagged.
- // test length, 0xC0000000
- // jne $helper
- InsertTestBranch(
- lengthOpnd,
- IR::IntConstOpnd::New(0xC0000000, TyUint32, this->m_func, true),
- Js::OpCode::BrNeq_A,
- labelHelper,
- ldLen);
- #endif
- #if INT32VAR
- //
- // dst_32 = MOV length
- // dst_64 = OR dst_64, Js::AtomTag_IntPtr
- //
- Assert(dst->GetType() == TyVar);
- IR::Opnd *dst32 = dst->Copy(this->m_func);
- dst32->SetType(TyInt32);
- // This will clear the top bits.
- InsertMove(dst32, lengthOpnd, ldLen);
- m_lowererMD.GenerateInt32ToVarConversion(dst, ldLen);
- #else
- // dst = SHL length, Js::VarTag_Shift -- restore the var tag on the result
- InsertShift(
- Js::OpCode::Shl_A,
- false /* needFlags */,
- dst,
- lengthOpnd,
- IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func),
- ldLen);
- // dst = ADD dst, AtomTag
- InsertAdd(
- false /* needFlags */,
- dst,
- dst,
- IR::IntConstOpnd::New(Js::AtomTag_Int32, TyUint32, m_func, true),
- ldLen);
- #endif
- // JMP $fallthrough
- InsertBranch(Js::OpCode::Br, ldLen->GetOrCreateContinueLabel(), ldLen);
- }
- // $helper:
- // (caller generates helper call)
- ldLen->InsertBefore(labelHelper);
- instrIsInHelperBlock = true;
- return true; // fast path was generated, helper call will be in a helper block
- }
- void
- Lowerer::GenerateFastInlineStringCodePointAt(IR::Instr* lastInstr, Func* func, IR::Opnd *strLength, IR::Opnd *srcIndex, IR::RegOpnd *lowerChar, IR::RegOpnd *strPtr)
- {
- //// Required State:
- // strLength - UInt32
- // srcIndex - TyVar if not Address
- // lowerChar - TyMachReg
- // strPtr - Addr
- //// Instructions
- // CMP [strLength], srcIndex + 1
- // JBE charCodeAt
- // CMP lowerChar 0xDC00
- // JGE charCodeAt
- // CMP lowerChar 0xD7FF
- // JLE charCodeAt
- // upperChar = MOVZX [strPtr + srcIndex + 1]
- // CMP upperChar 0xE000
- // JGE charCodeAt
- // CMP lowerChar 0xDBFF
- // JLE charCodeAt
- // lowerChar = SUB lowerChar - 0xD800
- // lowerChar = SHL lowerChar, 10
- // lowerChar = ADD lowerChar + upperChar
- // lowerChar = ADD lowerChar + 0x2400
- // :charCodeAt
- // :done
- // Asserts
- // Arm should change to Uint32 for the strLength
- Assert(strLength->GetType() == TyUint32 || strLength->GetType() == TyMachReg);
- Assert(srcIndex->GetType() == TyVar || srcIndex->IsAddrOpnd());
- Assert(lowerChar->GetType() == TyMachReg || lowerChar->GetType() == TyUint32);
- Assert(strPtr->IsRegOpnd());
- IR::RegOpnd *tempReg = IR::RegOpnd::New(TyMachReg, func);
- IR::LabelInstr *labelCharCodeAt = IR::LabelInstr::New(Js::OpCode::Label, func);
- IR::IndirOpnd *tempIndirOpnd;
- if (srcIndex->IsAddrOpnd())
- {
- uint32 length = Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address) + 1U;
- InsertCompareBranch(strLength, IR::IntConstOpnd::New(length, TyUint32, func), Js::OpCode::BrLe_A, true, labelCharCodeAt, lastInstr);
- tempIndirOpnd = IR::IndirOpnd::New(strPtr, (length) * sizeof(wchar_t), TyUint16, func);
- }
- else
- {
- InsertMove(tempReg, srcIndex, lastInstr);
- #if INT32VAR
- IR::Opnd * reg32Bit = tempReg->UseWithNewType(TyInt32, func);
- InsertMove(tempReg, reg32Bit, lastInstr);
- tempReg = reg32Bit->AsRegOpnd();
- #else
- InsertShift(Js::OpCode::Shr_A, false, tempReg, tempReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, func), lastInstr);
- #endif
- InsertAdd(false, tempReg, tempReg, IR::IntConstOpnd::New(1, TyInt32, func), lastInstr);
- InsertCompareBranch(strLength, tempReg, Js::OpCode::BrLe_A, true, labelCharCodeAt, lastInstr);
- if(tempReg->GetSize() != MachPtr)
- {
- tempReg = tempReg->UseWithNewType(TyMachPtr, func)->AsRegOpnd();
- }
- tempIndirOpnd = IR::IndirOpnd::New(strPtr, tempReg, 1, TyUint16, func);
- }
- // By this point, we have added instructions before labelCharCodeAt to check for extra length required for the surrogate pair
- // The branching for that is already handled, all we have to do now is to check for correct values.
- // Validate char is in range [D800, DBFF]; otherwise just get a charCodeAt
- InsertCompareBranch(lowerChar, IR::IntConstOpnd::New(0xDC00, TyUint32, func), Js::OpCode::BrGe_A, labelCharCodeAt, lastInstr);
- InsertCompareBranch(lowerChar, IR::IntConstOpnd::New(0xD7FF, TyUint32, func), Js::OpCode::BrLe_A, labelCharCodeAt, lastInstr);
- // upperChar = MOVZX r3, [r1 + r3 * 2] -- this is the value of the upper surrogate pair char
- IR::RegOpnd *upperChar = IR::RegOpnd::New(TyInt32, func);
- InsertMove(upperChar, tempIndirOpnd, lastInstr);
- // Validate upper is in range [DC00, DFFF]; otherwise just get a charCodeAt
- InsertCompareBranch(upperChar, IR::IntConstOpnd::New(0xE000, TyUint32, func), Js::OpCode::BrGe_A, labelCharCodeAt, lastInstr);
- InsertCompareBranch(upperChar, IR::IntConstOpnd::New(0xDBFF, TyUint32, func), Js::OpCode::BrLe_A, labelCharCodeAt, lastInstr);
- // (lower - 0xD800) << 10 + second - 0xDC00 + 0x10000 -- 0x10000 - 0xDC00 = 0x2400
- // lowerChar = SUB lowerChar - 0xD800
- // lowerChar = SHL lowerChar, 10
- // lowerChar = ADD lowerChar + upperChar
- // lowerChar = ADD lowerChar + 0x2400
- InsertSub(false, lowerChar, lowerChar, IR::IntConstOpnd::New(0xD800, TyUint32, func), lastInstr);
- InsertShift(Js::OpCode::Shl_A, false, lowerChar, lowerChar, IR::IntConstOpnd::New(10, TyUint32, func), lastInstr);
- InsertAdd(false, lowerChar, lowerChar, upperChar, lastInstr);
- InsertAdd(false, lowerChar, lowerChar, IR::IntConstOpnd::New(0x2400, TyUint32, func), lastInstr);
- lastInstr->InsertBefore(labelCharCodeAt);
- }
- bool
- Lowerer::GenerateFastInlineStringFromCodePoint(IR::Instr* instr)
- {
- Assert(instr->m_opcode == Js::OpCode::CallDirect);
- // ArgOut sequence
- // s8.var = StartCall 2 (0x2).i32 #000c
- // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, String object
- // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to FromCharCode
- // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
- // s0[LikelyTaggedInt].var = CallDirect String_FromCodePoint.u32, arg1(s11)<0>.u32 #001c
- IR::Opnd * linkOpnd = instr->GetSrc2();
- IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
- linkOpnd = tmpInstr->GetSrc2();
- #if DBG
- IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
- Assert(argCount == 2);
- #endif
- IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
- Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
- IR::Opnd *src1 = argInstr->GetSrc1();
- if (src1->GetValueType().IsLikelyNumber())
- {
- //Trying to generate this code
- // MOV resultOpnd, dst
- // MOV fromCharCodeIntArgOpnd, src1
- // SAR fromCharCodeIntArgOpnd, Js::VarTag_Shift
- // JAE $Helper
- // CMP fromCharCodeIntArgOpnd, Js::ScriptContext::CharStringCacheSize
- //
- // JAE $labelWCharStringCheck <
- // MOV resultOpnd, GetCharStringCache[fromCharCodeIntArgOpnd]
- // TST resultOpnd, resultOpnd //Check for null
- // JEQ $helper
- // JMP $Done
- //
- //$labelWCharStringCheck:
- // resultOpnd = Call HelperGetStringForCharW
- // JMP $Done
- //$helper:
- IR::RegOpnd * resultOpnd = nullptr;
- if (!instr->GetDst()->IsRegOpnd() || instr->GetDst()->IsEqual(src1))
- {
- resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
- }
- else
- {
- resultOpnd = instr->GetDst()->AsRegOpnd();
- }
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- IR::RegOpnd * fromCodePointIntArgOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
- IR::AutoReuseOpnd autoReuseFromCodePointIntArgOpnd(fromCodePointIntArgOpnd, instr->m_func);
- InsertMove(fromCodePointIntArgOpnd, src1, instr);
- //Check for tagged int and get the untagged version.
- fromCodePointIntArgOpnd = GenerateUntagVar(fromCodePointIntArgOpnd, labelHelper, instr);
- GenerateGetSingleCharString(fromCodePointIntArgOpnd, resultOpnd, labelHelper, doneLabel, instr, true);
- instr->InsertBefore(labelHelper);
- instr->InsertAfter(doneLabel);
- RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
- }
- return true;
- }
- bool
- Lowerer::GenerateFastInlineStringFromCharCode(IR::Instr* instr)
- {
- Assert(instr->m_opcode == Js::OpCode::CallDirect);
- // ArgOut sequence
- // s8.var = StartCall 2 (0x2).i32 #000c
- // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, String object
- // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to FromCharCode
- // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
- // s0[LikelyTaggedInt].var = CallDirect String_FromCharCode.u32, arg1(s11)<0>.u32 #001c
- IR::Opnd * linkOpnd = instr->GetSrc2();
- IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
- linkOpnd = tmpInstr->GetSrc2();
- #if DBG
- IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
- Assert(argCount == 2);
- #endif
- IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
- Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
- IR::Opnd *src1 = argInstr->GetSrc1();
- if (src1->GetValueType().IsLikelyNumber())
- {
- //Trying to generate this code
- // MOV resultOpnd, dst
- // MOV fromCharCodeIntArgOpnd, src1
- // SAR fromCharCodeIntArgOpnd, Js::VarTag_Shift
- // JAE $Helper
- // CMP fromCharCodeIntArgOpnd, Js::ScriptContext::CharStringCacheSize
- //
- // JAE $labelWCharStringCheck <
- // MOV resultOpnd, GetCharStringCache[fromCharCodeIntArgOpnd]
- // TST resultOpnd, resultOpnd //Check for null
- // JEQ $helper
- // JMP $Done
- //
- //$labelWCharStringCheck:
- // resultOpnd = Call HelperGetStringForCharW
- // JMP $Done
- //$helper:
- IR::RegOpnd * resultOpnd = nullptr;
- if (!instr->GetDst()->IsRegOpnd() || instr->GetDst()->IsEqual(src1))
- {
- resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
- }
- else
- {
- resultOpnd = instr->GetDst()->AsRegOpnd();
- }
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- IR::RegOpnd * fromCharCodeIntArgOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
- IR::AutoReuseOpnd autoReuseFromCharCodeIntArgOpnd(fromCharCodeIntArgOpnd, instr->m_func);
- InsertMove(fromCharCodeIntArgOpnd, src1, instr);
- //Check for tagged int and get the untagged version.
- fromCharCodeIntArgOpnd = GenerateUntagVar(fromCharCodeIntArgOpnd, labelHelper, instr);
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- GenerateGetSingleCharString(fromCharCodeIntArgOpnd, resultOpnd, labelHelper, doneLabel, instr, false);
- instr->InsertBefore(labelHelper);
- instr->InsertAfter(doneLabel);
- RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
- }
- return true;
- }
- void
- Lowerer::GenerateGetSingleCharString(IR::RegOpnd * charCodeOpnd, IR::Opnd * resultOpnd, IR::LabelInstr * labelHelper, IR::LabelInstr * doneLabel, IR::Instr * instr, bool isCodePoint)
- {
- // MOV cacheReg, CharStringCache
- // CMP charCodeOpnd, Js::ScriptContext::CharStringCacheSize
- // JAE $labelWCharStringCheck <
- // MOV resultOpnd, cacheReg[charCodeOpnd]
- // TST resultOpnd, resultOpnd //Check for null
- // JEQ $helper
- // JMP $Done
- //
- //$labelWCharStringCheck:
- // Arg1 = charCodeOpnd
- // Arg0 = cacheReg
- // resultOpnd = Call HelperGetStringForCharW/CodePoint
- // JMP $Done
- //$helper:
- IR::LabelInstr *labelWCharStringCheck = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- //Try to load from in CharStringCacheA
- IR::RegOpnd *cacheRegOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
- IR::AutoReuseOpnd autoReuseCacheRegOpnd(cacheRegOpnd, instr->m_func);
- Assert(Js::JavascriptLibrary::GetCharStringCacheAOffset() == Js::JavascriptLibrary::GetCharStringCacheOffset());
- InsertMove(cacheRegOpnd, this->LoadLibraryValueOpnd(instr, LibraryValue::ValueCharStringCache), instr);
- InsertCompareBranch(charCodeOpnd, IR::IntConstOpnd::New(Js::CharStringCache::CharStringCacheSize, TyUint32, this->m_func), Js::OpCode::BrGe_A, true, labelWCharStringCheck, instr);
- InsertMove(resultOpnd, IR::IndirOpnd::New(cacheRegOpnd, charCodeOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyVar, instr->m_func), instr);
- InsertTestBranch(resultOpnd, resultOpnd, Js::OpCode::BrEq_A, labelHelper, instr);
- InsertMove(instr->GetDst(), resultOpnd, instr);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(labelWCharStringCheck);
- IR::JnHelperMethod helperMethod;
- if (isCodePoint)
- {
- helperMethod = IR::HelperGetStringForCharCodePoint;
- }
- else
- {
- InsertMove(charCodeOpnd, charCodeOpnd->UseWithNewType(TyUint16, instr->m_func), instr);
- helperMethod = IR::HelperGetStringForChar;
- }
- //Try to load from in CharStringCacheW or CharStringCacheCodePoint, this is a helper call.
- this->m_lowererMD.LoadHelperArgument(instr, charCodeOpnd);
- this->m_lowererMD.LoadHelperArgument(instr, cacheRegOpnd);
- IR::Instr* helperCallInstr = IR::Instr::New(Js::OpCode::Call, resultOpnd, IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
- instr->InsertBefore(helperCallInstr);
- this->m_lowererMD.LowerCall(helperCallInstr, 0);
- InsertMove(instr->GetDst(), resultOpnd, instr);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- }
- bool
- Lowerer::GenerateFastInlineGlobalObjectParseInt(IR::Instr *instr)
- {
- Assert(instr->m_opcode == Js::OpCode::CallDirect);
- // ArgOut sequence
- // s8.var = StartCall 2 (0x2).i32 #000c
- // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, global object
- // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to parseInt
- // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
- // s0[LikelyTaggedInt].var = CallDirect GlobalObject_ParseInt.u32, arg1(s11)<0>.u32 #001c
- IR::Opnd * linkOpnd = instr->GetSrc2();
- IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
- linkOpnd = tmpInstr->GetSrc2();
- #if DBG
- IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
- Assert(argCount == 2);
- #endif
- IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
- Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
- IR::Opnd *parseIntArgOpnd = argInstr->GetSrc1();
- if (parseIntArgOpnd->GetValueType().IsLikelyNumber())
- {
- //If likely int check for tagged int and set the dst
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- if (!parseIntArgOpnd->IsTaggedInt())
- {
- this->m_lowererMD.GenerateSmIntTest(parseIntArgOpnd, instr, labelHelper);
- }
- if (instr->GetDst())
- {
- this->m_lowererMD.CreateAssign(instr->GetDst(), parseIntArgOpnd, instr);
- }
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(labelHelper);
- instr->InsertAfter(doneLabel);
- RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
- }
- return true;
- }
- void
- Lowerer::GenerateFastInlineArrayPop(IR::Instr * instr)
- {
- Assert(instr->m_opcode == Js::OpCode::InlineArrayPop);
- IR::Opnd *arrayOpnd = instr->GetSrc1();
- IR::LabelInstr *bailOutLabelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- bool isLikelyNativeArray = arrayOpnd->GetValueType().IsLikelyNativeArray();
- if (ShouldGenerateArrayFastPath(arrayOpnd, false, false, false))
- {
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- if(isLikelyNativeArray)
- {
- //We bailOut on cases like length == 0, Array Test failing cases (Runtime helper cannot handle these cases)
- GenerateFastPop(arrayOpnd, instr, labelHelper, doneLabel, bailOutLabelHelper);
- }
- else
- {
- //We jump to helper on cases like length == 0, Array Test failing cases
- GenerateFastPop(arrayOpnd, instr, labelHelper, doneLabel, labelHelper);
- }
- instr->InsertBefore(labelHelper);
- ///JMP to $doneLabel
- InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
- }
- else
- {
- //We assume here that the array will be a Var array. - Runtime Helper calls assume this.
- Assert(!isLikelyNativeArray);
- }
- instr->InsertAfter(doneLabel);
- if(isLikelyNativeArray)
- {
- //Lower IR::BailOutConventionalNativeArrayAccessOnly here.
- LowerOneBailOutKind(instr, IR::BailOutConventionalNativeArrayAccessOnly, false, false);
- instr->InsertAfter(bailOutLabelHelper);
- }
- GenerateHelperToArrayPopFastPath(instr, doneLabel, bailOutLabelHelper);
- }
- bool
- Lowerer::ShouldGenerateStringReplaceFastPath(IR::Instr * callInstr, IntConstType argCount)
- {
- // a.replace(b,c)
- // We want to emit the fast path if 'a' and 'c' are strings and 'b' is a regex
- //
- // argout sequence:
- // arg1(s12)<0>.var = ArgOut_A s2.var, s11.var #0014 <---- a
- // arg2(s13)<4>.var = ArgOut_A s3.var, arg1(s12)<0>.var #0018 <---- b
- // arg3(s14)<8>.var = ArgOut_A s4.var, arg2(s13)<4>.var #001c <---- c
- // s0[LikelyString].var = CallI s5[ffunc].var, arg3(s14)<8>.var #0020
- IR::Opnd *linkOpnd = callInstr->GetSrc2();
- Assert(argCount == 2);
- while(linkOpnd->IsSymOpnd())
- {
- IR::SymOpnd *src2 = linkOpnd->AsSymOpnd();
- StackSym *sym = src2->m_sym->AsStackSym();
- Assert(sym->m_isSingleDef);
- IR::Instr *argInstr = sym->m_instrDef;
- Assert(argCount >= 0);
- // check to see if 'a' and 'c' are likely strings
- if((argCount == 2 || argCount == 0) && (!argInstr->GetSrc1()->GetValueType().IsLikelyString()))
- {
- return false;
- }
- // we want 'b' to be regex. Don't generate fastpath if it is a tagged int
- if((argCount == 1) && (argInstr->GetSrc1()->IsTaggedInt()))
- {
- return false;
- }
- argCount--;
- linkOpnd = argInstr->GetSrc2();
- }
- return true;
- }
- bool
- Lowerer::GenerateFastReplace(IR::Opnd* strOpnd, IR::Opnd* src1, IR::Opnd* src2, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
- {
- // a.replace(b,c)
- // We want to emit the fast path if 'a' and 'c' are strings and 'b' is a regex
- //
- // strOpnd --> a
- // src1 --> b
- // src2 --> c
- IR::Opnd * callDst = callInstr->GetDst();
- Assert(strOpnd->GetValueType().IsLikelyString() && src2->GetValueType().IsLikelyString());
- if(!strOpnd->GetValueType().IsString())
- {
- if(!strOpnd->IsRegOpnd())
- {
- IR::RegOpnd *strOpndReg = IR::RegOpnd::New(TyVar, m_func);
- LowererMD::CreateAssign(strOpndReg, strOpnd, insertInstr);
- strOpnd = strOpndReg;
- }
- this->GenerateStringTest(strOpnd->AsRegOpnd(), insertInstr, labelHelper);
- }
- if(!src1->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(src1, insertInstr, labelHelper);
- }
- IR::Opnd * vtableOpnd = LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptRegExp);
- // cmp [regex], vtableAddress
- // jne $labelHelper
- if(!src1->IsRegOpnd())
- {
- IR::RegOpnd *src1Reg = IR::RegOpnd::New(TyVar, m_func);
- LowererMD::CreateAssign(src1Reg, src1, insertInstr);
- src1 = src1Reg;
- }
- InsertCompareBranch(
- IR::IndirOpnd::New(src1->AsRegOpnd(), 0, TyMachPtr, insertInstr->m_func),
- vtableOpnd,
- Js::OpCode::BrNeq_A,
- labelHelper,
- insertInstr);
- if(!src2->GetValueType().IsString())
- {
- if(!src2->IsRegOpnd())
- {
- IR::RegOpnd *src2Reg = IR::RegOpnd::New(TyVar, m_func);
- LowererMD::CreateAssign(src2Reg, src2, insertInstr);
- src2 = src2Reg;
- }
- this->GenerateStringTest(src2->AsRegOpnd(), insertInstr, labelHelper);
- }
- //scriptContext, pRegEx, pThis, pReplace (to be pushed in reverse order)
- // pReplace, pThis, pRegEx
- this->m_lowererMD.LoadHelperArgument(insertInstr, src2);
- this->m_lowererMD.LoadHelperArgument(insertInstr, strOpnd);
- this->m_lowererMD.LoadHelperArgument(insertInstr, src1);
- // script context
- LoadScriptContext(insertInstr);
- IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, insertInstr->m_func);
- if(callDst)
- {
- helperCallInstr->SetDst(callDst);
- }
- insertInstr->InsertBefore(helperCallInstr);
- if(callDst)
- {
- m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperRegExp_ReplaceStringResultUsed);
- }
- else
- {
- m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperRegExp_ReplaceStringResultNotUsed);
- }
- return true;
- }
- ///----
- void
- Lowerer::GenerateFastInlineStringSplitMatch(IR::Instr * instr)
- {
- // a.split(b,c (optional) )
- // We want to emit the fast path when
- // 1. c is not present, and
- // 2. 'a' is a string and 'b' is a regex.
- //
- // a.match(b)
- // We want to emit the fast path when 'a' is a string and 'b' is a regex.
- Assert(instr->m_opcode == Js::OpCode::CallDirect);
- IR::Opnd * callDst = instr->GetDst();
- //helperCallOpnd
- IR::Opnd * src1 = instr->GetSrc1();
- //ArgOut_A_InlineSpecialized
- IR::Instr * tmpInstr = instr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
- IR::Opnd * argsOpnd[2];
- if(!instr->FetchOperands(argsOpnd, 2))
- {
- return;
- }
- if(!argsOpnd[0]->GetValueType().IsLikelyString() || argsOpnd[1]->IsTaggedInt())
- {
- return;
- }
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- if(!argsOpnd[0]->GetValueType().IsString())
- {
- if(!argsOpnd[0]->IsRegOpnd())
- {
- IR::RegOpnd *opndReg = IR::RegOpnd::New(TyVar, m_func);
- LowererMD::CreateAssign(opndReg, argsOpnd[0], instr);
- argsOpnd[0] = opndReg;
- }
- this->GenerateStringTest(argsOpnd[0]->AsRegOpnd(), instr, labelHelper);
- }
- if(!argsOpnd[1]->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(argsOpnd[1], instr, labelHelper);
- }
- IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
- // cmp [regex], vtableAddress
- // jne $labelHelper
- if(!argsOpnd[1]->IsRegOpnd())
- {
- IR::RegOpnd *opndReg = IR::RegOpnd::New(TyVar, m_func);
- LowererMD::CreateAssign(opndReg, argsOpnd[1], instr);
- argsOpnd[1] = opndReg;
- }
- InsertCompareBranch(
- IR::IndirOpnd::New(argsOpnd[1]->AsRegOpnd(), 0, TyMachPtr, instr->m_func),
- vtableOpnd,
- Js::OpCode::BrNeq_A,
- labelHelper,
- instr);
- // [stackAllocationPointer, ]scriptcontext, regexp, input[, limit] (to be pushed in reverse order)
- if(src1->AsHelperCallOpnd()->m_fnHelper == IR::JnHelperMethod::HelperString_Split)
- {
- //limit
- //As we are optimizing only for two operands, make limit UINT_MAX
- IR::Opnd* limit = IR::IntConstOpnd::New(UINT_MAX, TyUint32, instr->m_func);
- this->m_lowererMD.LoadHelperArgument(instr, limit);
- }
- //input, regexp
- this->m_lowererMD.LoadHelperArgument(instr, argsOpnd[0]);
- this->m_lowererMD.LoadHelperArgument(instr, argsOpnd[1]);
- // script context
- LoadScriptContext(instr);
- IR::JnHelperMethod helperMethod;
- IR::AutoReuseOpnd autoReuseStackAllocationOpnd;
- if(callDst && instr->dstIsTempObject)
- {
- switch(src1->AsHelperCallOpnd()->m_fnHelper)
- {
- case IR::JnHelperMethod::HelperString_Split:
- helperMethod = IR::JnHelperMethod::HelperRegExp_SplitResultUsedAndMayBeTemp;
- break;
- case IR::JnHelperMethod::HelperString_Match:
- helperMethod = IR::JnHelperMethod::HelperRegExp_MatchResultUsedAndMayBeTemp;
- break;
- default:
- Assert(false);
- __assume(false);
- }
- // Allocate some space on the stack for the result array
- IR::RegOpnd *const stackAllocationOpnd = IR::RegOpnd::New(TyVar, m_func);
- autoReuseStackAllocationOpnd.Initialize(stackAllocationOpnd, m_func);
- stackAllocationOpnd->SetValueType(callDst->GetValueType());
- GenerateMarkTempAlloc(stackAllocationOpnd, Js::JavascriptArray::StackAllocationSize, instr);
- m_lowererMD.LoadHelperArgument(instr, stackAllocationOpnd);
- }
- else
- {
- switch(src1->AsHelperCallOpnd()->m_fnHelper)
- {
- case IR::JnHelperMethod::HelperString_Split:
- helperMethod =
- callDst
- ? IR::JnHelperMethod::HelperRegExp_SplitResultUsed
- : IR::JnHelperMethod::HelperRegExp_SplitResultNotUsed;
- break;
- case IR::JnHelperMethod::HelperString_Match:
- helperMethod =
- callDst
- ? IR::JnHelperMethod::HelperRegExp_MatchResultUsed
- : IR::JnHelperMethod::HelperRegExp_MatchResultNotUsed;
- break;
- default:
- Assert(false);
- __assume(false);
- }
- }
- IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
- if(callDst)
- {
- helperCallInstr->SetDst(callDst);
- }
- instr->InsertBefore(helperCallInstr);
- m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- instr->InsertAfter(doneLabel);
- instr->InsertBefore(labelHelper);
- InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
- RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
- }
- void
- Lowerer::GenerateFastInlineRegExpExec(IR::Instr * instr)
- {
- // a.exec(b)
- // We want to emit the fast path when 'a' is a regex and 'b' is a string
- Assert(instr->m_opcode == Js::OpCode::CallDirect);
- IR::Opnd * callDst = instr->GetDst();
- //ArgOut_A_InlineSpecialized
- IR::Instr * tmpInstr = instr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
- IR::Opnd * argsOpnd[2];
- if (!instr->FetchOperands(argsOpnd, 2))
- {
- return;
- }
- IR::Opnd *opndString = argsOpnd[1];
- if(!opndString->GetValueType().IsLikelyString() || argsOpnd[0]->IsTaggedInt())
- {
- return;
- }
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- if(!opndString->GetValueType().IsString())
- {
- if(!opndString->IsRegOpnd())
- {
- IR::RegOpnd *opndReg = IR::RegOpnd::New(TyVar, m_func);
- LowererMD::CreateAssign(opndReg, opndString, instr);
- opndString = opndReg;
- }
- this->GenerateStringTest(opndString->AsRegOpnd(), instr, labelHelper);
- }
- IR::Opnd *opndRegex = argsOpnd[0];
- if(!opndRegex->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(opndRegex, instr, labelHelper);
- }
- IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
- // cmp [regex], vtableAddress
- // jne $labelHelper
- if(!opndRegex->IsRegOpnd())
- {
- IR::RegOpnd *opndReg = IR::RegOpnd::New(TyVar, m_func);
- LowererMD::CreateAssign(opndReg, opndRegex, instr);
- opndRegex = opndReg;
- }
- InsertCompareBranch(
- IR::IndirOpnd::New(opndRegex->AsRegOpnd(), 0, TyMachPtr, instr->m_func),
- vtableOpnd,
- Js::OpCode::BrNeq_A,
- labelHelper,
- instr);
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- if (!PHASE_OFF(Js::ExecBOIFastPathPhase, m_func))
- {
- // Load pattern from regex operand
- IR::RegOpnd *opndPattern = IR::RegOpnd::New(TyMachPtr, m_func);
- LowererMD::CreateAssign(
- opndPattern,
- IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfPattern(), TyMachPtr, m_func),
- instr);
- // Load program from pattern
- IR::RegOpnd *opndProgram = IR::RegOpnd::New(TyMachPtr, m_func);
- LowererMD::CreateAssign(
- opndProgram,
- IR::IndirOpnd::New(opndPattern, offsetof(UnifiedRegex::RegexPattern, rep) + offsetof(UnifiedRegex::RegexPattern::UnifiedRep, program), TyMachPtr, m_func),
- instr);
- IR::LabelInstr *labelFastHelper = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- // We want the program's tag to be BOILiteral2Tag
- InsertCompareBranch(
- IR::IndirOpnd::New(opndProgram, (int32)UnifiedRegex::Program::GetOffsetOfTag(), TyUint8, m_func),
- IR::IntConstOpnd::New(UnifiedRegex::Program::GetBOILiteral2Tag(), TyUint8, m_func),
- Js::OpCode::BrNeq_A,
- labelFastHelper,
- instr);
- // Test the program's flags for "global"
- InsertTestBranch(
- IR::IndirOpnd::New(opndProgram, offsetof(UnifiedRegex::Program, flags), TyUint8, m_func),
- IR::IntConstOpnd::New(UnifiedRegex::GlobalRegexFlag, TyUint8, m_func),
- Js::OpCode::BrNeq_A,
- labelFastHelper,
- instr);
- IR::LabelInstr *labelNoMatch = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- // If string length < 2...
- InsertCompareBranch(
- IR::IndirOpnd::New(opndString->AsRegOpnd(), offsetof(Js::JavascriptString, m_charLength), TyUint32, m_func),
- IR::IntConstOpnd::New(2, TyUint32, m_func),
- Js::OpCode::BrLt_A,
- labelNoMatch,
- instr);
- // ...or the DWORD doesn't match the pattern...
- IR::RegOpnd *opndBuffer = IR::RegOpnd::New(TyMachReg, m_func);
- LowererMD::CreateAssign(
- opndBuffer,
- IR::IndirOpnd::New(opndString->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, m_func),
- instr);
- IR::LabelInstr *labelGotString = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- InsertTestBranch(opndBuffer, opndBuffer, Js::OpCode::BrNeq_A, labelGotString, instr);
- m_lowererMD.LoadHelperArgument(instr, opndString);
- IR::Instr *instrCall = IR::Instr::New(Js::OpCode::Call, opndBuffer, IR::HelperCallOpnd::New(IR::HelperString_GetSz, m_func), m_func);
- instr->InsertBefore(instrCall);
- m_lowererMD.LowerCall(instrCall, 0);
- instr->InsertBefore(labelGotString);
- IR::RegOpnd *opndBufferDWORD = IR::RegOpnd::New(TyUint32, m_func);
- LowererMD::CreateAssign(
- opndBufferDWORD,
- IR::IndirOpnd::New(opndBuffer, 0, TyUint32, m_func),
- instr);
- InsertCompareBranch(
- IR::IndirOpnd::New(opndProgram, (int32)(UnifiedRegex::Program::GetOffsetOfRep() + UnifiedRegex::Program::GetOffsetOfBOILiteral2Literal()), TyUint32, m_func),
- opndBufferDWORD,
- Js::OpCode::BrEq_A,
- labelFastHelper,
- instr);
- // ...then set the last index to 0...
- instr->InsertBefore(labelNoMatch);
- LowererMD::CreateAssign(
- IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), TyVar, m_func),
- IR::AddrOpnd::NewNull(m_func),
- instr);
- LowererMD::CreateAssign(
- IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), TyUint32, m_func),
- IR::IntConstOpnd::New(0, TyUint32, m_func),
- instr);
- // ...and set the dst to null...
- if (callDst)
- {
- LowererMD::CreateAssign(
- callDst,
- LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
- instr);
- }
- // ...and we're done.
- this->InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(labelFastHelper);
- }
- // [stackAllocationPointer, ]scriptcontext, regexp, string (to be pushed in reverse order)
- //string, regexp
- this->m_lowererMD.LoadHelperArgument(instr, opndString);
- this->m_lowererMD.LoadHelperArgument(instr, opndRegex);
- // script context
- LoadScriptContext(instr);
- IR::JnHelperMethod helperMethod;
- IR::AutoReuseOpnd autoReuseStackAllocationOpnd;
- if(callDst)
- {
- if(instr->dstIsTempObject)
- {
- helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultUsedAndMayBeTemp;
- // Allocate some space on the stack for the result array
- IR::RegOpnd *const stackAllocationOpnd = IR::RegOpnd::New(TyVar, m_func);
- autoReuseStackAllocationOpnd.Initialize(stackAllocationOpnd, m_func);
- stackAllocationOpnd->SetValueType(callDst->GetValueType());
- GenerateMarkTempAlloc(stackAllocationOpnd, Js::JavascriptArray::StackAllocationSize, instr);
- m_lowererMD.LoadHelperArgument(instr, stackAllocationOpnd);
- }
- else
- {
- helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultUsed;
- }
- }
- else
- {
- helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultNotUsed;
- }
- IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
- if(callDst)
- {
- helperCallInstr->SetDst(callDst);
- }
- instr->InsertBefore(helperCallInstr);
- m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
- instr->InsertAfter(doneLabel);
- instr->InsertBefore(labelHelper);
- InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
- RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
- }
- void
- Lowerer::RelocateCallDirectToHelperPath(IR::Instr* argoutInlineSpecialized, IR::LabelInstr* labelHelper)
- {
- IR::Opnd *linkOpnd = argoutInlineSpecialized->GetSrc2(); //ArgOut_A_InlineSpecialized src2; link to actual argouts.
- argoutInlineSpecialized->Unlink();
- labelHelper->InsertAfter(argoutInlineSpecialized);
- while(linkOpnd->IsSymOpnd())
- {
- IR::SymOpnd *src2 = linkOpnd->AsSymOpnd();
- StackSym *sym = src2->m_sym->AsStackSym();
- Assert(sym->m_isSingleDef);
- IR::Instr *argInstr = sym->m_instrDef;
- Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
- argInstr->Unlink();
- labelHelper->InsertAfter(argInstr);
- linkOpnd = argInstr->GetSrc2();
- }
- // Move startcall
- Assert(linkOpnd->IsRegOpnd());
- StackSym *sym = linkOpnd->AsRegOpnd()->m_sym;
- Assert(sym->m_isSingleDef);
- IR::Instr *startCall = sym->m_instrDef;
- Assert(startCall->m_opcode == Js::OpCode::StartCall);
- startCall->Unlink();
- labelHelper->InsertAfter(startCall);
- }
- bool
- Lowerer::GenerateFastInlineStringCharCodeAt(IR::Instr * instr, Js::BuiltinFunction index)
- {
- Assert(instr->m_opcode == Js::OpCode::CallDirect);
- //CallDirect src2
- IR::Opnd * linkOpnd = instr->GetSrc2();
- //ArgOut_A_InlineSpecialized
- IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
- IR::Opnd * argsOpnd[2] = {0};
- bool result = instr->FetchOperands(argsOpnd, 2);
- Assert(result);
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- instr->InsertAfter(doneLabel);
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- bool success = this->m_lowererMD.GenerateFastCharAt(index, instr->GetDst(), argsOpnd[0], argsOpnd[1],
- instr, instr, labelHelper, doneLabel);
- instr->InsertBefore(labelHelper);
- if (!success)
- {
- return false;
- }
- InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
- RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
- return true;
- }
- void
- Lowerer::GenerateFastInlineMathClz32(IR::Instr* instr)
- {
- Assert(instr->GetDst()->IsInt32());
- Assert(instr->GetSrc1()->IsInt32());
- m_lowererMD.GenerateClz(instr);
- }
- void
- Lowerer::GenerateFastInlineMathImul(IR::Instr* instr)
- {
- IR::Opnd* src1 = instr->GetSrc1();
- IR::Opnd* src2 = instr->GetSrc2();
- IR::Opnd* dst = instr->GetDst();
- Assert(dst->IsInt32());
- Assert(src1->IsInt32());
- Assert(src2->IsInt32());
- IR::Instr* imul = IR::Instr::New(LowererMD::MDImulOpcode, dst, src1, src2, instr->m_func);
- instr->InsertBefore(imul);
- LowererMD::Legalize(imul);
- instr->Remove();
- }
- void
- Lowerer::GenerateFastInlineMathFround(IR::Instr* instr)
- {
- IR::Opnd* src1 = instr->GetSrc1();
- IR::Opnd* dst = instr->GetDst();
- Assert(dst->IsFloat());
- Assert(src1->IsFloat());
- IR::Instr* fcvt64to32 = IR::Instr::New(LowererMD::MDConvertFloat64ToFloat32Opcode, dst, src1, instr->m_func);
- instr->InsertBefore(fcvt64to32);
- LowererMD::Legalize(fcvt64to32);
- if (dst->IsFloat64())
- {
- IR::Instr* fcvt32to64 = IR::Instr::New(LowererMD::MDConvertFloat32ToFloat64Opcode, dst, dst, instr->m_func);
- instr->InsertBefore(fcvt32to64);
- LowererMD::Legalize(fcvt32to64);
- }
- instr->Remove();
- return;
- }
- bool
- Lowerer::GenerateFastInlineStringReplace(IR::Instr * instr)
- {
- Assert(instr->m_opcode == Js::OpCode::CallDirect);
- //CallDirect src2
- IR::Opnd * linkOpnd = instr->GetSrc2();
- //ArgOut_A_InlineSpecialized
- IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
- IR::Opnd * argsOpnd[3] = {0};
- bool result = instr->FetchOperands(argsOpnd, 3);
- Assert(result);
- AnalysisAssert(argsOpnd[0] && argsOpnd[1] && argsOpnd[2]);
- if (!argsOpnd[0]->GetValueType().IsLikelyString()
- || argsOpnd[1]->GetValueType().IsNotObject()
- || !argsOpnd[2]->GetValueType().IsLikelyString())
- {
- return false;
- }
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- instr->InsertAfter(doneLabel);
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- bool success = this->GenerateFastReplace(argsOpnd[0], argsOpnd[1], argsOpnd[2],
- instr, instr, labelHelper, doneLabel);
- instr->InsertBefore(labelHelper);
- if (!success)
- {
- return false;
- }
- InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
- RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
- return true;
- }
- #ifdef ENABLE_DOM_FAST_PATH
- /*
- Lower the DOMFastPathGetter opcode
- We have inliner generated bytecode:
- (dst)helpArg1: ExtendArg_A (src1)thisObject (src2)null
- (dst)helpArg2: ExtendArg_A (src1)funcObject (src2)helpArg1
- method: DOMFastPathGetter (src1)HelperCall (src2)helpArg2
- We'll convert it to a JavascriptFunction entry method call:
- CALL Helper funcObject CallInfo(CallFlags_Value, 3) thisObj
- */
- void
- Lowerer::LowerFastInlineDOMFastPathGetter(IR::Instr* instr)
- {
- IR::Opnd* helperOpnd = instr->UnlinkSrc1();
- Assert(helperOpnd->IsHelperCallOpnd());
- IR::Opnd *linkOpnd = instr->UnlinkSrc2();
- Assert(linkOpnd->IsRegOpnd());
- IR::Instr* prevInstr = linkOpnd->AsRegOpnd()->m_sym->m_instrDef;
- Assert(prevInstr->m_opcode == Js::OpCode::ExtendArg_A);
- IR::Opnd* funcObj = prevInstr->GetSrc1();
- Assert(funcObj->IsRegOpnd());
- // If the Extended_arg was CSE's across a loop or hoisted out of a loop,
- // adding a new reference down here might cause funcObj to now be liveOnBackEdge.
- // Use the addToLiveOnBackEdgeSyms bit vector to add it to a loop if we encounter one.
- // We'll clear it once we reach the Extended arg.
- this->addToLiveOnBackEdgeSyms->Set(funcObj->AsRegOpnd()->m_sym->m_id);
- Assert(prevInstr->GetSrc2() != nullptr);
- prevInstr = prevInstr->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
- Assert(prevInstr->m_opcode == Js::OpCode::ExtendArg_A);
- IR::Opnd* thisObj = prevInstr->GetSrc1();
- Assert(prevInstr->GetSrc2() == nullptr);
- Assert(thisObj->IsRegOpnd());
- this->addToLiveOnBackEdgeSyms->Set(thisObj->AsRegOpnd()->m_sym->m_id);
- const auto info = Lowerer::MakeCallInfoConst(Js::CallFlags_Value, 1, m_func);
- m_lowererMD.LoadHelperArgument(instr, thisObj);
- m_lowererMD.LoadHelperArgument(instr, info);
- m_lowererMD.LoadHelperArgument(instr, funcObj);
- instr->m_opcode = Js::OpCode::Call;
- IR::HelperCallOpnd *helperCallOpnd = Lowerer::CreateHelperCallOpnd(helperOpnd->AsHelperCallOpnd()->m_fnHelper, 3, m_func);
- instr->SetSrc1(helperCallOpnd);
- m_lowererMD.LowerCall(instr, 3); // we have funcobj, callInfo, and this.
- }
- #endif
- void
- Lowerer::GenerateFastInlineArrayPush(IR::Instr * instr)
- {
- Assert(instr->m_opcode == Js::OpCode::InlineArrayPush);
- IR::Opnd * baseOpnd = instr->GetSrc1();
- IR::Opnd * srcOpnd = instr->GetSrc2();
- bool returnLength = false;
- if(instr->GetDst())
- {
- returnLength = true;
- }
- IR::LabelInstr * bailOutLabelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- instr->InsertAfter(doneLabel);
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- //Don't Generate fast path according to ShouldGenerateArrayFastPath()
- //AND, Don't Generate fast path if the array is LikelyNative and the element is not specialized
- if(ShouldGenerateArrayFastPath(baseOpnd, false, false, false) &&
- !(baseOpnd->GetValueType().IsLikelyNativeArray() && srcOpnd->IsVar()))
- {
- GenerateFastPush(baseOpnd, srcOpnd, instr, instr, labelHelper, doneLabel, bailOutLabelHelper, returnLength);
- instr->InsertBefore(labelHelper);
- InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
- }
- if(baseOpnd->GetValueType().IsLikelyNativeArray())
- {
- //Lower IR::BailOutConventionalNativeArrayAccessOnly here.
- LowerOneBailOutKind(instr, IR::BailOutConventionalNativeArrayAccessOnly, false, false);
- instr->InsertAfter(bailOutLabelHelper);
- InsertBranch(Js::OpCode::Br, doneLabel, bailOutLabelHelper);
- }
- GenerateHelperToArrayPushFastPath(instr, bailOutLabelHelper);
- }
- bool Lowerer::GenerateFastPop(IR::Opnd *baseOpndParam, IR::Instr *callInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel, IR::LabelInstr * bailOutLabelHelper)
- {
- Assert(ShouldGenerateArrayFastPath(baseOpndParam, false, false, false));
- // TEST baseOpnd, AtomTag -- check baseOpnd not tagged int
- // JNE $helper
- // CMP [baseOpnd], JavascriptArray::`vtable' -- check baseOpnd isArray
- // JNE $helper
- // MOV r2, [baseOpnd + offset(length)] -- Load array length
- IR::RegOpnd * baseOpnd = baseOpndParam->AsRegOpnd();
- const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
- ValueType arrValueType(baseOpndParam->GetValueType());
- IR::RegOpnd *arrayOpnd = baseOpnd;
- IR::RegOpnd *arrayLengthOpnd = nullptr;
- IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
- if(!arrValueType.IsAnyOptimizedArray())
- {
- arrayOpnd = GenerateArrayTest(baseOpnd, bailOutLabelHelper, bailOutLabelHelper, callInstr, false, true);
- arrValueType = arrayOpnd->GetValueType().ToDefiniteObject().SetHasNoMissingValues(false);
- }
- else if(arrayOpnd->IsArrayRegOpnd())
- {
- IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
- if(arrayRegOpnd->LengthSym())
- {
- arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), arrayRegOpnd->LengthSym()->GetType(), m_func);
- DebugOnly(arrayLengthOpnd->FreezeSymValue());
- autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
- }
- }
- const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
- IR::AutoReuseOpnd autoReuseMutableArrayLengthOpnd;
- {
- IR::RegOpnd *const mutableArrayLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
- autoReuseMutableArrayLengthOpnd.Initialize(mutableArrayLengthOpnd, m_func);
- if(arrayLengthOpnd)
- {
- // mov mutableArrayLength, arrayLength
- InsertMove(mutableArrayLengthOpnd, arrayLengthOpnd, callInstr);
- }
- else
- {
- // MOV mutableArrayLength, [array + offset(length)] -- Load array length
- // We know this index is safe since, so mark it as UInt32 to avoid unnecessary conversion/checks
- InsertMove(
- mutableArrayLengthOpnd,
- IR::IndirOpnd::New(
- arrayOpnd,
- Js::JavascriptArray::GetOffsetOfLength(),
- mutableArrayLengthOpnd->GetType(),
- this->m_func),
- callInstr);
- }
- arrayLengthOpnd = mutableArrayLengthOpnd;
- }
- InsertCompareBranch(arrayLengthOpnd, IR::IntConstOpnd::New(0, TyUint32, this->m_func), Js::OpCode::BrEq_A, true, bailOutLabelHelper, callInstr);
- InsertSub(false, arrayLengthOpnd, arrayLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func),callInstr);
- IR::IndirOpnd *arrayRef = IR::IndirOpnd::New(arrayOpnd, arrayLengthOpnd, TyVar, this->m_func);
- arrayRef->GetBaseOpnd()->SetValueType(arrValueType);
- //Array length is going to overflow, hence don't check for Array.length and Segment.length overflow.
- bool isTypedArrayElement, isStringIndex;
- IR::IndirOpnd *const indirOpnd =
- GenerateFastElemICommon(
- callInstr,
- false,
- arrayRef,
- labelHelper,
- labelHelper,
- nullptr,
- &isTypedArrayElement,
- &isStringIndex,
- nullptr,
- nullptr /*pLabelSegmentLengthIncreased*/,
- true /*checkArrayLengthOverflow*/,
- true /* forceGenerateFastPath */,
- false/* = returnLength */,
- bailOutLabelHelper /* = bailOutLabelInstr*/);
- Assert(!isTypedArrayElement);
- Assert(indirOpnd);
- return true;
- }
- bool Lowerer::GenerateFastPush(IR::Opnd *baseOpndParam, IR::Opnd *src, IR::Instr *callInstr,
- IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel, IR::LabelInstr * bailOutLabelHelper, bool returnLength)
- {
- Assert(ShouldGenerateArrayFastPath(baseOpndParam, false, false, false));
- // TEST baseOpnd, AtomTag -- check baseOpnd not tagged int
- // JNE $helper
- // CMP [baseOpnd], JavascriptArray::`vtable' -- check baseOpnd isArray
- // JNE $helper
- // MOV r2, [baseOpnd + offset(length)] -- Load array length
- IR::RegOpnd * baseOpnd = baseOpndParam->AsRegOpnd();
- const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
- ValueType arrValueType(baseOpndParam->GetValueType());
- IR::RegOpnd *arrayOpnd = baseOpnd;
- IR::RegOpnd *arrayLengthOpnd = nullptr;
- IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
- if(!arrValueType.IsAnyOptimizedArray())
- {
- arrayOpnd = GenerateArrayTest(baseOpnd, labelHelper, labelHelper, insertInstr, false, true);
- arrValueType = arrayOpnd->GetValueType().ToDefiniteObject().SetHasNoMissingValues(false);
- }
- else if(arrayOpnd->IsArrayRegOpnd())
- {
- IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
- if(arrayRegOpnd->LengthSym())
- {
- arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), arrayRegOpnd->LengthSym()->GetType(), m_func);
- DebugOnly(arrayLengthOpnd->FreezeSymValue());
- autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
- }
- }
- const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
- if(!arrayLengthOpnd)
- {
- // MOV arrayLength, [array + offset(length)] -- Load array length
- // We know this index is safe since, so mark it as UInt32 to avoid unnecessary conversion/checks
- arrayLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
- autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
- InsertMove(
- arrayLengthOpnd,
- IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), arrayLengthOpnd->GetType(), this->m_func),
- insertInstr);
- }
- IR::IndirOpnd *arrayRef = IR::IndirOpnd::New(arrayOpnd, arrayLengthOpnd, TyVar, this->m_func);
- arrayRef->GetBaseOpnd()->SetValueType(arrValueType);
- if (returnLength && src->IsEqual(insertInstr->GetDst()))
- {
- //If the dst is same as the src, then dst is going to be overridden by GenerateFastElemICommon in process of updating the length.
- //Save it in a temp register.
- IR::RegOpnd *opnd = IR::RegOpnd::New(src->GetType(), this->m_func);
- InsertMove(opnd, src, insertInstr);
- src = opnd;
- }
- //Array length is going to overflow, hence don't check for Array.length and Segment.length overflow.
- bool isTypedArrayElement, isStringIndex;
- IR::IndirOpnd *const indirOpnd =
- GenerateFastElemICommon(
- insertInstr,
- true,
- arrayRef,
- labelHelper,
- labelHelper,
- nullptr,
- &isTypedArrayElement,
- &isStringIndex,
- nullptr,
- nullptr /*pLabelSegmentLengthIncreased*/,
- false /*checkArrayLengthOverflow*/,
- true /* forceGenerateFastPath */,
- returnLength,
- bailOutLabelHelper);
- Assert(!isTypedArrayElement);
- Assert(indirOpnd);
- // MOV [r3 + r2], src
- InsertMoveWithBarrier(indirOpnd, src, insertInstr);
- return true;
- }
- IR::Opnd*
- Lowerer::GenerateArgOutForInlineeStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
- {
- Assert(callInstr->m_func->IsInlinee());
- Func *func = callInstr->m_func;
- uint32 actualCount = func->actualCount - 1; // don't count this pointer
- Assert(actualCount < Js::InlineeCallInfo::MaxInlineeArgoutCount);
- const auto firstRealArgStackSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
- this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr); //Start after this pointer
- IR::SymOpnd *firstArg = IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, func);
- const IR::AutoReuseOpnd autoReuseFirstArg(firstArg, func);
- IR::RegOpnd* argInOpnd = IR::RegOpnd::New(TyMachReg, func);
- const IR::AutoReuseOpnd autoReuseArgInOpnd(argInOpnd, func);
- InsertLea(argInOpnd, firstArg, callInstr);
- IR::IndirOpnd *argIndirOpnd = nullptr;
- IR::Instr* argout = nullptr;
- #if defined(_M_IX86)
- // Maintain alignment
- if ((actualCount & 1) == 0)
- {
- IR::Instr *alignPush = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
- alignPush->SetSrc1(IR::IntConstOpnd::New(1, TyInt32, this->m_func));
- callInstr->InsertBefore(alignPush);
- }
- #endif
- for(uint i = actualCount; i > 0; i--)
- {
- argIndirOpnd = IR::IndirOpnd::New(argInOpnd, (i - 1) * MachPtr, TyMachReg, func);
- argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
- argout->SetSrc1(argIndirOpnd);
- callInstr->InsertBefore(argout);
- // i represents ith arguments from actuals, with is i + 3 counting this, callInfo and function object
- this->m_lowererMD.LoadDynamicArgument(argout, i + 3);
- }
- return IR::IntConstOpnd::New(func->actualCount, TyInt32, func);
- }
- // For AMD64 and ARM only.
- void
- Lowerer::LowerInlineSpreadArgOutLoopUsingRegisters(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd)
- {
- Func *const func = callInstr->m_func;
- IR::LabelInstr *oneArgLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertCompareBranch(indexOpnd, IR::IntConstOpnd::New(1, TyUint8, func), Js::OpCode::BrEq_A, true, oneArgLabel, callInstr);
- IR::LabelInstr *startLoopLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- startLoopLabel->m_isLoopTop = true;
- Loop *loop = JitAnew(func->m_alloc, Loop, func->m_alloc, this->m_func);
- startLoopLabel->SetLoop(loop);
- loop->SetLoopTopInstr(startLoopLabel);
- loop->regAlloc.liveOnBackEdgeSyms = AllocatorNew(JitArenaAllocator, func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
- loop->regAlloc.liveOnBackEdgeSyms->Set(indexOpnd->m_sym->m_id);
- loop->regAlloc.liveOnBackEdgeSyms->Set(arrayElementsStartOpnd->m_sym->m_id);
- callInstr->InsertBefore(startLoopLabel);
- InsertSub(false, indexOpnd, indexOpnd, IR::IntConstOpnd::New(1, TyInt8, func), callInstr);
- IR::IndirOpnd *elemPtrOpnd = IR::IndirOpnd::New(arrayElementsStartOpnd, indexOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyMachPtr, func);
- // Generate argout for n+2 arg (skipping function object + this)
- IR::Instr *argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
- // X64 requires a reg opnd
- IR::RegOpnd *elemRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
- LowererMD::CreateAssign(elemRegOpnd, elemPtrOpnd, callInstr);
- argout->SetSrc1(elemRegOpnd);
- argout->SetSrc2(indexOpnd);
- callInstr->InsertBefore(argout);
- this->m_lowererMD.LoadDynamicArgumentUsingLength(argout);
- InsertCompareBranch(indexOpnd, IR::IntConstOpnd::New(1, TyUint8, func), Js::OpCode::BrNeq_A, true, startLoopLabel, callInstr);
- // Emit final argument into register 4 on AMD64 and ARM
- callInstr->InsertBefore(oneArgLabel);
- argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
- argout->SetSrc1(elemPtrOpnd);
- callInstr->InsertBefore(argout);
- this->m_lowererMD.LoadDynamicArgument(argout, 4); //4 to denote this is 4th register after this, callinfo & function object
- }
- IR::Instr *
- Lowerer::LowerCallIDynamicSpread(IR::Instr *callInstr, ushort callFlags)
- {
- Assert(callInstr->m_opcode == Js::OpCode::CallIDynamicSpread);
- IR::Instr * insertBeforeInstrForCFG = nullptr;
- Func *const func = callInstr->m_func;
- if (func->IsInlinee())
- {
- throw Js::RejitException(RejitReason::InlineSpreadDisabled);
- }
- IR::Instr *spreadArrayInstr = callInstr;
- IR::SymOpnd *argLinkOpnd = spreadArrayInstr->UnlinkSrc2()->AsSymOpnd();
- StackSym *argLinkSym = argLinkOpnd->m_sym->AsStackSym();
- AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
- argLinkOpnd->Free(this->m_func);
- spreadArrayInstr = argLinkSym->m_instrDef;
- Assert(spreadArrayInstr->m_opcode == Js::OpCode::ArgOut_A_SpreadArg);
- IR::RegOpnd *arrayOpnd = nullptr;
- IR::Opnd *arraySrcOpnd = spreadArrayInstr->UnlinkSrc1();
- if (!arraySrcOpnd->IsRegOpnd())
- {
- arrayOpnd = IR::RegOpnd::New(TyMachPtr, func);
- LowererMD::CreateAssign(arrayOpnd, arraySrcOpnd, spreadArrayInstr);
- }
- else
- {
- arrayOpnd = arraySrcOpnd->AsRegOpnd();
- }
- argLinkOpnd = spreadArrayInstr->UnlinkSrc2()->AsSymOpnd();
- // Walk the arg chain and find the start call
- argLinkSym = argLinkOpnd->m_sym->AsStackSym();
- AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
- argLinkOpnd->Free(this->m_func);
- // Nothing to be done for the function object, emit as normal
- IR::Instr *thisInstr = argLinkSym->m_instrDef;
- IR::RegOpnd *thisOpnd = thisInstr->UnlinkSrc2()->AsRegOpnd();
- argLinkSym = thisOpnd->m_sym->AsStackSym();
- thisInstr->Unlink();
- thisInstr->FreeDst();
- // Remove the array ArgOut instr and StartCall, they are no longer needed
- spreadArrayInstr->Unlink();
- spreadArrayInstr->FreeDst();
- IR::Instr *startCallInstr = argLinkSym->m_instrDef;
- Assert(startCallInstr->m_opcode == Js::OpCode::StartCall);
- insertBeforeInstrForCFG = startCallInstr->GetNextRealInstr();
- startCallInstr->Remove();
- IR::RegOpnd *argsLengthOpnd = IR::RegOpnd::New(TyUint32, func);
- IR::IndirOpnd *arrayLengthPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, func);
- LowererMD::CreateAssign(argsLengthOpnd, arrayLengthPtrOpnd, callInstr);
- // Don't bother expanding args if there are zero
- IR::LabelInstr *zeroArgsLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertCompareBranch(argsLengthOpnd, IR::IntConstOpnd::New(0, TyInt8, func), Js::OpCode::BrEq_A, true, zeroArgsLabel, callInstr);
- IR::RegOpnd *indexOpnd = IR::RegOpnd::New(TyUint32, func);
- LowererMD::CreateAssign(indexOpnd, argsLengthOpnd, callInstr);
- // Get the array head offset and length
- IR::IndirOpnd *arrayHeadPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfHead(), TyMachPtr, func);
- IR::RegOpnd *arrayElementsStartOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertAdd(false, arrayElementsStartOpnd, arrayHeadPtrOpnd, IR::IntConstOpnd::New(offsetof(Js::SparseArraySegment<Js::Var>, elements), TyUint8, func), callInstr);
- this->m_lowererMD.LowerInlineSpreadArgOutLoop(callInstr, indexOpnd, arrayElementsStartOpnd);
- // Resume if we have zero args
- callInstr->InsertBefore(zeroArgsLabel);
- // Lower call
- callInstr->m_opcode = Js::OpCode::CallIDynamic;
- callInstr = m_lowererMD.LowerCallIDynamic(callInstr, thisInstr, argsLengthOpnd, callFlags, insertBeforeInstrForCFG);
- return callInstr;
- }
- IR::Instr *
- Lowerer::LowerCallIDynamic(IR::Instr * callInstr, ushort callFlags)
- {
- if (!this->m_func->GetHasStackArgs())
- {
- throw Js::RejitException(RejitReason::InlineApplyDisabled);
- }
- IR::Instr * insertBeforeInstrForCFG = nullptr;
- // Lower args and look for StartCall
- IR::Instr * argInstr = callInstr;
- IR::SymOpnd * argLinkOpnd = argInstr->UnlinkSrc2()->AsSymOpnd();
- StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
- AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
- argLinkOpnd->Free(this->m_func);
- argInstr = argLinkSym->m_instrDef;
- Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
- IR::Instr* saveThisArgOutInstr = argInstr;
- saveThisArgOutInstr->Unlink();
- saveThisArgOutInstr->FreeDst();
- argLinkOpnd = argInstr->UnlinkSrc2()->AsSymOpnd();
- argLinkSym = argLinkOpnd->m_sym->AsStackSym();
- AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
- argLinkOpnd->Free(this->m_func);
- argInstr = argLinkSym->m_instrDef;
- Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_FromStackArgs);
- IR::Opnd* argsLength = m_lowererMD.GenerateArgOutForStackArgs(callInstr, argInstr);
- IR::RegOpnd* startCallDstOpnd = argInstr->UnlinkSrc2()->AsRegOpnd();
- argLinkSym = startCallDstOpnd->m_sym->AsStackSym();
- startCallDstOpnd->Free(this->m_func);
- argInstr->Remove();// Remove ArgOut_A_FromStackArgs
- argInstr = argLinkSym->m_instrDef;
- Assert(argInstr->m_opcode == Js::OpCode::StartCall);
- insertBeforeInstrForCFG = argInstr->GetNextRealInstr();
- argInstr->Remove(); //Remove start call
- return m_lowererMD.LowerCallIDynamic(callInstr, saveThisArgOutInstr, argsLength, callFlags, insertBeforeInstrForCFG);
- }
- //This is only for x64 & ARM.
- IR::Opnd*
- Lowerer::GenerateArgOutForStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
- {
- // s25.var = LdLen_A s4.var
- // s26.var = Ld_A s25.var
- // BrNeq_I4 $L3, s25.var,0
- // $L2:
- // BrNeq_I4 $L4, s25.var,1
- // s25.var = SUB_I4 s25.var, 0x1
- // s10.var = LdElemI_A [s4.var+s25.var].var
- // ArgOut_A_Dynamic s10.var, s25.var
- // Br $L2
- // $L4:
- // s10.var = LdElemI_A [s4.var].var
- // ArgOut_A_Dynamic s10.var, 4
- // $L3
- #if defined(_M_IX86)
- Assert(false);
- #endif
- Assert(stackArgsInstr->m_opcode == Js::OpCode::ArgOut_A_FromStackArgs);
- Assert(callInstr->m_opcode == Js::OpCode::CallIDynamic);
- this->m_lowererMD.GenerateFunctionObjectTest(callInstr, callInstr->GetSrc1()->AsRegOpnd(), false);
- if (callInstr->m_func->IsInlinee())
- {
- return this->GenerateArgOutForInlineeStackArgs(callInstr, stackArgsInstr);
- }
- Func *func = callInstr->m_func;
- IR::RegOpnd* stackArgs = stackArgsInstr->GetSrc1()->AsRegOpnd();
- IR::RegOpnd* ldLenDstOpnd = IR::RegOpnd::New(TyUint32, func);
- IR::Instr* ldLen = IR::Instr::New(Js::OpCode::LdLen_A, ldLenDstOpnd ,stackArgs, func);
- ldLenDstOpnd->SetValueType(ValueType::GetTaggedInt()); /*LdLen_A works only on stack arguments*/
- callInstr->InsertBefore(ldLen);
- GenerateFastRealStackArgumentsLdLen(ldLen);
- IR::Instr* saveLenInstr = IR::Instr::New(Js::OpCode::MOV, IR::RegOpnd::New(TyUint32, func), ldLenDstOpnd, func);
- saveLenInstr->GetDst()->SetValueType(ValueType::GetTaggedInt());
- callInstr->InsertBefore(saveLenInstr);
- IR::LabelInstr* doneArgs = IR::LabelInstr::New(Js::OpCode::Label, func);
- IR::Instr* branchDoneArgs = IR::BranchInstr::New(Js::OpCode::BrEq_I4, doneArgs, ldLenDstOpnd, IR::IntConstOpnd::New(0, TyInt8, func),func);
- callInstr->InsertBefore(branchDoneArgs);
- this->m_lowererMD.EmitInt4Instr(branchDoneArgs);
- IR::LabelInstr* startLoop = IR::LabelInstr::New(Js::OpCode::Label, func);
- IR::LabelInstr* endLoop = IR::LabelInstr::New(Js::OpCode::Label, func);
- startLoop->m_isLoopTop = true;
- Loop *loop = JitAnew(func->m_alloc, Loop, func->m_alloc, this->m_func);
- startLoop->SetLoop(loop);
- loop->SetLoopTopInstr(startLoop);
- loop->regAlloc.liveOnBackEdgeSyms = AllocatorNew(JitArenaAllocator, func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
- callInstr->InsertBefore(startLoop);
- IR::Instr* branchOutOfLoop = IR::BranchInstr::New(Js::OpCode::BrEq_I4, endLoop, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyInt8, func),func);
- callInstr->InsertBefore(branchOutOfLoop);
- this->m_lowererMD.EmitInt4Instr(branchOutOfLoop);
- IR::Instr* subInstr = IR::Instr::New(Js::OpCode::Sub_I4, ldLenDstOpnd, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyInt8, func),func);
- callInstr->InsertBefore(subInstr);
- this->m_lowererMD.EmitInt4Instr(subInstr);
- IR::IndirOpnd *nthArgument = IR::IndirOpnd::New(stackArgs, ldLenDstOpnd, TyMachReg, func);
- IR::RegOpnd* ldElemDstOpnd = IR::RegOpnd::New(TyMachReg,func);
- IR::Instr* ldElem = IR::Instr::New(Js::OpCode::LdElemI_A, ldElemDstOpnd, nthArgument, func);
- callInstr->InsertBefore(ldElem);
- GenerateFastStackArgumentsLdElemI(ldElem);
- IR::Instr* argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
- argout->SetSrc1(ldElemDstOpnd);
- argout->SetSrc2(ldLenDstOpnd);
- callInstr->InsertBefore(argout);
- this->m_lowererMD.LoadDynamicArgumentUsingLength(argout);
- IR::BranchInstr *tailBranch = IR::BranchInstr::New(Js::OpCode::Br, startLoop, func);
- callInstr->InsertBefore(tailBranch);
- callInstr->InsertBefore(endLoop);
- this->m_lowererMD.LowerUncondBranch(tailBranch);
- loop->regAlloc.liveOnBackEdgeSyms->Set(ldLenDstOpnd->m_sym->m_id);
- subInstr = IR::Instr::New(Js::OpCode::Sub_I4, ldLenDstOpnd, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyInt8, func),func);
- callInstr->InsertBefore(subInstr);
- this->m_lowererMD.EmitInt4Instr(subInstr);
- nthArgument = IR::IndirOpnd::New(stackArgs, ldLenDstOpnd, TyMachReg, func);
- ldElemDstOpnd = IR::RegOpnd::New(TyMachReg,func);
- ldElem = IR::Instr::New(Js::OpCode::LdElemI_A, ldElemDstOpnd, nthArgument, func);
- callInstr->InsertBefore(ldElem);
- GenerateFastStackArgumentsLdElemI(ldElem);
- argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
- argout->SetSrc1(ldElemDstOpnd);
- callInstr->InsertBefore(argout);
- this->m_lowererMD.LoadDynamicArgument(argout, 4); //4 to denote this is 4th register after this, callinfo & function object
- callInstr->InsertBefore(doneArgs);
- /*return the length which will be used for callInfo generations & stack allocation*/
- return saveLenInstr->GetDst()->AsRegOpnd();
- }
- //This function assumes there is stackargs bailout and index is always on the range.
- bool
- Lowerer::GenerateFastStackArgumentsLdElemI(IR::Instr* ldElem)
- {
- // MOV dst, ebp [(valueOpnd + 5) *4] // 5 for the stack layout
- //
- IR::IndirOpnd *indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
- // Now load the index and check if it is an integer.
- IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
- Assert (indexOpnd && indexOpnd->IsTaggedInt());
- if(ldElem->m_func->IsInlinee())
- {
- IR::IndirOpnd *argIndirOpnd = GetArgsIndirOpndForInlinee(ldElem, indexOpnd);
- LowererMD::CreateAssign(ldElem->GetDst(), argIndirOpnd, ldElem);
- }
- else
- {
- // Load argument set dst = [ebp + index].
- IR::RegOpnd *ebpOpnd = IR::Opnd::CreateFramePointerOpnd(m_func);
- IR::IndirOpnd *argIndirOpnd = nullptr;
- // The stack looks like this:
- // ...
- // arguments[1]
- // arguments[0]
- // this
- // callinfo
- // function object
- // return addr
- // EBP-> EBP chain
- //actual arguments offset is LowererMD::GetFormalParamOffset() + 1 (this)
- int32 actualOffset = GetFormalParamOffset() + 1 + indirOpnd->GetOffset();
- Assert(GetFormalParamOffset() + 1 == 5);
- const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
- argIndirOpnd = IR::IndirOpnd::New(ebpOpnd->AsRegOpnd(), indexOpnd->AsRegOpnd(), indirScale, TyMachReg, this->m_func);
- // Need to offset valueOpnd by 5. Instead of changing valueOpnd, we can just add an offset to the indir. Changing
- // valueOpnd requires creation of a temp sym (if it's not already a temp) so that the value of the sym that
- // valueOpnd represents is not changed.
- argIndirOpnd->SetOffset(actualOffset << indirScale);
- LowererMD::CreateAssign(ldElem->GetDst(), argIndirOpnd, ldElem);
- }
- ldElem->Remove();
- return false;
- }
- IR::IndirOpnd*
- Lowerer::GetArgsIndirOpndForInlinee(IR::Instr* ldElem, IR::Opnd* valueOpnd)
- {
- Assert(ldElem->m_func->IsInlinee());
- IR::IndirOpnd* argIndirOpnd = nullptr;
- // Address of argument after 'this'
- const auto firstRealArgStackSym = ldElem->m_func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
- this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr); //Start after this pointer
- IR::SymOpnd *firstArg = IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, ldElem->m_func);
- const IR::AutoReuseOpnd autoReuseFirstArg(firstArg, m_func);
- IR::RegOpnd *const baseOpnd = IR::RegOpnd::New(TyMachReg, ldElem->m_func);
- const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
- InsertLea(baseOpnd, firstArg, ldElem);
- if (valueOpnd->IsIntConstOpnd())
- {
- IntConstType offset = valueOpnd->AsIntConstOpnd()->GetValue() * MachPtr;
- // TODO: Assert(Math::FitsInDWord(offset));
- argIndirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, ldElem->m_func);
- }
- else
- {
- Assert(valueOpnd->IsRegOpnd());
- const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
- argIndirOpnd = IR::IndirOpnd::New(baseOpnd, valueOpnd->AsRegOpnd(), indirScale, TyMachReg, ldElem->m_func);
- }
- return argIndirOpnd;
- }
- IR::IndirOpnd*
- Lowerer::GetArgsIndirOpndForTopFunction(IR::Instr* ldElem, IR::Opnd* valueOpnd)
- {
- // Load argument set dst = [ebp + index] (or grab from the generator object if m_func is a generator function).
- IR::RegOpnd *baseOpnd = m_func->GetJnFunction()->IsGenerator() ? LoadGeneratorArgsPtr(ldElem) : IR::Opnd::CreateFramePointerOpnd(m_func);
- IR::IndirOpnd* argIndirOpnd = nullptr;
- // The stack looks like this:
- // ...
- // arguments[1]
- // arguments[0]
- // this
- // callinfo
- // function object
- // return addr
- // EBP-> EBP chain
- //actual arguments offset is LowererMD::GetFormalParamOffset() + 1 (this)
- uint16 actualOffset = m_func->GetJnFunction()->IsGenerator() ? 1 : GetFormalParamOffset() + 1; //5
- Assert(actualOffset == 5 || m_func->GetJnFunction()->IsGenerator());
- if (valueOpnd->IsIntConstOpnd())
- {
- IntConstType offset = (valueOpnd->AsIntConstOpnd()->GetValue() + actualOffset) * MachPtr;
- // TODO: Assert(Math::FitsInDWord(offset));
- argIndirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, this->m_func);
- }
- else
- {
- const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
- argIndirOpnd = IR::IndirOpnd::New(baseOpnd->AsRegOpnd(), valueOpnd->AsRegOpnd(), indirScale, TyMachReg, this->m_func);
- // Need to offset valueOpnd by 5. Instead of changing valueOpnd, we can just add an offset to the indir. Changing
- // valueOpnd requires creation of a temp sym (if it's not already a temp) so that the value of the sym that
- // valueOpnd represents is not changed.
- argIndirOpnd->SetOffset(actualOffset << indirScale);
- }
- return argIndirOpnd;
- }
- void
- Lowerer::GenerateCheckForArgumentsLength(IR::Instr* ldElem, IR::LabelInstr* labelCreateHeapArgs, IR::Opnd* actualParamOpnd, IR::Opnd* valueOpnd, Js::OpCode opcode)
- {
- // Check if index < nr_actuals.
- InsertCompare(actualParamOpnd, valueOpnd, ldElem);
- // Jump to helper if index >= nr_actuals.
- // Do an unsigned check here so that a negative index will also fail.
- // (GenerateLdValueFromCheckedIndexOpnd does not guarantee positive index on x86.)
- InsertBranch(opcode, true, labelCreateHeapArgs, ldElem);
- }
- bool
- Lowerer::GenerateFastArgumentsLdElemI(IR::Instr* ldElem, IR::LabelInstr * labelHelper, IR::LabelInstr *labelFallThru)
- {
- // TEST argsSlot, argsSlot
- // JNE $helper // There is an arguments object created jump to helper.
- // ---GenerateSmIntTest
- // ---GenerateLdValueFromCheckedIndexOpnd
- // ---LoadInputParamCount
- // CMP actualParamOpnd, valueOpnd //Compare between the actual count & the index count (say i in arguments[i])
- // JLE $labelCreateHeapArgs
- // MOV dst, ebp [(valueOpnd + 5) *4] // 5 for the stack layout
- // JMP $fallthrough
- //
- //labelCreateHeapArgs:
- // ---LoadHeapArguments
- Assert(ldElem->DoStackArgsOpt(this->m_func));
- IR::IndirOpnd *indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
- bool isInlinee = ldElem->m_func->IsInlinee();
- Func *func = ldElem->m_func;
- // First check the slot on the frame to see if there is a heap arguments object.
- IR::Opnd *cachedArgsObjectSlotOpnd = isInlinee? ldElem->m_func->GetInlineeArgumentsObjectSlotOpnd() : this->m_lowererMD.CreateStackArgumentsSlotOpnd() ;
- // Re-use the base pointer here so that we're loading the current heap args into the reg we will pass
- // to the helper if necessary.
- IR::RegOpnd *argsObjRegOpnd = indirOpnd->GetBaseOpnd();
- LowererMD::CreateAssign(argsObjRegOpnd, cachedArgsObjectSlotOpnd, ldElem);
- InsertTest(argsObjRegOpnd, argsObjRegOpnd, ldElem);
- IR::LabelInstr *labelCreateHeapArgs = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- InsertBranch(Js::OpCode::BrNeq_A, labelHelper, ldElem);
- // Now load the index and check if it is an integer.
- bool emittedFastPath = false;
- bool isNotInt = false;
- IntConstType value = 0;
- IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
- IR::Opnd *valueOpnd = nullptr;
- IR::Opnd *actualParamOpnd = nullptr;
- bool hasIntConstIndex = indirOpnd->TryGetIntConstIndexValue(true, &value, &isNotInt);
- if (isInlinee && hasIntConstIndex && value >= (ldElem->m_func->actualCount - 1))
- {
- //Outside the range of actuals, skip
- }
- else if (labelFallThru != nullptr && !(hasIntConstIndex && value < 0)) //if index is not a negative int constant
- {
- if (isInlinee)
- {
- actualParamOpnd = IR::IntConstOpnd::New(ldElem->m_func->actualCount - 1, TyInt32, func);
- }
- else
- {
- // Load actuals count, LoadHeapArguments will reuse the generated instructions here
- IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldElem, -1 /* don't include 'this' while counting actuals. */);
- actualParamOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
- }
- if (hasIntConstIndex)
- {
- //Constant index
- valueOpnd = IR::IntConstOpnd::New(value, TyInt32, func);
- }
- else
- {
- //Load valueOpnd from the index
- valueOpnd =
- m_lowererMD.LoadNonnegativeIndex(
- indexOpnd,
- (
- #if INT32VAR
- indexOpnd->GetType() == TyUint32
- #else
- // On 32-bit platforms, skip the negative check since for now, the unsigned upper bound check covers it
- true
- #endif
- ),
- labelCreateHeapArgs,
- labelCreateHeapArgs,
- ldElem);
- }
- if (isInlinee)
- {
- if (!hasIntConstIndex)
- {
- //Runtime check if to make sure length is within the arguments.length range.
- GenerateCheckForArgumentsLength(ldElem, labelCreateHeapArgs, valueOpnd, actualParamOpnd, Js::OpCode::BrGe_A);
- }
- }
- else
- {
- GenerateCheckForArgumentsLength(ldElem, labelCreateHeapArgs, actualParamOpnd, valueOpnd, Js::OpCode::BrLe_A);
- }
- IR::Opnd *argIndirOpnd = nullptr;
- if (isInlinee)
- {
- argIndirOpnd = GetArgsIndirOpndForInlinee(ldElem, valueOpnd);
- }
- else
- {
- argIndirOpnd = GetArgsIndirOpndForTopFunction(ldElem, valueOpnd);
- }
- LowererMD::CreateAssign(ldElem->GetDst(), argIndirOpnd, ldElem);
- // JMP $done
- InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
- // $labelCreateHeapArgs:
- ldElem->InsertBefore(labelCreateHeapArgs);
- emittedFastPath = true;
- }
- IR::Opnd *nullOpnd = this->LoadLibraryValueOpnd(ldElem, LibraryValue::ValueNull);
- IR::Instr *instrArgs = IR::Instr::New(Js::OpCode::LdHeapArguments,
- indirOpnd->GetBaseOpnd(),
- nullOpnd,
- nullOpnd,
- func);
- ldElem->InsertBefore(instrArgs);
- this->m_lowererMD.LoadHeapArguments(instrArgs, true, actualParamOpnd);
- return emittedFastPath;
- }
- bool
- Lowerer::GenerateFastRealStackArgumentsLdLen(IR::Instr *ldLen)
- {
- if(ldLen->m_func->IsInlinee())
- {
- //Get the length of the arguments
- LowererMD::CreateAssign(ldLen->GetDst(),
- IR::IntConstOpnd::New(ldLen->m_func->actualCount - 1, TyUint32, ldLen->m_func),
- ldLen);
- }
- else
- {
- IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldLen, -1);
- IR::RegOpnd *actualCountOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
- LowererMD::CreateAssign(ldLen->GetDst(), actualCountOpnd, ldLen);
- }
- ldLen->Remove();
- return false;
- }
- bool
- Lowerer::GenerateFastArgumentsLdLen(IR::Instr *ldLen, IR::LabelInstr* labelHelper, IR::LabelInstr* labelFallThru)
- {
- // TEST argslot, argslot //Test if the arguments slot is zero
- // JNE $helper
- // actualCountOpnd <-LoadInputParamCount fastpath
- // SHL actualCountOpnd, actualCountOpnd, 1 // Left shift for tagging
- // INC actualCountOpnd // Tagging
- // MOV dst, actualCountOpnd
- // JMP $fallthrough
- //$helper:
- Assert(ldLen->DoStackArgsOpt(this->m_func));
- if(ldLen->m_func->IsInlinee())
- {
- IR::Opnd *cachedArgsObjectSlotOpnd = ldLen->m_func->GetInlineeArgumentsObjectSlotOpnd();
- // Re-use the LdLen_A source here so that we're loading the current heap args into the reg we will pass
- // to the helper if necessary.
- IR::RegOpnd *argsObjectRegOpnd = ldLen->GetSrc1()->AsRegOpnd();
- LowererMD::CreateAssign(argsObjectRegOpnd, cachedArgsObjectSlotOpnd, ldLen);
- InsertTest(argsObjectRegOpnd, argsObjectRegOpnd, ldLen);
- InsertBranch(Js::OpCode::BrNeq_A, labelHelper, ldLen);
- //Get the length of the arguments
- LowererMD::CreateAssign(ldLen->GetDst(),
- IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(ldLen->m_func->actualCount - 1), IR::AddrOpndKindConstantVar, ldLen->m_func), // -1 to exclude this pointer
- ldLen);
- }
- else
- {
- IR::Opnd *cachedArgsObjectSlotOpnd = this->m_lowererMD.CreateStackArgumentsSlotOpnd();
- // Re-use the LdLen_A source here so that we're loading the current heap args into the reg we will pass
- // to the helper if necessary.
- IR::RegOpnd *argsObjectRegOpnd = ldLen->GetSrc1()->AsRegOpnd();
- LowererMD::CreateAssign(argsObjectRegOpnd, cachedArgsObjectSlotOpnd, ldLen);
- InsertTest(argsObjectRegOpnd, argsObjectRegOpnd, ldLen);
- InsertBranch(Js::OpCode::BrNeq_A, labelHelper, ldLen);
- IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldLen, -1);
- IR::RegOpnd *actualCountOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
- this->m_lowererMD.GenerateInt32ToVarConversion(actualCountOpnd, ldLen);
- LowererMD::CreateAssign(ldLen->GetDst(), actualCountOpnd, ldLen);
- }
- InsertBranch(Js::OpCode::Br, labelFallThru, ldLen);
- return true;
- }
- IR::RegOpnd*
- Lowerer::GenerateFunctionTypeFromFixedFunctionObject(IR::Instr *insertInstrPt, IR::Opnd* functionObjOpnd)
- {
- IR::RegOpnd * functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::Opnd *functionTypeOpnd = nullptr;
- if(functionObjOpnd->IsAddrOpnd())
- {
- IR::AddrOpnd* functionObjAddrOpnd = functionObjOpnd->AsAddrOpnd();
- // functionTypeRegOpnd = MOV [fixed function address + type offset]
- functionObjAddrOpnd->m_address;
- functionTypeOpnd = IR::MemRefOpnd::New((void *)((intptr)functionObjAddrOpnd->m_address + Js::RecyclableObject::GetOffsetOfType()), TyMachPtr, this->m_func,
- IR::AddrOpndKindDynamicObjectTypeRef);
- }
- else
- {
- functionTypeOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, this->m_func);
- }
- LowererMD::CreateAssign(functionTypeRegOpnd, functionTypeOpnd, insertInstrPt);
- return functionTypeRegOpnd;
- }
- void
- Lowerer::FinalLower()
- {
- this->m_lowererMD.FinalLower();
- // ensure that the StartLabel and EndLabel are inserted
- // before the prolog and after the epilog respectively
- IR::LabelInstr * startLabel = m_func->GetFuncStartLabel();
- if (startLabel != nullptr)
- {
- m_func->m_headInstr->InsertAfter(startLabel);
- }
- IR::LabelInstr * endLabel = m_func->GetFuncEndLabel();
- if (endLabel != nullptr)
- {
- m_func->m_tailInstr->GetPrevRealInstr()->InsertBefore(endLabel);
- }
- }
- void
- Lowerer::EHBailoutPatchUp()
- {
- Assert(this->m_func->isPostLayout);
- // 1. Insert return thunks for all the regions.
- // 2. Set the hasBailedOut bit to true on all bailout paths in EH regions.
- // 3. Insert code after every bailout in a try or catch region to save the return value on the stack, and jump to the return thunk (See Region.h) of that region.
- // 4. Insert code right before the epilog, to restore the return value (saved in 2.) from a bailout into eax.
- IR::LabelInstr * restoreReturnValueFromBailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- IR::LabelInstr * epilogLabel;
- IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->GetPrevRealInstrOrLabel();
- if (exitPrevInstr->IsLabelInstr())
- {
- epilogLabel = exitPrevInstr->AsLabelInstr();
- }
- else
- {
- epilogLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- this->m_func->m_exitInstr->InsertBefore(epilogLabel);
- }
- IR::Instr * tmpInstr = nullptr;
- bool restoreReturnFromBailoutEmitted = false;
- FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, this->m_func)
- {
- if (instr->IsLabelInstr())
- {
- this->currentRegion = instr->AsLabelInstr()->GetRegion();
- }
- // Consider (radua): Assert(this->currentRegion) here?
- if (this->currentRegion)
- {
- RegionType currentRegionType = this->currentRegion->GetType();
- if (currentRegionType == RegionTypeTry || currentRegionType == RegionTypeCatch)
- {
- this->InsertReturnThunkForRegion(this->currentRegion, restoreReturnValueFromBailoutLabel);
- if (instr->HasBailOutInfo())
- {
- this->SetHasBailedOut(instr);
- tmpInstr = this->EmitEHBailoutStackRestore(instr);
- this->EmitSaveEHBailoutReturnValueAndJumpToRetThunk(tmpInstr);
- if (!restoreReturnFromBailoutEmitted)
- {
- this->EmitRestoreReturnValueFromEHBailout(restoreReturnValueFromBailoutLabel, epilogLabel);
- restoreReturnFromBailoutEmitted = true;
- }
- }
- }
- }
- }
- NEXT_INSTR_IN_FUNC_EDITING
- }
- bool
- Lowerer::GenerateFastLdFld(IR::Instr * const instrLdFld, IR::JnHelperMethod helperMethod, IR::JnHelperMethod polymorphicHelperMethod,
- IR::LabelInstr ** labelBailOut, IR::RegOpnd* typeOpnd, bool* pIsHelper, IR::LabelInstr** pLabelHelper)
- {
- // Generates:
- //
- // r1 = object->type
- // if (r1 is taggedInt) goto helper
- // Load inline cache
- // if monomorphic
- // r2 = address of the monomorphic inline cache
- // if polymorphic
- // r2 = address of the polymorphic inline cache array
- // r3 = (type >> PIC shift amount) & (PIC size - 1)
- // r2 = r2 + r3
- // Try load property using proto cache (if protoFirst)
- // Try load property using local cache
- // Try loading property using proto cache (if !protoFirst)
- // Try loading property using flags cache
- //
- // Loading property using local cache:
- // if (r1 == r2->u.local.type)
- // result = load inline slot r2->u.local.slotIndex from r1
- // goto fallthru
- // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.local.type)
- // result = load aux slot r2->u.local.slotIndex from r1
- // goto fallthru
- //
- // Loading property using proto cache:
- // if (r1 == r2->u.proto.type)
- // r3 = r2->u.proto.prototypeObject
- // result = load inline slot r2->u.proto.slotIndex from r3
- // goto fallthru
- // if (r1 | InlineCacheAuxSlotTypeTag) == r2.u.proto.type)
- // r3 = r2->u.proto.prototypeObject
- // result = load aux slot r2->u.proto.slotIndex from r3
- // goto fallthru
- //
- // Loading property using flags cache:
- // if (r2->u.accessor.flags & (Js::InlineCacheGetterFlag | Js::InlineCacheSetterFlag) == 0)
- // if (r1 == r2->u.accessor.type)
- // result = load inline slot r2->u.accessor.slotIndex from r1
- // goto fallthru
- // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.accessor.type)
- // result = load aux slot r2->u.accessor.slotIndex from r1
- // goto fallthru
- //
- // Loading an inline slot:
- // result = [r1 + slotIndex * sizeof(Var)]
- //
- // Loading an aux slot:
- // slotArray = r1->auxSlots
- // result = [slotArray + slotIndex * sizeof(Var)]
- //
- // We only emit the code block for a type of cache (local/proto/flags) if the profile data
- // indicates that type of cache was used to load the property in the past.
- // We don't emit the type check with aux slot tag if the profile data indicates that we didn't
- // load the property from an aux slot before.
- // We don't emit the type check without an aux slot tag if the profile data indicates that we didn't
- // load the property from an inline slot before.
- IR::Opnd * opndSrc = instrLdFld->GetSrc1();
- AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->IsPropertySymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(), "Expected PropertySym as src of LdFld");
- Assert(!instrLdFld->DoStackArgsOpt(this->m_func));
- IR::PropertySymOpnd * propertySymOpnd = opndSrc->AsPropertySymOpnd();
- PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
- PHASE_PRINT_TESTTRACE(
- Js::ObjTypeSpecPhase,
- this->m_func,
- L"Field load: %s, property: %s, func: %s, cache ID: %d, cloned cache: false\n",
- Js::OpCodeUtil::GetOpCodeName(instrLdFld->m_opcode),
- this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(),
- this->m_func->GetJnFunction()->GetDisplayName(),
- propertySymOpnd->m_inlineCacheIndex);
- Assert(pIsHelper != nullptr);
- bool& isHelper = *pIsHelper;
- Assert(pLabelHelper != nullptr);
- IR::LabelInstr*& labelHelper = *pLabelHelper;
- bool doLocal = true;
- bool doProto = instrLdFld->m_opcode == Js::OpCode::LdMethodFld
- || instrLdFld->m_opcode == Js::OpCode::LdRootMethodFld
- || instrLdFld->m_opcode == Js::OpCode::ScopedLdMethodFld;
- bool doProtoFirst = doProto;
- bool doInlineSlots = true;
- bool doAuxSlots = true;
- if (!PHASE_OFF(Js::ProfileBasedFldFastPathPhase, this->m_func) && instrLdFld->IsProfiledInstr())
- {
- IR::ProfiledInstr * profiledInstrLdFld = instrLdFld->AsProfiledInstr();
- if (profiledInstrLdFld->u.FldInfo().flags != Js::FldInfo_NoInfo)
- {
- doProto = !!(profiledInstrLdFld->u.FldInfo().flags & Js::FldInfo_FromProto);
- doLocal = !!(profiledInstrLdFld->u.FldInfo().flags & Js::FldInfo_FromLocal);
- if ((profiledInstrLdFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromInlineSlots)
- {
- // If the inline slots flag is set and the aux slots flag is not, only generate the inline slots check
- doAuxSlots = false;
- }
- else if ((profiledInstrLdFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromAuxSlots)
- {
- // If the aux slots flag is set and the inline slots flag is not, only generate the aux slots check
- doInlineSlots = false;
- }
- }
- else if (!profiledInstrLdFld->u.FldInfo().valueType.IsUninitialized())
- {
- // We have value type info about the field but no flags. This means we shouldn't generate any
- // fast paths for this field load.
- doLocal = false;
- doProto = false;
- }
- }
- if (!doLocal && !doProto)
- {
- return false;
- }
- IR::LabelInstr * labelFallThru = instrLdFld->GetOrCreateContinueLabel();
- if (labelHelper == nullptr)
- {
- labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- }
- IR::RegOpnd * opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
- bool usePolymorphicInlineCache = !!propertySymOpnd->m_runtimePolymorphicInlineCache;
- IR::RegOpnd * opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
- if (usePolymorphicInlineCache)
- {
- LowererMD::CreateAssign(opndInlineCache, IR::AddrOpnd::New(propertySymOpnd->m_runtimePolymorphicInlineCache->GetInlineCaches(), IR::AddrOpndKindDynamicInlineCache, this->m_func, true), instrLdFld);
- }
- else
- {
- LowererMD::CreateAssign(opndInlineCache, this->LoadRuntimeInlineCacheOpnd(instrLdFld, propertySymOpnd, isHelper), instrLdFld);
- }
- if (typeOpnd == nullptr)
- {
- typeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- GenerateObjectTestAndTypeLoad(instrLdFld, opndBase, typeOpnd, labelHelper);
- }
- if (usePolymorphicInlineCache)
- {
- LowererMD::GenerateLoadPolymorphicInlineCacheSlot(instrLdFld, opndInlineCache, typeOpnd, propertySymOpnd->m_runtimePolymorphicInlineCache->GetSize());
- }
- IR::LabelInstr * labelNext = nullptr;
- IR::Opnd * opndDst = instrLdFld->GetDst();
- IR::RegOpnd * opndTaggedType = nullptr;
- IR::BranchInstr * labelNextBranchToPatch = nullptr;
- if (doProto && doProtoFirst)
- {
- if (doInlineSlots)
- {
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- labelNextBranchToPatch = LowererMD::GenerateProtoInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
- LowererMD::GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
- instrLdFld->InsertBefore(labelNext);
- }
- if (doAuxSlots)
- {
- if (opndTaggedType == nullptr)
- {
- opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
- LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
- }
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- labelNextBranchToPatch = LowererMD::GenerateProtoInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
- LowererMD::GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
- instrLdFld->InsertBefore(labelNext);
- }
- }
- if (doLocal)
- {
- if (doInlineSlots)
- {
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- labelNextBranchToPatch = LowererMD::GenerateLocalInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
- LowererMD::GenerateLdFldFromLocalInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
- instrLdFld->InsertBefore(labelNext);
- }
- if (doAuxSlots)
- {
- if (opndTaggedType == nullptr)
- {
- opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
- LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
- }
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- labelNextBranchToPatch = LowererMD::GenerateLocalInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
- LowererMD::GenerateLdFldFromLocalInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
- instrLdFld->InsertBefore(labelNext);
- }
- }
- if (doProto && !doProtoFirst)
- {
- if (doInlineSlots)
- {
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- labelNextBranchToPatch = LowererMD::GenerateProtoInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
- LowererMD::GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
- instrLdFld->InsertBefore(labelNext);
- }
- if (doAuxSlots)
- {
- if (opndTaggedType == nullptr)
- {
- opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
- LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
- }
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- labelNextBranchToPatch = LowererMD::GenerateProtoInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
- LowererMD::GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
- instrLdFld->InsertBefore(labelNext);
- }
- }
- Assert(labelNextBranchToPatch);
- labelNextBranchToPatch->SetTarget(labelHelper);
- labelNext->Remove();
- // $helper:
- // dst = CALL Helper(inlineCache, base, field, scriptContext)
- // $fallthru:
- isHelper = true;
- // Return false to indicate the original instruction was not lowered. Caller will insert the helper label.
- return false;
- }
- void
- Lowerer::GenerateAuxSlotAdjustmentRequiredCheck(
- IR::Instr * instrToInsertBefore,
- IR::RegOpnd * opndInlineCache,
- IR::LabelInstr * labelHelper)
- {
- // regSlotCap = MOV [&(inlineCache->u.local.rawUInt16)] // sized to 16 bits
- IR::RegOpnd * regSlotCap = IR::RegOpnd::New(TyMachReg, instrToInsertBefore->m_func);
- IR::IndirOpnd * memSlotCap = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.rawUInt16), TyUint16, instrToInsertBefore->m_func);
- InsertMove(regSlotCap, memSlotCap, instrToInsertBefore);
- // SAR regSlotCap, Js::InlineCache::CacheLayoutSelectorBitCount
- IR::IntConstOpnd * constSelectorBitCount = IR::IntConstOpnd::New(Js::InlineCache::CacheLayoutSelectorBitCount, TyUint16, instrToInsertBefore->m_func, /* dontEncode = */ true);
- InsertShiftBranch(Js::OpCode::Shr_A, regSlotCap, regSlotCap, constSelectorBitCount, Js::OpCode::BrNeq_A, true, labelHelper, instrToInsertBefore);
- }
- void
- Lowerer::GenerateSetObjectTypeFromInlineCache(
- IR::Instr * instrToInsertBefore,
- IR::RegOpnd * opndBase,
- IR::RegOpnd * opndInlineCache,
- bool isTypeTagged)
- {
- // regNewType = MOV [&(inlineCache->u.local.type)]
- IR::RegOpnd * regNewType = IR::RegOpnd::New(TyMachReg, instrToInsertBefore->m_func);
- IR::IndirOpnd * memNewType = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.type), TyMachReg, instrToInsertBefore->m_func);
- InsertMove(regNewType, memNewType, instrToInsertBefore);
- // AND regNewType, ~InlineCacheAuxSlotTypeTag
- if (isTypeTagged)
- {
- // On 64-bit platforms IntConstOpnd isn't big enough to hold TyMachReg values.
- IR::AddrOpnd * constTypeTagComplement = IR::AddrOpnd::New((Js::Var)~InlineCacheAuxSlotTypeTag, IR::AddrOpndKindConstant, instrToInsertBefore->m_func, /* dontEncode = */ true);
- InsertAnd(regNewType, regNewType, constTypeTagComplement, instrToInsertBefore);
- }
- // MOV base->type, regNewType
- IR::IndirOpnd * memObjType = IR::IndirOpnd::New(opndBase, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrToInsertBefore->m_func);
- InsertMove(memObjType, regNewType, instrToInsertBefore);
- }
- bool
- Lowerer::GenerateFastStFld(IR::Instr * const instrStFld, IR::JnHelperMethod helperMethod, IR::JnHelperMethod polymorphicHelperMethod, IR::LabelInstr ** labelBailOut, IR::RegOpnd* typeOpnd,
- bool* pIsHelper, IR::LabelInstr** pLabelHelper, bool withPutFlags, Js::PropertyOperationFlags flags)
- {
- // Generates:
- //
- // r1 = object->type
- // if (r1 is taggedInt) goto helper
- // Load inline cache
- // if monomorphic
- // r2 = address of the monomorphic inline cache
- // if polymorphic
- // r2 = address of the polymorphic inline cache array
- // r3 = (type >> PIC shift amount) & (PIC size - 1)
- // r2 = r2 + r3
- // Try store property using local cache
- //
- // Loading property using local cache:
- // if (r1 == r2->u.local.type)
- // store value to inline slot r2->u.local.slotIndex on r1
- // goto fallthru
- // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.local.type)
- // store value to aux slot r2->u.local.slotIndex on r1
- // goto fallthru
- //
- // Storing to an inline slot:
- // [r1 + slotIndex * sizeof(Var)] = value
- //
- // Storing to an aux slot:
- // slotArray = r1->auxSlots
- // [slotArray + slotIndex * sizeof(Var)] = value
- //
- // We don't emit the type check with aux slot tag if the profile data indicates that we didn't
- // store the property to an aux slot before.
- // We don't emit the type check without an aux slot tag if the profile data indicates that we didn't
- // store the property to an inline slot before.
- IR::Opnd * opndSrc = instrStFld->GetSrc1();
- IR::Opnd * opndDst = instrStFld->GetDst();
- AssertMsg(opndDst->IsSymOpnd() && opndDst->AsSymOpnd()->IsPropertySymOpnd() && opndDst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected PropertySym as dst of StFld");
- IR::PropertySymOpnd * propertySymOpnd = opndDst->AsPropertySymOpnd();
- PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
- PHASE_PRINT_TESTTRACE(
- Js::ObjTypeSpecPhase,
- this->m_func,
- L"Field store: %s, property: %s, func: %s, cache ID: %d, cloned cache: false\n",
- Js::OpCodeUtil::GetOpCodeName(instrStFld->m_opcode),
- this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(),
- this->m_func->GetJnFunction()->GetDisplayName(),
- propertySymOpnd->m_inlineCacheIndex);
- Assert(pIsHelper != nullptr);
- bool& isHelper = *pIsHelper;
- Assert(pLabelHelper != nullptr);
- IR::LabelInstr*& labelHelper = *pLabelHelper;
- bool doStore = true;
- bool doAdd = false;
- bool doInlineSlots = true;
- bool doAuxSlots = true;
- if (!PHASE_OFF(Js::ProfileBasedFldFastPathPhase, this->m_func) && instrStFld->IsProfiledInstr())
- {
- IR::ProfiledInstr * profiledInstrStFld = instrStFld->AsProfiledInstr();
- if (profiledInstrStFld->u.FldInfo().flags != Js::FldInfo_NoInfo)
- {
- if (!(profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromLocal | Js::FldInfo_FromLocalWithoutProperty)))
- {
- return false;
- }
- if (!PHASE_OFF(Js::AddFldFastPathPhase, this->m_func))
- {
- // We always try to do the store field fast path, unless the profile specifically says we never set, but always add a property here.
- if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromLocal | Js::FldInfo_FromLocalWithoutProperty)) == Js::FldInfo_FromLocalWithoutProperty)
- {
- doStore = false;
- }
- // On the other hand, we only emit the add field fast path, if the profile explicitly says we do add properties here.
- if (!!(profiledInstrStFld->u.FldInfo().flags & Js::FldInfo_FromLocalWithoutProperty))
- {
- doAdd = true;
- }
- }
- else
- {
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- #endif
- PHASE_PRINT_TRACE(Js::AddFldFastPathPhase, this->m_func,
- L"AddFldFastPath: function: %s(%s) property: %s(#%d) no fast path, because the phase is off.\n",
- this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
- this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(), propertySym->m_propertyId);
- }
- if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromInlineSlots)
- {
- // If the inline slots flag is set and the aux slots flag is not, only generate the inline slots check
- doAuxSlots = false;
- }
- else if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromAuxSlots)
- {
- // If the aux slots flag is set and the inline slots flag is not, only generate the aux slots check
- doInlineSlots = false;
- }
- }
- else if (!profiledInstrStFld->u.FldInfo().valueType.IsUninitialized())
- {
- // We have value type info about the field but no flags. This means we shouldn't generate any
- // fast paths for this field store.
- return false;
- }
- }
- Assert(doStore || doAdd);
- if (labelHelper == nullptr)
- {
- labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- }
- IR::LabelInstr * labelFallThru = instrStFld->GetOrCreateContinueLabel();
- IR::RegOpnd * opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
- bool usePolymorphicInlineCache = !!propertySymOpnd->m_runtimePolymorphicInlineCache;
- if (doAdd)
- {
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
- #endif
- PHASE_PRINT_TRACE(Js::AddFldFastPathPhase, this->m_func,
- L"AddFldFastPath: function: %s(%s) property: %s(#%d) %s fast path for %s.\n",
- this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
- this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(), propertySym->m_propertyId,
- usePolymorphicInlineCache ? L"poly" : L"mono", doStore ? L"store and add" : L"add only");
- }
- IR::RegOpnd * opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
- if (usePolymorphicInlineCache)
- {
- LowererMD::CreateAssign(opndInlineCache, IR::AddrOpnd::New(propertySymOpnd->m_runtimePolymorphicInlineCache->GetInlineCaches(), IR::AddrOpndKindDynamicInlineCache, this->m_func, true), instrStFld);
- }
- else
- {
- LowererMD::CreateAssign(opndInlineCache, this->LoadRuntimeInlineCacheOpnd(instrStFld, propertySymOpnd, isHelper), instrStFld);
- }
- if (typeOpnd == nullptr)
- {
- typeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- GenerateObjectTestAndTypeLoad(instrStFld, opndBase, typeOpnd, labelHelper);
- }
- if (usePolymorphicInlineCache)
- {
- LowererMD::GenerateLoadPolymorphicInlineCacheSlot(instrStFld, opndInlineCache, typeOpnd, propertySymOpnd->m_runtimePolymorphicInlineCache->GetSize());
- }
- IR::LabelInstr * labelNext = nullptr;
- IR::RegOpnd * opndTaggedType = nullptr;
- IR::BranchInstr * lastBranchToNext = nullptr;
- if (doStore)
- {
- if (doInlineSlots)
- {
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- lastBranchToNext = LowererMD::GenerateLocalInlineCacheCheck(instrStFld, typeOpnd, opndInlineCache, labelNext);
- LowererMD::GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, true);
- instrStFld->InsertBefore(labelNext);
- }
- if (doAuxSlots)
- {
- if (opndTaggedType == nullptr)
- {
- opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
- LowererMD::GenerateLoadTaggedType(instrStFld, typeOpnd, opndTaggedType);
- }
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- lastBranchToNext = LowererMD::GenerateLocalInlineCacheCheck(instrStFld, opndTaggedType, opndInlineCache, labelNext);
- LowererMD::GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, false);
- instrStFld->InsertBefore(labelNext);
- }
- }
- if (doAdd)
- {
- if (doInlineSlots)
- {
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
- lastBranchToNext = LowererMD::GenerateLocalInlineCacheCheck(instrStFld, typeOpnd, opndInlineCache, labelNext, true);
- GenerateSetObjectTypeFromInlineCache(instrStFld, opndBase, opndInlineCache, false);
- LowererMD::GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, true);
- instrStFld->InsertBefore(labelNext);
- }
- if (doAuxSlots)
- {
- if (opndTaggedType == nullptr)
- {
- opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
- LowererMD::GenerateLoadTaggedType(instrStFld, typeOpnd, opndTaggedType);
- }
- labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- lastBranchToNext = LowererMD::GenerateLocalInlineCacheCheck(instrStFld, opndTaggedType, opndInlineCache, labelNext, true);
- GenerateAuxSlotAdjustmentRequiredCheck(instrStFld, opndInlineCache, labelHelper);
- GenerateSetObjectTypeFromInlineCache(instrStFld, opndBase, opndInlineCache, true);
- LowererMD::GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, false);
- instrStFld->InsertBefore(labelNext);
- }
- }
- Assert(lastBranchToNext);
- lastBranchToNext->SetTarget(labelHelper);
- labelNext->Remove();
- // $helper:
- // CALL Helper(inlineCache, base, field, src, scriptContext)
- // $fallthru:
- isHelper = true;
- // Return false to indicate the original instruction was not lowered. Caller will insert the helper label.
- return false;
- }
- bool Lowerer::GenerateFastStFldForCustomProperty(IR::Instr *const instr, IR::LabelInstr * *const labelHelperRef)
- {
- Assert(instr);
- Assert(labelHelperRef);
- Assert(!*labelHelperRef);
- switch(instr->m_opcode)
- {
- case Js::OpCode::StFld:
- case Js::OpCode::StFldStrict:
- break;
- default:
- return false;
- }
- IR::SymOpnd *const symOpnd = instr->GetDst()->AsSymOpnd();
- PropertySym *const propertySym = symOpnd->m_sym->AsPropertySym();
- if(propertySym->m_propertyId != Js::PropertyIds::lastIndex || !symOpnd->IsPropertySymOpnd())
- {
- return false;
- }
- const ValueType objectValueType(symOpnd->GetPropertyOwnerValueType());
- if(!objectValueType.IsLikelyRegExp())
- {
- return false;
- }
- if(instr->HasBailOutInfo())
- {
- const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- if(!BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind) || bailOutKind & IR::BailOutKindBits)
- {
- // Other bailout kinds will likely need bailout checks that would not be generated here. In particular, if a type
- // check is necessary here to guard against downstream property accesses on the same object, the type check will
- // fail and cause a bailout if the object is a RegExp object since the "lastIndex" property accesses are not cached.
- return false;
- }
- }
- Func *const func = instr->m_func;
- IR::RegOpnd *const objectOpnd = symOpnd->CreatePropertyOwnerOpnd(func);
- const IR::AutoReuseOpnd autoReuseObjectOpnd(objectOpnd, func);
- IR::LabelInstr *labelHelper = nullptr;
- if(!objectOpnd->IsNotTaggedValue())
- {
- // test object, 1
- // jnz $helper
- if(!labelHelper)
- {
- *labelHelperRef = labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- }
- m_lowererMD.GenerateObjectTest(objectOpnd, instr, labelHelper);
- }
- if(!objectValueType.IsObject())
- {
- // cmp [object], Js::JavascriptRegExp::vtable
- // jne $helper
- if(!labelHelper)
- {
- *labelHelperRef = labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- }
- InsertCompareBranch(
- IR::IndirOpnd::New(objectOpnd, 0, TyMachPtr, func),
- LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp),
- Js::OpCode::BrNeq_A,
- labelHelper,
- instr);
- objectOpnd->SetValueType(objectValueType.ToDefiniteObject());
- }
- // mov [object + offset(lastIndexVar)], src
- // mov [object + offset(lastIndexOrFlag)], Js::JavascriptRegExp::NotCachedValue
- // jmp $done
- InsertMove(
- IR::IndirOpnd::New(objectOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), TyVar, func),
- instr->GetSrc1(),
- instr);
- InsertMove(
- IR::IndirOpnd::New(objectOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), TyUint32, func),
- IR::IntConstOpnd::New(Js::JavascriptRegExp::NotCachedValue, TyUint32, func, true),
- instr);
- InsertBranch(Js::OpCode::Br, instr->GetOrCreateContinueLabel(), instr);
- return true;
- }
- IR::RegOpnd *
- Lowerer::GenerateIsBuiltinRecyclableObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool checkObjectAndDynamicObject, IR::LabelInstr *labelContinue)
- {
- // CMP [srcReg], Js::DynamicObject::`vtable'
- // JEQ $fallThough
- // MOV r1, [src1 + offset(type)] -- get the type id
- // MOV r1, [r1 + offset(typeId)]
- // ADD r1, ~TypeIds_LastStaticType -- if (typeId > TypeIds_LastStaticType && typeId <= TypeIds_LastBuiltinDynamicObject)
- // CMP r1, (TypeIds_LastBuiltinDynamicObject - TypeIds_LastStaticType - 1)
- // JA $helper
- //fallThrough:
- IR::LabelInstr *labelFallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- if (checkObjectAndDynamicObject)
- {
- if (!regOpnd->IsNotTaggedValue())
- {
- m_lowererMD.GenerateObjectTest(regOpnd, insertInstr, labelHelper);
- }
- m_lowererMD.GenerateIsDynamicObject(regOpnd, insertInstr, labelFallthrough, true);
- }
- IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::RegOpnd * typeIdRegOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
- IR::IndirOpnd *indirOpnd;
- // MOV typeRegOpnd, [src1 + offset(type)]
- indirOpnd = IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
- m_lowererMD.CreateAssign(typeRegOpnd, indirOpnd, insertInstr);
- // MOV typeIdRegOpnd, [typeRegOpnd + offset(typeId)]
- indirOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
- m_lowererMD.CreateAssign(typeIdRegOpnd, indirOpnd, insertInstr);
- // ADD typeIdRegOpnd, ~TypeIds_LastStaticType
- InsertAdd(false, typeIdRegOpnd, typeIdRegOpnd,
- IR::IntConstOpnd::New(~Js::TypeIds_LastStaticType, TyInt32, this->m_func, true), insertInstr);
- // CMP typeIdRegOpnd, (TypeIds_LastBuiltinDynamicObject - TypeIds_LastStaticType - 1)
- InsertCompare(
- typeIdRegOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_LastBuiltinDynamicObject - Js::TypeIds_LastStaticType - 1, TyInt32, this->m_func),
- insertInstr);
- if (labelContinue)
- {
- // On success, go to continuation label.
- InsertBranch(Js::OpCode::BrLe_A, true, labelContinue, insertInstr);
- }
- else
- {
- // On failure, go to helper.
- InsertBranch(Js::OpCode::BrGt_A, true, labelHelper, insertInstr);
- }
- // $fallThrough
- insertInstr->InsertBefore(labelFallthrough);
- return typeRegOpnd;
- }
- bool Lowerer::GenerateFastBrEqLikely(IR::BranchInstr * instrBranch, bool *pNeedHelper)
- {
- IR::Opnd *src1 = instrBranch->GetSrc1();
- IR::Opnd *src2 = instrBranch->GetSrc2();
- IR::LabelInstr *targetInstr = instrBranch->GetTarget();
- IR::LabelInstr *labelBooleanCmp = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- IR::LabelInstr *labelTrue = instrBranch->GetOrCreateContinueLabel();
- IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- bool isStrictBr = false;
- bool isStrictMode = this->m_func->GetJnFunction()->GetIsStrictMode();
- *pNeedHelper = true;
- switch (instrBranch->m_opcode)
- {
- case Js::OpCode::BrSrEq_A:
- case Js::OpCode::BrSrNotNeq_A:
- case Js::OpCode::BrSrNeq_A:
- case Js::OpCode::BrSrNotEq_A:
- isStrictBr = true;
- break;
- }
- if (src1->GetValueType().IsLikelyBoolean() && src2->GetValueType().IsLikelyBoolean())
- {
- //
- // Booleans
- //
- if (isStrictBr)
- {
- if (!src1->GetValueType().IsBoolean() && !src2->GetValueType().IsBoolean())
- {
- this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instrBranch, labelHelper, false);
- if (this->m_lowererMD.GenerateJSBooleanTest(src2->AsRegOpnd(), instrBranch, labelBooleanCmp, true))
- {
- instrBranch->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func));
- }
- }
- else
- {
- *pNeedHelper = false;
- }
- }
- else
- {
- this->m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instrBranch, labelHelper, false);
- this->m_lowererMD.GenerateJSBooleanTest(src1->AsRegOpnd(), instrBranch, labelHelper, false);
- this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instrBranch, labelHelper, false);
- if (this->m_lowererMD.GenerateJSBooleanTest(src2->AsRegOpnd(), instrBranch, labelBooleanCmp, true))
- {
- instrBranch->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func));
- }
- }
- }
- else if (src1->GetValueType().IsLikelyObject() && src2->GetValueType().IsLikelyObject())
- {
- //
- // Objects
- //
- IR::LabelInstr *labelTypeIdCheck = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- if (!isStrictBr)
- {
- // If not strictBr, verify both sides are dynamic objects
- this->m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instrBranch, labelHelper, false);
- this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instrBranch, labelHelper, false);
- this->m_lowererMD.GenerateIsDynamicObject(src1->AsRegOpnd(), instrBranch, labelTypeIdCheck, false);
- }
- else
- {
- this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instrBranch, labelHelper, false);
- }
- this->m_lowererMD.GenerateIsDynamicObject(src2->AsRegOpnd(), instrBranch, labelBooleanCmp, true);
- instrBranch->InsertBefore(labelTypeIdCheck);
- if (isStrictMode)
- {
- labelTypeIdCheck->isOpHelper = true;
- IR::BranchInstr *branchToHelper = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func);
- instrBranch->InsertBefore(branchToHelper);
- }
- else
- {
- if (!ExternalLowerer::TryGenerateFastExternalEqTest(src1, src2, instrBranch, labelHelper, labelBooleanCmp, this, isStrictBr))
- {
- if (!isStrictBr)
- {
- GenerateIsBuiltinRecyclableObject(src1->AsRegOpnd(), instrBranch, labelHelper, false /*checkObjectAndDynamicObject*/);
- }
- GenerateIsBuiltinRecyclableObject(src2->AsRegOpnd(), instrBranch, labelHelper, false /*checkObjectAndDynamicObject*/);
- }
- }
- }
- else
- {
- return false;
- }
- instrBranch->InsertBefore(labelBooleanCmp);
- IR::BranchInstr *newBranch = IR::BranchInstr::New(instrBranch->m_opcode, targetInstr, src1, src2, this->m_func);
- instrBranch->InsertBefore(newBranch);
- this->m_lowererMD.LowerCondBranch(newBranch);
- newBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelTrue, this->m_func);
- instrBranch->InsertBefore(newBranch);
- instrBranch->InsertBefore(labelHelper);
- return true;
- }
- bool Lowerer::GenerateFastBrBool(IR::BranchInstr *const instr)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::BrFalse_A || instr->m_opcode == Js::OpCode::BrTrue_A);
- Func *const func = instr->m_func;
- if(!instr->GetSrc1()->IsRegOpnd())
- {
- LowererMD::ChangeToAssign(instr->HoistSrc1(Js::OpCode::Ld_A));
- }
- IR::RegOpnd *const src = instr->GetSrc1()->Copy(func)->AsRegOpnd();
- const IR::AutoReuseOpnd autoReuseSrc(src, func);
- const ValueType srcOriginalValueType(src->GetValueType());
- ValueType srcValueType(srcOriginalValueType);
- IR::LabelInstr *const labelTarget = instr->GetTarget();
- IR::LabelInstr *const labelFallthrough = instr->GetOrCreateContinueLabel();
- if(labelTarget == labelFallthrough)
- {
- // Nothing to do
- instr->Remove();
- return false;
- }
- const bool branchOnFalse = instr->m_opcode == Js::OpCode::BrFalse_A;
- IR::LabelInstr *const labelFalse = branchOnFalse ? labelTarget : labelFallthrough;
- IR::LabelInstr *const labelTrue = branchOnFalse ? labelFallthrough : labelTarget;
- const Js::OpCode compareWithFalseBranchToTargetOpCode = branchOnFalse ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A;
- IR::LabelInstr *lastLabelBeforeHelper = nullptr;
- /// Typespec'd float
- if (instr->GetSrc1()->GetType() == TyFloat64)
- {
- InsertFloatCheckForZeroOrNanBranch(instr->GetSrc1(), branchOnFalse, labelTarget, labelFallthrough, instr);
- Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
- instr->Remove();
- return false;
- }
- ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- // Null fast path
- if (srcValueType.HasBeenNull() || srcOriginalValueType.IsUninitialized())
- {
- if(srcValueType.IsNull())
- {
- // jmp $false
- InsertBranch(Js::OpCode::Br, labelFalse, instr);
- // Skip lowering call to helper
- Assert(instr->m_prev->IsBranchInstr());
- instr->Remove();
- return false;
- }
- // cmp src, null
- // je $false
- InsertCompareBranch(
- src,
- LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
- Js::OpCode::BrEq_A,
- labelFalse,
- instr);
- src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Null));
- }
- ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- // Undefined fast path
- if(srcValueType.HasBeenUndefined() || srcOriginalValueType.IsUninitialized())
- {
- if(srcValueType.IsUndefined())
- {
- // jmp $false
- InsertBranch(Js::OpCode::Br, labelFalse, instr);
- // Skip lowering call to helper
- Assert(instr->m_prev->IsBranchInstr());
- instr->Remove();
- return false;
- }
- // cmp src, undefined
- // je $false
- InsertCompareBranch(
- src,
- LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
- Js::OpCode::BrEq_A,
- labelFalse,
- instr);
- src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Undefined));
- }
- ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- // Tagged int fast path
- const bool isNotInt = src->IsNotInt();
- bool checkedForTaggedInt = isNotInt;
- if( (
- srcValueType.HasBeenInt() ||
- srcValueType.HasBeenUnknownNumber() ||
- srcOriginalValueType.IsUninitialized()
- ) && !isNotInt)
- {
- checkedForTaggedInt = true;
- IR::LabelInstr *notTaggedIntLabel = nullptr;
- if(!src->IsTaggedInt())
- {
- // test src, 1
- // jz $notTaggedInt
- notTaggedIntLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- m_lowererMD.GenerateSmIntTest(src, instr, notTaggedIntLabel);
- }
- // cmp src, tag(0)
- // je/jne $target
- m_lowererMD.GenerateTaggedZeroTest(src, instr);
- Lowerer::InsertBranch(compareWithFalseBranchToTargetOpCode, labelTarget, instr);
- if(src->IsTaggedInt())
- {
- // Skip lowering call to helper
- Assert(instr->m_prev->IsBranchInstr());
- instr->Remove();
- return false;
- }
- // jmp $fallthrough
- Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
- // $notTaggedInt:
- if(notTaggedIntLabel)
- {
- instr->InsertBefore(notTaggedIntLabel);
- lastLabelBeforeHelper = notTaggedIntLabel;
- }
- }
- ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- // Float fast path
- bool generateFloatTest = srcValueType.IsLikelyFloat();
- #ifdef _M_IX86
- if (!AutoSystemInfo::Data.SSE2Available())
- {
- generateFloatTest = false;
- }
- #endif
- bool checkedForTaggedFloat =
- #if FLOATVAR
- srcValueType.IsNotNumber();
- #else
- true; // there are no tagged floats, indicate that it has been checked
- #endif
- if (generateFloatTest)
- {
- // if(srcValueType.IsFloat()) // skip tagged int check?
- //
- // ValueType::IsFloat() does not guarantee that the storage is not in a tagged int.
- // The tagged int check is necessary. It does, however, guarantee that as long as the value is not
- // stored in a tagged int, that it is definitely stored in a JavascriptNumber/TaggedFloat.
- IR::LabelInstr *const notFloatLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- if(!checkedForTaggedInt)
- {
- checkedForTaggedInt = true;
- m_lowererMD.GenerateSmIntTest(src, instr, notFloatLabel, nullptr, true);
- }
- // cmp [src], JavascriptNumber::vtable
- // jne $notFloat
- #if FLOATVAR
- checkedForTaggedFloat = true;
- IR::RegOpnd *const floatOpnd = m_lowererMD.CheckFloatAndUntag(src, instr, notFloatLabel);
- #else
- m_lowererMD.GenerateFloatTest(src, instr, notFloatLabel);
- IR::IndirOpnd *const floatOpnd = IR::IndirOpnd::New(src, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, func);
- #endif
- // cmp src, 0.0
- // jp $false
- // je/jne $target
- // jmp $fallthrough
- InsertFloatCheckForZeroOrNanBranch(floatOpnd, branchOnFalse, labelTarget, labelFallthrough, instr);
- Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
- // $notFloat:
- instr->InsertBefore(notFloatLabel);
- lastLabelBeforeHelper = notFloatLabel;
- src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::AnyNumber));
- }
- IR::LabelInstr *labelHelper = nullptr;
- bool _didObjectTest = checkedForTaggedInt && checkedForTaggedFloat;
- const auto EnsureObjectTest = [&]()
- {
- if(_didObjectTest)
- {
- return;
- }
- if(!labelHelper)
- {
- labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- }
- m_lowererMD.GenerateObjectTest(src, instr, labelHelper);
- _didObjectTest = true;
- };
- ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- // Boolean fast path
- if (srcValueType.HasBeenBoolean() || srcOriginalValueType.IsUninitialized())
- {
- IR::LabelInstr *notBooleanLabel = nullptr;
- if (!srcValueType.IsBoolean())
- {
- EnsureObjectTest();
- // cmp [src], JavascriptBoolean::vtable
- // jne $notBoolean
- notBooleanLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- InsertCompareBranch(
- IR::IndirOpnd::New(src, 0, TyMachPtr, func),
- LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptBoolean),
- Js::OpCode::BrNeq_A,
- notBooleanLabel,
- instr);
- }
- // cmp src, false
- // je/jne $target
- InsertCompareBranch(
- src,
- LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse),
- compareWithFalseBranchToTargetOpCode,
- labelTarget,
- instr);
- if (srcValueType.IsBoolean())
- {
- // Skip lowering call to helper
- Assert(!labelHelper);
- Assert(instr->m_prev->IsBranchInstr());
- instr->Remove();
- return false;
- }
- // jmp $fallthrough
- Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
- if (notBooleanLabel)
- {
- instr->InsertBefore(notBooleanLabel);
- lastLabelBeforeHelper = notBooleanLabel;
- }
- src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Boolean));
- }
- ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- // String fast path
- if(srcValueType.HasBeenString())
- {
- IR::LabelInstr *notStringLabel = nullptr;
- if(!srcValueType.IsString())
- {
- EnsureObjectTest();
- notStringLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- GenerateStringTest(src, instr, notStringLabel, nullptr, false);
- }
- // cmp [src + offset(length)], 0
- // jeq/jne $target
- InsertCompareBranch(
- IR::IndirOpnd::New(src, Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func),
- IR::IntConstOpnd::New(0, TyUint32, func, true),
- compareWithFalseBranchToTargetOpCode,
- labelTarget,
- instr);
- if(srcValueType.IsString())
- {
- // Skip lowering call to helper
- Assert(!labelHelper);
- Assert(instr->m_prev->IsBranchInstr());
- instr->Remove();
- return false;
- }
- // jmp $fallthrough
- Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
- if(notStringLabel)
- {
- instr->InsertBefore(notStringLabel);
- lastLabelBeforeHelper = notStringLabel;
- }
- src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::String));
- }
- ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- // Object fast path
- if (srcValueType.IsLikelyObject())
- {
- if(srcValueType.IsObject())
- {
- if(srcValueType.GetObjectType() > ObjectType::Object)
- {
- // Specific object types that are tracked are equivalent to 'true'
- // jmp $true
- InsertBranch(Js::OpCode::Br, labelTrue, instr);
- // Skip lowering call to helper
- Assert(!labelHelper);
- Assert(instr->m_prev->IsBranchInstr());
- instr->Remove();
- return false;
- }
- }
- else
- {
- EnsureObjectTest();
- }
- // mov srcType, [src + offset(type)] -- load type
- IR::RegOpnd *const srcType = IR::RegOpnd::New(TyMachPtr, func);
- const IR::AutoReuseOpnd autoReuseR1(srcType, func);
- InsertMove(srcType, IR::IndirOpnd::New(src, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func), instr);
- // test [srcType + offset(flags)], TypeFlagMask_IsFalsy -- check if falsy
- // jnz $false
- InsertTestBranch(
- IR::IndirOpnd::New(srcType, Js::Type::GetOffsetOfFlags(), TyUint8, func),
- IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyUint8, func),
- Js::OpCode::BrNeq_A,
- labelFalse,
- instr);
- // cmp [srcType + offset(typeId)], TypeIds_LastJavascriptPrimitiveType -- check base TypeIds_LastJavascriptPrimitiveType
- // ja $true
- InsertCompareBranch(
- IR::IndirOpnd::New(srcType, Js::Type::GetOffsetOfTypeId(), TyInt32, func),
- IR::IntConstOpnd::New(Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, func),
- Js::OpCode::BrGt_A,
- true /* isUnsigned */,
- labelTrue,
- instr);
- if(!labelHelper)
- {
- labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- }
- lastLabelBeforeHelper = nullptr;
- }
- ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- // Helper call
- // $helper:
- if(lastLabelBeforeHelper)
- {
- Assert(instr->m_prev == lastLabelBeforeHelper);
- lastLabelBeforeHelper->isOpHelper = true;
- }
- if (labelHelper)
- {
- Assert(labelHelper->isOpHelper);
- instr->InsertBefore(labelHelper);
- }
- // call JavascriptConversion::ToBoolean
- IR::RegOpnd *const toBoolDst = IR::RegOpnd::New(TyInt32, func);
- const IR::AutoReuseOpnd autoReuseToBoolDst(toBoolDst, func);
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, toBoolDst, instr->GetSrc1(), func);
- instr->InsertBefore(callInstr);
- LowerUnaryHelperMem(callInstr, IR::HelperConv_ToBoolean);
- // test eax, eax
- InsertTest(toBoolDst, toBoolDst, instr);
- // je/jne $target
- Assert(instr->IsBranchInstr());
- instr->FreeSrc1();
- instr->m_opcode = LowererMD::MDBranchOpcode(compareWithFalseBranchToTargetOpCode);
- Assert(instr->AsBranchInstr()->GetTarget() == labelTarget);
- // Skip lowering another call to helper
- return false;
- }
- // Helper method used in LowerMD by all platforms.
- // Creates HelperCallOpnd or DiagHelperCallOpnd, based on helperMethod and state.
- // static
- IR::HelperCallOpnd*
- Lowerer::CreateHelperCallOpnd(IR::JnHelperMethod helperMethod, int helperArgCount, Func* func)
- {
- Assert(func);
- IR::HelperCallOpnd* helperCallOpnd;
- if (CONFIG_FLAG(EnableContinueAfterExceptionWrappersForHelpers) &&
- func->IsJitInDebugMode() &&
- HelperMethodAttributes::CanThrow(helperMethod))
- {
- // Create DiagHelperCallOpnd to indicate that it's needed to wrap original helper with try-catch wrapper,
- // so that we can ignore exception and bailout to next stmt in debugger.
- // For details, see: Lib\Runtime\Debug\DiagHelperMethodWrapper.{h,cpp}.
- helperCallOpnd = IR::DiagHelperCallOpnd::New(helperMethod, func, helperArgCount);
- }
- else
- {
- helperCallOpnd = IR::HelperCallOpnd::New(helperMethod, func);
- }
- return helperCallOpnd;
- }
- bool
- Lowerer::TryGenerateFastBrOrCmTypeOf(IR::Instr *instr, IR::Instr **prev, bool *pfNoLower)
- {
- Assert(prev);
- Assert(instr->m_opcode == Js::OpCode::BrSrEq_A ||
- instr->m_opcode == Js::OpCode::BrSrNeq_A ||
- instr->m_opcode == Js::OpCode::BrSrNotEq_A ||
- instr->m_opcode == Js::OpCode::BrSrNotNeq_A ||
- instr->m_opcode == Js::OpCode::CmSrEq_A ||
- instr->m_opcode == Js::OpCode::CmSrNeq_A ||
- instr->m_opcode == Js::OpCode::BrEq_A ||
- instr->m_opcode == Js::OpCode::BrNeq_A ||
- instr->m_opcode == Js::OpCode::BrNotEq_A ||
- instr->m_opcode == Js::OpCode::BrNotNeq_A ||
- instr->m_opcode == Js::OpCode::CmEq_A ||
- instr->m_opcode == Js::OpCode::CmNeq_A);
- //
- // instr - (Br/Cm)Sr(N)eq_A
- // instr->m_prev - typeOf
- //
- IR::Instr *instrLd = instr->GetPrevRealInstrOrLabel();
- bool skippedLoads = false;
- //Skip intermediate Ld_A which might be inserted by flow graph peeps
- while (instrLd && instrLd->m_opcode == Js::OpCode::Ld_A )
- {
- if (!(instrLd->GetDst()->IsRegOpnd() && instrLd->GetDst()->AsRegOpnd()->m_fgPeepTmp))
- {
- return false;
- }
- if (instrLd->HasBailOutInfo())
- {
- return false;
- }
- instrLd = instrLd->GetPrevRealInstrOrLabel();
- skippedLoads = true;
- }
- IR::Instr *typeOf = instrLd;
- if (typeOf && (typeOf->m_opcode == Js::OpCode::Typeof))
- {
- IR::RegOpnd *typeOfDst = typeOf->GetDst()->IsRegOpnd() ? typeOf->GetDst()->AsRegOpnd() : nullptr;
- IR::RegOpnd *instrSrc1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
- IR::RegOpnd *instrSrc2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
- if (typeOfDst && instrSrc1 && instrSrc2)
- {
- if (instrSrc1->m_sym == typeOfDst->m_sym)
- {
- if (!instrSrc1->m_isTempLastUse)
- {
- return false;
- }
- if (!(instrSrc2->m_sym->m_isSingleDef && instrSrc2->m_sym->m_isStrConst))
- {
- return false;
- }
- // The second argument to [Cm|Br]TypeOf is the typeid.
- IR::IntConstOpnd *typeIdOpnd = nullptr;
- Assert(instrSrc2->m_sym->m_isSingleDef);
- Assert(instrSrc2->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd());
- // We can't optimize non-javascript type strings.
- Js::JavascriptString *typeNameJsString = Js::JavascriptString::FromVar(instrSrc2->m_sym->m_instrDef->GetSrc1()->AsAddrOpnd()->m_address);
- const wchar_t *typeName = typeNameJsString->GetString();
- Js::InternalString typeNameString(typeName, typeNameJsString->GetLength());
- if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::UndefinedTypeNameString))
- {
- typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Undefined, TyInt32, instr->m_func);
- }
- else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::ObjectTypeNameString))
- {
- typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Object, TyInt32, instr->m_func);
- }
- else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::BooleanTypeNameString))
- {
- typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Boolean, TyInt32, instr->m_func);
- }
- else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::NumberTypeNameString))
- {
- typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Number, TyInt32, instr->m_func);
- }
- else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::StringTypeNameString))
- {
- typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_String, TyInt32, instr->m_func);
- }
- else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::FunctionTypeNameString))
- {
- typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, instr->m_func);
- }
- else
- {
- return false;
- }
- if (skippedLoads)
- {
- //validate none of dst of Ld_A overlaps with typeof src or dst
- IR::Opnd* typeOfSrc = typeOf->GetSrc1();
- instrLd = typeOf->GetNextRealInstr();
- while (instrLd != instr)
- {
- if (instrLd->GetDst()->IsEqual(typeOfDst) || instrLd->GetDst()->IsEqual(typeOfSrc))
- {
- return false;
- }
- instrLd = instrLd->GetNextRealInstr();
- }
- typeOf->Unlink();
- instr->InsertBefore(typeOf);
- }
- // The first argument to [Cm|Br]TypeOf is the first arg to the TypeOf instruction.
- IR::Opnd *objectOpnd = typeOf->GetSrc1();
- Assert(objectOpnd->IsRegOpnd());
- // Now emit this instruction and remove the ldstr and typeOf.
- *prev = typeOf->m_prev;
- *pfNoLower = false;
- if (instr->IsBranchInstr())
- {
- GenerateFastBrTypeOf(instr, objectOpnd->AsRegOpnd(), typeIdOpnd, typeOf, pfNoLower);
- }
- else
- {
- GenerateFastCmTypeOf(instr, objectOpnd->AsRegOpnd(), typeIdOpnd, typeOf, pfNoLower);
- }
- return true;
- }
- }
- }
- return false;
- }
- void
- Lowerer::GenerateFalsyObjectTest(IR::Instr *insertInstr, IR::RegOpnd *TypeOpnd, Js::TypeId typeIdToCheck, IR::LabelInstr* target, IR::LabelInstr* done, bool isNeqOp)
- {
- if (!this->m_func->GetScriptContext()->GetThreadContext()->CanBeFalsy(typeIdToCheck) && typeIdToCheck != Js::TypeIds_Undefined)
- {
- // Don't need the check for falsy, the typeId we are looking for doesn't care
- return;
- }
- IR::Opnd *flagsOpnd = IR::IndirOpnd::New(TypeOpnd, Js::Type::GetOffsetOfFlags(), TyInt32, this->m_func);
- InsertTest(flagsOpnd, IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyInt32, this->m_func), insertInstr);
- if (typeIdToCheck == Js::TypeIds_Undefined)
- {
- //Falsy object returns true for undefined ((typeof falsyObj) == "undefined")
- InsertBranch( Js::OpCode::BrNeq_A, true, isNeqOp ? done : target, insertInstr);
- }
- else
- {
- //Falsy object returns false for all other types ((typeof falsyObj) != "function")
- InsertBranch( Js::OpCode::BrNeq_A, true, isNeqOp? target : done , insertInstr);
- }
- }
- ///----------------------------------------------------------------------------
- ///
- /// LowererMD::GenerateFastBrTypeOf
- ///
- ///----------------------------------------------------------------------------
- void
- Lowerer::GenerateFastBrTypeOf(IR::Instr *branch, IR::RegOpnd *object, IR::IntConstOpnd *typeIdOpnd, IR::Instr *typeOf, bool *pfNoLower)
- {
- Js::TypeId typeId = static_cast<Js::TypeId>(typeIdOpnd->GetValue());
- IR::LabelInstr *target = branch->AsBranchInstr()->GetTarget();
- IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
- IR::LabelInstr *helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
- bool isNeqOp;
- switch(branch->m_opcode)
- {
- case Js::OpCode::BrSrNeq_A:
- case Js::OpCode::BrNeq_A:
- case Js::OpCode::BrSrNotEq_A:
- case Js::OpCode::BrNotEq_A:
- isNeqOp = true;
- break;
- case Js::OpCode::BrSrEq_A:
- case Js::OpCode::BrEq_A:
- case Js::OpCode::BrSrNotNeq_A:
- case Js::OpCode::BrNotNeq_A:
- isNeqOp = false;
- break;
- default:
- Assert(UNREACHED);
- __assume(UNREACHED);
- }
- // JNE/BNE (typeId == Js::TypeIds_Number) ? $target : $done
- IR::LabelInstr *label = (typeId == Js::TypeIds_Number) ? target : done;
- if (isNeqOp)
- label = (label == target) ? done : target;
- m_lowererMD.GenerateObjectTest(object, branch, label);
- // MOV typeRegOpnd, [object + offset(Type)]
- InsertMove(typeRegOpnd,
- IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
- branch);
- GenerateFalsyObjectTest(branch, typeRegOpnd, typeId, target, done, isNeqOp);
- // MOV objTypeId, [typeRegOpnd + offset(TypeId)]
- IR::RegOpnd* objTypeIdOpnd = IR::RegOpnd::New(TyInt32, m_func);
- InsertMove(objTypeIdOpnd,
- IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
- branch);
- // CMP objTypeId, typeId
- // JEQ/JGE $done
- if (typeId == Js::TypeIds_Object)
- {
- InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrGe_A, isNeqOp ? done : target, branch);
- }
- else if (typeId == Js::TypeIds_Function)
- {
- InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrEq_A, isNeqOp ? done : target, branch);
- }
- else if (typeId == Js::TypeIds_Number)
- {
- //Check for the typeIds between TypeIds_FirstNumberType <= typeIds <= TypeIds_LastNumberType
- InsertSub(false, objTypeIdOpnd, objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_FirstNumberType, TyInt32, branch->m_func),branch);
- InsertCompare(objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_LastNumberType - Js::TypeIds_FirstNumberType, TyInt32, branch->m_func), branch);
- InsertBranch(isNeqOp ? Js::OpCode::BrGt_A : Js::OpCode::BrLe_A, true, target, branch);
- }
- else
- {
- InsertCompare(objTypeIdOpnd, typeIdOpnd, branch);
- InsertBranch(isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, target, branch);
- }
- // This could be 'null' which, for historical reasons, has a TypeId < TypeIds_Object but
- // is still a Javascript "object."
- if (typeId == Js::TypeIds_Object)
- {
- // CMP object, 0xXXXXXXXX
- // JEQ isNeqOp ? $done : $target
- InsertCompareBranch(object,
- LoadLibraryValueOpnd(branch, LibraryValue::ValueNull),
- Js::OpCode::BrEq_A,
- isNeqOp ? done : target,
- branch);
- }
- branch->InsertAfter(done); // Get this label first
- // "object" or "function" may come from HostDispatch. Needs helper if that's the case.
- if (typeId == Js::TypeIds_Object || typeId == Js::TypeIds_Function)
- {
- // CMP objTypeId, TypeIds_Proxy. typeof proxy could be 'object' or 'function' depends on the target
- // JNE isNeqOp ? $target : $done
- InsertCompareBranch(objTypeIdOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
- Js::OpCode::BrEq_A,
- helper,
- branch);
- // CMP objTypeId, TypeIds_HostDispatch
- // JNE isNeqOp ? $target : $done
- InsertCompareBranch(objTypeIdOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
- Js::OpCode::BrNeq_A,
- isNeqOp ? target : done,
- branch);
- // Now emit Typeof and lower it like we would've for the helper call.
- {
- branch->InsertBefore(helper);
- typeOf->Unlink();
- branch->InsertBefore(typeOf);
- LowerUnaryHelperMem(typeOf, IR::HelperOp_Typeof);
- }
- }
- else // Other primitive types don't need helper
- {
- typeOf->Remove();
- branch->Remove();
- *pfNoLower = true;
- }
- // $done:
- }
- ///----------------------------------------------------------------------------
- ///
- /// LowererMD::GenerateFastCmTypeOf
- ///
- ///----------------------------------------------------------------------------
- void
- Lowerer::GenerateFastCmTypeOf(IR::Instr *compare, IR::RegOpnd *object, IR::IntConstOpnd *typeIdOpnd, IR::Instr *typeOf, bool *pfNoLower)
- {
- Assert(compare->m_opcode == Js::OpCode::CmSrEq_A ||
- compare->m_opcode == Js::OpCode::CmEq_A ||
- compare->m_opcode == Js::OpCode::CmSrNeq_A ||
- compare->m_opcode == Js::OpCode::CmNeq_A);
- Js::TypeId typeId = static_cast<Js::TypeId>(typeIdOpnd->GetValue());
- IR::LabelInstr *movFalse = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
- IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
- IR::LabelInstr *helper= IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::RegOpnd *dst = compare->GetDst()->IsRegOpnd() ? compare->GetDst()->AsRegOpnd() : nullptr;
- IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
- bool isNeqOp = compare->m_opcode == Js::OpCode::CmSrNeq_A ||
- compare->m_opcode == Js::OpCode::CmNeq_A;
- Assert(dst);
- if (dst->IsEqual(object))
- {
- //dst same as the src of typeof. As we need to move true to dst first we need to save the src to a new opnd
- IR::RegOpnd *newObject = IR::RegOpnd::New(object->GetType(), m_func);
- InsertMove(newObject, object, compare); //Save src
- object = newObject;
- }
- // mov dst, 'true'
- InsertMove(dst,
- LoadLibraryValueOpnd(compare, LibraryValue::ValueTrue),
- compare);
- // TEST object, 1
- // JNE (typeId == Js::TypeIds_Number) ? $done : $movFalse
- IR::LabelInstr *target = (typeId == Js::TypeIds_Number) ? done : movFalse;
- if (isNeqOp)
- {
- target = (target == done) ? movFalse : done;
- }
- m_lowererMD.GenerateObjectTest(object, compare, target);
- // MOV typeRegOpnd, [object + offset(Type)]
- InsertMove(typeRegOpnd,
- IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
- compare);
- GenerateFalsyObjectTest(compare, typeRegOpnd, typeId, done, movFalse, isNeqOp);
- // MOV objTypeId, [typeRegOpnd + offset(TypeId)]
- IR::RegOpnd* objTypeIdOpnd = IR::RegOpnd::New(TyInt32, m_func);
- InsertMove(objTypeIdOpnd,
- IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
- compare);
- // CMP objTypeId, typeId
- // JEQ/JGE $done
- if (typeId == Js::TypeIds_Object)
- {
- InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrGe_A, isNeqOp ? movFalse : done, compare);
- }
- else if (typeId == Js::TypeIds_Function)
- {
- InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrEq_A, isNeqOp ? movFalse : done, compare);
- }
- else if (typeId == Js::TypeIds_Number)
- {
- //Check for the typeIds between TypeIds_FirstNumberType <= typeIds <= TypeIds_LastNumberType
- InsertCompareBranch(objTypeIdOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_LastNumberType, TyInt32, compare->m_func),
- Js::OpCode::BrGt_A,
- isNeqOp ? done : movFalse,
- compare);
- InsertCompareBranch(objTypeIdOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_FirstNumberType, TyInt32, compare->m_func),
- isNeqOp? Js::OpCode::BrLt_A : Js::OpCode::BrGe_A,
- done,
- compare);
- }
- else
- {
- InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, done, compare);
- }
- // This could be 'null' which, for historical reasons, has a TypeId < TypeIds_Object but
- // is still a Javascript "object."
- if (typeId == Js::TypeIds_Object)
- {
- // CMP object, 0xXXXXXXXX
- // JEQ isNeqOp ? $movFalse : $done
- InsertCompareBranch(object,
- LoadLibraryValueOpnd(compare, LibraryValue::ValueNull),
- Js::OpCode::BrEq_A,
- isNeqOp ? movFalse : done,
- compare);
- }
- compare->InsertAfter(done); // Get this label first
- // "object" or "function" may come from HostDispatch. Needs helper if that's the case.
- if (typeId == Js::TypeIds_Object || typeId == Js::TypeIds_Function)
- {
- // CMP objTypeId, TypeIds_Proxy
- // JNE isNeqOp ? $done : $movFalse
- InsertCompareBranch(objTypeIdOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
- Js::OpCode::BrEq_A,
- helper,
- compare);
- // CMP objTypeId, TypeIds_HostDispatch
- // JNE isNeqOp ? $done : $movFalse
- InsertCompareBranch(objTypeIdOpnd,
- IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
- Js::OpCode::BrNeq_A,
- isNeqOp ? done : movFalse,
- compare);
- // Now emit Typeof like we would've for the helper call.
- {
- compare->InsertBefore(helper);
- typeOf->Unlink();
- compare->InsertBefore(typeOf);
- LowerUnaryHelperMem(typeOf, IR::HelperOp_Typeof);
- }
- // JMP/B $done
- InsertBranch(Js::OpCode::Br, done, done);
- }
- else // Other primitive types don't need helper
- {
- typeOf->Remove();
- compare->Remove();
- *pfNoLower = true;
- }
- // $movFalse: (insert before $done)
- done->InsertBefore(movFalse);
- // MOV dst, 'false'
- InsertMove(dst, LoadLibraryValueOpnd(done, LibraryValue::ValueFalse), done);
- // $done:
- }
- void
- Lowerer::GenerateCheckForCallFlagNew(IR::Instr* instrInsert)
- {
- Func *func = instrInsert->m_func;
- IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- Assert(!func->IsInlinee());
- // MOV s1, [ebp + 4] // s1 = call info
- // AND s2, s1, Js::CallFlags_New // s2 = s1 & Js::CallFlags_New
- // CMP s2, 0
- // JNE $Done
- // CALL RuntimeTypeError
- // $Done
- IR::SymOpnd* callInfoOpnd = Lowerer::LoadCallInfo(instrInsert);
- Assert(Js::CallInfo::ksizeofCount == 24);
- IR::RegOpnd* isNewFlagSetRegOpnd = IR::RegOpnd::New(TyUint32, func);
- InsertAnd(isNewFlagSetRegOpnd, callInfoOpnd, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_New << Js::CallInfo::ksizeofCount, TyUint32, func, true), instrInsert);
- InsertTestBranch(isNewFlagSetRegOpnd, isNewFlagSetRegOpnd, Js::OpCode::BrNeq_A, labelDone, instrInsert);
- IR::Instr *throwInstr = IR::Instr::New(
- Js::OpCode::RuntimeTypeError,
- IR::RegOpnd::New(TyMachReg, m_func),
- IR::IntConstOpnd::New(SCODE_CODE(JSERR_ClassConstructorCannotBeCalledWithoutNew), TyInt32, m_func),
- m_func);
- instrInsert->InsertBefore(throwInstr);
- this->LowerUnaryHelperMem(throwInstr, IR::HelperOp_RuntimeTypeError);
- instrInsert->InsertBefore(labelDone);
- instrInsert->Remove();
- }
- void
- Lowerer::GenerateLoadNewTarget(IR::Instr* instrInsert)
- {
- Func *func = instrInsert->m_func;
- IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- IR::LabelInstr * labelLoadArgNewTarget = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- IR::Opnd* opndUndefAddress = this->LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueUndefined);
- Assert(!func->IsInlinee());
- if (func->GetJnFunction()->IsGenerator())
- {
- instrInsert->SetSrc1(opndUndefAddress);
- LowererMD::ChangeToAssign(instrInsert);
- return;
- }
- // MOV dst, undefined // dst = undefined
- // MOV s1, [ebp + 4] // s1 = call info
- // AND s2, s1, Js::CallFlags_NewTarget // s2 = s1 & Js::CallFlags_NewTarget
- // CMP s2, 0
- // JNE $LoadLastArgument
- // AND s2, s1, Js::CallFlags_New // s2 = s1 & Js::CallFlags_New
- // CMP s2, 0
- // JE $Done
- // MOV dst, [ebp + 8] // dst = function object
- // JMP $Done
- // $LoadLastArgument
- // AND s2, s1, (0x00FFFFFF)
- // MOV s3, ebp
- // MOV dst, [s3 + 5 * sizeof(Var) + s2] // s3 = last argument
- // $Done
- IR::Opnd * dstOpnd = instrInsert->GetDst();
- Assert(dstOpnd->IsRegOpnd());
- LowererMD::CreateAssign(dstOpnd, opndUndefAddress, instrInsert);
- IR::SymOpnd* callInfoOpnd = Lowerer::LoadCallInfo(instrInsert);
- Assert(Js::CallInfo::ksizeofCount == 24);
- IR::RegOpnd* isNewFlagSetRegOpnd = IR::RegOpnd::New(TyUint32, func);
- InsertAnd(isNewFlagSetRegOpnd, callInfoOpnd, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_NewTarget << Js::CallInfo::ksizeofCount, TyUint32, func, true), instrInsert);
- InsertTestBranch(isNewFlagSetRegOpnd, isNewFlagSetRegOpnd, Js::OpCode::BrNeq_A, labelLoadArgNewTarget, instrInsert);
- InsertAnd(isNewFlagSetRegOpnd, callInfoOpnd, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_New << Js::CallInfo::ksizeofCount, TyUint32, func, true), instrInsert);
- GenerateNotZeroTest(isNewFlagSetRegOpnd, labelDone, instrInsert);
- IR::Instr* loadFuncInstr = IR::Instr::New(Js::OpCode::AND, func);
- loadFuncInstr->SetDst(instrInsert->GetDst());
- m_lowererMD.LoadFuncExpression(loadFuncInstr);
- instrInsert->InsertBefore(loadFuncInstr);
- InsertBranch(Js::OpCode::Br, labelDone, instrInsert);
- instrInsert->InsertBefore(labelLoadArgNewTarget);
- IR::RegOpnd* argCountOpnd = isNewFlagSetRegOpnd;
- InsertAnd(argCountOpnd, callInfoOpnd, IR::IntConstOpnd::New(0x00FFFFFF, TyUint32, func, true), instrInsert);
- IR::RegOpnd *baseOpnd = IR::RegOpnd::New(TyMachReg, func);
- StackSym *paramSym = StackSym::New(TyMachReg, this->m_func);
- instrInsert->InsertBefore(this->m_lowererMD.LoadStackAddress(paramSym, baseOpnd));
- const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
- IR::IndirOpnd* argIndirOpnd = IR::IndirOpnd::New(baseOpnd->AsRegOpnd(), argCountOpnd, indirScale, TyMachReg, this->m_func);
- // Need to offset valueOpnd by 5. Instead of changing valueOpnd, we can just add an offset to the indir. Changing
- // valueOpnd requires creation of a temp sym (if it's not already a temp) so that the value of the sym that
- // valueOpnd represents is not changed.
- uint16 actualOffset = GetFormalParamOffset() + 1; //5
- argIndirOpnd->SetOffset(actualOffset << indirScale);
- LowererMD::CreateAssign(dstOpnd, argIndirOpnd, instrInsert);
- instrInsert->InsertBefore(labelDone);
- instrInsert->Remove();
- }
- void
- Lowerer::GenerateGetCurrentFunctionObject(IR::Instr * instr)
- {
- Func * func = this->m_func;
- IR::Instr * insertBeforeInstr = instr->m_next;
- IR::RegOpnd * functionObjectOpnd = instr->GetDst()->AsRegOpnd();
- IR::Opnd * vtableAddressOpnd = this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableStackScriptFunction);
- IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
- InsertCompareBranch(IR::IndirOpnd::New(functionObjectOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
- Js::OpCode::BrNeq_A, true, labelDone, insertBeforeInstr);
- IR::RegOpnd * boxedFunctionObjectOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(boxedFunctionObjectOpnd, IR::IndirOpnd::New(functionObjectOpnd,
- Js::StackScriptFunction::GetOffsetOfBoxedScriptFunction(), TyMachPtr, func), insertBeforeInstr);
- InsertTestBranch(boxedFunctionObjectOpnd, boxedFunctionObjectOpnd, Js::OpCode::BrEq_A, true, labelDone, insertBeforeInstr);
- InsertMove(functionObjectOpnd, boxedFunctionObjectOpnd, insertBeforeInstr);
- insertBeforeInstr->InsertBefore(labelDone);
- }
- IR::Opnd *
- Lowerer::GetInlineCacheFromFuncObjectForRuntimeUse(IR::Instr * instr, IR::PropertySymOpnd * propSymOpnd, bool isHelper)
- {
- // MOV s1, [ebp + 8] //s1 = function object
- // MOV s2, [s1 + offset(hasInlineCaches)]
- // TEST s2, s2
- // JE $L1
- // MOV s3, [s1 + offset(m_inlineCaches)] //s3 = inlineCaches from function object
- // MOV s4, [s3 + index*scale] //s4 = inlineCaches[index]
- // JMP $L2
- // $L1
- // MOV s3, propSym->m_runtimeCache
- // $L2
- byte indirScale = this->m_lowererMD.GetDefaultIndirScale();
- IR::RegOpnd * funcObjOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
- IR::Instr * funcObjInstr = IR::Instr::New(Js::OpCode::Ld_A, funcObjOpnd, instr->m_func);
- instr->InsertBefore(funcObjInstr);
- this->m_lowererMD.LoadFuncExpression(funcObjInstr);
- IR::RegOpnd * funcObjHasInlineCachesOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
- this->m_lowererMD.CreateAssign(funcObjHasInlineCachesOpnd, IR::IndirOpnd::New(funcObjOpnd, Js::ScriptFunction::GetOffsetOfHasInlineCaches(), TyUint8, instr->m_func), instr);
- IR::LabelInstr * inlineCachesNullLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, isHelper);
- InsertTestBranch(funcObjHasInlineCachesOpnd, funcObjHasInlineCachesOpnd, Js::OpCode::BrEq_A, inlineCachesNullLabel, instr);
- IR::RegOpnd * inlineCachesOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
- Lowerer::InsertMove(inlineCachesOpnd, IR::IndirOpnd::New(funcObjOpnd, Js::ScriptFunctionWithInlineCache::GetOffsetOfInlineCaches(), TyMachPtr, instr->m_func), instr);
- IR::RegOpnd * inlineCacheOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
- IR::RegOpnd * indexOpnd = IR::RegOpnd::New(TyMachReg, instr->m_func);
- int inlineCacheOffset;
- if (!Int32Math::Mul(sizeof(Js::InlineCache *), propSymOpnd->m_inlineCacheIndex, &inlineCacheOffset))
- {
- Lowerer::InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCachesOpnd, inlineCacheOffset, TyMachPtr, instr->m_func), instr);
- }
- else
- {
- Lowerer::InsertMove(indexOpnd, IR::IntConstOpnd::New(propSymOpnd->m_inlineCacheIndex, TyUint32, instr->m_func), instr);
- Lowerer::InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCachesOpnd, indexOpnd, indirScale, TyMachPtr, instr->m_func), instr);
- }
- IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, isHelper);
- InsertBranch(LowererMD::MDUncondBranchOpcode, continueLabel, instr);
- IR::Instr * ldCacheFromPropSymOpndInstr = this->m_lowererMD.CreateAssign(inlineCacheOpnd, IR::AddrOpnd::New(propSymOpnd->m_runtimeInlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func), instr);
- ldCacheFromPropSymOpndInstr->InsertBefore(inlineCachesNullLabel);
- ldCacheFromPropSymOpndInstr->InsertAfter(continueLabel);
- return inlineCacheOpnd;
- }
- IR::Instr *
- Lowerer::LowerInitClass(IR::Instr * instr)
- {
- // scriptContext
- IR::Instr * prevInstr = LoadScriptContext(instr);
- // extends
- if (instr->GetSrc2() != nullptr)
- {
- IR::Opnd * extendsOpnd = instr->UnlinkSrc2();
- m_lowererMD.LoadHelperArgument(instr, extendsOpnd);
- }
- else
- {
- IR::AddrOpnd* extendsOpnd = IR::AddrOpnd::NewNull(this->m_func);
- m_lowererMD.LoadHelperArgument(instr, extendsOpnd);
- }
- // constructor
- IR::Opnd * ctorOpnd = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, ctorOpnd);
- // call
- m_lowererMD.ChangeToHelperCall(instr, IR::HelperOP_InitClass);
- return prevInstr;
- }
- void
- Lowerer::LowerNewConcatStrMulti(IR::Instr * instr)
- {
- IR::IntConstOpnd * countOpnd = instr->UnlinkSrc1()->AsIntConstOpnd();
- IR::RegOpnd * dstOpnd = instr->UnlinkDst()->AsRegOpnd();
- uint8 count = (uint8)countOpnd->GetValue();
- Assert(dstOpnd->GetValueType().IsString());
- GenerateRecyclerAlloc(IR::HelperAllocMemForConcatStringMulti, Js::ConcatStringMulti::GetAllocSize(count), dstOpnd, instr);
- GenerateRecyclerMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, VTableValue::VtableConcatStringMulti), instr);
- GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfType(),
- this->LoadLibraryValueOpnd(instr, LibraryValue::ValueStringTypeStatic), instr);
- GenerateRecyclerMemInitNull(dstOpnd, Js::ConcatStringMulti::GetOffsetOfpszValue(), instr);
- GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), 0, instr);
- GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfSlotCount(), countOpnd->AsUint32(), instr);
- instr->Remove();
- }
- void
- Lowerer::LowerNewConcatStrMultiBE(IR::Instr * instr)
- {
- // Lower
- // t1 = SetConcatStrMultiBE s1
- // t2 = SetConcatStrMultiBE s2, t1
- // t3 = SetConcatStrMultiBE s3, t2
- // s = NewConcatStrMultiBE 3, t3
- // to
- // s = new concat string
- // s+0 = s1
- // s+1 = s2
- // s+2 = s3
- Assert(instr->GetSrc1()->IsConstOpnd());
- Assert(instr->GetDst()->IsRegOpnd());
- IR::RegOpnd * newString = instr->GetDst()->AsRegOpnd();
- IR::Opnd * newConcatItemOpnd = nullptr;
- uint index = instr->GetSrc1()->AsIntConstOpnd()->AsUint32() - 1;
- IR::Instr * concatItemInstr = nullptr;
- IR::Opnd * linkOpnd = instr->GetSrc2();
- while (linkOpnd)
- {
- Assert(linkOpnd->IsRegOpnd());
- concatItemInstr = linkOpnd->GetStackSym()->GetInstrDef();
- Assert(concatItemInstr->m_opcode == Js::OpCode::SetConcatStrMultiItemBE);
- IR::Opnd * concatItemOpnd = concatItemInstr->GetSrc1();
- Assert(concatItemOpnd->IsRegOpnd());
- // If one of the concat items is equal to the dst of the concat expressions (s = s + a + b),
- // hoist the load of that item to before the setting of the new string to the dst.
- if (concatItemOpnd->IsEqual(newString))
- {
- if (!newConcatItemOpnd)
- {
- IR::Instr * hoistSrcInstr = concatItemInstr->HoistSrc1(Js::OpCode::Ld_A);
- newConcatItemOpnd = hoistSrcInstr->GetDst();
- }
- concatItemOpnd = newConcatItemOpnd;
- }
- else
- {
- // If only some of the SetConcatStrMultiItemBE instructions were CSE'd and the rest, along with the NewConcatStrMultiBE
- // instruction, were in a loop, the strings on the CSE'd Set*BE instructions will become live on back edge. Add them to
- // addToLiveOnBackEdgeSyms here and clear when we reach the Set*BE instruction.
- // Note that we are doing this only for string opnds which are not the same as the dst of the concat expression. Reasoning
- // behind this is that if a loop has a concat expression with one of its sources same as the dst, the Set*BE instruction
- // for the dst wouldn't have been CSE'd as the dst's value is changing in the loop and the backward pass should have set the
- // symbol as live on backedge.
- this->addToLiveOnBackEdgeSyms->Set(concatItemOpnd->GetStackSym()->m_id);
- }
- IR::Instr * newConcatItemInstr = IR::Instr::New(Js::OpCode::SetConcatStrMultiItem,
- IR::IndirOpnd::New(newString, index, TyVar, instr->m_func),
- concatItemOpnd,
- instr->m_func);
- instr->InsertAfter(newConcatItemInstr);
- this->LowerSetConcatStrMultiItem(newConcatItemInstr);
- linkOpnd = concatItemInstr->GetSrc2();
- index--;
- }
- Assert(index == -1);
- this->LowerNewConcatStrMulti(instr);
- }
- void
- Lowerer::LowerSetConcatStrMultiItem(IR::Instr * instr)
- {
- Func * func = this->m_func;
- IR::IndirOpnd * dstOpnd = instr->GetDst()->AsIndirOpnd();
- IR::RegOpnd * concatStrOpnd = dstOpnd->GetBaseOpnd();
- IR::RegOpnd * srcOpnd = instr->UnlinkSrc1()->AsRegOpnd();
- Assert(concatStrOpnd->GetValueType().IsString());
- Assert(srcOpnd->GetValueType().IsString());
- srcOpnd = GenerateGetImmutableOrScriptUnreferencedString(srcOpnd, instr, IR::HelperOp_CompoundStringCloneForConcat);
- instr->SetSrc1(srcOpnd);
- IR::IndirOpnd * dstLength = IR::IndirOpnd::New(concatStrOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), TyUint32, func);
- IR::Opnd * srcLength;
- if (srcOpnd->m_sym->m_isStrConst)
- {
- srcLength = IR::IntConstOpnd::New(Js::JavascriptString::FromVar(srcOpnd->m_sym->GetConstAddress())->GetLength(),
- TyUint32, func);
- }
- else
- {
- srcLength = IR::RegOpnd::New(TyUint32, func);
- InsertMove(srcLength, IR::IndirOpnd::New(srcOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), TyUint32, func), instr);
- }
- InsertAdd(false, dstLength, dstLength, srcLength, instr);
- dstOpnd->SetOffset(dstOpnd->GetOffset() * sizeof(Js::JavascriptString *) + Js::ConcatStringMulti::GetOffsetOfSlots());
- this->m_lowererMD.ChangeToAssign(instr);
- }
- IR::RegOpnd *
- Lowerer::GenerateGetImmutableOrScriptUnreferencedString(IR::RegOpnd * strOpnd, IR::Instr * insertBeforeInstr, IR::JnHelperMethod helperMethod, bool reloadDst)
- {
- if (strOpnd->m_sym->m_isStrConst)
- {
- return strOpnd;
- }
- Func * const func = this->m_func;
- IR::RegOpnd *dstOpnd = reloadDst == true ? IR::RegOpnd::New(TyVar, func) : strOpnd;
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- if (!strOpnd->IsNotTaggedValue())
- {
- this->m_lowererMD.GenerateObjectTest(strOpnd, insertBeforeInstr, doneLabel);
- }
- // CMP [strOpnd], Js::CompoundString::`vtable'
- // JEQ $helper
- InsertCompareBranch(
- IR::IndirOpnd::New(strOpnd, 0, TyMachPtr, func),
- this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableCompoundString),
- Js::OpCode::BrEq_A,
- helperLabel,
- insertBeforeInstr);
- if (reloadDst)
- {
- InsertMove(dstOpnd, strOpnd, insertBeforeInstr);
- }
- InsertBranch(Js::OpCode::Br, doneLabel, insertBeforeInstr);
- insertBeforeInstr->InsertBefore(helperLabel);
- this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, strOpnd);
- IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, dstOpnd, func);
- callInstr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
- insertBeforeInstr->InsertBefore(callInstr);
- this->m_lowererMD.LowerCall(callInstr, 0);
- insertBeforeInstr->InsertBefore(doneLabel);
- return dstOpnd;
- }
- void
- Lowerer::LowerConvStrCommon(IR::JnHelperMethod helper, IR::Instr * instr)
- {
- IR::RegOpnd * src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
- if (!src1Opnd->GetValueType().IsNotString())
- {
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- this->GenerateStringTest(src1Opnd, instr, helperLabel);
- InsertMove(instr->GetDst(), src1Opnd, instr);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(helperLabel);
- instr->InsertAfter(doneLabel);
- }
- if (instr->GetSrc2())
- {
- this->m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
- }
- this->LoadScriptContext(instr);
- this->m_lowererMD.LoadHelperArgument(instr, src1Opnd);
- this->m_lowererMD.ChangeToHelperCall(instr, helper);
- }
- void
- Lowerer::LowerConvStr(IR::Instr * instr)
- {
- LowerConvStrCommon(IR::HelperOp_ConvString, instr);
- }
- void
- Lowerer::LowerCoerseStr(IR::Instr* instr)
- {
- LowerConvStrCommon(IR::HelperOp_CoerseString, instr);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerCoerseStrOrRegex - This method is used for String.Replace(arg1, arg2)
- /// where arg1 is regex or string
- /// if arg1 is not regex, then do String.Replace(CoerseStr(arg1), arg2);
- ///
- /// CoerseStrOrRegex arg1
- ///
- /// if (value == regex) goto :done
- /// else
- ///helper:
- /// ConvStr value
- ///done:
- ///----------------------------------------------------------------------------
- void
- Lowerer::LowerCoerseStrOrRegex(IR::Instr* instr)
- {
- IR::RegOpnd * src1Opnd = instr->GetSrc1()->AsRegOpnd();
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- // if (value == regex) goto :done
- if (!src1Opnd->IsNotTaggedValue())
- {
- this->m_lowererMD.GenerateObjectTest(src1Opnd, instr, helperLabel);
- }
- IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
- InsertCompareBranch(IR::IndirOpnd::New(src1Opnd, 0, TyMachPtr, instr->m_func),
- vtableOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
- InsertMove(instr->GetDst(), src1Opnd, instr);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(helperLabel);
- instr->InsertAfter(doneLabel);
- // helper: ConvStr value
- LowerConvStr(instr);
- }
- ///----------------------------------------------------------------------------
- ///
- /// Lowerer::LowerCoerseRegex - This method is used for String.Match(arg1)
- /// if arg1 is regex, then pass CreateRegEx(arg1) to String.Match
- ///
- ///----------------------------------------------------------------------------
- void
- Lowerer::LowerCoerseRegex(IR::Instr* instr)
- {
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::RegOpnd * src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
- if (!src1Opnd->IsNotTaggedValue())
- {
- this->m_lowererMD.GenerateObjectTest(src1Opnd, instr, helperLabel);
- }
- IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
- InsertCompareBranch(IR::IndirOpnd::New(src1Opnd, 0, TyMachPtr, instr->m_func),
- vtableOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
- InsertMove(instr->GetDst(), src1Opnd, instr);
- InsertBranch(Js::OpCode::Br, doneLabel, instr);
- instr->InsertBefore(helperLabel);
- instr->InsertAfter(doneLabel);
- this->LoadScriptContext(instr);
- this->m_lowererMD.LoadHelperArgument(instr, IR::AddrOpnd::NewNull(instr->m_func)); // option
- this->m_lowererMD.LoadHelperArgument(instr, src1Opnd); // regex
- this->m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_CoerseRegex);
- }
- void
- Lowerer::LowerConvPrimStr(IR::Instr * instr)
- {
- LowerConvStrCommon(IR::HelperOp_ConvPrimitiveString, instr);
- }
- void
- Lowerer::GenerateRecyclerAlloc(IR::JnHelperMethod allocHelper, size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, bool inOpHelper)
- {
- size_t alignedSize = HeapInfo::GetAlignedSizeNoCheck(allocSize);
- this->GenerateRecyclerAllocAligned(allocHelper, alignedSize, newObjDst, insertionPointInstr, inOpHelper);
- }
- void
- Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, int value, IR::Instr * insertBeforeInstr, bool isZeroed)
- {
- IRType type = TyInt32;
- if (isZeroed)
- {
- if (value == 0)
- {
- // Recycler memory are zero initialized
- return;
- }
- if (value > 0 && value <= USHORT_MAX)
- {
- // Recycler memory are zero initialized, so we can just initialize the 8 or 16 bits of value
- type = (value <= UCHAR_MAX)? TyUint8 : TyUint16;
- }
- }
- Func * func = this->m_func;
- InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), IR::IntConstOpnd::New(value, type, func), insertBeforeInstr);
- }
- void
- Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, uint32 value, IR::Instr * insertBeforeInstr, bool isZeroed)
- {
- IRType type = TyUint32;
- if (isZeroed)
- {
- if (value == 0)
- {
- // Recycler memory are zero initialized
- return;
- }
- if (value <= USHORT_MAX)
- {
- // Recycler memory are zero initialized, so we can just initialize the 8 or 16 bits of value
- type = (value <= UCHAR_MAX)? TyUint8 : TyUint16;
- }
- }
- Func * func = this->m_func;
- InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), IR::IntConstOpnd::New(value, type, func), insertBeforeInstr);
- }
- void
- Lowerer::GenerateMemInitNull(IR::RegOpnd * opnd, int32 offset, IR::Instr * insertBeforeInstr, bool isZeroed)
- {
- if (isZeroed)
- {
- return;
- }
- GenerateMemInit(opnd, offset, IR::AddrOpnd::NewNull(m_func), insertBeforeInstr);
- }
- void
- Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, IR::Opnd * value, IR::Instr * insertBeforeInstr, bool isZeroed)
- {
- IRType type = value->GetType();
- Func * func = this->m_func;
- InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), value, insertBeforeInstr);
- }
- void
- Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, int32 value, IR::Instr * insertBeforeInstr)
- {
- GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
- }
- void
- Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, uint32 value, IR::Instr * insertBeforeInstr)
- {
- GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
- }
- void
- Lowerer::GenerateRecyclerMemInitNull(IR::RegOpnd * opnd, int32 offset, IR::Instr * insertBeforeInstr)
- {
- GenerateMemInitNull(opnd, offset, insertBeforeInstr, true);
- }
- void
- Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, IR::Opnd * value, IR::Instr * insertBeforeInstr)
- {
- GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
- }
- void
- Lowerer::GenerateMemCopy(IR::Opnd * dst, IR::Opnd * src, uint32 size, IR::Instr * insertBeforeInstr)
- {
- Func * func = this->m_func;
- this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, IR::IntConstOpnd::New(size, TyUint32, func));
- this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, src);
- this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, dst);
- IR::Instr * memcpyInstr = IR::Instr::New(Js::OpCode::Call, func);
- memcpyInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperMemCpy, func));
- insertBeforeInstr->InsertBefore(memcpyInstr);
- m_lowererMD.LowerCall(memcpyInstr, 3);
- }
- bool
- Lowerer::GenerateSimplifiedInt4Rem(
- IR::Instr *const remInstr,
- IR::LabelInstr *const skipBailOutLabel) const
- {
- Assert(remInstr);
- Assert(remInstr->m_opcode == Js::OpCode::Rem_I4);
- auto *dst = remInstr->GetDst(), *src1 = remInstr->GetSrc1(), *src2 = remInstr->GetSrc2();
- Assert(src1 && src2);
- Assert(dst->IsRegOpnd());
- bool isModByPowerOf2 = (remInstr->HasBailOutInfo() && remInstr->GetBailOutKind() == IR::BailOnModByPowerOf2);
- if (PHASE_OFF(Js::Phase::MathFastPathPhase, remInstr->m_func->GetTopFunc()) && !isModByPowerOf2)
- return false;
- if (!(src2->IsIntConstOpnd() && Math::IsPow2(src2->AsIntConstOpnd()->AsInt32())) && !isModByPowerOf2)
- {
- return false;
- }
- // We have:
- // s3 = s1 % s2 , where s2 = +2^i
- //
- // Generate:
- // test s1, s1
- // js $slowPathLabel
- // s3 = and s1, 0x00..fff (2^i - 1)
- // jmp $doneLabel
- // $slowPathLabel:
- // (Slow path)
- // (Neg zero check)
- // (Bailout code)
- // $doneLabel:
- IR::LabelInstr *doneLabel = skipBailOutLabel, *slowPathLabel;
- if (!doneLabel)
- {
- doneLabel = IR::LabelInstr::New(Js::OpCode::Label, remInstr->m_func);
- remInstr->InsertAfter(doneLabel);
- }
- slowPathLabel = IR::LabelInstr::New(Js::OpCode::Label, remInstr->m_func, isModByPowerOf2);
- remInstr->InsertBefore(slowPathLabel);
- // test s1, s1
- InsertTest(src1, src1, slowPathLabel);
- // jsb $slowPathLabel
- InsertBranch(LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), slowPathLabel, slowPathLabel);
- // s3 = and s1, 0x00..fff (2^i - 1)
- IR::Opnd* maskOpnd;
- if(isModByPowerOf2)
- {
- Assert(isModByPowerOf2);
- maskOpnd = IR::RegOpnd::New(TyInt32, remInstr->m_func);
- // mov maskOpnd, s2
- InsertMove(maskOpnd, src2, slowPathLabel);
- // dec maskOpnd
- InsertSub(/*needFlags*/ true, maskOpnd, maskOpnd, IR::IntConstOpnd::New(1, TyInt32, this->m_func, /*dontEncode*/true), slowPathLabel);
- // maskOpnd < 0 goto $slowPath
- InsertBranch(LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), slowPathLabel, slowPathLabel);
- // TEST src2, maskOpnd
- InsertTestBranch(src2, maskOpnd, Js::OpCode::BrNeq_A, slowPathLabel, slowPathLabel);
- }
- else
- {
- Assert(src2->IsIntConstOpnd());
- int32 mask = src2->AsIntConstOpnd()->AsInt32() - 1;
- maskOpnd = IR::IntConstOpnd::New(mask, TyInt32, remInstr->m_func);
- }
- // dst = src1 & maskOpnd
- InsertAnd(dst, src1, maskOpnd, slowPathLabel);
- // jmp $doneLabel
- InsertBranch(Js::OpCode::Br, doneLabel, slowPathLabel);
- return true;
- }
- #if DBG
- bool
- Lowerer::ValidOpcodeAfterLower(IR::Instr* instr, Func * func)
- {
- Js::OpCode opcode = instr->m_opcode;
- if (opcode > Js::OpCode::MDStart)
- {
- return true;
- }
- switch (opcode)
- {
- case Js::OpCode::Label:
- case Js::OpCode::StatementBoundary:
- case Js::OpCode::DeletedNonHelperBranch:
- case Js::OpCode::FunctionEntry:
- case Js::OpCode::FunctionExit:
- case Js::OpCode::TryCatch:
- case Js::OpCode::TryFinally:
- case Js::OpCode::Catch:
- case Js::OpCode::GeneratorResumeJumpTable:
- case Js::OpCode::Break:
- #ifdef _M_X64
- case Js::OpCode::PrologStart:
- case Js::OpCode::PrologEnd:
- #endif
- #ifdef _M_IX86
- case Js::OpCode::BailOutStackRestore:
- #endif
- return true;
- case Js::OpCode::RestoreOutParam:
- Assert(func->isPostRegAlloc);
- return true;
- // These may be removed by peep
- case Js::OpCode::StartCall:
- case Js::OpCode::LoweredStartCall:
- case Js::OpCode::Nop:
- case Js::OpCode::ArgOut_A_InlineBuiltIn:
- return func && !func->isPostPeeps;
- case Js::OpCode::InlineeStart:
- case Js::OpCode::InlineeEnd:
- return instr->m_func->m_hasInlineArgsOpt;
- #ifdef _M_X64
- case Js::OpCode::LdArgSize:
- case Js::OpCode::LdSpillSize:
- return func && !func->isPostFinalLower;
- #endif
- case Js::OpCode::Leave:
- Assert(!func->IsLoopBodyInTry());
- Assert(func->HasTry() && func->DoOptimizeTryCatch());
- return func && !func->isPostFinalLower; //Lowered in FinalLower phase
- };
- return false;
- }
- #endif
- void Lowerer::LowerProfiledBeginSwitch(IR::JitProfilingInstr* instr)
- {
- Assert(instr->isBeginSwitch);
- m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
- m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
- instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleProfiledSwitch, m_func));
- m_lowererMD.LowerCall(instr, 0);
- }
- void Lowerer::LowerProfiledBinaryOp(IR::JitProfilingInstr* instr, IR::JnHelperMethod meth)
- {
- m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
- m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
- m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
- instr->SetSrc1(IR::HelperCallOpnd::New(meth, m_func));
- m_lowererMD.LowerCall(instr, 0);
- }
- void Lowerer::GenerateNullOutGeneratorFrame(IR::Instr* insertInstr)
- {
- // null out frame pointer on generator object to signal completion to JavascriptGenerator::CallGenerator
- // s = MOV prm1
- // s[offset of JavascriptGenerator::frame] = MOV nullptr
- StackSym *symSrc = StackSym::NewParamSlotSym(1, m_func);
- m_func->SetArgOffset(symSrc, LowererMD::GetFormalParamOffset() * MachPtr);
- IR::SymOpnd *srcOpnd = IR::SymOpnd::New(symSrc, TyMachPtr, m_func);
- IR::RegOpnd *dstOpnd = IR::RegOpnd::New(TyMachReg, m_func);
- m_lowererMD.CreateAssign(dstOpnd, srcOpnd, insertInstr);
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(dstOpnd, Js::JavascriptGenerator::GetFrameOffset(), TyMachPtr, m_func);
- IR::AddrOpnd *addrOpnd = IR::AddrOpnd::NewNull(m_func);
- m_lowererMD.CreateAssign(indirOpnd, addrOpnd, insertInstr);
- }
- void Lowerer::LowerFunctionExit(IR::Instr* funcExit)
- {
- if (m_func->GetJnFunction()->IsGenerator())
- {
- GenerateNullOutGeneratorFrame(funcExit->m_prev);
- }
- if (!m_func->DoSimpleJitDynamicProfile())
- {
- return;
- }
- IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, m_func);
- callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleCleanImplicitCallFlags, m_func));
- funcExit->m_prev->InsertBefore(callInstr);
- m_lowererMD.LoadHelperArgument(callInstr, CreateFunctionBodyOpnd(funcExit->m_func));
- m_lowererMD.LowerCall(callInstr, 0);
- }
- void Lowerer::LowerFunctionEntry(IR::Instr* funcEntry)
- {
- Assert(funcEntry->m_opcode == Js::OpCode::FunctionEntry);
- //Don't do a body call increment for loops or asm.js
- if (m_func->IsLoopBody() || m_func->GetJnFunction()->GetIsAsmjsMode())
- {
- return;
- }
- IR::Instr *const insertBeforeInstr = this->m_func->GetFunctionEntryInsertionPoint();
- LowerFunctionBodyCallCountChange(insertBeforeInstr);
- if (m_func->DoSimpleJitDynamicProfile())
- {
- const auto jn = m_func->GetJnFunction();
- // Only generate the argument profiling if the function expects to have some arguments to profile and only if
- // it has implicit ArgIns (the latter is a restriction imposed by the Interpreter, so it is mirrored in SimpleJit)
- if (jn->GetInParamsCount() > 1 && jn->GetHasImplicitArgIns())
- {
- // Call out to the argument profiling helper
- IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, m_func);
- callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleProfileParameters, m_func));
- insertBeforeInstr->InsertBefore(callInstr);
- m_lowererMD.LoadHelperArgument(callInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
- m_lowererMD.LowerCall(callInstr, 0);
- }
- // Clear existing ImplicitCallFlags
- const auto starFlag = GetImplicitCallFlagsOpnd();
- this->InsertMove(starFlag, CreateClearImplicitCallFlagsOpnd(), insertBeforeInstr);
- }
- }
- void Lowerer::LowerFunctionBodyCallCountChange(IR::Instr *const insertBeforeInstr)
- {
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- const bool isSimpleJit = func->IsSimpleJit();
- if ((isSimpleJit && !func->GetTopFunc()->GetJnFunction()->DoFullJit()))
- {
- return;
- }
- // mov countAddress, <countAddress>
- IR::RegOpnd *const countAddressOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, func), TyMachPtr, func);
- const IR::AutoReuseOpnd autoReuseCountAddressOpnd(countAddressOpnd, func);
- InsertMove(
- countAddressOpnd,
- IR::AddrOpnd::New(func->GetCallsCountAddress(), IR::AddrOpndKindDynamicMisc, func, true),
- insertBeforeInstr);
- IR::IndirOpnd *const countOpnd = IR::IndirOpnd::New(countAddressOpnd, 0, TyUint8, func);
- const IR::AutoReuseOpnd autoReuseCountOpnd(countOpnd, func);
- if(!isSimpleJit)
- {
- // InsertIncUint8PreventOverflow [countAddress]
- InsertIncUInt8PreventOverflow(countOpnd, countOpnd, insertBeforeInstr);
- return;
- }
- // InsertDecUint8PreventOverflow [countAddress]
- IR::Instr *onOverflowInsertBeforeInstr;
- InsertDecUInt8PreventOverflow(
- countOpnd,
- countOpnd,
- insertBeforeInstr,
- &onOverflowInsertBeforeInstr);
- // ($overflow:)
- // TransitionFromSimpleJit(framePointer)
- m_lowererMD.LoadHelperArgument(onOverflowInsertBeforeInstr, IR::Opnd::CreateFramePointerOpnd(func));
- IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
- callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperTransitionFromSimpleJit, func));
- onOverflowInsertBeforeInstr->InsertBefore(callInstr);
- m_lowererMD.LowerCall(callInstr, 0);
- }
- IR::Opnd*
- Lowerer::GetImplicitCallFlagsOpnd()
- {
- return GetImplicitCallFlagsOpnd(m_func);
- }
- IR::Opnd*
- Lowerer::GetImplicitCallFlagsOpnd(Func * func)
- {
- return IR::MemRefOpnd::New(func->GetScriptContext()->GetThreadContext()->GetAddressOfImplicitCallFlags(), GetImplicitCallFlagsType(), func);
- }
- IR::Opnd*
- Lowerer::CreateClearImplicitCallFlagsOpnd()
- {
- return IR::IntConstOpnd::New(Js::ImplicitCall_None, GetImplicitCallFlagsType(), m_func);
- }
- void
- Lowerer::LowerSpreadArrayLiteral(IR::Instr *instr)
- {
- LoadScriptContext(instr);
- IR::Opnd *src2Opnd = instr->UnlinkSrc2();
- m_lowererMD.LoadHelperArgument(instr, src2Opnd);
- IR::Opnd *src1Opnd = instr->UnlinkSrc1();
- m_lowererMD.LoadHelperArgument(instr, src1Opnd);
- this->m_lowererMD.ChangeToHelperCall(instr, IR::HelperSpreadArrayLiteral);
- }
- IR::Instr *
- Lowerer::LowerSpreadCall(IR::Instr *instr, Js::CallFlags callFlags, bool setupProfiledVersion)
- {
- // Get the target function object, and emit function object test.
- IR::RegOpnd * functionObjOpnd = instr->UnlinkSrc1()->AsRegOpnd();
- functionObjOpnd->m_isCallArg = true;
- if (!(callFlags & Js::CallFlags_New) && !setupProfiledVersion)
- {
- IR::LabelInstr* continueAfterExLabel = InsertContinueAfterExceptionLabelForDebugger(m_func, instr, false);
- this->m_lowererMD.GenerateFunctionObjectTest(instr, functionObjOpnd, false, continueAfterExLabel);
- }
- IR::Instr *spreadIndicesInstr;
- spreadIndicesInstr = GetLdSpreadIndicesInstr(instr);
- Assert(spreadIndicesInstr->m_opcode == Js::OpCode::LdSpreadIndices);
- // Get AuxArray
- IR::Opnd *spreadIndicesOpnd = spreadIndicesInstr->UnlinkSrc1();
- // Remove LdSpreadIndices from the argument chain
- instr->ReplaceSrc2(spreadIndicesInstr->UnlinkSrc2());
- // Emit the normal args
- callFlags = (Js::CallFlags)(callFlags | (instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed));
- // Profiled helper call requires three more parameters, ArrayProfileId, profileId, and the frame pointer.
- // This is just following the convention of HelperProfiledNewScObjArray call.
- const unsigned short extraArgsCount = setupProfiledVersion ? 5 : 2; // function object and AuxArray
- int32 argCount = this->m_lowererMD.LowerCallArgs(instr, (ushort)callFlags, extraArgsCount);
- // Emit our extra (first) args for the Spread helper in reverse order
- if (setupProfiledVersion)
- {
- IR::JitProfilingInstr* jitInstr = (IR::JitProfilingInstr*)instr;
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(jitInstr->arrayProfileId, m_func));
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(jitInstr->profileId, m_func));
- m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
- }
- m_lowererMD.LoadHelperArgument(instr, functionObjOpnd);
- m_lowererMD.LoadHelperArgument(instr, spreadIndicesOpnd);
- // Change the call target to our helper
- IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(setupProfiledVersion ? IR::HelperProfiledNewScObjArraySpread : IR::HelperSpreadCall, this->m_func);
- instr->SetSrc1(helperOpnd);
- return this->m_lowererMD.LowerCall(instr, (Js::ArgSlot)argCount);
- }
- void
- Lowerer::LowerDivI4Common(IR::Instr * instr)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::Div_I4);
- Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
- // MIN_INT/-1 path is only needed for signed operations
- // TEST src2, src2
- // JEQ $div0
- // CMP src1, MIN_INT
- // JEQ $minInt
- // JMP $div
- // $div0: [helper]
- // MOV dst, 0
- // JMP $done
- // $minInt: [helper]
- // CMP src2, -1
- // JNE $div
- // dst = MOV src1 / 0
- // JMP $done
- // $div:
- // dst = IDIV src2, src1
- // $done:
- IR::LabelInstr * div0Label = InsertLabel(true, instr);
- IR::LabelInstr * divLabel = InsertLabel(false, instr);
- IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
- InsertTestBranch(instr->GetSrc2(), instr->GetSrc2(), Js::OpCode::BrEq_A, div0Label, div0Label);
- InsertMove(instr->GetDst(), IR::IntConstOpnd::New(0, TyInt32, m_func), divLabel);
- InsertBranch(Js::OpCode::Br, doneLabel, divLabel);
- if (instr->GetSrc1()->GetType() == TyInt32)
- {
- IR::LabelInstr * minIntLabel = nullptr;
- // we need to check for INT_MIN/-1 if divisor is either -1 or variable, and dividend is either INT_MIN or variable
- bool needsMinOverNeg1Check = !(instr->GetSrc2()->IsIntConstOpnd() && instr->GetSrc2()->AsIntConstOpnd()->GetValue() != -1);
- if (instr->GetSrc1()->IsIntConstOpnd())
- {
- if (needsMinOverNeg1Check && instr->GetSrc1()->AsIntConstOpnd()->GetValue() == INT_MIN)
- {
- minIntLabel = InsertLabel(true, divLabel);
- InsertBranch(Js::OpCode::Br, minIntLabel, div0Label);
- }
- else
- {
- needsMinOverNeg1Check = false;
- }
- }
- else if(needsMinOverNeg1Check)
- {
- minIntLabel = InsertLabel(true, divLabel);
- InsertCompareBranch(instr->GetSrc1(), IR::IntConstOpnd::New(INT_MIN, TyInt32, m_func), Js::OpCode::BrEq_A, minIntLabel, div0Label);
- }
- if (needsMinOverNeg1Check)
- {
- Assert(minIntLabel);
- Assert(!instr->GetSrc2()->IsIntConstOpnd() || instr->GetSrc2()->AsIntConstOpnd()->GetValue() == -1);
- if (!instr->GetSrc2()->IsIntConstOpnd())
- {
- InsertCompareBranch(instr->GetSrc2(), IR::IntConstOpnd::New(-1, TyInt32, m_func), Js::OpCode::BrNeq_A, divLabel, divLabel);
- }
- InsertMove(instr->GetDst(), instr->m_opcode == Js::OpCode::Div_I4 ? instr->GetSrc1() : IR::IntConstOpnd::New(0, TyInt32, m_func), divLabel);
- InsertBranch(Js::OpCode::Br, doneLabel, divLabel);
- }
- }
- InsertBranch(Js::OpCode::Br, divLabel, div0Label);
- m_lowererMD.EmitInt4Instr(instr);
- }
- void
- Lowerer::LowerRemI4(IR::Instr * instr)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::Rem_I4);
- if (m_func->GetJnFunction()->GetIsAsmjsMode())
- {
- LowerDivI4Common(instr);
- }
- else
- {
- m_lowererMD.EmitInt4Instr(instr);
- }
- }
- void
- Lowerer::LowerDivI4(IR::Instr * instr)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::Div_I4);
- if (m_func->GetJnFunction()->GetIsAsmjsMode())
- {
- LowerDivI4Common(instr);
- return;
- }
- if(!instr->HasBailOutInfo())
- {
- m_lowererMD.EmitInt4Instr(instr);
- return;
- }
- Assert(!(instr->GetBailOutKind() & ~(IR::BailOnDivResultNotInt | IR::BailOutOnNegativeZero | IR::BailOutOnDivByZero | IR::BailOutOnDivOfMinInt)));
- IR::BailOutKind bailOutKind = instr->GetBailOutKind();
- // Split out and generate the bailout instruction
- const auto nonBailOutInstr = IR::Instr::New(instr->m_opcode, instr->m_func);
- instr->TransferTo(nonBailOutInstr);
- instr->InsertBefore(nonBailOutInstr);
- IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
- instr->InsertAfter(doneLabel);
- // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
- // ordering instructions anymore.
- IR::LabelInstr * bailOutLabel = GenerateBailOut(instr);
- IR::Opnd * denominatorOpnd = nonBailOutInstr->GetSrc2();
- IR::Opnd * nominatorOpnd = nonBailOutInstr->GetSrc1();
- if (bailOutKind & IR::BailOutOnDivOfMinInt)
- {
- // Bailout if numerator is MIN_INT (could also check for denominator being -1
- // before bailing out, but does not seem worth the extra code..)
- InsertCompareBranch(nominatorOpnd, IR::IntConstOpnd::New(INT32_MIN, TyInt32, this->m_func, true), Js::OpCode::BrEq_A, bailOutLabel, nonBailOutInstr);
- }
- if (denominatorOpnd->IsIntConstOpnd() && Math::IsPow2(denominatorOpnd->AsIntConstOpnd()->AsInt32()))
- {
- Assert((bailOutKind & (IR::BailOutOnNegativeZero | IR::BailOutOnDivByZero)) == 0);
- int pow2 = denominatorOpnd->AsIntConstOpnd()->AsInt32();
- InsertTestBranch(nominatorOpnd, IR::IntConstOpnd::New(pow2 - 1, TyInt32, this->m_func, true),
- Js::OpCode::BrNeq_A, bailOutLabel, nonBailOutInstr);
- nonBailOutInstr->m_opcode = Js::OpCode::Shr_A;
- nonBailOutInstr->ReplaceSrc2(IR::IntConstOpnd::New(Math::Log2(pow2), TyInt32, this->m_func, true));
- LowererMD::ChangeToShift(nonBailOutInstr, false);
- LowererMD::Legalize(nonBailOutInstr);
- }
- else
- {
- if (bailOutKind & IR::BailOutOnDivByZero)
- {
- // Bailout if denominator is 0
- InsertTestBranch(denominatorOpnd, denominatorOpnd, Js::OpCode::BrEq_A, bailOutLabel, nonBailOutInstr);
- }
- // Lower the div and bailout if there is a reminder (machine specific)
- IR::Instr * insertBeforeInstr = m_lowererMD.LowerDivI4AndBailOnReminder(nonBailOutInstr, bailOutLabel);
- IR::Opnd * resultOpnd = nonBailOutInstr->GetDst();
- if (bailOutKind & IR::BailOutOnNegativeZero)
- {
- // TEST result, result
- // JNE skipNegDenominatorCheckLabel // Result not 0
- // TEST denominator, denominator
- // JNSB/BMI bailout // bail if negative
- // skipNegDenominatorCheckLabel:
- IR::LabelInstr * skipNegDenominatorCheckLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- // Skip negative denominator check if the result is not 0
- InsertTestBranch(resultOpnd, resultOpnd, Js::OpCode::BrNeq_A, skipNegDenominatorCheckLabel, insertBeforeInstr);
- IR::LabelInstr * negDenominatorCheckLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- insertBeforeInstr->InsertBefore(negDenominatorCheckLabel);
- // Jump to done if the denominator is not negative
- InsertTestBranch(denominatorOpnd, denominatorOpnd,
- LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), bailOutLabel, insertBeforeInstr);
- insertBeforeInstr->InsertBefore(skipNegDenominatorCheckLabel);
- }
- }
- // We are all fine, jump around the bailout to done
- InsertBranch(Js::OpCode::Br, doneLabel, bailOutLabel);
- }
- void
- Lowerer::LowerRemR8(IR::Instr * instr)
- {
- Assert(instr);
- Assert(instr->m_opcode == Js::OpCode::Rem_A);
- Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
- m_lowererMD.LoadDoubleHelperArgument(instr, instr->UnlinkSrc2());
- m_lowererMD.LoadDoubleHelperArgument(instr, instr->UnlinkSrc1());
- instr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperOp_Rem_Double, m_func));
- m_lowererMD.LowerCall(instr, 0);
- }
- void
- Lowerer::LowerNewScopeSlots(IR::Instr * instr, bool doStackSlots)
- {
- Func * func = m_func;
- if (PHASE_OFF(Js::NewScopeSlotFastPathPhase, func))
- {
- this->LowerUnaryHelperMemWithFuncBody(instr, IR::HelperOP_NewScopeSlots);
- return;
- }
- uint const count = instr->GetSrc1()->AsIntConstOpnd()->AsUint32();
- uint const allocSize = count * sizeof(Js::Var);
- uint const actualSlotCount = count - Js::ScopeSlots::FirstSlotIndex;
- IR::RegOpnd * dst = instr->UnlinkDst()->AsRegOpnd();
- // dst = RecyclerAlloc(allocSize)
- // dst[EncodedSlotCountSlotIndex = EncodedSlotCountSlotIOndex];
- // dst[ScopeMetadataSlotIndex] = FunctionBody;
- // mov undefinedOpnd, undefined
- // dst[FirstSlotIndex..count] = undefinedOpnd;
- // Note: stack allocation of both scope slots and frame display are done together
- // in lowering of NewStackFrameDisplay
- if (!doStackSlots)
- {
- GenerateRecyclerAlloc(IR::HelperAllocMemForVarArray, allocSize, dst, instr);
- }
- GenerateMemInit(dst, Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
- min<uint>(actualSlotCount, Js::ScopeSlots::MaxEncodedSlotCount), instr, !doStackSlots);
- IR::Opnd * functionBodyOpnd = this->LoadFunctionBodyOpnd(instr);
- GenerateMemInit(dst, Js::ScopeSlots::ScopeMetadataSlotIndex * sizeof(Js::Var),
- functionBodyOpnd, instr, !doStackSlots);
- IR::Opnd * undefinedOpnd = this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined);
- const IR::AutoReuseOpnd autoReuseUndefinedOpnd(undefinedOpnd, func);
- // avoid using a register for the undefined pointer if we are going to assign 1 or 2
- if (actualSlotCount > 2 && !undefinedOpnd->IsRegOpnd())
- {
- // mov undefinedOpnd, undefined
- IR::RegOpnd * regOpnd = IR::RegOpnd::New(TyVar, func);
- InsertMove(regOpnd, undefinedOpnd, instr);
- undefinedOpnd = regOpnd;
- }
- int const loopUnrollCount = 8;
- if (actualSlotCount <= loopUnrollCount * 2)
- {
- // Just generate all the assignment in straight line code
- // mov[dst + Js::FirstSlotIndex], undefinedOpnd
- // ...
- // mov[dst + count - 1], undefinedOpnd
- for (unsigned int i = Js::ScopeSlots::FirstSlotIndex; i < count; i++)
- {
- GenerateMemInit(dst, sizeof(Js::Var) * i, undefinedOpnd, instr, !doStackSlots);
- }
- }
- else
- {
- // Just generate all the assignment in loop of loopUnroolCount and the rest as straight line code
- //
- // lea currOpnd, [dst + sizeof(Var) * (loopAssignCount + Js::ScopeSlots::FirstSlotIndex - loopUnrollCount)];
- // mov [currOpnd + loopUnrollCount + leftOverAssignCount - 1] , undefinedOpnd
- // mov [currOpnd + loopUnrollCount + leftOverAssignCount - 2] , undefinedOpnd
- // ...
- // mov [currOpnd + loopUnrollCount], undefinedOpnd
- // $LoopTop:
- // mov [currOpnd + loopUnrollCount - 1], undefinedOpnd
- // mov [currOpnd + loopUnrollCount - 2], undefinedOpnd
- // ...
- // mov [currOpnd], undefinedOpnd
- // lea currOpnd, [currOpnd - loopUnrollCount]
- // cmp dst, currOpnd
- // jlt $Looptop
- uint nLoop = actualSlotCount / loopUnrollCount;
- uint loopAssignCount = nLoop * loopUnrollCount;
- uint leftOverAssignCount = actualSlotCount - loopAssignCount; // The left over assignments
- IR::RegOpnd * currOpnd = IR::RegOpnd::New(TyMachPtr, func);
- const IR::AutoReuseOpnd autoReuseCurrOpnd(currOpnd, m_func);
- InsertLea(
- currOpnd,
- IR::IndirOpnd::New(
- dst,
- sizeof(Js::Var) * (loopAssignCount + Js::ScopeSlots::FirstSlotIndex - loopUnrollCount),
- TyMachPtr,
- func),
- instr);
- for (unsigned int i = 0; i < leftOverAssignCount; i++)
- {
- GenerateMemInit(currOpnd, sizeof(Js::Var) * (loopUnrollCount + leftOverAssignCount - i - 1), undefinedOpnd, instr, !doStackSlots);
- }
- IR::LabelInstr * loopTop = IR::LabelInstr::New(Js::OpCode::Label, func);
- instr->InsertBefore(loopTop);
- loopTop->m_isLoopTop = true;
- Loop *loop = JitAnew(func->m_alloc, Loop, func->m_alloc, this->m_func);
- loopTop->SetLoop(loop);
- loop->SetLoopTopInstr(loopTop);
- loop->regAlloc.liveOnBackEdgeSyms = JitAnew(func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
- for (unsigned int i = 0; i < loopUnrollCount; i++)
- {
- GenerateMemInit(currOpnd, sizeof(Js::Var) * (loopUnrollCount - i - 1), undefinedOpnd, instr, !doStackSlots);
- }
- InsertLea(currOpnd, IR::IndirOpnd::New(currOpnd, -((int)sizeof(Js::Var) * loopUnrollCount), TyMachPtr, func), instr);
- InsertCompareBranch(dst, currOpnd, Js::OpCode::BrLt_A, true, loopTop, instr);
- loop->regAlloc.liveOnBackEdgeSyms->Set(currOpnd->m_sym->m_id);
- loop->regAlloc.liveOnBackEdgeSyms->Set(dst->m_sym->m_id);
- loop->regAlloc.liveOnBackEdgeSyms->Set(undefinedOpnd->AsRegOpnd()->m_sym->m_id);
- }
- if (!doStackSlots)
- {
- InsertMove(IR::RegOpnd::New(instr->m_func->GetLocalClosureSym(), TyMachPtr, func), dst, instr);
- }
- instr->Remove();
- }
- void Lowerer::LowerLdInnerFrameDisplay(IR::Instr *instr)
- {
- bool isStrict = instr->m_func->GetJnFunction()->GetIsStrictMode();
- if (isStrict)
- {
- if (instr->GetSrc2())
- {
- this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdStrictInnerFrameDisplay);
- }
- else
- {
- #if DBG
- instr->m_opcode = Js::OpCode::LdInnerFrameDisplayNoParent;
- #endif
- this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdStrictInnerFrameDisplayNoParent);
- }
- }
- else
- {
- if (instr->GetSrc2())
- {
- this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdInnerFrameDisplay);
- }
- else
- {
- #if DBG
- instr->m_opcode = Js::OpCode::LdInnerFrameDisplayNoParent;
- #endif
- this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdInnerFrameDisplayNoParent);
- }
- }
- }
- void Lowerer::LowerLdFrameDisplay(IR::Instr *instr, bool doStackFrameDisplay)
- {
- bool isStrict = instr->m_func->GetJnFunction()->GetIsStrictMode();
- uint16 envDepth = instr->m_func->GetJnFunction()->GetEnvDepth();
- Func *func = this->m_func;
- // envDepth of -1 indicates unknown depth (eval expression or HTML event handler).
- // We could still fast-path these by generating a loop over the (dynamically loaded) scope chain length,
- // but I doubt it's worth it.
- // If the dst opnd is a byte code temp, that indicates we're prepending a block scope or some such and
- // shouldn't attempt to do this.
- if (envDepth == (uint16)-1 ||
- (!doStackFrameDisplay && instr->GetDst()->AsRegOpnd()->m_sym->IsTempReg(instr->m_func)) ||
- PHASE_OFF(Js::FrameDisplayFastPathPhase, func))
- {
- if (isStrict)
- {
- if (instr->GetSrc2())
- {
- this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdStrictFrameDisplay);
- }
- else
- {
- #if DBG
- instr->m_opcode = Js::OpCode::LdFrameDisplayNoParent;
- #endif
- this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdStrictFrameDisplayNoParent);
- }
- }
- else
- {
- if (instr->GetSrc2())
- {
- this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdFrameDisplay);
- }
- else
- {
- #if DBG
- instr->m_opcode = Js::OpCode::LdFrameDisplayNoParent;
- #endif
- this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdFrameDisplayNoParent);
- }
- }
- return;
- }
- uint16 frameDispLength = envDepth + 1;
- Assert(frameDispLength > 0);
- IR::RegOpnd *dstOpnd = instr->UnlinkDst()->AsRegOpnd();
- IR::RegOpnd *currentFrameOpnd = instr->UnlinkSrc1()->AsRegOpnd();
- uint allocSize = sizeof(Js::FrameDisplay) + (frameDispLength * sizeof(Js::Var));
- if (doStackFrameDisplay)
- {
- IR::Instr *insertInstr = func->GetFunctionEntryInsertionPoint();
- // Initialize stack pointers for scope slots and frame display together at the top of the function
- // (in case we bail out before executing the instructions).
- IR::LabelInstr *labelNoStackFunc = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- // Check whether stack functions have been disabled since we jitted.
- // If they have, then we must allocate closure memory on the heap.
- InsertTestBranch(IR::MemRefOpnd::New(m_func->GetJnFunction()->GetAddressOfFlags(), TyInt8, m_func),
- IR::IntConstOpnd::New(Js::FunctionBody::Flags_StackNestedFunc, TyInt8, m_func, true),
- Js::OpCode::BrEq_A, labelNoStackFunc, insertInstr);
- // allocSize is greater than TyMachPtr and hence changing the initial size to TyMisc
- StackSym * stackSym = StackSym::New(TyMisc, instr->m_func);
- m_func->StackAllocate(stackSym, allocSize);
- InsertLea(dstOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertInstr);
- uint scopeSlotAllocSize =
- (m_func->GetJnFunction()->scopeSlotArraySize + Js::ScopeSlots::FirstSlotIndex) * sizeof(Js::Var);
- stackSym = StackSym::New(TyMisc, instr->m_func);
- m_func->StackAllocate(stackSym, scopeSlotAllocSize);
- InsertLea(currentFrameOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertInstr);
- InsertBranch(Js::OpCode::Br, labelDone, insertInstr);
- insertInstr->InsertBefore(labelNoStackFunc);
- GenerateRecyclerAlloc(IR::HelperAllocMemForFrameDisplay, allocSize, dstOpnd, insertInstr, true);
- GenerateRecyclerAlloc(IR::HelperAllocMemForVarArray, scopeSlotAllocSize, currentFrameOpnd, insertInstr, true);
- insertInstr->InsertBefore(labelDone);
- m_lowererMD.CreateAssign(IR::SymOpnd::New(m_func->GetLocalFrameDisplaySym(), 0, TyMachReg, m_func), dstOpnd, insertInstr);
- m_lowererMD.CreateAssign(IR::SymOpnd::New(m_func->GetLocalClosureSym(), 0, TyMachReg, m_func), currentFrameOpnd, insertInstr);
- }
- else
- {
- GenerateRecyclerAlloc(IR::HelperAllocMemForFrameDisplay, allocSize, dstOpnd, instr);
- }
- // Copy contents of environment
- // Work back to front to leave the head element(s) in cache
- if (envDepth > 0)
- {
- IR::RegOpnd *envOpnd = instr->UnlinkSrc2()->AsRegOpnd();
- for (uint16 i = envDepth; i >= 1; i--)
- {
- IR::Opnd *scopeOpnd = IR::RegOpnd::New(TyMachReg, func);
- IR::Opnd *envLoadOpnd =
- IR::IndirOpnd::New(envOpnd, Js::FrameDisplay::GetOffsetOfScopes() + ((i - 1) * sizeof(Js::Var)), TyMachReg, func);
- m_lowererMD.CreateAssign(scopeOpnd, envLoadOpnd, instr);
- IR::Opnd *dstStoreOpnd =
- IR::IndirOpnd::New(dstOpnd, Js::FrameDisplay::GetOffsetOfScopes() + (i * sizeof(Js::Var)), TyMachReg, func);
- m_lowererMD.CreateAssign(dstStoreOpnd, scopeOpnd, instr);
- }
- }
- // Assign current element.
- m_lowererMD.CreateAssign(
- IR::IndirOpnd::New(dstOpnd, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg, func),
- currentFrameOpnd,
- instr);
- // Combine tag, strict mode flag, and length
- uintptr_t bits = 1 |
- (isStrict << (Js::FrameDisplay::GetOffsetOfStrictMode() * 8)) |
- (frameDispLength << (Js::FrameDisplay::GetOffsetOfLength() * 8));
- m_lowererMD.CreateAssign(
- IR::IndirOpnd::New(dstOpnd, 0, TyMachReg, func),
- IR::AddrOpnd::New((void*)bits, IR::AddrOpndKindConstant, func, true),
- instr);
- instr->Remove();
- }
- IR::AddrOpnd *Lowerer::CreateFunctionBodyOpnd(Func *const func) const
- {
- return CreateFunctionBodyOpnd(func->GetJnFunction());
- }
- IR::AddrOpnd *Lowerer::CreateFunctionBodyOpnd(Js::FunctionBody *const functionBody) const
- {
- return IR::AddrOpnd::New(functionBody, IR::AddrOpndKindDynamicFunctionBody, m_func, true);
- }
- bool
- Lowerer::GenerateRecyclerOrMarkTempAlloc(IR::Instr * instr, IR::RegOpnd * dstOpnd, IR::JnHelperMethod allocHelper, size_t allocSize, IR::SymOpnd ** tempObjectSymOpnd)
- {
- if (instr->dstIsTempObject)
- {
- *tempObjectSymOpnd = GenerateMarkTempAlloc(dstOpnd, allocSize, instr);
- return false;
- }
- this->GenerateRecyclerAlloc(allocHelper, allocSize, dstOpnd, instr);
- *tempObjectSymOpnd = nullptr;
- return true;
- }
- IR::SymOpnd *
- Lowerer::GenerateMarkTempAlloc(IR::RegOpnd *const dstOpnd, const size_t allocSize, IR::Instr *const insertBeforeInstr)
- {
- Assert(dstOpnd);
- Assert(allocSize != 0);
- Assert(insertBeforeInstr);
- Func *const func = insertBeforeInstr->m_func;
- // Allocate stack space for the reg exp instance, and a slot for the boxed value
- StackSym *const tempObjectSym = StackSym::New(TyMisc, func);
- m_func->StackAllocate(tempObjectSym, (int)(allocSize + sizeof(void *)));
- IR::SymOpnd * tempObjectOpnd = IR::SymOpnd::New(tempObjectSym, sizeof(void *), TyVar, func);
- InsertLea(dstOpnd, tempObjectOpnd, insertBeforeInstr);
- // Initialize the boxed instance slot
- if (this->outerMostLoopLabel == nullptr)
- {
- GenerateMemInit(dstOpnd, -(int)sizeof(void *), IR::AddrOpnd::NewNull(func), insertBeforeInstr, false);
- }
- else if (!PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func))
- {
- InsertMove(IR::SymOpnd::New(tempObjectSym, TyMachPtr, func), IR::AddrOpnd::NewNull(func), this->outerMostLoopLabel, false);
- }
- return tempObjectOpnd;
- }
- void Lowerer::LowerBrFncCachedScopeEq(IR::Instr *instr)
- {
- Assert(instr->m_opcode == Js::OpCode::BrFncCachedScopeEq || instr->m_opcode == Js::OpCode::BrFncCachedScopeNeq);
- Js::OpCode opcode = (instr->m_opcode == Js::OpCode::BrFncCachedScopeEq ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A);
- IR::RegOpnd *src1Reg = instr->UnlinkSrc1()->AsRegOpnd();
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src1Reg, Js::ScriptFunction::GetOffsetOfCachedScopeObj(), TyMachReg, this->m_func);
- this->InsertCompareBranch(indirOpnd, instr->UnlinkSrc2(), opcode, false, instr->AsBranchInstr()->GetTarget(), instr->m_next);
- instr->Remove();
- }
- IR::Instr* Lowerer::InsertLoweredRegionStartMarker(IR::Instr* instrToInsertBefore)
- {
- AssertMsg(instrToInsertBefore->m_prev != nullptr, "Can't insert lowered region start marker as the first instr in the func.");
- IR::LabelInstr* startMarkerLabel = IR::LabelInstr::New(Js::OpCode::Label, instrToInsertBefore->m_func);
- instrToInsertBefore->InsertBefore(startMarkerLabel);
- return startMarkerLabel;
- }
- IR::Instr* Lowerer::RemoveLoweredRegionStartMarker(IR::Instr* startMarkerInstr)
- {
- AssertMsg(startMarkerInstr->m_prev != nullptr, "Lowered region start marker became the first instruction in the func after lowering?");
- IR::Instr* prevInstr = startMarkerInstr->m_prev;
- startMarkerInstr->Remove();
- return prevInstr;
- }
- IR::Instr* Lowerer::GetLdSpreadIndicesInstr(IR::Instr *instr)
- {
- IR::Opnd *src2 = instr->GetSrc2();
- if (!src2->IsSymOpnd())
- {
- return nullptr;
- }
- IR::SymOpnd * argLinkOpnd = src2->AsSymOpnd();
- StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
- Assert(argLinkSym->IsSingleDef());
- return argLinkSym->m_instrDef;
- }
- bool Lowerer::IsSpreadCall(IR::Instr *instr)
- {
- IR::Instr *lastInstr = GetLdSpreadIndicesInstr(instr);
- return lastInstr && lastInstr->m_opcode == Js::OpCode::LdSpreadIndices;
- }
- // When under debugger, generate a new label to be used as safe place to jump after ignore exception,
- // insert it after insertAfterInstr, and return the label inserted.
- // Returns nullptr/NoOP for non-debugger code path.
- //static
- IR::LabelInstr* Lowerer::InsertContinueAfterExceptionLabelForDebugger(Func* func, IR::Instr* insertAfterInstr, bool isHelper)
- {
- Assert(func);
- Assert(insertAfterInstr);
- IR::LabelInstr* continueAfterExLabel = nullptr;
- if (func->IsJitInDebugMode())
- {
- continueAfterExLabel = IR::LabelInstr::New(Js::OpCode::Label, func, isHelper);
- insertAfterInstr->InsertAfter(continueAfterExLabel);
- }
- return continueAfterExLabel;
- }
- void Lowerer::GenerateSingleCharStrJumpTableLookup(IR::Instr * instr)
- {
- IR::MultiBranchInstr * multiBrInstr = instr->AsBranchInstr()->AsMultiBrInstr();
- Func * func = instr->m_func;
- IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
- IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
- // MOV strLengthOpnd, str->length
- IR::RegOpnd * strLengthOpnd = IR::RegOpnd::New(TyUint32, func);
- InsertMove(strLengthOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func), instr);
- // CMP strLengthOpnd, 1
- // JNE defaultLabel
- IR::LabelInstr * defaultLabelInstr = (IR::LabelInstr *)multiBrInstr->GetBranchJumpTable()->defaultTarget;
- InsertCompareBranch(strLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
- // MOV strBuffer, str->psz
- IR::RegOpnd * strBufferOpnd = IR::RegOpnd::New(TyMachPtr, func);
- InsertMove(strBufferOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, func), instr);
- // TST strBuffer, strBuffer
- // JNE $continue
- InsertTestBranch(strBufferOpnd, strBufferOpnd, Js::OpCode::BrNeq_A, continueLabel, instr);
- // $helper:
- // PUSH str
- // CALL JavascriptString::GetSzHelper
- // MOV strBuffer, eax
- // $continue:
- instr->InsertBefore(helperLabel);
- m_lowererMD.LoadHelperArgument(instr, instr->GetSrc1());
- IR::Instr * instrCall = IR::Instr::New(Js::OpCode::Call, strBufferOpnd, IR::HelperCallOpnd::New(IR::HelperString_GetSz, func), func);
- instr->InsertBefore(instrCall);
- m_lowererMD.LowerCall(instrCall, 0);
- instr->InsertBefore(continueLabel);
- // MOV charOpnd, [strBuffer]
- IR::RegOpnd * charOpnd = IR::RegOpnd::New(TyUint32, func);
- InsertMove(charOpnd, IR::IndirOpnd::New(strBufferOpnd, 0, TyUint16, func), instr);
- if (multiBrInstr->m_baseCaseValue != 0)
- {
- // SUB charOpnd, baseIndex
- InsertSub(false, charOpnd, charOpnd, IR::IntConstOpnd::New(multiBrInstr->m_baseCaseValue, TyUint32, func), instr);
- }
- // CMP charOpnd, lastCaseIndex - baseCaseIndex
- // JA defaultLabel
- InsertCompareBranch(charOpnd, IR::IntConstOpnd::New(multiBrInstr->m_lastCaseValue - multiBrInstr->m_baseCaseValue, TyUint32, func, true),
- Js::OpCode::BrGt_A, true, defaultLabelInstr, instr);
- instr->UnlinkSrc1();
- LowerJumpTableMultiBranch(multiBrInstr, charOpnd);
- }
- void Lowerer::GenerateSwitchStringLookup(IR::Instr * instr)
- {
- /* Collect information about string length in all the case*/
- charcount_t minLength = UINT_MAX;
- charcount_t maxLength = 0;
- BVUnit32 bvLength;
- instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary()->dictionary.Map([&](Js::JavascriptString * str, void *)
- {
- charcount_t len = str->GetLength();
- minLength = min(minLength, str->GetLength());
- maxLength = max(maxLength, str->GetLength());
- if (len < 32)
- {
- bvLength.Set(len);
- }
- });
- Func * func = instr->m_func;
- IR::RegOpnd * strLengthOpnd = IR::RegOpnd::New(TyUint32, func);
- InsertMove(strLengthOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func), instr);
- IR::LabelInstr * defaultLabelInstr = (IR::LabelInstr *)instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary()->defaultTarget;
- if (minLength == maxLength)
- {
- // Generate single length filter
- InsertCompareBranch(strLengthOpnd, IR::IntConstOpnd::New(minLength, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
- }
- else if (maxLength < 32)
- {
- // Generate bit filter
- // Jump to default label if the bit is not on for the length % 32
- IR::IntConstOpnd * lenBitMaskOpnd = IR::IntConstOpnd::New(bvLength.GetWord(), TyUint32, func);
- InsertBitTestBranch(lenBitMaskOpnd, strLengthOpnd, false, defaultLabelInstr, instr);
- // Jump to default label if the bit is > 32
- InsertTestBranch(strLengthOpnd, IR::IntConstOpnd::New(UINT32_MAX ^ 31, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
- }
- else
- {
- // CONSIDER: Generate range filter
- }
- this->LowerMultiBr(instr, IR::HelperOp_SwitchStringLookUp);
- }
- IR::Instr *
- Lowerer::LowerTry(IR::Instr* instr, bool tryCatch)
- {
- if (this->m_func->hasBailout)
- {
- this->EnsureBailoutReturnValueSym();
- }
- this->EnsureHasBailedOutSym();
- IR::SymOpnd * hasBailedOutOpnd = IR::SymOpnd::New(this->m_func->m_hasBailedOutSym, TyUint32, this->m_func);
- IR::Instr * setInstr = IR::Instr::New(LowererMD::GetStoreOp(TyUint32), hasBailedOutOpnd, IR::IntConstOpnd::New(0, TyUint32, this->m_func), this->m_func);
- instr->InsertBefore(setInstr);
- LowererMD::Legalize(setInstr);
- return m_lowererMD.LowerTry(instr, tryCatch ? IR::HelperOp_TryCatch : IR::HelperOp_TryFinally);
- }
- void
- Lowerer::EnsureBailoutReturnValueSym()
- {
- if (this->m_func->m_bailoutReturnValueSym == nullptr)
- {
- this->m_func->m_bailoutReturnValueSym = StackSym::New(TyVar, this->m_func);
- this->m_func->StackAllocate(this->m_func->m_bailoutReturnValueSym, sizeof(Js::Var));
- }
- }
- void
- Lowerer::EnsureHasBailedOutSym()
- {
- if (this->m_func->m_hasBailedOutSym == nullptr)
- {
- this->m_func->m_hasBailedOutSym = StackSym::New(TyUint32, this->m_func);
- this->m_func->StackAllocate(this->m_func->m_hasBailedOutSym, MachRegInt);
- }
- }
- void
- Lowerer::InsertReturnThunkForRegion(Region* region, IR::LabelInstr* restoreLabel)
- {
- Assert(this->m_func->isPostLayout);
- Assert(region->GetType() == RegionTypeTry || region->GetType() == RegionTypeCatch);
- if (!region->returnThunkEmitted)
- {
- this->m_func->m_exitInstr->InsertAfter(region->GetBailoutReturnThunkLabel());
- bool newLastInstrInserted = false;
- IR::Instr * insertBeforeInstr = region->GetBailoutReturnThunkLabel()->m_next;
- if (insertBeforeInstr == nullptr)
- {
- Assert(this->m_func->m_exitInstr == this->m_func->m_tailInstr);
- insertBeforeInstr = IR::Instr::New(Js::OpCode::Nop, this->m_func);
- newLastInstrInserted = true;
- region->GetBailoutReturnThunkLabel()->InsertAfter(insertBeforeInstr);
- this->m_func->m_tailInstr = insertBeforeInstr;
- }
- IR::LabelOpnd * continuationAddr;
- if (region->GetParent()->GetType() != RegionTypeRoot)
- {
- continuationAddr = IR::LabelOpnd::New(region->GetParent()->GetBailoutReturnThunkLabel(), this->m_func);
- }
- else
- {
- continuationAddr = IR::LabelOpnd::New(restoreLabel, this->m_func);
- }
- IR::Instr * lastInstr = m_lowererMD.LowerEHRegionReturn(insertBeforeInstr, continuationAddr);
- if (newLastInstrInserted)
- {
- Assert(this->m_func->m_tailInstr == insertBeforeInstr);
- insertBeforeInstr->Remove();
- this->m_func->m_tailInstr = lastInstr;
- }
- region->returnThunkEmitted = true;
- }
- }
- void
- Lowerer::SetHasBailedOut(IR::Instr * bailoutInstr)
- {
- Assert(this->m_func->isPostLayout);
- IR::SymOpnd * hasBailedOutOpnd = IR::SymOpnd::New(this->m_func->m_hasBailedOutSym, TyUint32, this->m_func);
- IR::Instr * setInstr = IR::Instr::New(LowererMD::GetStoreOp(TyUint32), hasBailedOutOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), this->m_func);
- bailoutInstr->InsertBefore(setInstr);
- LowererMD::Legalize(setInstr, true);
- }
- IR::Instr*
- Lowerer::EmitEHBailoutStackRestore(IR::Instr * bailoutInstr)
- {
- Assert(this->m_func->isPostLayout);
- #ifdef _M_IX86
- BailOutInfo * bailoutInfo = bailoutInstr->GetBailOutInfo();
- if (bailoutInfo->startCallCount != 0)
- {
- uint totalStackToBeRestored = 0;
- uint stackAlignmentAdjustment = 0;
- for (uint i = 0; i < bailoutInfo->startCallCount; i++)
- {
- uint startCallOutParamCount = bailoutInfo->GetStartCallOutParamCount(i);
- if ((Math::Align<int32>(startCallOutParamCount * MachPtr, MachStackAlignment) - (startCallOutParamCount * MachPtr)) != 0)
- {
- stackAlignmentAdjustment++;
- }
- }
- totalStackToBeRestored = (bailoutInfo->totalOutParamCount + stackAlignmentAdjustment) * MachPtr;
- IR::RegOpnd * espOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegStackPointer(), TyMachReg, this->m_func);
- IR::Opnd * opnd = IR::IndirOpnd::New(espOpnd, totalStackToBeRestored, TyMachReg, this->m_func);
- IR::Instr * stackRestoreInstr = IR::Instr::New(Js::OpCode::LEA, espOpnd, opnd, this->m_func);
- bailoutInstr->InsertAfter(stackRestoreInstr);
- return stackRestoreInstr;
- }
- #endif
- return bailoutInstr;
- }
- void
- Lowerer::EmitSaveEHBailoutReturnValueAndJumpToRetThunk(IR::Instr * insertAfterInstr)
- {
- Assert(this->m_func->isPostLayout);
- // After the CALL SaveAllRegistersAndBailout instruction, emit
- //
- // MOV bailoutReturnValueSym, eax
- // JMP $currentRegion->bailoutReturnThunkLabel
- IR::SymOpnd * bailoutReturnValueSymOpnd = IR::SymOpnd::New(this->m_func->m_bailoutReturnValueSym, TyVar, this->m_func);
- IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegReturn(TyMachReg), TyMachReg, this->m_func);
- IR::Instr * movInstr = IR::Instr::New(LowererMD::GetStoreOp(TyVar), bailoutReturnValueSymOpnd, eaxOpnd, this->m_func);
- insertAfterInstr->InsertAfter(movInstr);
- LowererMD::Legalize(movInstr, true);
- IR::BranchInstr * jumpInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, this->currentRegion->GetBailoutReturnThunkLabel(), this->m_func);
- movInstr->InsertAfter(jumpInstr);
- }
- void
- Lowerer::EmitRestoreReturnValueFromEHBailout(IR::LabelInstr * restoreLabel, IR::LabelInstr * epilogLabel)
- {
- Assert(this->m_func->isPostLayout);
- // JMP $epilog
- // $restore:
- // MOV eax, bailoutReturnValueSym
- // $epilog:
- IR::SymOpnd * bailoutReturnValueSymOpnd = IR::SymOpnd::New(this->m_func->m_bailoutReturnValueSym, TyVar, this->m_func);
- IR::RegOpnd * eaxOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegReturn(TyMachReg), TyMachReg, this->m_func);
- IR::Instr * movInstr = IR::Instr::New(LowererMD::GetLoadOp(TyVar), eaxOpnd, bailoutReturnValueSymOpnd, this->m_func);
- epilogLabel->InsertBefore(restoreLabel);
- epilogLabel->InsertBefore(movInstr);
- LowererMD::Legalize(movInstr, true);
- restoreLabel->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, epilogLabel, this->m_func));
- }
- void
- Lowerer::InsertBitTestBranch(IR::Opnd * bitMaskOpnd, IR::Opnd * bitIndex, bool jumpIfBitOn, IR::LabelInstr * targetLabel, IR::Instr * insertBeforeInstr)
- {
- #if defined(_M_IX86) || defined(_M_AMD64)
- // Generate bit test and branch
- // BT bitMaskOpnd, bitIndex
- // JB/JAE targetLabel
- Func * func = this->m_func;
- IR::Instr * instr = IR::Instr::New(Js::OpCode::BT, func);
- instr->SetSrc1(bitMaskOpnd);
- instr->SetSrc2(bitIndex);
- insertBeforeInstr->InsertBefore(instr);
- if (!(bitMaskOpnd->IsRegOpnd() || bitMaskOpnd->IsIndirOpnd() || bitMaskOpnd->IsMemRefOpnd()))
- {
- instr->HoistSrc1(Js::OpCode::MOV);
- }
- InsertBranch(jumpIfBitOn ? Js::OpCode::JB : Js::OpCode::JAE, targetLabel, insertBeforeInstr);
- #elif defined(_M_ARM)
- // ARM don't have bit test instruction, so just generated
- // MOV r1, 1
- // SHL r1, bitIndex
- // TEST bitMaskOpnd, r1
- // BEQ/BNEQ targetLabel
- Func * func = this->m_func;
- IR::RegOpnd * lenBitOpnd = IR::RegOpnd::New(TyUint32, func);
- InsertMove(lenBitOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
- InsertShift(Js::OpCode::Shl_I4, false, lenBitOpnd, lenBitOpnd, bitIndex, insertBeforeInstr);
- InsertTestBranch(lenBitOpnd, bitMaskOpnd, jumpIfBitOn? Js::OpCode::BrNeq_A :Js::OpCode::BrEq_A, targetLabel, insertBeforeInstr);
- #else
- AssertMsg(false, "Not implemented");
- #endif
- }
- //
- // Generates an object test and then a string test with the static string type
- //
- void
- Lowerer::GenerateStringTest(IR::RegOpnd *srcReg, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr * continueLabel, bool generateObjectCheck)
- {
- Assert(srcReg);
- if (!srcReg->GetValueType().IsString())
- {
- if (generateObjectCheck && !srcReg->IsNotTaggedValue())
- {
- this->m_lowererMD.GenerateObjectTest(srcReg, insertInstr, labelHelper);
- }
- // CMP [regSrcStr + offset(type)] , static string type -- check base string type
- // BrEq/BrNeq labelHelper.
- IR::IndirOpnd * src1 = IR::IndirOpnd::New(srcReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func);
- IR::Opnd * src2 = this->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueStringTypeStatic);
- if (continueLabel)
- {
- InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, continueLabel, insertInstr);
- }
- else
- {
- InsertCompareBranch(src1, src2, Js::OpCode::BrNeq_A, labelHelper, insertInstr);
- }
- }
- }
- void
- Lowerer::LowerConvNum(IR::Instr *instrLoad, bool noMathFastPath)
- {
- if (PHASE_OFF(Js::OtherFastPathPhase, this->m_func) || noMathFastPath || !instrLoad->GetSrc1()->IsRegOpnd())
- {
- this->LowerUnaryHelperMemWithTemp2(instrLoad, IR_HELPER_OP_FULL_OR_INPLACE(ConvNumber));
- return;
- }
- // MOV dst, src1
- // TEST src1, 1
- // JNE $done
- // call ToNumber
- //$done:
- bool isInt = false;
- bool isNotInt = false;
- IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
- IR::LabelInstr *labelDone = NULL;
- IR::Instr *instr;
- if (src1->IsTaggedInt())
- {
- isInt = true;
- }
- else if (src1->IsNotInt())
- {
- isNotInt = true;
- }
- if (!isNotInt)
- {
- // MOV dst, src1
- instr = LowererMD::CreateAssign(instrLoad->GetDst(), src1, instrLoad);
- if (!isInt)
- {
- labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
- bool didTest = m_lowererMD.GenerateObjectTest(src1, instrLoad, labelDone);
- if (didTest)
- {
- // This label is needed only to mark the helper block
- IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
- instrLoad->InsertBefore(labelHelper);
- }
- }
- }
- if (!isInt)
- {
- if (labelDone)
- {
- instrLoad->InsertAfter(labelDone);
- }
- this->LowerUnaryHelperMemWithTemp2(instrLoad, IR_HELPER_OP_FULL_OR_INPLACE(ConvNumber));
- }
- else
- {
- instrLoad->Remove();
- }
- }
- IR::Opnd *
- Lowerer::LoadSlotArrayWithCachedLocalType(IR::Instr * instrInsert, IR::PropertySymOpnd *propertySymOpnd)
- {
- IR::RegOpnd *opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
- if (propertySymOpnd->UsesAuxSlot())
- {
- // If we use the auxiliary slot array, load it and return it
- IR::RegOpnd *opndSlotArray = IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::Opnd *opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
- LowererMD::CreateAssign(opndSlotArray, opndIndir, instrInsert);
- return opndSlotArray;
- }
- else
- {
- // If we use inline slot return the address to the object header
- return opndBase;
- }
- }
- IR::Opnd *
- Lowerer::LoadSlotArrayWithCachedProtoType(IR::Instr * instrInsert, IR::PropertySymOpnd *propertySymOpnd)
- {
- // Get the prototype object from the cache
- Js::RecyclableObject *prototypeObject = propertySymOpnd->GetProtoObject();
- Assert(prototypeObject != nullptr);
- if (propertySymOpnd->UsesAuxSlot())
- {
- // If we use the auxiliary slot array, load it from the prototype object and return it
- IR::RegOpnd *opndSlotArray = IR::RegOpnd::New(TyMachReg, this->m_func);
- IR::Opnd *opnd = IR::MemRefOpnd::New((char*)prototypeObject + Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func, IR::AddrOpndKindDynamicAuxSlotArrayRef);
- LowererMD::CreateAssign(opndSlotArray, opnd, instrInsert);
- return opndSlotArray;
- }
- else
- {
- // If we use inline slot return the address of the prototype object
- return IR::MemRefOpnd::New(prototypeObject, TyMachReg, this->m_func);
- }
- }
- IR::Instr *
- Lowerer::LowerLdAsmJsEnv(IR::Instr * instr)
- {
- Assert(m_func->GetJnFunction()->GetIsAsmJsFunction());
- IR::Opnd * functionObjOpnd;
- IR::Instr * instrPrev = this->m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
- Assert(!instr->GetSrc1());
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::AsmJsScriptFunction::GetOffsetOfModuleMemory(), TyMachPtr, m_func);
- instr->SetSrc1(indirOpnd);
- LowererMD::ChangeToAssign(instr);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerLdEnv(IR::Instr * instr)
- {
- IR::Opnd * src1 = instr->GetSrc1();
- IR::Opnd * functionObjOpnd;
- IR::Instr * instrPrev = this->m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
- Assert(!instr->GetSrc1());
- if (src1 == nullptr || functionObjOpnd->IsRegOpnd())
- {
- IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(),
- Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, m_func);
- instr->SetSrc1(indirOpnd);
- }
- else
- {
- Assert(functionObjOpnd->IsAddrOpnd());
- IR::AddrOpnd* functionObjAddrOpnd = functionObjOpnd->AsAddrOpnd();
- IR::MemRefOpnd* functionEnvMemRefOpnd = IR::MemRefOpnd::New((void *)((intptr)functionObjAddrOpnd->m_address + Js::ScriptFunction::GetOffsetOfEnvironment()),
- TyMachPtr, this->m_func, IR::AddrOpndKindDynamicFunctionEnvironmentRef);
- instr->SetSrc1(functionEnvMemRefOpnd);
- }
- LowererMD::ChangeToAssign(instr);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerFrameDisplayCheck(IR::Instr * instr)
- {
- IR::Instr *instrPrev = instr->m_prev;
- IR::Instr *insertInstr = instr->m_next;
- IR::AddrOpnd *addrOpnd = instr->UnlinkSrc2()->AsAddrOpnd();
- FrameDisplayCheckRecord *record = (FrameDisplayCheckRecord*)addrOpnd->m_address;
- IR::LabelInstr *errorLabel = nullptr;
- IR::LabelInstr *continueLabel = nullptr;
- IR::RegOpnd *envOpnd = instr->GetDst()->AsRegOpnd();
- uint32 frameDisplayOffset = Js::FrameDisplay::GetOffsetOfScopes()/sizeof(Js::Var);
- if (record->slotId != (uint32)-1 && record->slotId > frameDisplayOffset)
- {
- // Check that the frame display has enough scopes in it to satisfy the code.
- errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(envOpnd,
- Js::FrameDisplay::GetOffsetOfLength(),
- TyUint16, m_func, true);
- IR::IntConstOpnd *slotIdOpnd = IR::IntConstOpnd::New(record->slotId - frameDisplayOffset, TyUint16, m_func);
- InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrLe_A, true, errorLabel, insertInstr);
- }
- if (record->table)
- {
- // Check the size of each of the slot arrays in the scope chain.
- FOREACH_HASHTABLE_ENTRY(uint32, bucket, record->table)
- {
- uint32 slotId = bucket.element;
- if (slotId != (uint32)-1 && slotId > Js::ScopeSlots::FirstSlotIndex)
- {
- if (errorLabel == nullptr)
- {
- errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- }
- IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(envOpnd,
- bucket.value * sizeof(Js::Var),
- TyVar, m_func, true);
- IR::RegOpnd * slotArrayOpnd = IR::RegOpnd::New(TyVar, m_func);
- InsertMove(slotArrayOpnd, indirOpnd, insertInstr);
- indirOpnd = IR::IndirOpnd::New(slotArrayOpnd,
- Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
- TyUint32, m_func, true);
- IR::IntConstOpnd * slotIdOpnd = IR::IntConstOpnd::New(slotId - Js::ScopeSlots::FirstSlotIndex,
- TyUint32, m_func);
- InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrLe_A, true, errorLabel, insertInstr);
- }
- }
- NEXT_HASHTABLE_ENTRY;
- }
- if (errorLabel)
- {
- InsertBranch(Js::OpCode::Br, continueLabel, insertInstr);
- insertInstr->InsertBefore(errorLabel);
- IR::Instr * instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
- insertInstr->InsertBefore(instrHelper);
- m_lowererMD.ChangeToHelperCall(instrHelper, IR::HelperOp_FatalInternalError);
- insertInstr->InsertBefore(continueLabel);
- }
- m_lowererMD.ChangeToAssign(instr);
- return instrPrev;
- }
- IR::Instr *
- Lowerer::LowerSlotArrayCheck(IR::Instr * instr)
- {
- IR::Instr *instrPrev = instr->m_prev;
- IR::Instr *insertInstr = instr->m_next;
- IR::RegOpnd *slotArrayOpnd = instr->GetDst()->AsRegOpnd();
- StackSym *stackSym = slotArrayOpnd->m_sym;
- IR::IntConstOpnd *slotIdOpnd = instr->UnlinkSrc2()->AsIntConstOpnd();
- uint32 slotId = (uint32)slotIdOpnd->GetValue();
- Assert(slotId != (uint32)-1 && slotId >= Js::ScopeSlots::FirstSlotIndex);
- if (slotId > Js::ScopeSlots::FirstSlotIndex)
- {
- if (m_func->DoStackFrameDisplay() && stackSym->m_id == m_func->GetLocalClosureSym()->m_id)
- {
- // The pointer we loaded points to the reserved/known address where the slot array can be boxed.
- // Deref to get the real value.
- IR::IndirOpnd * srcOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(stackSym, TyVar, m_func), 0, TyVar, m_func);
- IR::RegOpnd * dstOpnd = IR::RegOpnd::New(TyVar, m_func);
- InsertMove(dstOpnd, srcOpnd, insertInstr);
- stackSym = dstOpnd->m_sym;
- }
- IR::LabelInstr *errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
- IR::LabelInstr *continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
- IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(stackSym, TyVar, m_func),
- Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
- TyUint32, m_func, true);
- slotIdOpnd->SetValue(slotId - Js::ScopeSlots::FirstSlotIndex);
- InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrGt_A, true, continueLabel, insertInstr);
- insertInstr->InsertBefore(errorLabel);
- IR::Instr * instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
- insertInstr->InsertBefore(instrHelper);
- m_lowererMD.ChangeToHelperCall(instrHelper, IR::HelperOp_FatalInternalError);
- insertInstr->InsertBefore(continueLabel);
- }
- m_lowererMD.ChangeToAssign(instr);
- return instrPrev;
- }
- IR::RegOpnd *
- Lowerer::LoadIndexFromLikelyFloat(
- IR::RegOpnd *indexOpnd,
- const bool skipNegativeCheck,
- IR::LabelInstr *const notIntLabel,
- IR::LabelInstr *const negativeLabel,
- IR::Instr *const insertBeforeInstr)
- {
- #ifdef _M_IX86
- // We should only generate this if sse2 is available
- Assert(AutoSystemInfo::Data.SSE2Available());
- #endif
- Func *func = insertBeforeInstr->m_func;
- IR::LabelInstr * convertToUint = IR::LabelInstr::New(Js::OpCode::Label, func);
- IR::LabelInstr * fallThrough = IR::LabelInstr::New(Js::OpCode::Label, func);
- // First generate test for tagged int even though profile data says likely float. Indices are usually int and we need a fast path before we try to convert float to int
- // mov intIndex, index
- // sar intIndex, 1
- // jae convertToInt
- IR::RegOpnd *int32IndexOpnd = GenerateUntagVar(indexOpnd, convertToUint, insertBeforeInstr, !indexOpnd->IsTaggedInt());
- if (!skipNegativeCheck)
- {
- // test index, index
- // js $notTaggedIntOrNegative
- InsertTestBranch(int32IndexOpnd, int32IndexOpnd, LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), negativeLabel, insertBeforeInstr);
- }
- InsertBranch(Js::OpCode::Br, fallThrough, insertBeforeInstr);
- insertBeforeInstr->InsertBefore(convertToUint);
- // try to convert float to int in a fast path
- #if FLOATVAR
- IR::RegOpnd* floatIndexOpnd = m_lowererMD.CheckFloatAndUntag(indexOpnd, insertBeforeInstr, notIntLabel);
- #else
- m_lowererMD.GenerateFloatTest(indexOpnd, insertBeforeInstr, notIntLabel);
- IR::IndirOpnd * floatIndexOpnd = IR::IndirOpnd::New(indexOpnd, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
- #endif
- IR::LabelInstr * doneConvUint32 = IR::LabelInstr::New(Js::OpCode::Label, func);
- IR::LabelInstr * helperConvUint32 = IR::LabelInstr::New(Js::OpCode::Label, func, true /*helper*/);
- m_lowererMD.ConvertFloatToInt32(int32IndexOpnd, floatIndexOpnd, helperConvUint32, doneConvUint32, insertBeforeInstr);
- // helper path
- insertBeforeInstr->InsertBefore(helperConvUint32);
- m_lowererMD.LoadDoubleHelperArgument(insertBeforeInstr, floatIndexOpnd);
- IR::Instr * helperCall = IR::Instr::New(Js::OpCode::Call, int32IndexOpnd, this->m_func);
- insertBeforeInstr->InsertBefore(helperCall);
- m_lowererMD.ChangeToHelperCall(helperCall, IR::HelperConv_ToUInt32Core);
- // main path
- insertBeforeInstr->InsertBefore(doneConvUint32);
- //Convert uint32 to back to float for comparison that conversion was indeed successful
- IR::RegOpnd *floatOpndFromUint32 = IR::RegOpnd::New(TyFloat64, func);
- m_lowererMD.EmitUIntToFloat(floatOpndFromUint32, int32IndexOpnd, insertBeforeInstr);
- // compare with float from the original indexOpnd, we need floatIndex == (float64)(uint32)floatIndex
- InsertCompareBranch(floatOpndFromUint32, floatIndexOpnd, Js::OpCode::BrNeq_A, notIntLabel, insertBeforeInstr, false);
- insertBeforeInstr->InsertBefore(fallThrough);
- return int32IndexOpnd;
- }
- #if DBG
- void
- Lowerer::LegalizeVerifyRange(IR::Instr * instrStart, IR::Instr * instrLast)
- {
- FOREACH_INSTR_IN_RANGE(verifyLegalizeInstr, instrStart, instrLast)
- {
- LowererMD::Legalize<true>(verifyLegalizeInstr);
- }
- NEXT_INSTR_IN_RANGE;
- }
- #endif
|