LowerMD.cpp 253 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Language/JavascriptFunctionArgIndex.h"
  7. const Js::OpCode LowererMD::MDUncondBranchOpcode = Js::OpCode::B;
  8. const Js::OpCode LowererMD::MDMultiBranchOpcode = Js::OpCode::BR;
  9. const Js::OpCode LowererMD::MDTestOpcode = Js::OpCode::TST;
  10. const Js::OpCode LowererMD::MDOrOpcode = Js::OpCode::ORR;
  11. const Js::OpCode LowererMD::MDXorOpcode = Js::OpCode::EOR;
  12. const Js::OpCode LowererMD::MDOverflowBranchOpcode = Js::OpCode::BVS;
  13. const Js::OpCode LowererMD::MDNotOverflowBranchOpcode = Js::OpCode::BVC;
  14. const Js::OpCode LowererMD::MDConvertFloat32ToFloat64Opcode = Js::OpCode::FCVT;
  15. const Js::OpCode LowererMD::MDConvertFloat64ToFloat32Opcode = Js::OpCode::FCVT;
  16. const Js::OpCode LowererMD::MDCallOpcode = Js::OpCode::Call;
  17. const Js::OpCode LowererMD::MDImulOpcode = Js::OpCode::MUL;
  18. const Js::OpCode LowererMD::MDLea = Js::OpCode::LEA;
  19. const Js::OpCode LowererMD::MDSpecBlockNEOpcode = Js::OpCode::CSELNE;
  20. const Js::OpCode LowererMD::MDSpecBlockFNEOpcode = Js::OpCode::FCSELNE;
  21. template<typename T>
  22. inline void Swap(T& x, T& y)
  23. {
  24. T temp = x;
  25. x = y;
  26. y = temp;
  27. }
  28. // Static utility fn()
  29. //
  30. bool
  31. LowererMD::IsAssign(const IR::Instr *instr)
  32. {
  33. return (instr->m_opcode == Js::OpCode::MOV ||
  34. instr->m_opcode == Js::OpCode::FMOV ||
  35. instr->m_opcode == Js::OpCode::LDIMM ||
  36. instr->m_opcode == Js::OpCode::LDR ||
  37. instr->m_opcode == Js::OpCode::LDRS ||
  38. instr->m_opcode == Js::OpCode::FLDR ||
  39. instr->m_opcode == Js::OpCode::STR ||
  40. instr->m_opcode == Js::OpCode::FSTR);
  41. }
  42. ///----------------------------------------------------------------------------
  43. ///
  44. /// LowererMD::IsCall
  45. ///
  46. ///----------------------------------------------------------------------------
  47. bool
  48. LowererMD::IsCall(const IR::Instr *instr)
  49. {
  50. return (instr->m_opcode == Js::OpCode::BL ||
  51. instr->m_opcode == Js::OpCode::BLR);
  52. }
  53. ///----------------------------------------------------------------------------
  54. ///
  55. /// LowererMD::IsIndirectBranch
  56. ///
  57. ///----------------------------------------------------------------------------
  58. bool
  59. LowererMD::IsIndirectBranch(const IR::Instr *instr)
  60. {
  61. return (instr->m_opcode == Js::OpCode::BR);
  62. }
  63. ///----------------------------------------------------------------------------
  64. ///
  65. /// LowererMD::IsUnconditionalBranch
  66. ///
  67. ///----------------------------------------------------------------------------
  68. bool
  69. LowererMD::IsUnconditionalBranch(const IR::Instr *instr)
  70. {
  71. return (instr->m_opcode == Js::OpCode::B ||
  72. instr->m_opcode == Js::OpCode::BR);
  73. }
  74. bool
  75. LowererMD::IsReturnInstr(const IR::Instr *instr)
  76. {
  77. return instr->m_opcode == Js::OpCode::RET;
  78. }
  79. ///----------------------------------------------------------------------------
  80. ///
  81. /// LowererMD::InvertBranch
  82. ///
  83. ///----------------------------------------------------------------------------
  84. void
  85. LowererMD::InvertBranch(IR::BranchInstr *branchInstr)
  86. {
  87. switch (branchInstr->m_opcode)
  88. {
  89. case Js::OpCode::BEQ:
  90. branchInstr->m_opcode = Js::OpCode::BNE;
  91. break;
  92. case Js::OpCode::BNE:
  93. branchInstr->m_opcode = Js::OpCode::BEQ;
  94. break;
  95. case Js::OpCode::BGE:
  96. branchInstr->m_opcode = Js::OpCode::BLT;
  97. break;
  98. case Js::OpCode::BGT:
  99. branchInstr->m_opcode = Js::OpCode::BLE;
  100. break;
  101. case Js::OpCode::BLT:
  102. branchInstr->m_opcode = Js::OpCode::BGE;
  103. break;
  104. case Js::OpCode::BLE:
  105. branchInstr->m_opcode = Js::OpCode::BGT;
  106. break;
  107. case Js::OpCode::BCS:
  108. branchInstr->m_opcode = Js::OpCode::BCC;
  109. break;
  110. case Js::OpCode::BCC:
  111. branchInstr->m_opcode = Js::OpCode::BCS;
  112. break;
  113. case Js::OpCode::BMI:
  114. branchInstr->m_opcode = Js::OpCode::BPL;
  115. break;
  116. case Js::OpCode::BPL:
  117. branchInstr->m_opcode = Js::OpCode::BMI;
  118. break;
  119. case Js::OpCode::BVS:
  120. branchInstr->m_opcode = Js::OpCode::BVC;
  121. break;
  122. case Js::OpCode::BVC:
  123. branchInstr->m_opcode = Js::OpCode::BVS;
  124. break;
  125. case Js::OpCode::BLS:
  126. branchInstr->m_opcode = Js::OpCode::BHI;
  127. break;
  128. case Js::OpCode::BHI:
  129. branchInstr->m_opcode = Js::OpCode::BLS;
  130. break;
  131. case Js::OpCode::CBZ:
  132. branchInstr->m_opcode = Js::OpCode::CBNZ;
  133. break;
  134. case Js::OpCode::CBNZ:
  135. branchInstr->m_opcode = Js::OpCode::CBZ;
  136. break;
  137. case Js::OpCode::TBZ:
  138. branchInstr->m_opcode = Js::OpCode::TBNZ;
  139. break;
  140. case Js::OpCode::TBNZ:
  141. branchInstr->m_opcode = Js::OpCode::TBZ;
  142. break;
  143. default:
  144. AssertMsg(UNREACHED, "B missing in InvertBranch()");
  145. }
  146. }
  147. Js::OpCode
  148. LowererMD::MDConvertFloat64ToInt32Opcode(const RoundMode roundMode)
  149. {
  150. switch (roundMode)
  151. {
  152. case RoundModeTowardZero:
  153. return Js::OpCode::FCVTZ;
  154. case RoundModeTowardInteger:
  155. return Js::OpCode::Nop;
  156. case RoundModeHalfToEven:
  157. return Js::OpCode::FCVTN;
  158. default:
  159. AssertMsg(0, "RoundMode has no MD mapping.");
  160. return Js::OpCode::Nop;
  161. }
  162. }
  163. // GenerateMemRef: Return an opnd that can be used to access the given address.
  164. // ARM can't encode direct accesses to physical addresses, so put the address in a register
  165. // and return an indir. (This facilitates re-use of the loaded address without having to re-load it.)
  166. IR::Opnd *
  167. LowererMD::GenerateMemRef(intptr_t addr, IRType type, IR::Instr *instr, bool dontEncode)
  168. {
  169. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  170. IR::AddrOpnd *addrOpnd = IR::AddrOpnd::New(addr, IR::AddrOpndKindDynamicMisc, this->m_func, dontEncode);
  171. Lowerer::InsertMove(baseOpnd, addrOpnd, instr);
  172. return IR::IndirOpnd::New(baseOpnd, 0, type, this->m_func);
  173. }
  174. void
  175. LowererMD::FlipHelperCallArgsOrder()
  176. {
  177. int left = 0;
  178. int right = helperCallArgsCount - 1;
  179. while (left < right)
  180. {
  181. IR::Opnd *tempOpnd = helperCallArgs[left];
  182. helperCallArgs[left] = helperCallArgs[right];
  183. helperCallArgs[right] = tempOpnd;
  184. left++;
  185. right--;
  186. }
  187. }
  188. IR::Instr *
  189. LowererMD::LowerCallHelper(IR::Instr *instrCall)
  190. {
  191. IR::Opnd *argOpnd = instrCall->UnlinkSrc2();
  192. IR::Instr *prevInstr = instrCall;
  193. IR::JnHelperMethod helperMethod = instrCall->GetSrc1()->AsHelperCallOpnd()->m_fnHelper;
  194. instrCall->FreeSrc1();
  195. while (argOpnd)
  196. {
  197. Assert(argOpnd->IsRegOpnd());
  198. IR::RegOpnd *regArg = argOpnd->AsRegOpnd();
  199. Assert(regArg->m_sym->m_isSingleDef);
  200. IR::Instr *instrArg = regArg->m_sym->m_instrDef;
  201. Assert(instrArg->m_opcode == Js::OpCode::ArgOut_A ||
  202. (helperMethod == IR::JnHelperMethod::HelperOP_InitCachedScope && instrArg->m_opcode == Js::OpCode::ExtendArg_A) ||
  203. (helperMethod == IR::JnHelperMethod::HelperScrFunc_OP_NewScFuncHomeObj && instrArg->m_opcode == Js::OpCode::ExtendArg_A) ||
  204. (helperMethod == IR::JnHelperMethod::HelperScrFunc_OP_NewScGenFuncHomeObj && instrArg->m_opcode == Js::OpCode::ExtendArg_A));
  205. prevInstr = this->LoadHelperArgument(prevInstr, instrArg->GetSrc1());
  206. argOpnd = instrArg->GetSrc2();
  207. if (instrArg->m_opcode == Js::OpCode::ArgOut_A)
  208. {
  209. instrArg->UnlinkSrc1();
  210. if (argOpnd)
  211. {
  212. instrArg->UnlinkSrc2();
  213. }
  214. regArg->Free(this->m_func);
  215. instrArg->Remove();
  216. }
  217. else if (instrArg->m_opcode == Js::OpCode::ExtendArg_A)
  218. {
  219. if (instrArg->GetSrc1()->IsRegOpnd())
  220. {
  221. m_lowerer->addToLiveOnBackEdgeSyms->Set(instrArg->GetSrc1()->AsRegOpnd()->GetStackSym()->m_id);
  222. }
  223. }
  224. }
  225. switch (helperMethod)
  226. {
  227. case IR::JnHelperMethod::HelperScrFunc_OP_NewScFuncHomeObj:
  228. case IR::JnHelperMethod::HelperScrFunc_OP_NewScGenFuncHomeObj:
  229. break;
  230. default:
  231. prevInstr = m_lowerer->LoadScriptContext(prevInstr);
  232. break;
  233. }
  234. this->FlipHelperCallArgsOrder();
  235. return this->ChangeToHelperCall(instrCall, helperMethod);
  236. }
  237. // Lower a call: May be either helper or native JS call. Just set the opcode, and
  238. // put the result into the return register. (No stack adjustment required.)
  239. IR::Instr *
  240. LowererMD::LowerCall(IR::Instr * callInstr, Js::ArgSlot argCount)
  241. {
  242. IR::Instr *retInstr = callInstr;
  243. IR::Opnd *targetOpnd = callInstr->GetSrc1();
  244. AssertMsg(targetOpnd, "Call without a target?");
  245. // This is required here due to calls created during lowering
  246. callInstr->m_func->SetHasCallsOnSelfAndParents();
  247. if (targetOpnd->IsRegOpnd())
  248. {
  249. // Indirect call
  250. callInstr->m_opcode = Js::OpCode::BLR;
  251. }
  252. else
  253. {
  254. AssertMsg(targetOpnd->IsHelperCallOpnd(), "Why haven't we loaded the call target?");
  255. // Direct call
  256. //
  257. // load the address into a register because we cannot directly access more than 24 bit constants
  258. // in BL instruction. Non helper call methods will already be accessed indirectly.
  259. //
  260. // Skip this for bailout calls. The register allocator will lower that as appropriate, without affecting spill choices.
  261. if (!callInstr->HasBailOutInfo())
  262. {
  263. IR::RegOpnd *regOpnd = IR::RegOpnd::New(nullptr, RegLR, TyMachPtr, this->m_func);
  264. IR::Instr *movInstr = IR::Instr::New(Js::OpCode::LDIMM, regOpnd, callInstr->UnlinkSrc1(), this->m_func);
  265. regOpnd->m_isCallArg = true;
  266. callInstr->SetSrc1(regOpnd);
  267. callInstr->InsertBefore(movInstr);
  268. }
  269. callInstr->m_opcode = Js::OpCode::BLR;
  270. }
  271. IR::Opnd *dstOpnd = callInstr->GetDst();
  272. if (dstOpnd)
  273. {
  274. Js::OpCode assignOp;
  275. RegNum returnReg;
  276. if(dstOpnd->IsFloat64())
  277. {
  278. assignOp = Js::OpCode::FMOV;
  279. returnReg = RETURN_DBL_REG;
  280. }
  281. else
  282. {
  283. assignOp = Js::OpCode::MOV;
  284. returnReg = RETURN_REG;
  285. if (callInstr->GetSrc1()->IsHelperCallOpnd())
  286. {
  287. // Truncate the result of a conversion to 32-bit int, because the C++ code doesn't.
  288. IR::HelperCallOpnd *helperOpnd = callInstr->GetSrc1()->AsHelperCallOpnd();
  289. if (helperOpnd->m_fnHelper == IR::HelperConv_ToInt32 ||
  290. helperOpnd->m_fnHelper == IR::HelperConv_ToInt32_Full ||
  291. helperOpnd->m_fnHelper == IR::HelperConv_ToInt32Core ||
  292. helperOpnd->m_fnHelper == IR::HelperConv_ToUInt32 ||
  293. helperOpnd->m_fnHelper == IR::HelperConv_ToUInt32_Full ||
  294. helperOpnd->m_fnHelper == IR::HelperConv_ToUInt32Core)
  295. {
  296. assignOp = Js::OpCode::MOV_TRUNC;
  297. }
  298. }
  299. }
  300. IR::Instr * movInstr = callInstr->SinkDst(assignOp);
  301. callInstr->GetDst()->AsRegOpnd()->SetReg(returnReg);
  302. movInstr->GetSrc1()->AsRegOpnd()->SetReg(returnReg);
  303. retInstr = movInstr;
  304. Legalize(retInstr);
  305. }
  306. //
  307. // assign the arguments to appropriate positions
  308. //
  309. AssertMsg(this->helperCallArgsCount >= 0, "Fatal. helper call arguments ought to be positive");
  310. AssertMsg(this->helperCallArgsCount <= MaxArgumentsToHelper, "Too many helper call arguments");
  311. uint16 argsLeft = this->helperCallArgsCount;
  312. uint16 doubleArgsLeft = this->helperCallDoubleArgsCount;
  313. uint16 intArgsLeft = argsLeft - doubleArgsLeft;
  314. while(argsLeft > 0)
  315. {
  316. IR::Opnd *helperArgOpnd = this->helperCallArgs[this->helperCallArgsCount - argsLeft];
  317. IR::Opnd * opndParam = nullptr;
  318. if (helperArgOpnd->IsFloat())
  319. {
  320. opndParam = this->GetOpndForArgSlot(doubleArgsLeft - 1, helperArgOpnd);
  321. AssertMsg(opndParam->IsRegOpnd(), "NYI for other kind of operands");
  322. --doubleArgsLeft;
  323. }
  324. else
  325. {
  326. opndParam = this->GetOpndForArgSlot(intArgsLeft - 1, helperArgOpnd);
  327. --intArgsLeft;
  328. }
  329. Lowerer::InsertMove(opndParam, helperArgOpnd, callInstr);
  330. --argsLeft;
  331. }
  332. Assert(doubleArgsLeft == 0 && intArgsLeft == 0 && argsLeft == 0);
  333. // We're done with the args (if any) now, so clear the param location state.
  334. this->FinishArgLowering();
  335. return retInstr;
  336. }
  337. IR::Instr *
  338. LowererMD::LoadDynamicArgument(IR::Instr *instr, uint argNumber)
  339. {
  340. Assert(instr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  341. Assert(instr->GetSrc2() == nullptr);
  342. IR::Opnd* dst = GetOpndForArgSlot((Js::ArgSlot) (argNumber - 1));
  343. instr->SetDst(dst);
  344. instr->m_opcode = Js::OpCode::MOV;
  345. LegalizeMD::LegalizeInstr(instr);
  346. return instr;
  347. }
  348. IR::Instr *
  349. LowererMD::LoadDynamicArgumentUsingLength(IR::Instr *instr)
  350. {
  351. Assert(instr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  352. IR::RegOpnd* src2 = instr->UnlinkSrc2()->AsRegOpnd();
  353. // We register store the first INT_ARG_REG_COUNT - 3 parameters, since the first 3 register parameters are taken by function object, callinfo, and this pointer
  354. IR::Instr *add = IR::Instr::New(Js::OpCode::SUB, IR::RegOpnd::New(src2->GetType(), this->m_func), src2, IR::IntConstOpnd::New(INT_ARG_REG_COUNT - 3, TyInt8, this->m_func), this->m_func);
  355. instr->InsertBefore(add);
  356. LegalizeMD::LegalizeInstr(add);
  357. //We need store nth actuals, so stack location is after function object, callinfo & this pointer
  358. IR::RegOpnd *stackPointer = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func);
  359. IR::IndirOpnd *actualsLocation = IR::IndirOpnd::New(stackPointer, add->GetDst()->AsRegOpnd(), GetDefaultIndirScale(), TyMachReg, this->m_func);
  360. instr->SetDst(actualsLocation);
  361. instr->m_opcode = Js::OpCode::LDR;
  362. LegalizeMD::LegalizeInstr(instr);
  363. return instr;
  364. }
  365. void
  366. LowererMD::SetMaxArgSlots(Js::ArgSlot actualCount /*including this*/)
  367. {
  368. Js::ArgSlot offset = 3;//For function object & callInfo & this
  369. if (this->m_func->m_argSlotsForFunctionsCalled < (uint32) (actualCount + offset))
  370. {
  371. this->m_func->m_argSlotsForFunctionsCalled = (uint32)(actualCount + offset);
  372. }
  373. return;
  374. }
  375. void
  376. LowererMD::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, size_t value, IR::Instr * insertBeforeInstr, bool isZeroed)
  377. {
  378. m_lowerer->GenerateMemInit(opnd, offset, (uint32)value, insertBeforeInstr, isZeroed);
  379. }
  380. IR::Instr *
  381. LowererMD::LowerCallIDynamic(IR::Instr *callInstr, IR::Instr*saveThisArgOutInstr, IR::Opnd *argsLength, ushort callFlags, IR::Instr * insertBeforeInstrForCFG)
  382. {
  383. callInstr->InsertBefore(saveThisArgOutInstr); //Move this Argout next to call;
  384. this->LoadDynamicArgument(saveThisArgOutInstr, 3); //this pointer is the 3rd argument
  385. //callInfo
  386. if (callInstr->m_func->IsInlinee())
  387. {
  388. Assert(argsLength->AsIntConstOpnd()->GetValue() == callInstr->m_func->actualCount);
  389. this->SetMaxArgSlots((Js::ArgSlot)callInstr->m_func->actualCount);
  390. }
  391. else
  392. {
  393. callInstr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, argsLength, argsLength, IR::IntConstOpnd::New(1, TyInt8, this->m_func), this->m_func));
  394. this->SetMaxArgSlots(Js::InlineeCallInfo::MaxInlineeArgoutCount);
  395. }
  396. Lowerer::InsertMove( this->GetOpndForArgSlot(1), argsLength, callInstr);
  397. IR::RegOpnd *funcObjOpnd = callInstr->UnlinkSrc1()->AsRegOpnd();
  398. GeneratePreCall(callInstr, funcObjOpnd, insertBeforeInstrForCFG);
  399. // functionOpnd is the first argument.
  400. IR::Opnd * opndParam = this->GetOpndForArgSlot(0);
  401. Lowerer::InsertMove(opndParam, funcObjOpnd, callInstr);
  402. return this->LowerCall(callInstr, 0);
  403. }
  404. void
  405. LowererMD::GenerateFunctionObjectTest(IR::Instr * callInstr, IR::RegOpnd *functionObjOpnd, bool isHelper, IR::LabelInstr* continueAfterExLabel /* = nullptr */)
  406. {
  407. AssertMsg(!m_func->IsJitInDebugMode() || continueAfterExLabel, "When jit is in debug mode, continueAfterExLabel must be provided otherwise continue after exception may cause AV.");
  408. // Need check and error if we are calling a tagged int.
  409. if (!functionObjOpnd->IsNotTaggedValue())
  410. {
  411. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  412. if (this->GenerateObjectTest(functionObjOpnd, callInstr, helperLabel))
  413. {
  414. IR::LabelInstr * callLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  415. IR::Instr * instr = IR::BranchInstr::New(Js::OpCode::B, callLabel, this->m_func);
  416. callInstr->InsertBefore(instr);
  417. callInstr->InsertBefore(helperLabel);
  418. callInstr->InsertBefore(callLabel);
  419. this->m_lowerer->GenerateRuntimeError(callLabel, JSERR_NeedFunction);
  420. if (continueAfterExLabel)
  421. {
  422. // Under debugger the RuntimeError (exception) can be ignored, generate branch to jmp to safe place
  423. // (which would normally be debugger bailout check).
  424. IR::BranchInstr* continueAfterEx = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, continueAfterExLabel, this->m_func);
  425. callLabel->InsertBefore(continueAfterEx);
  426. }
  427. }
  428. }
  429. }
  430. IR::Instr*
  431. LowererMD::GeneratePreCall(IR::Instr * callInstr, IR::Opnd *functionObjOpnd, IR::Instr * insertBeforeInstrForCFGCheck)
  432. {
  433. if (insertBeforeInstrForCFGCheck == nullptr)
  434. {
  435. insertBeforeInstrForCFGCheck = callInstr;
  436. }
  437. IR::RegOpnd * functionTypeRegOpnd = nullptr;
  438. IR::IndirOpnd * entryPointIndirOpnd = nullptr;
  439. // For calls to fixed functions we load the function's type directly from the known (hard-coded) function object address.
  440. // For other calls, we need to load it from the function object stored in a register operand.
  441. if (functionObjOpnd->IsAddrOpnd() && functionObjOpnd->AsAddrOpnd()->m_isFunction)
  442. {
  443. functionTypeRegOpnd = this->m_lowerer->GenerateFunctionTypeFromFixedFunctionObject(insertBeforeInstrForCFGCheck, functionObjOpnd);
  444. }
  445. else if (functionObjOpnd->IsRegOpnd())
  446. {
  447. AssertMsg(functionObjOpnd->AsRegOpnd()->m_sym->IsStackSym(), "Expected call target to be stackSym");
  448. functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  449. IR::IndirOpnd* functionTypeIndirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(),
  450. Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  451. Lowerer::InsertMove(functionTypeRegOpnd, functionTypeIndirOpnd, insertBeforeInstrForCFGCheck);
  452. }
  453. else
  454. {
  455. AnalysisAssertMsg(false, "Unexpected call target operand type.");
  456. }
  457. entryPointIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, Js::Type::GetOffsetOfEntryPoint(), TyMachPtr, m_func);
  458. IR::RegOpnd *entryPointRegOpnd = functionTypeRegOpnd;
  459. entryPointRegOpnd->m_isCallArg = true;
  460. IR::Instr * stackParamInsert = Lowerer::InsertMove(entryPointRegOpnd, entryPointIndirOpnd, insertBeforeInstrForCFGCheck);
  461. // targetAddrOpnd is the address we'll call.
  462. callInstr->SetSrc1(entryPointRegOpnd);
  463. #if defined(_CONTROL_FLOW_GUARD)
  464. // verify that the call target is valid (CFG Check)
  465. if (!PHASE_OFF(Js::CFGInJitPhase, this->m_func))
  466. {
  467. this->GenerateCFGCheck(entryPointRegOpnd, insertBeforeInstrForCFGCheck);
  468. }
  469. #endif
  470. return stackParamInsert;
  471. }
  472. IR::Instr *
  473. LowererMD::LowerCallI(IR::Instr * callInstr, ushort callFlags, bool isHelper, IR::Instr * insertBeforeInstrForCFG)
  474. {
  475. // Indirect call using JS calling convention:
  476. // R0 = callee func object
  477. // R1 = callinfo
  478. // R2 = arg0 ("this")
  479. // R3 = arg1
  480. // [sp] = arg2
  481. // etc.
  482. // First load the target address. Note that we want to wind up with this:
  483. // ...
  484. // [sp+4] = arg3
  485. // [sp] = arg2
  486. // load target addr from func obj
  487. // R3 = arg1
  488. // ...
  489. // R0 = func obj
  490. // BLX target addr
  491. // This way the register containing the target addr interferes with the param regs
  492. // only, not the regs we use to store params to the stack.
  493. // We're sinking the stores of stack params so that the call sequence is contiguous.
  494. // This is required by nested calls, since each call will re-use the same stack slots.
  495. // But if there is no nesting, stack params can be stored as soon as they're computed.
  496. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  497. IR::Instr * insertBeforeInstrForCFGCheck = callInstr;
  498. // If this is a call for new, we already pass the function operand through NewScObject,
  499. // which checks if the function operand is a real function or not, don't need to add a check again.
  500. // If this is a call to a fixed function, we've already verified that the target is, indeed, a function.
  501. if (callInstr->m_opcode != Js::OpCode::CallIFixed && !(callFlags & Js::CallFlags_New))
  502. {
  503. Assert(functionObjOpnd->IsRegOpnd());
  504. IR::LabelInstr* continueAfterExLabel = Lowerer::InsertContinueAfterExceptionLabelForDebugger(m_func, callInstr, isHelper);
  505. GenerateFunctionObjectTest(callInstr, functionObjOpnd->AsRegOpnd(), isHelper, continueAfterExLabel);
  506. }
  507. else if (insertBeforeInstrForCFG != nullptr)
  508. {
  509. // RegNum dstReg = insertBeforeInstrForCFG->GetDst()->AsRegOpnd()->GetReg();
  510. // AssertMsg(dstReg == RegArg2 || dstReg == RegArg3, "NewScObject should insert the first Argument in RegArg2/RegArg3 only based on Spread call or not.");
  511. insertBeforeInstrForCFGCheck = insertBeforeInstrForCFG;
  512. }
  513. IR::Instr * stackParamInsert = GeneratePreCall(callInstr, functionObjOpnd, insertBeforeInstrForCFGCheck);
  514. // We need to get the calculated CallInfo in SimpleJit because that doesn't include any changes for stack alignment
  515. IR::IntConstOpnd *callInfo;
  516. int32 argCount = this->LowerCallArgs(callInstr, stackParamInsert, callFlags, 1, &callInfo);
  517. // functionObjOpnd is the first argument.
  518. IR::Opnd * opndParam = this->GetOpndForArgSlot(0);
  519. Lowerer::InsertMove(opndParam, functionObjOpnd, callInstr);
  520. IR::Opnd *const finalDst = callInstr->GetDst();
  521. // Finally, lower the call instruction itself.
  522. IR::Instr* ret = this->LowerCall(callInstr, (Js::ArgSlot)argCount);
  523. IR::AutoReuseOpnd autoReuseSavedFunctionObjOpnd;
  524. if (callInstr->IsJitProfilingInstr())
  525. {
  526. Assert(callInstr->m_func->IsSimpleJit());
  527. Assert(!CONFIG_FLAG(NewSimpleJit));
  528. if(finalDst &&
  529. finalDst->IsRegOpnd() &&
  530. functionObjOpnd->IsRegOpnd() &&
  531. finalDst->AsRegOpnd()->m_sym == functionObjOpnd->AsRegOpnd()->m_sym)
  532. {
  533. // The function object sym is going to be overwritten, so save it in a temp for profiling
  534. IR::RegOpnd *const savedFunctionObjOpnd = IR::RegOpnd::New(functionObjOpnd->GetType(), callInstr->m_func);
  535. autoReuseSavedFunctionObjOpnd.Initialize(savedFunctionObjOpnd, callInstr->m_func);
  536. Lowerer::InsertMove(savedFunctionObjOpnd, functionObjOpnd, callInstr->m_next);
  537. functionObjOpnd = savedFunctionObjOpnd;
  538. }
  539. auto instr = callInstr->AsJitProfilingInstr();
  540. ret = this->m_lowerer->GenerateCallProfiling(
  541. instr->profileId,
  542. instr->inlineCacheIndex,
  543. instr->GetDst(),
  544. functionObjOpnd,
  545. callInfo,
  546. instr->isProfiledReturnCall,
  547. callInstr,
  548. ret);
  549. }
  550. return ret;
  551. }
  552. int32
  553. LowererMD::LowerCallArgs(IR::Instr *callInstr, IR::Instr *stackParamInsert, ushort callFlags, Js::ArgSlot extraParams, IR::IntConstOpnd **callInfoOpndRef)
  554. {
  555. AssertMsg(this->helperCallArgsCount == 0, "We don't support nested helper calls yet");
  556. uint32 argCount = 0;
  557. IR::Opnd* opndParam;
  558. // Now walk the user arguments and remember the arg count.
  559. IR::Instr * argInstr = callInstr;
  560. IR::Opnd *src2Opnd = callInstr->UnlinkSrc2();
  561. while (src2Opnd->IsSymOpnd())
  562. {
  563. // Get the arg instr
  564. IR::SymOpnd * argLinkOpnd = src2Opnd->AsSymOpnd();
  565. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  566. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  567. argLinkOpnd->Free(this->m_func);
  568. argInstr = argLinkSym->m_instrDef;
  569. // The arg sym isn't assigned a constant directly anymore
  570. argLinkSym->m_isConst = false;
  571. argLinkSym->m_isIntConst = false;
  572. argLinkSym->m_isTaggableIntConst = false;
  573. // The arg slot nums are 1-based, so subtract 1. Then add 1 for the non-user args (callinfo).
  574. auto argSlotNum = argLinkSym->GetArgSlotNum();
  575. if(argSlotNum + extraParams < argSlotNum)
  576. {
  577. Js::Throw::OutOfMemory();
  578. }
  579. opndParam = this->GetOpndForArgSlot(argSlotNum + extraParams);
  580. src2Opnd = argInstr->UnlinkSrc2();
  581. argInstr->ReplaceDst(opndParam);
  582. argInstr->Unlink();
  583. if (opndParam->IsRegOpnd())
  584. {
  585. callInstr->InsertBefore(argInstr);
  586. }
  587. else
  588. {
  589. stackParamInsert->InsertBefore(argInstr);
  590. }
  591. this->ChangeToAssign(argInstr);
  592. argCount++;
  593. }
  594. IR::RegOpnd * argLinkOpnd = src2Opnd->AsRegOpnd();
  595. StackSym *argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  596. AssertMsg(!argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  597. IR::Instr *startCallInstr = argLinkSym->m_instrDef;
  598. AssertMsg(startCallInstr->m_opcode == Js::OpCode::StartCall || startCallInstr->m_opcode == Js::OpCode::LoweredStartCall, "Problem with arg chain.");
  599. AssertMsg(startCallInstr->GetArgOutCount(/*getInterpreterArgOutCount*/ false) == argCount,
  600. "ArgCount doesn't match StartCall count");
  601. // Deal with the SC.
  602. this->LowerStartCall(startCallInstr);
  603. // Second argument is the callinfo.
  604. IR::IntConstOpnd *opndCallInfo = Lowerer::MakeCallInfoConst(callFlags, argCount, m_func);
  605. if(callInfoOpndRef)
  606. {
  607. opndCallInfo->Use(m_func);
  608. *callInfoOpndRef = opndCallInfo;
  609. }
  610. opndParam = this->GetOpndForArgSlot(extraParams);
  611. Lowerer::InsertMove(opndParam, opndCallInfo, callInstr);
  612. return argCount + 1 + extraParams; // + 1 for call flags
  613. }
  614. IR::Instr *
  615. LowererMD::LowerStartCall(IR::Instr * instr)
  616. {
  617. // StartCall doesn't need to generate a stack adjustment. Just delete it.
  618. instr->m_opcode = Js::OpCode::LoweredStartCall;
  619. return instr;
  620. }
  621. IR::Instr *
  622. LowererMD::LoadHelperArgument(IR::Instr * instr, IR::Opnd * opndArgValue)
  623. {
  624. // Load the given parameter into the appropriate location.
  625. // We update the current param state so we can do this work without making the caller
  626. // do the work.
  627. Assert(this->helperCallArgsCount < LowererMD::MaxArgumentsToHelper);
  628. __analysis_assume(this->helperCallArgsCount < MaxArgumentsToHelper);
  629. helperCallArgs[helperCallArgsCount++] = opndArgValue;
  630. if (opndArgValue->GetType() == TyMachDouble)
  631. {
  632. this->helperCallDoubleArgsCount++;
  633. }
  634. return instr;
  635. }
  636. void
  637. LowererMD::FinishArgLowering()
  638. {
  639. this->helperCallArgsCount = 0;
  640. this->helperCallDoubleArgsCount = 0;
  641. }
  642. IR::Opnd *
  643. LowererMD::GetOpndForArgSlot(Js::ArgSlot argSlot, IR::Opnd * argOpnd)
  644. {
  645. IR::Opnd * opndParam = nullptr;
  646. IRType type = argOpnd ? argOpnd->GetType() : TyMachReg;
  647. if (argOpnd == nullptr || !argOpnd->IsFloat())
  648. {
  649. if (argSlot < NUM_INT_ARG_REGS)
  650. {
  651. // Return an instance of the next arg register.
  652. IR::RegOpnd *regOpnd;
  653. regOpnd = IR::RegOpnd::New(nullptr, (RegNum)(argSlot + FIRST_INT_ARG_REG), type, this->m_func);
  654. regOpnd->m_isCallArg = true;
  655. opndParam = regOpnd;
  656. }
  657. else
  658. {
  659. // Create a stack slot reference and bump up the size of this function's outgoing param area,
  660. // if necessary.
  661. argSlot = argSlot - NUM_INT_ARG_REGS;
  662. IntConstType offset = argSlot * MachRegInt;
  663. IR::RegOpnd * spBase = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  664. opndParam = IR::IndirOpnd::New(spBase, int32(offset), type, this->m_func);
  665. if (this->m_func->m_argSlotsForFunctionsCalled < (uint32)(argSlot + 1))
  666. {
  667. this->m_func->m_argSlotsForFunctionsCalled = argSlot + 1;
  668. }
  669. }
  670. }
  671. else
  672. {
  673. if (argSlot < MaxDoubleArgumentsToHelper)
  674. {
  675. // Return an instance of the next arg register.
  676. IR::RegOpnd *regOpnd;
  677. regOpnd = IR::RegOpnd::New(nullptr, (RegNum)(argSlot + FIRST_DOUBLE_ARG_REG), type, this->m_func);
  678. regOpnd->m_isCallArg = true;
  679. opndParam = regOpnd;
  680. }
  681. else
  682. {
  683. AssertMsg(false,"More than 8 double parameter passing disallowed");
  684. }
  685. }
  686. return opndParam;
  687. }
  688. IR::Instr *
  689. LowererMD::LoadDoubleHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  690. {
  691. // Load the given parameter into the appropriate location.
  692. // We update the current param state so we can do this work without making the caller
  693. // do the work.
  694. Assert(opndArg->GetType() == TyMachDouble);
  695. return this->LoadHelperArgument(instr, opndArg);
  696. }
  697. void
  698. LowererMD::GenerateStackProbe(IR::Instr *insertInstr, bool afterProlog)
  699. {
  700. //
  701. // Generate a stack overflow check. This can be as simple as a cmp esp, const
  702. // because this function is guaranteed to be called on its base thread only.
  703. // If the check fails call ThreadContext::ProbeCurrentStack which will check again and must throw.
  704. //
  705. // LDIMM r17, ThreadContext::scriptStackLimit + frameSize //Load to register first, as this can be more than 12 bit supported in CMP
  706. // CMP sp, r17
  707. // BHI done
  708. // begin:
  709. // LDIMM r0, frameSize
  710. // LDIMM r1, scriptContext
  711. // LDIMM r2, ThreadContext::ProbeCurrentStack //MUST THROW
  712. // BLX r2 //BX r2 if the stackprobe is before prolog
  713. // done:
  714. //
  715. // For thread context with script interrupt enabled:
  716. // LDIMM r17, &ThreadContext::scriptStackLimitForCurrentThread
  717. // LDR r17, [r17]
  718. // MOV r15, frameSize
  719. // ADDS r17, r17, r15
  720. // BVS $helper
  721. // CMP sp, r17
  722. // BHI done
  723. // $helper:
  724. // LDIMM r0, frameSize
  725. // LDIMM r1, scriptContext
  726. // LDIMM r2, ThreadContext::ProbeCurrentStack //MUST THROW
  727. // BLX r2 //BX r2 if the stackprobe is before prolog
  728. // done:
  729. //
  730. //m_localStackHeight for ARM contains (m_argSlotsForFunctionsCalled * MachPtr)
  731. uint32 frameSize = this->m_func->m_localStackHeight + Js::Constants::MinStackJIT;
  732. IR::RegOpnd *scratchOpnd = IR::RegOpnd::New(nullptr, SCRATCH_REG, TyMachReg, this->m_func);
  733. IR::LabelInstr *helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, afterProlog);
  734. IR::Instr *instr;
  735. bool doInterruptProbe = m_func->GetJITFunctionBody()->DoInterruptProbe();
  736. if (doInterruptProbe || !m_func->GetThreadContextInfo()->IsThreadBound())
  737. {
  738. // LDIMM r17, &ThreadContext::scriptStackLimitForCurrentThread
  739. intptr_t pLimit = m_func->GetThreadContextInfo()->GetThreadStackLimitAddr();
  740. Lowerer::InsertMove(scratchOpnd, IR::AddrOpnd::New(pLimit, IR::AddrOpndKindDynamicMisc, this->m_func), insertInstr);
  741. // LDR r17, [r17, #0]
  742. Lowerer::InsertMove(scratchOpnd, IR::IndirOpnd::New(scratchOpnd, 0, TyMachReg, this->m_func), insertInstr);
  743. AssertMsg(!IS_CONST_00000FFF(frameSize), "For small size we can just add frameSize to r17");
  744. // MOV r15, frameSize
  745. IR::Opnd* spAllocRegOpnd = IR::RegOpnd::New(nullptr, SP_ALLOC_SCRATCH_REG, TyMachReg, this->m_func);
  746. Lowerer::InsertMove(spAllocRegOpnd, IR::IntConstOpnd::New(frameSize, TyMachReg, this->m_func), insertInstr);
  747. // ADDS r17, r17, r15
  748. instr = IR::Instr::New(Js::OpCode::ADDS, scratchOpnd, scratchOpnd, spAllocRegOpnd, this->m_func);
  749. insertInstr->InsertBefore(instr);
  750. // If this add overflows, we have to call the helper.
  751. instr = IR::BranchInstr::New(Js::OpCode::BVS, helperLabel, this->m_func);
  752. insertInstr->InsertBefore(instr);
  753. }
  754. else
  755. {
  756. // MOV r17, frameSize + scriptStackLimit
  757. uint64 scriptStackLimit = m_func->GetThreadContextInfo()->GetScriptStackLimit();
  758. IR::Opnd *stackLimitOpnd = IR::IntConstOpnd::New(frameSize + scriptStackLimit, TyMachReg, this->m_func);
  759. Lowerer::InsertMove(scratchOpnd, stackLimitOpnd, insertInstr);
  760. }
  761. IR::LabelInstr *doneLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  762. if (!IS_FAULTINJECT_STACK_PROBE_ON) // Do stack check fastpath only if not doing StackProbe fault injection
  763. {
  764. // CMP sp, r17
  765. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  766. instr->SetSrc1(IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func));
  767. instr->SetSrc2(scratchOpnd);
  768. insertInstr->InsertBefore(instr);
  769. LegalizeMD::LegalizeInstr(instr);
  770. // BHI done
  771. instr = IR::BranchInstr::New(Js::OpCode::BHI, doneLabelInstr, this->m_func);
  772. insertInstr->InsertBefore(instr);
  773. }
  774. insertInstr->InsertBefore(helperLabel);
  775. // ToDo (SaAgarwa): Make sure all SP offsets are correct
  776. // Zero out the pointer to the list of stack nested funcs, since the functions won't be initialized on this path.
  777. /*
  778. scratchOpnd = IR::RegOpnd::New(nullptr, RegR0, TyMachReg, m_func);
  779. IR::RegOpnd *frameReg = IR::RegOpnd::New(nullptr, GetRegFramePointer(), TyMachReg, m_func);
  780. Lowerer::InsertMove(scratchOpnd, IR::IntConstOpnd::New(0, TyMachReg, m_func), insertInstr);
  781. IR::Opnd *indirOpnd = IR::IndirOpnd::New(
  782. frameReg, -(int32)(Js::Constants::StackNestedFuncList * sizeof(Js::Var)), TyMachReg, m_func);
  783. Lowerer::InsertMove(indirOpnd, scratchOpnd, insertInstr);
  784. */
  785. IR::RegOpnd *r0Opnd = IR::RegOpnd::New(nullptr, RegR0, TyMachReg, this->m_func);
  786. Lowerer::InsertMove(r0Opnd, IR::IntConstOpnd::New(frameSize, TyMachReg, this->m_func, true), insertInstr);
  787. IR::RegOpnd *r1Opnd = IR::RegOpnd::New(nullptr, RegR1, TyMachReg, this->m_func);
  788. Lowerer::InsertMove(r1Opnd, this->m_lowerer->LoadScriptContextOpnd(insertInstr), insertInstr);
  789. IR::RegOpnd *r2Opnd = IR::RegOpnd::New(nullptr, RegR2, TyMachReg, m_func);
  790. Lowerer::InsertMove(r2Opnd, IR::HelperCallOpnd::New(IR::HelperProbeCurrentStack, this->m_func), insertInstr);
  791. instr = IR::Instr::New(afterProlog? Js::OpCode::BLR : Js::OpCode::BR, this->m_func);
  792. instr->SetSrc1(r2Opnd);
  793. insertInstr->InsertBefore(instr);
  794. insertInstr->InsertBefore(doneLabelInstr);
  795. }
  796. //
  797. // Emits the code to allocate 'size' amount of space on stack. for values smaller than PAGE_SIZE
  798. // this will just emit sub SP,size otherwise calls _chkstk.
  799. //
  800. bool
  801. LowererMD::GenerateStackAllocation(IR::Instr *instr, uint32 allocSize, uint32 probeSize)
  802. {
  803. IR::RegOpnd* spOpnd = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func);
  804. if (IsSmallStack(probeSize))
  805. {
  806. AssertMsg(!(allocSize & 0xFFFFF000), "Must fit in 12 bits");
  807. AssertMsg(allocSize % MachStackAlignment == 0, "Must be aligned");
  808. // Generate SUB SP, SP, stackSize
  809. IR::IntConstOpnd * stackSizeOpnd = IR::IntConstOpnd::New(allocSize, TyMachReg, this->m_func, true);
  810. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::SUB, spOpnd, spOpnd, stackSizeOpnd, this->m_func);
  811. instr->InsertBefore(subInstr);
  812. return false;
  813. }
  814. //__chkStk is a leaf function and hence alignment is not required.
  815. // Generate _chkstk call
  816. // LDIMM RegR15, stackSize/16
  817. // LDIMM RegR17, HelperCRT_chkstk
  818. // BLX RegR17
  819. // SUB SP, SP, x15, lsl #4
  820. //chkstk expects the stacksize argument in R15 register
  821. IR::RegOpnd *spAllocOpnd = IR::RegOpnd::New(nullptr, SP_ALLOC_SCRATCH_REG, TyMachReg, this->m_func);
  822. IR::RegOpnd *targetOpnd = IR::RegOpnd::New(nullptr, SCRATCH_REG, TyMachReg, this->m_func);
  823. IR::IntConstOpnd * stackSizeOpnd = IR::IntConstOpnd::New((allocSize / MachStackAlignment), TyMachReg, this->m_func, true);
  824. IR::Instr *movHelperAddrInstr = IR::Instr::New(Js::OpCode::LDIMM, targetOpnd, IR::HelperCallOpnd::New(IR::HelperCRT_chkstk, this->m_func), this->m_func);
  825. instr->InsertBefore(movHelperAddrInstr);
  826. IR::Instr *movInstr = IR::Instr::New(Js::OpCode::LDIMM, spAllocOpnd, stackSizeOpnd, this->m_func);
  827. instr->InsertBefore(movInstr);
  828. IR::Instr * callInstr = IR::Instr::New(Js::OpCode::BLR, spAllocOpnd, targetOpnd, this->m_func);
  829. instr->InsertBefore(callInstr);
  830. // _chkstk succeeded adjust SP by allocSize. r15 contains allocSize/16 so left shift r15 by 4 to get allocSize
  831. // Generate SUB SP, SP, x15, lsl #4
  832. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::SUB_LSL4, spOpnd, spOpnd, spAllocOpnd, this->m_func);
  833. instr->InsertBefore(subInstr);
  834. // return true to imply scratch register is trashed
  835. return true;
  836. }
  837. void
  838. LowererMD::GenerateStackDeallocation(IR::Instr *instr, uint32 allocSize)
  839. {
  840. IR::RegOpnd * spOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  841. IR::Instr * spAdjustInstr = IR::Instr::New(Js::OpCode::ADD,
  842. spOpnd,
  843. spOpnd,
  844. IR::IntConstOpnd::New(allocSize, TyMachReg, this->m_func, true), this->m_func);
  845. instr->InsertBefore(spAdjustInstr);
  846. LegalizeMD::LegalizeInstr(spAdjustInstr);
  847. }
  848. class ARM64StackLayout
  849. {
  850. //
  851. // Canonical ARM64 prolog/epilog stack layout (stack grows downward):
  852. //
  853. // +-------------------------------------+
  854. // | caller-allocated parameters |
  855. // +=====================================+-----> SP at time of call
  856. // | callee-saved parameters (x0-x7) |
  857. // +-------------------------------------+
  858. // | frame pointer + link register |
  859. // +-------------------------------------+-----> updated FP points here
  860. // | arguments slot + StackFunctionList |
  861. // +-------------------------------------+
  862. // | callee-saved registers (x19-x28) |
  863. // +-------------------------------------+
  864. // | callee-saved FP regs (d8-d15) |
  865. // +-------------------------------------+-----> == regOffset
  866. // | locals area |
  867. // +-------------------------------------+-----> locals pointer if not SP
  868. // | caller-allocated parameters |
  869. // +=====================================+-----> SP points here when done
  870. //
  871. public:
  872. ARM64StackLayout(Func* func);
  873. // Getters
  874. bool HasCalls() const { return m_hasCalls; }
  875. bool HasTry() const { return m_hasTry; }
  876. ULONG ArgSlotCount() const { return m_argSlotCount; }
  877. BitVector HomedParams() const { return m_homedParams; }
  878. BitVector SavedRegisters() const { return m_savedRegisters; }
  879. BitVector SavedDoubles() const { return m_savedDoubles; }
  880. // Locals area sits right after space allocated for argments
  881. ULONG LocalsOffset() const { return this->m_argSlotCount * MachRegInt; }
  882. ULONG LocalsSize() const { return this->m_localsArea; }
  883. // Saved non-volatile double registers sit past the locals area
  884. ULONG SavedDoublesOffset() const { return this->LocalsOffset() + this->LocalsSize(); }
  885. ULONG SavedDoublesSize() const { return this->m_savedDoubles.Count() * MachRegDouble; }
  886. // Saved non-volatile integer registers sit after the saved doubles
  887. ULONG SavedRegistersOffset() const { return this->SavedDoublesOffset() + this->SavedDoublesSize(); }
  888. ULONG SavedRegistersSize() const { return this->m_savedRegisters.Count() * MachRegInt; }
  889. // The argument slot and StackFunctionList entry come after the saved integer registers
  890. ULONG ArgSlotOffset() const { return this->SavedRegistersOffset() + this->SavedRegistersSize(); }
  891. ULONG ArgSlotSize() const { return this->m_hasCalls ? (2 * MachRegInt) : 0; }
  892. // Next comes the frame chain
  893. ULONG FpLrOffset() const { return this->ArgSlotOffset() + this->ArgSlotSize(); }
  894. ULONG FpLrSize() const { return this->m_hasCalls ? (2 * MachRegInt) : 0; }
  895. // Followed by any homed parameters
  896. ULONG HomedParamsOffset() const { return this->FpLrOffset() + this->FpLrSize(); }
  897. ULONG HomedParamsSize() const { return this->m_homedParams.Count() * MachRegInt; }
  898. // And that's the total stack allocation
  899. ULONG TotalStackSize() const { return this->HomedParamsOffset() + this->HomedParamsSize(); }
  900. // The register area is the area at the far end that doesn't include locals or arg slots
  901. ULONG RegisterAreaOffset() const { return this->SavedDoublesOffset(); }
  902. ULONG RegisterAreaSize() const { return this->TotalStackSize() - this->RegisterAreaOffset(); }
  903. private:
  904. bool m_hasCalls;
  905. bool m_hasTry;
  906. ULONG m_argSlotCount;
  907. ULONG m_localsArea;
  908. BitVector m_homedParams;
  909. BitVector m_savedRegisters;
  910. BitVector m_savedDoubles;
  911. };
  912. ARM64StackLayout::ARM64StackLayout(Func* func)
  913. : m_hasCalls(false),
  914. m_hasTry(func->HasTry()),
  915. m_argSlotCount(func->m_argSlotsForFunctionsCalled),
  916. m_localsArea(func->m_localStackHeight)
  917. {
  918. Assert(m_localsArea % 16 == 0);
  919. Assert(m_argSlotCount % 2 == 0);
  920. // If there is a try, behave specially because the try/catch/finally helpers assume a
  921. // fully-populated stack layout.
  922. if (this->m_hasTry)
  923. {
  924. this->m_hasCalls = true;
  925. this->m_savedRegisters.SetRange(FIRST_CALLEE_SAVED_GP_REG, CALLEE_SAVED_GP_REG_COUNT);
  926. this->m_savedDoubles.SetRange(FIRST_CALLEE_SAVED_DBL_REG, CALLEE_SAVED_DOUBLE_REG_COUNT);
  927. this->m_homedParams.SetRange(0, NUM_INT_ARG_REGS);
  928. }
  929. // Otherwise, be more selective
  930. else
  931. {
  932. // Determine integer register saves. Since registers are always saved in pairs, mark both registers
  933. // in each pair as being saved even if only one is actually used.
  934. for (RegNum curReg = FIRST_CALLEE_SAVED_GP_REG; curReg <= LAST_CALLEE_SAVED_GP_REG; curReg = RegNum(curReg + 2))
  935. {
  936. Assert(LinearScan::IsCalleeSaved(curReg));
  937. RegNum nextReg = RegNum(curReg + 1);
  938. Assert(LinearScan::IsCalleeSaved(nextReg));
  939. if (func->m_regsUsed.Test(curReg) || func->m_regsUsed.Test(nextReg))
  940. {
  941. this->m_savedRegisters.SetRange(curReg, 2);
  942. }
  943. }
  944. // Determine double register saves. Since registers are always saved in pairs, mark both registers
  945. // in each pair as being saved even if only one is actually used.
  946. for (RegNum curReg = FIRST_CALLEE_SAVED_DBL_REG; curReg <= LAST_CALLEE_SAVED_DBL_REG; curReg = RegNum(curReg + 2))
  947. {
  948. Assert(LinearScan::IsCalleeSaved(curReg));
  949. RegNum nextReg = RegNum(curReg + 1);
  950. Assert(LinearScan::IsCalleeSaved(nextReg));
  951. if (func->m_regsUsed.Test(curReg) || func->m_regsUsed.Test(nextReg))
  952. {
  953. this->m_savedDoubles.SetRange(curReg, 2);
  954. }
  955. }
  956. // Determine if there are nested calls.
  957. //
  958. // If the function has a try, we need to have the same register saves in the prolog as the
  959. // arm64_CallEhFrame helper, so that we can use the same epilog. So always allocate a slot
  960. // for the stack nested func here whether we actually do have any stack nested func or not
  961. // TODO-STACK-NESTED-FUNC: May be use a different arm64_CallEhFrame for when we have
  962. // stack nested func?
  963. //
  964. // Note that this->TotalStackSize() will not include the homed parameters yet, so we add in
  965. // the worst case assumption (homing all NUM_INT_ARG_REGS).
  966. this->m_hasCalls = func->GetHasCalls() ||
  967. func->HasAnyStackNestedFunc() ||
  968. !LowererMD::IsSmallStack(this->TotalStackSize() + NUM_INT_ARG_REGS * MachRegInt) ||
  969. Lowerer::IsArgSaveRequired(func);
  970. // Home the params. This is done to enable on-the-fly creation of the arguments object,
  971. // Dyno bailout code, etc. For non-global functions, that means homing all the param registers
  972. // (since we have to assume they all have valid parameters). For the global function,
  973. // just home x0 (function object) and x1 (callinfo), which the runtime can't get by any other means.
  974. int homedParams = MIN_HOMED_PARAM_REGS;
  975. if (func->IsLoopBody())
  976. {
  977. // Jitted loop body takes only one "user" param: the pointer to the local slots.
  978. homedParams += 1;
  979. }
  980. else if (!this->m_hasCalls)
  981. {
  982. // A leaf function (no calls of any kind, including helpers) may still need its params, or, if it
  983. // has none, may still need the function object and call info.
  984. homedParams += func->GetInParamsCount();
  985. }
  986. else
  987. {
  988. homedParams = NUM_INT_ARG_REGS;
  989. }
  990. // Round up to an even number to keep stack alignment
  991. if (homedParams % 2 != 0)
  992. {
  993. homedParams += 1;
  994. }
  995. this->m_homedParams.SetRange(0, (homedParams < NUM_INT_ARG_REGS) ? homedParams : NUM_INT_ARG_REGS);
  996. }
  997. }
  998. IR::Instr *
  999. LowererMD::LowerEntryInstr(IR::EntryInstr * entryInstr)
  1000. {
  1001. IR::Instr *insertInstr = entryInstr->m_next;
  1002. // Begin recording info for later pdata/xdata emission.
  1003. this->m_func->m_unwindInfo.Init(this->m_func);
  1004. // Ensure there are an even number of slots for called functions
  1005. if (this->m_func->m_argSlotsForFunctionsCalled % 2 != 0)
  1006. {
  1007. this->m_func->m_argSlotsForFunctionsCalled += 1;
  1008. }
  1009. if (this->m_func->HasInlinee())
  1010. {
  1011. // Allocate the inlined arg out stack in the locals. Allocate an additional slot so that
  1012. // we can unconditionally clear the first slot past the current frame.
  1013. this->m_func->m_localStackHeight += this->m_func->GetInlineeArgumentStackSize();
  1014. }
  1015. // Ensure the locals area is 16 byte aligned.
  1016. this->m_func->m_localStackHeight = Math::Align<int32>(this->m_func->m_localStackHeight, MachStackAlignment);
  1017. // Now that the localStackHeight has been adjusted, compute the final layout
  1018. ARM64StackLayout layout(this->m_func);
  1019. Assert(layout.TotalStackSize() % 16 == 0);
  1020. // Set the arguments offset relative to the end of the locals area
  1021. this->m_func->m_ArgumentsOffset = layout.HomedParamsOffset() - (layout.LocalsOffset() + layout.LocalsSize());
  1022. // Set the frame height if inlinee arguments are needed
  1023. if (m_func->GetMaxInlineeArgOutSize() != 0)
  1024. {
  1025. // subtracting 2 for frame pointer & return address
  1026. this->m_func->GetJITOutput()->SetFrameHeight(this->m_func->m_localStackHeight + this->m_func->m_ArgumentsOffset - 2 * MachRegInt);
  1027. }
  1028. // Two situations to handle:
  1029. //
  1030. // 1. If total stack allocation < 512, we can do a single allocation up front
  1031. // 2. Otherwise, we allocate the register area first, save regs, then allocate locals
  1032. //
  1033. // Breaking this down, there are two stack allocations
  1034. //
  1035. // Allocation 1 = situation1 ? TotalStackSize : RegisterAreaSize
  1036. // Allocation 2 = TotalStackSize - Allocation 1
  1037. //
  1038. // <probe>
  1039. // prologStart:
  1040. // sub sp, sp, #allocation1
  1041. // stp d8-d15, [sp, #savedDoublesOffset - allocation2]
  1042. // stp x19-x28, [sp, #savedRegistersOffset - allocation2]
  1043. // stp fp, lr, [sp, #fpLrOffset - allocation2]
  1044. // add fp, sp, #fpLrOffset - allocation2
  1045. // sub sp, sp, #allocation2 (might be call to _chkstk)
  1046. // prologEnd:
  1047. // stp zr, zr, [fp, #argSlotOffset - fpLrOffset]
  1048. // stp x0-x7, [fp, #paramSaveOffset - fpLrOffset]
  1049. // add localsptr, sp, #localsOffset
  1050. // sub ehsave, fp, #fpLrOffset - registerAreaOffset
  1051. //
  1052. // Determine the 1 or 2 stack allocation sizes
  1053. ULONG stackAllocation1 = (layout.TotalStackSize() < 512) ? layout.TotalStackSize() : layout.RegisterAreaSize();
  1054. ULONG stackAllocation2 = layout.TotalStackSize() - stackAllocation1;
  1055. // this->GenerateDebugBreak(insertInstr);
  1056. // Generate a stack probe for large stacks first even before register push
  1057. bool fStackProbeAfterProlog = IsSmallStack(layout.TotalStackSize());
  1058. if (!fStackProbeAfterProlog)
  1059. {
  1060. GenerateStackProbe(insertInstr, false);
  1061. }
  1062. // Create the prologStart label
  1063. IR::LabelInstr *prologStartLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  1064. insertInstr->InsertBefore(prologStartLabel);
  1065. this->m_func->m_unwindInfo.SetFunctionOffsetLabel(UnwindPrologStart, prologStartLabel);
  1066. // Perform the initial stack allocation (guaranteed to be small)
  1067. IR::RegOpnd *spOpnd = IR::RegOpnd::New(nullptr, RegSP, TyMachReg, this->m_func);
  1068. if (stackAllocation1 > 0)
  1069. {
  1070. IR::Instr * instrSub = IR::Instr::New(Js::OpCode::SUB, spOpnd, spOpnd, IR::IntConstOpnd::New(stackAllocation1, TyMachReg, this->m_func), this->m_func);
  1071. insertInstr->InsertBefore(instrSub);
  1072. }
  1073. // Save doubles in pairs
  1074. if (!layout.SavedDoubles().IsEmpty())
  1075. {
  1076. ULONG curOffset = layout.SavedDoublesOffset() - stackAllocation2;
  1077. for (RegNum curReg = FIRST_CALLEE_SAVED_DBL_REG; curReg <= LAST_CALLEE_SAVED_DBL_REG; curReg = RegNum(curReg + 2))
  1078. {
  1079. if (layout.SavedDoubles().Test(curReg))
  1080. {
  1081. RegNum nextReg = RegNum(curReg + 1);
  1082. IR::Instr * instrStp = IR::Instr::New(Js::OpCode::FSTP,
  1083. IR::IndirOpnd::New(spOpnd, curOffset, TyMachReg, this->m_func),
  1084. IR::RegOpnd::New(curReg, TyMachDouble, this->m_func),
  1085. IR::RegOpnd::New(nextReg, TyMachDouble, this->m_func), this->m_func);
  1086. insertInstr->InsertBefore(instrStp);
  1087. curOffset += 2 * MachRegDouble;
  1088. }
  1089. }
  1090. }
  1091. // Save integer registers in pairs
  1092. if (!layout.SavedRegisters().IsEmpty())
  1093. {
  1094. ULONG curOffset = layout.SavedRegistersOffset() - stackAllocation2;
  1095. for (RegNum curReg = FIRST_CALLEE_SAVED_GP_REG; curReg <= LAST_CALLEE_SAVED_GP_REG; curReg = RegNum(curReg + 2))
  1096. {
  1097. if (layout.SavedRegisters().Test(curReg))
  1098. {
  1099. RegNum nextReg = RegNum(curReg + 1);
  1100. IR::Instr * instrStp = IR::Instr::New(Js::OpCode::STP,
  1101. IR::IndirOpnd::New(spOpnd, curOffset, TyMachReg, this->m_func),
  1102. IR::RegOpnd::New(curReg, TyMachReg, this->m_func),
  1103. IR::RegOpnd::New(nextReg, TyMachReg, this->m_func), this->m_func);
  1104. insertInstr->InsertBefore(instrStp);
  1105. curOffset += 2 * MachRegInt;
  1106. }
  1107. }
  1108. }
  1109. // Save FP/LR and compute FP
  1110. IR::RegOpnd *fpOpnd = fpOpnd = IR::RegOpnd::New(nullptr, RegFP, TyMachReg, this->m_func);
  1111. if (layout.HasCalls())
  1112. {
  1113. // STP fp, lr, [sp, #offs]
  1114. ULONG fpOffset = layout.FpLrOffset() - stackAllocation2;
  1115. IR::Instr * instrStp = IR::Instr::New(Js::OpCode::STP,
  1116. IR::IndirOpnd::New(spOpnd, fpOffset, TyMachReg, this->m_func),
  1117. fpOpnd, IR::RegOpnd::New(RegLR, TyMachReg, this->m_func), this->m_func);
  1118. insertInstr->InsertBefore(instrStp);
  1119. // ADD fp, sp, #offs
  1120. // For exception handling, do this part AFTER the prolog to allow for proper unwinding
  1121. if (!layout.HasTry())
  1122. {
  1123. Lowerer::InsertAdd(false, fpOpnd, spOpnd, IR::IntConstOpnd::New(fpOffset, TyMachReg, this->m_func), insertInstr);
  1124. }
  1125. }
  1126. // Perform the second (potentially large) stack allocation
  1127. if (stackAllocation2 > 0)
  1128. {
  1129. // TODO: is the probeSize parameter correct here?
  1130. this->GenerateStackAllocation(insertInstr, stackAllocation2, stackAllocation1 + stackAllocation2);
  1131. }
  1132. // Future work in the register area should be done FP-relative if it is set up
  1133. IR::RegOpnd *regAreaBaseOpnd = layout.HasCalls() ? fpOpnd : spOpnd;
  1134. ULONG regAreaBaseOffset = layout.HasCalls() ? layout.FpLrOffset() : 0;
  1135. // This marks the end of the formal prolog (for EH purposes); create and register a label
  1136. IR::LabelInstr *prologEndLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  1137. insertInstr->InsertBefore(prologEndLabel);
  1138. this->m_func->m_unwindInfo.SetFunctionOffsetLabel(UnwindPrologEnd, prologEndLabel);
  1139. // Compute the FP now if there is a try present
  1140. if (layout.HasTry())
  1141. {
  1142. Lowerer::InsertAdd(false, fpOpnd, spOpnd, IR::IntConstOpnd::New(layout.FpLrOffset(), TyMachReg, this->m_func), insertInstr);
  1143. }
  1144. // Zero the argument slot if present
  1145. IR::RegOpnd *zrOpnd = IR::RegOpnd::New(nullptr, RegZR, TyMachReg, this->m_func);
  1146. if (layout.ArgSlotSize() > 0)
  1147. {
  1148. IR::Instr * instrStp = IR::Instr::New(Js::OpCode::STP,
  1149. IR::IndirOpnd::New(regAreaBaseOpnd, layout.ArgSlotOffset() - regAreaBaseOffset, TyMachReg, this->m_func),
  1150. zrOpnd, zrOpnd, this->m_func);
  1151. insertInstr->InsertBefore(instrStp);
  1152. }
  1153. // Home parameter registers in pairs
  1154. if (!layout.HomedParams().IsEmpty())
  1155. {
  1156. ULONG curOffset = layout.HomedParamsOffset() - regAreaBaseOffset;
  1157. for (RegNum curReg = FIRST_INT_ARG_REG; curReg <= LAST_INT_ARG_REG; curReg = RegNum(curReg + 2))
  1158. {
  1159. if (layout.HomedParams().Test(curReg))
  1160. {
  1161. RegNum nextReg = RegNum(curReg + 1);
  1162. IR::Instr * instrStp = IR::Instr::New(Js::OpCode::STP,
  1163. IR::IndirOpnd::New(regAreaBaseOpnd, curOffset, TyMachReg, this->m_func),
  1164. IR::RegOpnd::New(curReg, TyMachReg, this->m_func),
  1165. IR::RegOpnd::New(nextReg, TyMachReg, this->m_func), this->m_func);
  1166. insertInstr->InsertBefore(instrStp);
  1167. curOffset += 2 * MachRegInt;
  1168. }
  1169. }
  1170. }
  1171. // Compute the locals pointer if needed
  1172. RegNum localsReg = this->m_func->GetLocalsPointer();
  1173. if (localsReg != RegSP)
  1174. {
  1175. IR::RegOpnd* localsOpnd = IR::RegOpnd::New(nullptr, localsReg, TyMachReg, this->m_func);
  1176. Lowerer::InsertAdd(false, localsOpnd, spOpnd, IR::IntConstOpnd::New(layout.LocalsOffset(), TyMachReg, this->m_func), insertInstr);
  1177. }
  1178. // Zero initialize the first inlinee frames argc.
  1179. if (this->m_func->GetMaxInlineeArgOutSize() != 0)
  1180. {
  1181. // STR argc, zr
  1182. StackSym *sym = this->m_func->m_symTable->GetArgSlotSym((Js::ArgSlot) - 1);
  1183. sym->m_isInlinedArgSlot = true;
  1184. sym->m_offset = 0;
  1185. IR::Instr * instrStr = IR::Instr::New(Js::OpCode::STR, IR::SymOpnd::New(sym, 0, TyMachReg, this->m_func), zrOpnd, this->m_func);
  1186. insertInstr->InsertBefore(instrStr);
  1187. }
  1188. // Now do the stack probe for small stacks
  1189. // hasCalls catches the recursion case
  1190. if (layout.HasCalls() && fStackProbeAfterProlog)
  1191. {
  1192. GenerateStackProbe(insertInstr, true); //stack is already aligned in this case
  1193. }
  1194. return entryInstr;
  1195. }
  1196. IR::Instr *
  1197. LowererMD::LowerExitInstr(IR::ExitInstr * exitInstr)
  1198. {
  1199. // Compute the final layout (should match the prolog)
  1200. ARM64StackLayout layout(this->m_func);
  1201. Assert(layout.TotalStackSize() % 16 == 0);
  1202. // Determine the 1 or 2 stack allocation sizes
  1203. // Note that on exit, if there is a try, we always do a 2-step deallocation because the
  1204. // epilog is re-used by the try/catch/finally code
  1205. ULONG stackAllocation1 = (layout.TotalStackSize() < 512 && !layout.HasTry()) ? layout.TotalStackSize() : layout.RegisterAreaSize();
  1206. ULONG stackAllocation2 = layout.TotalStackSize() - stackAllocation1;
  1207. // Mark the start of the epilog
  1208. IR::LabelInstr *epilogStartLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  1209. exitInstr->InsertBefore(epilogStartLabel);
  1210. this->m_func->m_unwindInfo.SetFunctionOffsetLabel(UnwindEpilogStart, epilogStartLabel);
  1211. IR::RegOpnd *spOpnd = IR::RegOpnd::New(nullptr, RegSP, TyMachReg, this->m_func);
  1212. IR::RegOpnd *fpOpnd = IR::RegOpnd::New(nullptr, RegFP, TyMachReg, this->m_func);
  1213. // Exception handling regions exit via the same epilog
  1214. IR::LabelInstr* ehEpilogLabel = this->m_func->m_epilogLabel;
  1215. if (ehEpilogLabel != nullptr)
  1216. {
  1217. ehEpilogLabel->Unlink();
  1218. exitInstr->InsertBefore(ehEpilogLabel);
  1219. }
  1220. // Undo the last stack allocation
  1221. if (stackAllocation2 > 0)
  1222. {
  1223. GenerateStackDeallocation(exitInstr, stackAllocation2);
  1224. }
  1225. // Recover FP and LR
  1226. if (layout.HasCalls())
  1227. {
  1228. // LDP fp, lr, [sp, #offs]
  1229. ULONG fpOffset = layout.FpLrOffset() - stackAllocation2;
  1230. IR::Instr * instrLdp = IR::Instr::New(Js::OpCode::LDP, fpOpnd,
  1231. IR::IndirOpnd::New(spOpnd, fpOffset, TyMachReg, this->m_func),
  1232. IR::RegOpnd::New(RegLR, TyMachReg, this->m_func), this->m_func);
  1233. exitInstr->InsertBefore(instrLdp);
  1234. }
  1235. // Recover integer registers in pairs
  1236. if (!layout.SavedRegisters().IsEmpty())
  1237. {
  1238. ULONG curOffset = layout.SavedRegistersOffset() - stackAllocation2 + layout.SavedRegistersSize();
  1239. for (RegNum curReg = RegNum(LAST_CALLEE_SAVED_GP_REG - 1); curReg >= FIRST_CALLEE_SAVED_GP_REG; curReg = RegNum(curReg - 2))
  1240. {
  1241. if (layout.SavedRegisters().Test(curReg))
  1242. {
  1243. curOffset -= 2 * MachRegInt;
  1244. RegNum nextReg = RegNum(curReg + 1);
  1245. IR::Instr * instrLdp = IR::Instr::New(Js::OpCode::LDP,
  1246. IR::RegOpnd::New(curReg, TyMachReg, this->m_func),
  1247. IR::IndirOpnd::New(spOpnd, curOffset, TyMachReg, this->m_func),
  1248. IR::RegOpnd::New(nextReg, TyMachReg, this->m_func), this->m_func);
  1249. exitInstr->InsertBefore(instrLdp);
  1250. }
  1251. }
  1252. }
  1253. // Recover doubles in pairs
  1254. if (!layout.SavedDoubles().IsEmpty())
  1255. {
  1256. ULONG curOffset = layout.SavedDoublesOffset() - stackAllocation2 + layout.SavedDoublesSize();
  1257. for (RegNum curReg = RegNum(LAST_CALLEE_SAVED_DBL_REG - 1); curReg >= FIRST_CALLEE_SAVED_DBL_REG; curReg = RegNum(curReg - 2))
  1258. {
  1259. if (layout.SavedDoubles().Test(curReg))
  1260. {
  1261. curOffset -= 2 * MachRegDouble;
  1262. RegNum nextReg = RegNum(curReg + 1);
  1263. IR::Instr * instrLdp = IR::Instr::New(Js::OpCode::FLDP,
  1264. IR::RegOpnd::New(curReg, TyMachDouble, this->m_func),
  1265. IR::IndirOpnd::New(spOpnd, curOffset, TyMachReg, this->m_func),
  1266. IR::RegOpnd::New(nextReg, TyMachDouble, this->m_func), this->m_func);
  1267. exitInstr->InsertBefore(instrLdp);
  1268. }
  1269. }
  1270. }
  1271. // Final stack deallocation
  1272. if (stackAllocation1 > 0)
  1273. {
  1274. GenerateStackDeallocation(exitInstr, stackAllocation1);
  1275. }
  1276. // Return
  1277. IR::Instr * instrRet = IR::Instr::New(Js::OpCode::RET, nullptr, IR::RegOpnd::New(nullptr, RegLR, TyMachReg, this->m_func), this->m_func);
  1278. exitInstr->InsertBefore(instrRet);
  1279. // Label the end
  1280. IR::LabelInstr *epilogEndLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  1281. exitInstr->InsertBefore(epilogEndLabel);
  1282. this->m_func->m_unwindInfo.SetFunctionOffsetLabel(UnwindEpilogEnd, epilogEndLabel);
  1283. return exitInstr;
  1284. }
  1285. IR::Instr *
  1286. LowererMD::LoadNewScObjFirstArg(IR::Instr * instr, IR::Opnd * argSrc, ushort extraArgs)
  1287. {
  1288. // Spread moves down the argument slot by one.
  1289. // LowerCallArgs will handle the extraArgs. We only need to specify the argument number
  1290. // i.e 1 and not + extraArgs as done in AMD64
  1291. IR::SymOpnd *argOpnd = IR::SymOpnd::New(this->m_func->m_symTable->GetArgSlotSym(1), TyVar, this->m_func);
  1292. IR::Instr *argInstr = IR::Instr::New(Js::OpCode::ArgOut_A, argOpnd, argSrc, this->m_func);
  1293. instr->InsertBefore(argInstr);
  1294. // Insert the argument into the arg chain.
  1295. if (m_lowerer->IsSpreadCall(instr))
  1296. {
  1297. // Spread calls need LdSpreadIndices as the last arg in the arg chain.
  1298. instr = m_lowerer->GetLdSpreadIndicesInstr(instr);
  1299. }
  1300. IR::Opnd *linkOpnd = instr->UnlinkSrc2();
  1301. argInstr->SetSrc2(linkOpnd);
  1302. instr->SetSrc2(argOpnd);
  1303. return argInstr;
  1304. }
  1305. IR::Instr *
  1306. LowererMD::LowerTry(IR::Instr * tryInstr, IR::JnHelperMethod helperMethod)
  1307. {
  1308. // Mark the entry to the try
  1309. IR::Instr * instr = tryInstr->GetNextRealInstrOrLabel();
  1310. AssertMsg(instr->IsLabelInstr(), "No label at the entry to a try?");
  1311. IR::LabelInstr * tryAddr = instr->AsLabelInstr();
  1312. // Arg 7: ScriptContext
  1313. this->m_lowerer->LoadScriptContext(tryAddr);
  1314. if (tryInstr->m_opcode == Js::OpCode::TryCatch || this->m_func->DoOptimizeTry() || (this->m_func->IsSimpleJit() && this->m_func->hasBailout))
  1315. {
  1316. // Arg 6 : hasBailedOutOffset
  1317. IR::Opnd * hasBailedOutOffset = IR::IntConstOpnd::New(this->m_func->m_hasBailedOutSym->m_offset + tryInstr->m_func->GetInlineeArgumentStackSize(), TyInt32, this->m_func);
  1318. this->LoadHelperArgument(tryAddr, hasBailedOutOffset);
  1319. }
  1320. // Arg 5: arg out size
  1321. IR::RegOpnd * argOutSize = IR::RegOpnd::New(TyMachReg, this->m_func);
  1322. instr = IR::Instr::New(Js::OpCode::LDARGOUTSZ, argOutSize, this->m_func);
  1323. tryAddr->InsertBefore(instr);
  1324. this->LoadHelperArgument(tryAddr, argOutSize);
  1325. // Arg 4: locals pointer
  1326. IR::RegOpnd * localsPtr = IR::RegOpnd::New(nullptr, this->m_func->GetLocalsPointer(), TyMachReg, this->m_func);
  1327. this->LoadHelperArgument(tryAddr, localsPtr);
  1328. // Arg 3: frame pointer
  1329. IR::RegOpnd * framePtr = IR::RegOpnd::New(nullptr, FRAME_REG, TyMachReg, this->m_func);
  1330. this->LoadHelperArgument(tryAddr, framePtr);
  1331. // Arg 2: helper address
  1332. IR::LabelInstr * helperAddr = tryInstr->AsBranchInstr()->GetTarget();
  1333. this->LoadHelperArgument(tryAddr, IR::LabelOpnd::New(helperAddr, this->m_func));
  1334. // Arg 1: try address
  1335. this->LoadHelperArgument(tryAddr, IR::LabelOpnd::New(tryAddr, this->m_func));
  1336. // Call the helper
  1337. IR::RegOpnd *continuationAddr =
  1338. IR::RegOpnd::New(StackSym::New(TyMachReg,this->m_func), RETURN_REG, TyMachReg, this->m_func);
  1339. IR::Instr * callInstr = IR::Instr::New(
  1340. Js::OpCode::Call, continuationAddr, IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
  1341. tryAddr->InsertBefore(callInstr);
  1342. this->LowerCall(callInstr, 0);
  1343. // Jump to the continuation address supplied by the helper
  1344. IR::BranchInstr *branchInstr = IR::MultiBranchInstr::New(Js::OpCode::BR, continuationAddr, this->m_func);
  1345. tryAddr->InsertBefore(branchInstr);
  1346. return tryInstr->m_prev;
  1347. }
  1348. IR::Instr *
  1349. LowererMD::LowerLeaveNull(IR::Instr * leaveInstr)
  1350. {
  1351. IR::Instr * instrPrev = leaveInstr->m_prev;
  1352. // Return a NULL continuation address to the caller to indicate that the finally did not seize the flow.
  1353. this->LowerEHRegionReturn(leaveInstr, IR::IntConstOpnd::New(0, TyMachReg, this->m_func));
  1354. leaveInstr->Remove();
  1355. return instrPrev;
  1356. }
  1357. IR::Instr *
  1358. LowererMD::LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * targetOpnd)
  1359. {
  1360. IR::RegOpnd *retReg = IR::RegOpnd::New(nullptr, RETURN_REG, TyMachReg, this->m_func);
  1361. // Load the continuation address into the return register.
  1362. Lowerer::InsertMove(retReg, targetOpnd, insertBeforeInstr);
  1363. IR::LabelInstr *epilogLabel = this->EnsureEHEpilogLabel();
  1364. IR::BranchInstr *jmpInstr = IR::BranchInstr::New(Js::OpCode::B, epilogLabel, this->m_func);
  1365. insertBeforeInstr->InsertBefore(jmpInstr);
  1366. // return the last instruction inserted
  1367. return jmpInstr;
  1368. }
  1369. ///----------------------------------------------------------------------------
  1370. ///
  1371. /// LowererMD::Init
  1372. ///
  1373. ///----------------------------------------------------------------------------
  1374. void
  1375. LowererMD::Init(Lowerer *lowerer)
  1376. {
  1377. m_lowerer = lowerer;
  1378. // The arg slot count computed by an earlier phase (e.g., IRBuilder) doesn't work for
  1379. // ARM if it accounts for nesting. Clear it here and let Lower compute its own value.
  1380. this->m_func->m_argSlotsForFunctionsCalled = 0;
  1381. }
  1382. ///----------------------------------------------------------------------------
  1383. ///
  1384. /// LowererMD::LoadInputParamPtr
  1385. ///
  1386. /// Load the address of the start of the passed-in parameters not including
  1387. /// the this parameter.
  1388. ///
  1389. ///----------------------------------------------------------------------------
  1390. IR::Instr *
  1391. LowererMD::LoadInputParamPtr(IR::Instr * instrInsert, IR::RegOpnd * optionalDstOpnd /* = nullptr */)
  1392. {
  1393. if (this->m_func->GetJITFunctionBody()->IsCoroutine())
  1394. {
  1395. IR::RegOpnd * argPtrRegOpnd = Lowerer::LoadGeneratorArgsPtr(instrInsert);
  1396. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(argPtrRegOpnd, 1 * MachPtr, TyMachPtr, this->m_func);
  1397. IR::RegOpnd * dstOpnd = optionalDstOpnd != nullptr ? optionalDstOpnd : IR::RegOpnd::New(TyMachPtr, this->m_func);
  1398. return Lowerer::InsertLea(dstOpnd, indirOpnd, instrInsert);
  1399. }
  1400. else
  1401. {
  1402. StackSym * paramSym = GetImplicitParamSlotSym(3);
  1403. return this->m_lowerer->InsertLoadStackAddress(paramSym, instrInsert);
  1404. }
  1405. }
  1406. ///----------------------------------------------------------------------------
  1407. ///
  1408. /// LowererMD::LoadInputParamCount
  1409. ///
  1410. /// Load the passed-in parameter count from the appropriate slot.
  1411. ///
  1412. ///----------------------------------------------------------------------------
  1413. IR::Instr *
  1414. LowererMD::LoadInputParamCount(IR::Instr * instrInsert, int adjust, bool needFlags)
  1415. {
  1416. // LDR Rz, CallInfo
  1417. // UBFX Rx, Rz, 27, #1 // Get CallEval bit.
  1418. // UBFX Rz, Rz, 0, #24 // Extract call count
  1419. // SUB Rz, Rz, Rx // Now Rz has the right number of parameters
  1420. IR::SymOpnd * srcOpnd = Lowerer::LoadCallInfo(instrInsert);
  1421. IR::RegOpnd * dstOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  1422. IR::Instr *instr = IR::Instr::New(Js::OpCode::LDR, dstOpnd, srcOpnd, this->m_func);
  1423. instrInsert->InsertBefore(instr);
  1424. // Get the actual call count. On ARM64 top 32 bits are unused
  1425. instr = IR::Instr::New(Js::OpCode::UBFX, dstOpnd, dstOpnd, IR::IntConstOpnd::New(BITFIELD(0, Js::CallInfo::ksizeofCount), TyMachReg, this->m_func), this->m_func);
  1426. instrInsert->InsertBefore(instr);
  1427. return Lowerer::InsertSub(needFlags, dstOpnd, dstOpnd, IR::IntConstOpnd::New(-adjust, TyUint32, this->m_func), instrInsert);
  1428. }
  1429. IR::Instr *
  1430. LowererMD::LoadStackArgPtr(IR::Instr * instr)
  1431. {
  1432. if (this->m_func->IsLoopBody())
  1433. {
  1434. // Get the first user param from the interpreter frame instance that was passed in.
  1435. // These args don't include the func object and callinfo; we just need to advance past "this".
  1436. // t1 = LDR [prm1 + m_inParams]
  1437. // dst = ADD t1, sizeof(var)
  1438. Assert(this->m_func->m_loopParamSym);
  1439. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  1440. size_t offset = Js::InterpreterStackFrame::GetOffsetOfInParams();
  1441. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, this->m_func);
  1442. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  1443. Lowerer::InsertMove(tmpOpnd, indirOpnd, instr);
  1444. instr->SetSrc1(tmpOpnd);
  1445. instr->SetSrc2(IR::IntConstOpnd::New(sizeof(Js::Var), TyMachReg, this->m_func));
  1446. }
  1447. else if (this->m_func->GetJITFunctionBody()->IsCoroutine())
  1448. {
  1449. IR::Instr *instr2 = LoadInputParamPtr(instr, instr->UnlinkDst()->AsRegOpnd());
  1450. instr->Remove();
  1451. instr = instr2;
  1452. }
  1453. else
  1454. {
  1455. // Get the args pointer relative to fp. We assume that fp is set up, since we'll only be looking
  1456. // for the stack arg pointer in a non-leaf.
  1457. // dst = ADD r11, "this" offset + sizeof(var)
  1458. instr->SetSrc1(IR::RegOpnd::New(nullptr, FRAME_REG, TyMachReg, this->m_func));
  1459. instr->SetSrc2(IR::IntConstOpnd::New((ArgOffsetFromFramePtr + Js::JavascriptFunctionArgIndex_SecondScriptArg) * sizeof(Js::Var), TyMachReg, this->m_func));
  1460. }
  1461. instr->m_opcode = Js::OpCode::ADD;
  1462. Legalize(instr);
  1463. return instr->m_prev;
  1464. }
  1465. IR::Instr *
  1466. LowererMD::LoadArgumentsFromFrame(IR::Instr * instr)
  1467. {
  1468. IR::RegOpnd *baseOpnd;
  1469. int32 offset;
  1470. if (this->m_func->IsLoopBody())
  1471. {
  1472. // Get the arguments ptr from the interpreter frame instance that was passed in.
  1473. Assert(this->m_func->m_loopParamSym);
  1474. baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  1475. offset = Js::InterpreterStackFrame::GetOffsetOfArguments();
  1476. }
  1477. else
  1478. {
  1479. // Get the arguments relative to the frame pointer.
  1480. baseOpnd = IR::RegOpnd::New(nullptr, FRAME_REG, TyMachReg, this->m_func);
  1481. offset = -MachArgsSlotOffset;
  1482. }
  1483. instr->SetSrc1(IR::IndirOpnd::New(baseOpnd, offset, TyMachReg, this->m_func));
  1484. this->ChangeToAssign(instr);
  1485. return instr->m_prev;
  1486. }
  1487. // load argument count as I4
  1488. IR::Instr *
  1489. LowererMD::LoadArgumentCount(IR::Instr * instr)
  1490. {
  1491. IR::RegOpnd *baseOpnd;
  1492. int32 offset;
  1493. if (this->m_func->IsLoopBody())
  1494. {
  1495. // Pull the arg count from the interpreter frame instance that was passed in.
  1496. // (The callinfo in the loop body's frame just shows the single parameter, the interpreter frame.)
  1497. Assert(this->m_func->m_loopParamSym);
  1498. baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  1499. offset = Js::InterpreterStackFrame::GetOffsetOfInSlotsCount();
  1500. }
  1501. else
  1502. {
  1503. baseOpnd = IR::RegOpnd::New(nullptr, FRAME_REG, TyMachReg, this->m_func);
  1504. offset = (ArgOffsetFromFramePtr + Js::JavascriptFunctionArgIndex_CallInfo) * sizeof(Js::Var);
  1505. }
  1506. instr->SetSrc1(IR::IndirOpnd::New(baseOpnd, offset, TyInt32, this->m_func));
  1507. this->ChangeToAssign(instr);
  1508. return instr->m_prev;
  1509. }
  1510. ///----------------------------------------------------------------------------
  1511. ///
  1512. /// LowererMD::LoadHeapArguments
  1513. ///
  1514. /// Load the arguments object
  1515. /// NOTE: The same caveat regarding arguments passed on the stack applies here
  1516. /// as in LoadInputParamCount above.
  1517. ///----------------------------------------------------------------------------
  1518. IR::Instr *
  1519. LowererMD::LoadHeapArguments(IR::Instr * instrArgs)
  1520. {
  1521. ASSERT_INLINEE_FUNC(instrArgs);
  1522. Func *func = instrArgs->m_func;
  1523. IR::Instr * instrPrev = instrArgs->m_prev;
  1524. if (func->IsStackArgsEnabled())
  1525. {
  1526. // The initial args slot value is zero.
  1527. instrArgs->m_opcode = Js::OpCode::LDIMM;
  1528. instrArgs->ReplaceSrc1(IR::AddrOpnd::NewNull(func));
  1529. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase) && func->GetJITFunctionBody()->GetInParamsCount() > 1)
  1530. {
  1531. Output::Print(_u("StackArgFormals : %s (%d) :Removing Heap Arguments object creation in Lowerer. \n"), instrArgs->m_func->GetJITFunctionBody()->GetDisplayName(), instrArgs->m_func->GetFunctionNumber());
  1532. Output::Flush();
  1533. }
  1534. }
  1535. else
  1536. {
  1537. // s7 = formals are let decls
  1538. // s6 = memory context
  1539. // s5 = array of property ID's
  1540. // s4 = local frame instance
  1541. // s3 = address of first actual argument (after "this")
  1542. // s2 = actual argument count
  1543. // s1 = current function
  1544. // dst = JavascriptOperators::LoadHeapArguments(s1, s2, s3, s4, s5, s6, s7)
  1545. // s7 = formals are let decls
  1546. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(instrArgs->m_opcode == Js::OpCode::LdLetHeapArguments ? TRUE : FALSE, TyUint8, func));
  1547. // s6 = memory context
  1548. this->m_lowerer->LoadScriptContext(instrArgs);
  1549. // s5 = array of property ID's
  1550. intptr_t formalsPropIdArray = instrArgs->m_func->GetJITFunctionBody()->GetFormalsPropIdArrayAddr();
  1551. if (!formalsPropIdArray)
  1552. {
  1553. formalsPropIdArray = instrArgs->m_func->GetScriptContextInfo()->GetNullAddr();
  1554. }
  1555. IR::Opnd * argArray = IR::AddrOpnd::New(formalsPropIdArray, IR::AddrOpndKindDynamicMisc, m_func);
  1556. this->LoadHelperArgument(instrArgs, argArray);
  1557. // s4 = local frame instance
  1558. IR::Opnd * frameObj = instrArgs->UnlinkSrc1();
  1559. this->LoadHelperArgument(instrArgs, frameObj);
  1560. if (func->IsInlinee())
  1561. {
  1562. // s3 = address of first actual argument (after "this").
  1563. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  1564. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  1565. IR::Instr *instr = this->m_lowerer->InsertLoadStackAddress(firstRealArgSlotSym, instrArgs);
  1566. this->LoadHelperArgument(instrArgs, instr->GetDst());
  1567. // s2 = actual argument count (without counting "this").
  1568. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(func->actualCount - 1, TyUint32, func));
  1569. // s1 = current function.
  1570. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  1571. // Save the newly-created args object to its dedicated stack slot.
  1572. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  1573. Lowerer::InsertMove(argObjSlotOpnd,instrArgs->GetDst(), instrArgs->m_next);
  1574. }
  1575. else
  1576. {
  1577. // s3 = address of first actual argument (after "this")
  1578. // Stack looks like (function object)+0, (arg count)+4, (this)+8, actual args
  1579. IR::Instr * instr = this->LoadInputParamPtr(instrArgs);
  1580. this->LoadHelperArgument(instrArgs, instr->GetDst());
  1581. // s2 = actual argument count (without counting "this")
  1582. instr = this->LoadInputParamCount(instrArgs, -1);
  1583. IR::Opnd * opndInputParamCount = instr->GetDst();
  1584. this->LoadHelperArgument(instrArgs, opndInputParamCount);
  1585. // s1 = current function
  1586. StackSym * paramSym = GetImplicitParamSlotSym(0);
  1587. IR::Opnd * srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  1588. this->LoadHelperArgument(instrArgs, srcOpnd);
  1589. // Save the newly-created args object to its dedicated stack slot.
  1590. Lowerer::InsertMove(CreateStackArgumentsSlotOpnd(), instrArgs->GetDst(), instrArgs->m_next);
  1591. }
  1592. this->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArguments);
  1593. }
  1594. return instrPrev;
  1595. }
  1596. ///----------------------------------------------------------------------------
  1597. ///
  1598. /// LowererMD::LoadHeapArgsCached
  1599. ///
  1600. /// Load the heap-based arguments object using a cached scope
  1601. ///
  1602. ///----------------------------------------------------------------------------
  1603. IR::Instr *
  1604. LowererMD::LoadHeapArgsCached(IR::Instr * instrArgs)
  1605. {
  1606. Assert(!this->m_func->GetJITFunctionBody()->IsGenerator());
  1607. ASSERT_INLINEE_FUNC(instrArgs);
  1608. Func *func = instrArgs->m_func;
  1609. IR::Instr * instrPrev = instrArgs->m_prev;
  1610. if (instrArgs->m_func->IsStackArgsEnabled())
  1611. {
  1612. instrArgs->m_opcode = Js::OpCode::LDIMM;
  1613. instrArgs->ReplaceSrc1(IR::AddrOpnd::NewNull(func));
  1614. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase) && func->GetJITFunctionBody()->GetInParamsCount() > 1)
  1615. {
  1616. Output::Print(_u("StackArgFormals : %s (%d) :Removing Heap Arguments object creation in Lowerer. \n"), instrArgs->m_func->GetJITFunctionBody()->GetDisplayName(), instrArgs->m_func->GetFunctionNumber());
  1617. Output::Flush();
  1618. }
  1619. }
  1620. else
  1621. {
  1622. // s7 = formals are let decls
  1623. // s6 = memory context
  1624. // s5 = local frame instance
  1625. // s4 = address of first actual argument (after "this")
  1626. // s3 = formal argument count
  1627. // s2 = actual argument count
  1628. // s1 = current function
  1629. // dst = JavascriptOperators::LoadHeapArgsCached(s1, s2, s3, s4, s5, s6, s7)
  1630. // s7 = formals are let decls
  1631. IR::Opnd * formalsAreLetDecls = IR::IntConstOpnd::New((IntConstType)(instrArgs->m_opcode == Js::OpCode::LdLetHeapArgsCached), TyUint8, func);
  1632. this->LoadHelperArgument(instrArgs, formalsAreLetDecls);
  1633. // s6 = memory context
  1634. this->m_lowerer->LoadScriptContext(instrArgs);
  1635. // s5 = local frame instance
  1636. IR::Opnd * frameObj = instrArgs->UnlinkSrc1();
  1637. this->LoadHelperArgument(instrArgs, frameObj);
  1638. if (func->IsInlinee())
  1639. {
  1640. // s4 = address of first actual argument (after "this").
  1641. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  1642. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  1643. IR::Instr *instr = this->m_lowerer->InsertLoadStackAddress(firstRealArgSlotSym, instrArgs);
  1644. this->LoadHelperArgument(instrArgs, instr->GetDst());
  1645. // s3 = formal argument count (without counting "this").
  1646. uint32 formalsCount = func->GetJITFunctionBody()->GetInParamsCount() - 1;
  1647. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyUint32, func));
  1648. // s2 = actual argument count (without counting "this").
  1649. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(func->actualCount - 1, TyUint32, func));
  1650. // s1 = current function.
  1651. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  1652. // Save the newly-created args object to its dedicated stack slot.
  1653. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  1654. Lowerer::InsertMove(argObjSlotOpnd, instrArgs->GetDst(), instrArgs->m_next);
  1655. }
  1656. else
  1657. {
  1658. // s4 = address of first actual argument (after "this")
  1659. IR::Instr * instr = this->LoadInputParamPtr(instrArgs);
  1660. this->LoadHelperArgument(instrArgs, instr->GetDst());
  1661. // s3 = formal argument count (without counting "this")
  1662. uint32 formalsCount = func->GetInParamsCount() - 1;
  1663. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyMachReg, func));
  1664. // s2 = actual argument count (without counting "this")
  1665. instr = this->LoadInputParamCount(instrArgs, -1);
  1666. this->LoadHelperArgument(instrArgs, instr->GetDst());
  1667. // s1 = current function
  1668. StackSym * paramSym = GetImplicitParamSlotSym(0);
  1669. IR::Opnd * srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  1670. this->LoadHelperArgument(instrArgs, srcOpnd);
  1671. // Save the newly-created args object to its dedicated stack slot.
  1672. Lowerer::InsertMove(CreateStackArgumentsSlotOpnd(), instrArgs->GetDst(), instrArgs->m_next);
  1673. }
  1674. this->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArgsCached);
  1675. }
  1676. return instrPrev;
  1677. }
  1678. ///----------------------------------------------------------------------------
  1679. ///
  1680. /// LowererMD::ChangeToHelperCall
  1681. ///
  1682. /// Change the current instruction to a call to the given helper.
  1683. ///
  1684. ///----------------------------------------------------------------------------
  1685. IR::Instr *
  1686. LowererMD::ChangeToHelperCall(IR::Instr * callInstr, IR::JnHelperMethod helperMethod, IR::LabelInstr *labelBailOut,
  1687. IR::Opnd *opndInstance, IR::PropertySymOpnd *propSymOpnd, bool isHelperContinuation)
  1688. {
  1689. #if DBG
  1690. this->m_lowerer->ReconcileWithLowererStateOnHelperCall(callInstr, helperMethod);
  1691. #endif
  1692. IR::Instr * bailOutInstr = callInstr;
  1693. if (callInstr->HasBailOutInfo())
  1694. {
  1695. if (callInstr->GetBailOutKind() == IR::BailOutOnNotPrimitive)
  1696. {
  1697. callInstr = IR::Instr::New(callInstr->m_opcode, callInstr->m_func);
  1698. bailOutInstr->TransferTo(callInstr);
  1699. bailOutInstr->InsertBefore(callInstr);
  1700. bailOutInstr->m_opcode = Js::OpCode::BailOnNotPrimitive;
  1701. bailOutInstr->SetSrc1(opndInstance);
  1702. }
  1703. else
  1704. {
  1705. bailOutInstr = this->m_lowerer->SplitBailOnImplicitCall(callInstr);
  1706. }
  1707. }
  1708. IR::HelperCallOpnd *helperCallOpnd = Lowerer::CreateHelperCallOpnd(helperMethod, this->GetHelperArgsCount(), m_func);
  1709. if (helperCallOpnd->IsDiagHelperCallOpnd())
  1710. {
  1711. // Load arguments for the wrapper.
  1712. this->LoadHelperArgument(callInstr, IR::AddrOpnd::New((Js::Var)IR::GetMethodOriginalAddress(m_func->GetThreadContextInfo(), helperMethod), IR::AddrOpndKindDynamicMisc, m_func));
  1713. this->m_lowerer->LoadScriptContext(callInstr);
  1714. }
  1715. callInstr->SetSrc1(helperCallOpnd);
  1716. IR::Instr * instrRet = this->LowerCall(callInstr, 0);
  1717. if (bailOutInstr != callInstr)
  1718. {
  1719. // The bailout needs to be lowered after we lower the helper call because the helper argument
  1720. // has already been loaded. We need to drain them on AMD64 before starting another helper call
  1721. if (bailOutInstr->m_opcode == Js::OpCode::BailOnNotObject)
  1722. {
  1723. this->m_lowerer->LowerBailOnNotObject(bailOutInstr, nullptr, labelBailOut);
  1724. }
  1725. else if (bailOutInstr->m_opcode == Js::OpCode::BailOnNotPrimitive)
  1726. {
  1727. this->m_lowerer->LowerBailOnTrue(bailOutInstr, labelBailOut);
  1728. }
  1729. else
  1730. {
  1731. this->m_lowerer->LowerBailOnEqualOrNotEqual(bailOutInstr, nullptr, labelBailOut, propSymOpnd, isHelperContinuation);
  1732. }
  1733. }
  1734. return instrRet;
  1735. }
  1736. IR::Instr* LowererMD::ChangeToHelperCallMem(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  1737. {
  1738. this->m_lowerer->LoadScriptContext(instr);
  1739. return this->ChangeToHelperCall(instr, helperMethod);
  1740. }
  1741. ///----------------------------------------------------------------------------
  1742. ///
  1743. /// LowererMD::ChangeToAssign
  1744. ///
  1745. /// Change to a copy. Handle riscification of operands.
  1746. ///
  1747. ///----------------------------------------------------------------------------
  1748. // ToDo (SaAgarwa) Copied from ARM32 to compile. Validate is this correct
  1749. IR::Instr *
  1750. LowererMD::ChangeToAssignNoBarrierCheck(IR::Instr * instr)
  1751. {
  1752. return ChangeToAssign(instr, instr->GetDst()->GetType());
  1753. }
  1754. IR::Instr *
  1755. LowererMD::ChangeToAssign(IR::Instr * instr)
  1756. {
  1757. return ChangeToAssign(instr, instr->GetDst()->GetType());
  1758. }
  1759. IR::Instr *
  1760. LowererMD::ChangeToAssign(IR::Instr * instr, IRType destType)
  1761. {
  1762. Assert(!instr->HasBailOutInfo() || instr->GetBailOutKind() == IR::BailOutExpectingInteger
  1763. || instr->GetBailOutKind() == IR::BailOutExpectingString);
  1764. IR::Opnd *src = instr->GetSrc1();
  1765. IRType srcType = src->GetType();
  1766. if (src->IsImmediateOpnd() || src->IsLabelOpnd())
  1767. {
  1768. instr->m_opcode = Js::OpCode::LDIMM;
  1769. }
  1770. else if(destType == TyFloat32 && instr->GetDst()->IsRegOpnd())
  1771. {
  1772. Assert(instr->GetSrc1()->IsFloat32());
  1773. instr->m_opcode = Js::OpCode::FLDR;
  1774. // Note that we allocate double register for single precision floats as well, as the register allocator currently
  1775. // does not support 32-bit float registers
  1776. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyFloat64, instr->m_func));
  1777. if(instr->GetSrc1()->IsRegOpnd())
  1778. {
  1779. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyFloat64, instr->m_func));
  1780. }
  1781. }
  1782. else if (!src->IsIndirOpnd() && TySize[destType] > TySize[srcType] && (IRType_IsSignedInt(destType) || IRType_IsUnsignedInt(destType)))
  1783. {
  1784. // If we're moving between different lengths of registers, we need to use the
  1785. // right operator - sign extend if the source is int, zero extend if uint.
  1786. if (IRType_IsSignedInt(srcType))
  1787. {
  1788. instr->ReplaceSrc1(src->UseWithNewType(IRType_EnsureSigned(destType), instr->m_func));
  1789. instr->SetSrc2(IR::IntConstOpnd::New(BITFIELD(0, TySize[srcType] * MachBits), TyMachReg, instr->m_func, true));
  1790. instr->m_opcode = Js::OpCode::SBFX;
  1791. }
  1792. else if (IRType_IsUnsignedInt(srcType))
  1793. {
  1794. instr->ReplaceSrc1(src->UseWithNewType(IRType_EnsureUnsigned(destType), instr->m_func));
  1795. instr->SetSrc2(IR::IntConstOpnd::New(BITFIELD(0, TySize[srcType] * MachBits), TyMachReg, instr->m_func, true));
  1796. instr->m_opcode = Js::OpCode::UBFX;
  1797. }
  1798. else
  1799. {
  1800. AssertMsg(false, "argument size mismatch for mov instruction, with non int/uint types!");
  1801. }
  1802. }
  1803. else
  1804. {
  1805. instr->m_opcode = IRType_IsFloat(destType) ? Js::OpCode::FMOV : Js::OpCode::MOV;
  1806. }
  1807. AutoRestoreLegalize restore(instr->m_func, false);
  1808. LegalizeMD::LegalizeInstr(instr);
  1809. return instr;
  1810. }
  1811. IR::Instr *
  1812. LowererMD::ChangeToWriteBarrierAssign(IR::Instr * assignInstr, const Func* func)
  1813. {
  1814. #ifdef RECYCLER_WRITE_BARRIER_JIT
  1815. // WriteBarrier-TODO- Implement ARM JIT
  1816. #endif
  1817. return ChangeToAssignNoBarrierCheck(assignInstr);
  1818. }
  1819. ///----------------------------------------------------------------------------
  1820. ///
  1821. /// LowererMD::LowerRet
  1822. ///
  1823. /// Lower Ret to "MOV EAX, src"
  1824. /// The real RET is inserted at the exit of the function when emitting the
  1825. /// epilog.
  1826. ///
  1827. ///----------------------------------------------------------------------------
  1828. IR::Instr *
  1829. LowererMD::LowerRet(IR::Instr * retInstr)
  1830. {
  1831. IR::RegOpnd *retReg = IR::RegOpnd::New(TyMachReg, m_func);
  1832. retReg->SetReg(RETURN_REG);
  1833. Lowerer::InsertMove(retReg, retInstr->UnlinkSrc1(), retInstr);
  1834. retInstr->SetSrc1(retReg);
  1835. return retInstr;
  1836. }
  1837. ///----------------------------------------------------------------------------
  1838. ///
  1839. /// LowererMD::MDBranchOpcode
  1840. ///
  1841. /// Map HIR branch opcode to machine-dependent equivalent.
  1842. ///
  1843. ///----------------------------------------------------------------------------
  1844. Js::OpCode
  1845. LowererMD::MDBranchOpcode(Js::OpCode opcode)
  1846. {
  1847. switch (opcode)
  1848. {
  1849. case Js::OpCode::BrEq_A:
  1850. case Js::OpCode::BrSrEq_A:
  1851. case Js::OpCode::BrNotNeq_A:
  1852. case Js::OpCode::BrSrNotNeq_A:
  1853. case Js::OpCode::BrAddr_A:
  1854. return Js::OpCode::BEQ;
  1855. case Js::OpCode::BrNeq_A:
  1856. case Js::OpCode::BrSrNeq_A:
  1857. case Js::OpCode::BrNotEq_A:
  1858. case Js::OpCode::BrSrNotEq_A:
  1859. case Js::OpCode::BrNotAddr_A:
  1860. return Js::OpCode::BNE;
  1861. case Js::OpCode::BrLt_A:
  1862. case Js::OpCode::BrNotGe_A:
  1863. return Js::OpCode::BLT;
  1864. case Js::OpCode::BrLe_A:
  1865. case Js::OpCode::BrNotGt_A:
  1866. return Js::OpCode::BLE;
  1867. case Js::OpCode::BrGt_A:
  1868. case Js::OpCode::BrNotLe_A:
  1869. return Js::OpCode::BGT;
  1870. case Js::OpCode::BrGe_A:
  1871. case Js::OpCode::BrNotLt_A:
  1872. return Js::OpCode::BGE;
  1873. case Js::OpCode::BrUnGt_A:
  1874. return Js::OpCode::BHI;
  1875. case Js::OpCode::BrUnGe_A:
  1876. return Js::OpCode::BCS;
  1877. case Js::OpCode::BrUnLt_A:
  1878. return Js::OpCode::BCC;
  1879. case Js::OpCode::BrUnLe_A:
  1880. return Js::OpCode::BLS;
  1881. default:
  1882. AssertMsg(0, "NYI");
  1883. return opcode;
  1884. }
  1885. }
  1886. Js::OpCode
  1887. LowererMD::MDUnsignedBranchOpcode(Js::OpCode opcode)
  1888. {
  1889. switch (opcode)
  1890. {
  1891. case Js::OpCode::BrEq_A:
  1892. case Js::OpCode::BrSrEq_A:
  1893. case Js::OpCode::BrSrNotNeq_A:
  1894. case Js::OpCode::BrNotNeq_A:
  1895. case Js::OpCode::BrAddr_A:
  1896. return Js::OpCode::BEQ;
  1897. case Js::OpCode::BrNeq_A:
  1898. case Js::OpCode::BrSrNeq_A:
  1899. case Js::OpCode::BrSrNotEq_A:
  1900. case Js::OpCode::BrNotEq_A:
  1901. case Js::OpCode::BrNotAddr_A:
  1902. return Js::OpCode::BNE;
  1903. case Js::OpCode::BrLt_A:
  1904. case Js::OpCode::BrNotGe_A:
  1905. return Js::OpCode::BCC;
  1906. case Js::OpCode::BrLe_A:
  1907. case Js::OpCode::BrNotGt_A:
  1908. return Js::OpCode::BLS;
  1909. case Js::OpCode::BrGt_A:
  1910. case Js::OpCode::BrNotLe_A:
  1911. return Js::OpCode::BHI;
  1912. case Js::OpCode::BrGe_A:
  1913. case Js::OpCode::BrNotLt_A:
  1914. return Js::OpCode::BCS;
  1915. default:
  1916. AssertMsg(0, "NYI");
  1917. return opcode;
  1918. }
  1919. }
  1920. Js::OpCode LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode opcode)
  1921. {
  1922. Assert(opcode == Js::OpCode::BrLt_A || opcode == Js::OpCode::BrGe_A);
  1923. return opcode == Js::OpCode::BrLt_A ? Js::OpCode::BMI : Js::OpCode::BPL;
  1924. }
  1925. void LowererMD::ChangeToAdd(IR::Instr *const instr, const bool needFlags)
  1926. {
  1927. Assert(instr);
  1928. Assert(instr->GetDst());
  1929. Assert(instr->GetSrc1());
  1930. Assert(instr->GetSrc2());
  1931. if(instr->GetDst()->IsFloat64())
  1932. {
  1933. Assert(instr->GetSrc1()->IsFloat64());
  1934. Assert(instr->GetSrc2()->IsFloat64());
  1935. Assert(!needFlags);
  1936. instr->m_opcode = Js::OpCode::FADD;
  1937. return;
  1938. }
  1939. instr->m_opcode = needFlags ? Js::OpCode::ADDS : Js::OpCode::ADD;
  1940. }
  1941. void LowererMD::ChangeToSub(IR::Instr *const instr, const bool needFlags)
  1942. {
  1943. Assert(instr);
  1944. Assert(instr->GetDst());
  1945. Assert(instr->GetSrc1());
  1946. Assert(instr->GetSrc2());
  1947. if(instr->GetDst()->IsFloat64())
  1948. {
  1949. Assert(instr->GetSrc1()->IsFloat64());
  1950. Assert(instr->GetSrc2()->IsFloat64());
  1951. Assert(!needFlags);
  1952. instr->m_opcode = Js::OpCode::FSUB;
  1953. return;
  1954. }
  1955. instr->m_opcode = needFlags ? Js::OpCode::SUBS : Js::OpCode::SUB;
  1956. }
  1957. void LowererMD::ChangeToShift(IR::Instr *const instr, const bool needFlags)
  1958. {
  1959. Assert(instr);
  1960. Assert(instr->GetDst());
  1961. Assert(instr->GetSrc1());
  1962. Assert(instr->GetSrc2());
  1963. Func *const func = instr->m_func;
  1964. switch(instr->m_opcode)
  1965. {
  1966. case Js::OpCode::Shl_A:
  1967. case Js::OpCode::Shl_I4:
  1968. Assert(!needFlags); // not implemented
  1969. instr->m_opcode = Js::OpCode::LSL;
  1970. break;
  1971. case Js::OpCode::Shr_A:
  1972. case Js::OpCode::Shr_I4:
  1973. Assert(!needFlags); // not implemented
  1974. instr->m_opcode = Js::OpCode::ASR;
  1975. break;
  1976. case Js::OpCode::ShrU_A:
  1977. case Js::OpCode::ShrU_I4:
  1978. Assert(!needFlags); // not implemented
  1979. instr->m_opcode = Js::OpCode::LSR;
  1980. break;
  1981. default:
  1982. Assert(false);
  1983. __assume(false);
  1984. }
  1985. // Javascript requires the ShiftCount is masked to the bottom 5 bits.
  1986. uint8 mask = TySize[instr->GetDst()->GetType()] == 8 ? 63 : 31;
  1987. if (instr->GetSrc2()->IsIntConstOpnd())
  1988. {
  1989. // In the constant case, do the mask manually.
  1990. IntConstType immed = instr->GetSrc2()->AsIntConstOpnd()->GetValue() & mask;
  1991. if (immed == 0)
  1992. {
  1993. // Shift by zero is just a move, and the shift-right instructions
  1994. // don't permit encoding of a zero shift amount.
  1995. instr->m_opcode = Js::OpCode::MOV;
  1996. instr->FreeSrc2();
  1997. }
  1998. else
  1999. {
  2000. instr->GetSrc2()->AsIntConstOpnd()->SetValue(immed);
  2001. }
  2002. }
  2003. else
  2004. {
  2005. // In the variable case, generate code to do the mask.
  2006. IR::Opnd *const src2 = instr->UnlinkSrc2();
  2007. instr->SetSrc2(IR::RegOpnd::New(src2->GetType(), func));
  2008. IR::Instr *const newInstr = IR::Instr::New(
  2009. Js::OpCode::AND, instr->GetSrc2(), src2, IR::IntConstOpnd::New(mask, TyInt8, func), func);
  2010. instr->InsertBefore(newInstr);
  2011. }
  2012. }
  2013. const uint16
  2014. LowererMD::GetFormalParamOffset()
  2015. {
  2016. //In ARM formal params are offset into the param area.
  2017. //So we only count the non-user params (Function object & CallInfo and let the encoder account for the saved R11 and LR
  2018. return 2;
  2019. }
  2020. ///----------------------------------------------------------------------------
  2021. ///
  2022. /// LowererMD::LowerCondBranch
  2023. ///
  2024. ///----------------------------------------------------------------------------
  2025. IR::Instr *
  2026. LowererMD::LowerCondBranch(IR::Instr * instr)
  2027. {
  2028. AssertMsg(instr->GetSrc1() != nullptr, "Expected src opnds on conditional branch");
  2029. IR::Opnd * opndSrc1 = instr->UnlinkSrc1();
  2030. IR::Instr * instrPrev = nullptr;
  2031. switch (instr->m_opcode)
  2032. {
  2033. case Js::OpCode::BrTrue_A:
  2034. case Js::OpCode::BrOnNotEmpty:
  2035. case Js::OpCode::BrNotNull_A:
  2036. case Js::OpCode::BrOnObject_A:
  2037. case Js::OpCode::BrOnClassConstructor:
  2038. case Js::OpCode::BrOnBaseConstructorKind:
  2039. Assert(!opndSrc1->IsFloat64());
  2040. AssertMsg(opndSrc1->IsRegOpnd(),"NYI for other operands");
  2041. AssertMsg(instr->GetSrc2() == nullptr, "Expected 1 src on boolean branch");
  2042. instrPrev = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  2043. instrPrev->SetSrc1(opndSrc1);
  2044. instrPrev->SetSrc2(IR::IntConstOpnd::New(0, TyInt32, m_func));
  2045. instr->InsertBefore(instrPrev);
  2046. LegalizeMD::LegalizeInstr(instrPrev);
  2047. instr->m_opcode = Js::OpCode::BNE;
  2048. break;
  2049. case Js::OpCode::BrFalse_A:
  2050. case Js::OpCode::BrOnEmpty:
  2051. Assert(!opndSrc1->IsFloat64());
  2052. AssertMsg(opndSrc1->IsRegOpnd(),"NYI for other operands");
  2053. AssertMsg(instr->GetSrc2() == nullptr, "Expected 1 src on boolean branch");
  2054. instrPrev = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  2055. instrPrev->SetSrc1(opndSrc1);
  2056. instrPrev->SetSrc2(IR::IntConstOpnd::New(0, TyInt32, m_func));
  2057. instr->InsertBefore(instrPrev);
  2058. LegalizeMD::LegalizeInstr(instrPrev);
  2059. instr->m_opcode = Js::OpCode::BEQ;
  2060. break;
  2061. default:
  2062. IR::Opnd * opndSrc2 = instr->UnlinkSrc2();
  2063. AssertMsg(opndSrc2 != nullptr, "Expected 2 src's on non-boolean branch");
  2064. if (opndSrc1->IsFloat64())
  2065. {
  2066. AssertMsg(opndSrc1->IsRegOpnd(),"NYI for other operands");
  2067. Assert(opndSrc2->IsFloat64());
  2068. Assert(opndSrc2->IsRegOpnd() && opndSrc1->IsRegOpnd());
  2069. //This comparison updates the FPSCR - floating point status control register
  2070. instrPrev = IR::Instr::New(Js::OpCode::FCMP, this->m_func);
  2071. instrPrev->SetSrc1(opndSrc1);
  2072. instrPrev->SetSrc2(opndSrc2);
  2073. instr->InsertBefore(instrPrev);
  2074. LegalizeMD::LegalizeInstr(instrPrev);
  2075. instr->m_opcode = LowererMD::MDBranchOpcode(instr->m_opcode);
  2076. }
  2077. else
  2078. {
  2079. AssertMsg(opndSrc2->IsRegOpnd() || opndSrc2->IsIntConstOpnd() || (opndSrc2->IsAddrOpnd()), "NYI for other operands");
  2080. instrPrev = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  2081. instrPrev->SetSrc1(opndSrc1);
  2082. instrPrev->SetSrc2(opndSrc2);
  2083. instr->InsertBefore(instrPrev);
  2084. LegalizeMD::LegalizeInstr(instrPrev);
  2085. instr->m_opcode = MDBranchOpcode(instr->m_opcode);
  2086. }
  2087. break;
  2088. }
  2089. return instr;
  2090. }
  2091. ///----------------------------------------------------------------------------
  2092. ///
  2093. /// LowererMD::ForceDstToReg
  2094. ///
  2095. ///----------------------------------------------------------------------------
  2096. IR::Instr*
  2097. LowererMD::ForceDstToReg(IR::Instr *instr)
  2098. {
  2099. IR::Opnd * dst = instr->GetDst();
  2100. if (dst->IsRegOpnd())
  2101. {
  2102. return instr;
  2103. }
  2104. IR::Instr * newInstr = instr->SinkDst(Js::OpCode::Ld_A);
  2105. LowererMD::ChangeToAssign(newInstr);
  2106. return newInstr;
  2107. }
  2108. IR::Instr *
  2109. LowererMD::LoadFunctionObjectOpnd(IR::Instr *instr, IR::Opnd *&functionObjOpnd)
  2110. {
  2111. IR::Opnd * src1 = instr->GetSrc1();
  2112. IR::Instr * instrPrev = instr->m_prev;
  2113. if (src1 == nullptr)
  2114. {
  2115. IR::RegOpnd * regOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  2116. //function object is first argument and mark it as IsParamSlotSym.
  2117. StackSym *paramSym = GetImplicitParamSlotSym(0);
  2118. IR::SymOpnd *paramOpnd = IR::SymOpnd::New(paramSym, TyMachPtr, m_func);
  2119. instrPrev = Lowerer::InsertMove(regOpnd, paramOpnd, instr);
  2120. functionObjOpnd = instrPrev->GetDst();
  2121. }
  2122. else
  2123. {
  2124. // Inlinee LdHomeObj, use the function object opnd on the instruction
  2125. functionObjOpnd = instr->UnlinkSrc1();
  2126. if (!functionObjOpnd->IsRegOpnd())
  2127. {
  2128. Assert(functionObjOpnd->IsAddrOpnd());
  2129. }
  2130. }
  2131. return instrPrev;
  2132. }
  2133. bool
  2134. LowererMD::GenerateFastDivAndRem(IR::Instr *instrDiv, IR::LabelInstr* bailOutLabel)
  2135. {
  2136. return false;
  2137. }
  2138. void
  2139. LowererMD::GenerateFastDivByPow2(IR::Instr *instrDiv)
  2140. {
  2141. //// Given:
  2142. //// dst = Div_A src1, src2
  2143. //// where src2 == power of 2
  2144. ////
  2145. //// Generate:
  2146. //// (observation: positive q divides by p equally, where p = power of 2, if q's binary representation
  2147. //// has all zeroes to the right of p's power 2 bit, try to see if that is the case)
  2148. //// s1 = AND src1, 0x80000001 | ((src2Value - 1) << 1)
  2149. //// CMP s1, 1
  2150. //// BNE $doesntDivideEqually
  2151. //// s1 = ASR src1, log2(src2Value) -- do the equal divide
  2152. //// dst = EOR s1, 1 -- restore tagged int bit
  2153. //// B $done
  2154. //// $doesntDivideEqually:
  2155. //// (now check if it divides with the remainder of 1, for which we can do integer divide and accommodate with +0.5
  2156. //// note that we need only the part that is to the left of p's power 2 bit)
  2157. //// s1 = AND s1, 0x80000001 | (src2Value - 1)
  2158. //// CMP s1, 1
  2159. //// BNE $helper
  2160. //// s1 = ASR src1, log2(src2Value) + 1 -- do the integer divide and also shift out the tagged int bit
  2161. //// PUSH 0xXXXXXXXX (ScriptContext)
  2162. //// PUSH s1
  2163. //// dst = CALL Op_FinishOddDivByPow2 -- input: actual value, scriptContext; output: JavascriptNumber with 0.5 added to the input
  2164. //// JMP $done
  2165. //// $helper:
  2166. //// ...
  2167. //// $done:
  2168. //if (instrDiv->GetSrc1()->IsRegOpnd() && instrDiv->GetSrc1()->AsRegOpnd()->m_sym->m_isNotInt)
  2169. //{
  2170. // return;
  2171. //}
  2172. //IR::Opnd *dst = instrDiv->GetDst();
  2173. //IR::Opnd *src1 = instrDiv->GetSrc1();
  2174. //IR::AddrOpnd *src2 = instrDiv->GetSrc2()->IsAddrOpnd() ? instrDiv->GetSrc2()->AsAddrOpnd() : nullptr;
  2175. //IR::LabelInstr *doesntDivideEqually = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2176. //IR::LabelInstr *helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  2177. //IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2178. //IR::RegOpnd *s1 = IR::RegOpnd::New(TyVar, m_func);
  2179. //IR::Instr *instr;
  2180. //Assert(src2 && src2->IsVar() && Js::TaggedInt::Is(src2->m_address) && (Math::IsPow2(Js::TaggedInt::ToInt32(src2->m_address))));
  2181. //int32 src2Value = Js::TaggedInt::ToInt32(src2->m_address);
  2182. //// s1 = AND src1, 0x80000001 | ((src2Value - 1) << 1)
  2183. //instr = IR::Instr::New(Js::OpCode::AND, s1, src1, IR::IntConstOpnd::New((0x80000001 | ((src2Value - 1) << 1)), TyInt32, m_func), m_func);
  2184. //instrDiv->InsertBefore(instr);
  2185. //LegalizeMD::LegalizeInstr(instr);
  2186. //// CMP s1, 1
  2187. //instr = IR::Instr::New(Js::OpCode::CMP, m_func);
  2188. //instr->SetSrc1(s1);
  2189. //instr->SetSrc2(IR::IntConstOpnd::New(1, TyInt32, m_func));
  2190. //instrDiv->InsertBefore(instr);
  2191. //// BNE $doesntDivideEqually
  2192. //instr = IR::BranchInstr::New(Js::OpCode::BNE, doesntDivideEqually, m_func);
  2193. //instrDiv->InsertBefore(instr);
  2194. //// s1 = ASR src1, log2(src2Value) -- do the equal divide
  2195. //instr = IR::Instr::New(Js::OpCode::ASR, s1, src1, IR::IntConstOpnd::New(Math::Log2(src2Value), TyInt32, m_func), m_func);
  2196. //instrDiv->InsertBefore(instr);
  2197. //LegalizeMD::LegalizeInstr(instr);
  2198. //// dst = ORR s1, 1 -- restore tagged int bit
  2199. //instr = IR::Instr::New(Js::OpCode::ORR, dst, s1, IR::IntConstOpnd::New(1, TyInt32, m_func), m_func);
  2200. //instrDiv->InsertBefore(instr);
  2201. //LegalizeMD::LegalizeInstr(instr);
  2202. //
  2203. //// B $done
  2204. //instr = IR::BranchInstr::New(Js::OpCode::B, done, m_func);
  2205. //instrDiv->InsertBefore(instr);
  2206. //// $doesntDivideEqually:
  2207. //instrDiv->InsertBefore(doesntDivideEqually);
  2208. //// s1 = AND s1, 0x80000001 | (src2Value - 1)
  2209. //instr = IR::Instr::New(Js::OpCode::AND, s1, s1, IR::IntConstOpnd::New((0x80000001 | (src2Value - 1)), TyInt32, m_func), m_func);
  2210. //instrDiv->InsertBefore(instr);
  2211. //// CMP s1, 1
  2212. //instr = IR::Instr::New(Js::OpCode::CMP, m_func);
  2213. //instr->SetSrc1(s1);
  2214. //instr->SetSrc2(IR::IntConstOpnd::New(1, TyInt32, m_func));
  2215. //instrDiv->InsertBefore(instr);
  2216. //// BNE $helper
  2217. //instrDiv->InsertBefore(IR::BranchInstr::New(Js::OpCode::BNE, helper, m_func));
  2218. //// s1 = ASR src1, log2(src2Value) + 1 -- do the integer divide and also shift out the tagged int bit
  2219. //instr = IR::Instr::New(Js::OpCode::ASR, s1, src1, IR::IntConstOpnd::New(Math::Log2(src2Value) + 1, TyInt32, m_func), m_func);
  2220. //instrDiv->InsertBefore(instr);
  2221. //LegalizeMD::LegalizeInstr(instr);
  2222. //// Arg2: scriptContext
  2223. //IR::JnHelperMethod helperMethod;
  2224. //if (instrDiv->dstIsTempNumber)
  2225. //{
  2226. // // Var JavascriptMath::FinishOddDivByPow2_InPlace(uint32 value, ScriptContext *scriptContext, __out JavascriptNumber* result)
  2227. // helperMethod = IR::HelperOp_FinishOddDivByPow2InPlace;
  2228. // Assert(dst->IsRegOpnd());
  2229. // StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(dst, instr->dstIsTempNumberTransferred);
  2230. // instr = this->m_lowerer->InsertLoadStackAddress(tempNumberSym, instrDiv);
  2231. // LegalizeMD::LegalizeInstr(instr);
  2232. // this->LoadHelperArgument(instrDiv, instr->GetDst());
  2233. //}
  2234. //else
  2235. //{
  2236. // // Var JavascriptMath::FinishOddDivByPow2(uint32 value, ScriptContext *scriptContext)
  2237. // helperMethod = IR::HelperOp_FinishOddDivByPow2;
  2238. //}
  2239. //this->m_lowerer->LoadScriptContext(instrDiv);
  2240. //// Arg1: value
  2241. //this->LoadHelperArgument(instrDiv, s1);
  2242. //// dst = CALL Op_FinishOddDivByPow2 -- input: actual value, output: JavascriptNumber with 0.5 added to the input
  2243. //instr = IR::Instr::New(Js::OpCode::Call, dst, IR::HelperCallOpnd::New(helperMethod, m_func), m_func);
  2244. //instrDiv->InsertBefore(instr);
  2245. //this->LowerCall(instr, 0);
  2246. //// JMP $done
  2247. //instrDiv->InsertBefore(IR::BranchInstr::New(Js::OpCode::B, done, m_func));
  2248. //// $helper:
  2249. //instrDiv->InsertBefore(helper);
  2250. //// $done:
  2251. //instrDiv->InsertAfter(done);
  2252. return;
  2253. }
  2254. ///----------------------------------------------------------------------------
  2255. ///
  2256. /// LowererMD::GenerateFastCmSrEqConst
  2257. ///
  2258. ///----------------------------------------------------------------------------
  2259. bool
  2260. LowererMD::GenerateFastCmSrEqConst(IR::Instr *instr)
  2261. {
  2262. //
  2263. // Given:
  2264. // s1 = CmSrEq_A s2, s3
  2265. // where either s2 or s3 is 'null', 'true' or 'false'
  2266. //
  2267. // Generate:
  2268. //
  2269. // CMP s2, s3
  2270. // JEQ $mov_true
  2271. // MOV s1, Library.GetFalse()
  2272. // JMP $done
  2273. // $mov_true:
  2274. // MOV s1, Library.GetTrue()
  2275. // $done:
  2276. //
  2277. Assert(m_lowerer->IsConstRegOpnd(instr->GetSrc2()->AsRegOpnd()));
  2278. return false;
  2279. }
  2280. bool LowererMD::GenerateFastCmXxI4(IR::Instr *instr)
  2281. {
  2282. return this->GenerateFastCmXxTaggedInt(instr);
  2283. }
  2284. ///----------------------------------------------------------------------------
  2285. ///
  2286. /// LowererMD::GenerateFastCmXxTaggedInt
  2287. ///
  2288. ///----------------------------------------------------------------------------
  2289. bool LowererMD::GenerateFastCmXxTaggedInt(IR::Instr *instr, bool isInHelper /* = false */)
  2290. {
  2291. // The idea is to do an inline compare if we can prove that both sources
  2292. // are tagged ints (i.e., are vars with the low bit set).
  2293. //
  2294. // Given:
  2295. //
  2296. // Cmxx_A dst, src1, src2
  2297. //
  2298. // Generate:
  2299. //
  2300. // (If not Int31's, goto $helper)
  2301. // LDIMM dst, trueResult
  2302. // CMP src1, src2
  2303. // BEQ $fallthru
  2304. // LDIMM dst, falseResult
  2305. // B $fallthru
  2306. // $helper:
  2307. // (caller will generate normal helper call sequence)
  2308. // $fallthru:
  2309. IR::Opnd * src1 = instr->GetSrc1();
  2310. IR::Opnd * src2 = instr->GetSrc2();
  2311. IR::Opnd * dst = instr->GetDst();
  2312. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  2313. IR::LabelInstr * fallthru = IR::LabelInstr::New(Js::OpCode::Label, m_func, isInHelper);
  2314. Assert(src1 && src2 && dst);
  2315. // Not tagged ints?
  2316. if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_sym->m_isNotNumber)
  2317. {
  2318. return false;
  2319. }
  2320. if (src2->IsRegOpnd() && src2->AsRegOpnd()->m_sym->m_isNotNumber)
  2321. {
  2322. return false;
  2323. }
  2324. Js::OpCode opcode = Js::OpCode::InvalidOpCode;
  2325. switch ( instr->m_opcode)
  2326. {
  2327. case Js::OpCode::CmEq_A:
  2328. case Js::OpCode::CmSrEq_A:
  2329. case Js::OpCode::CmEq_I4:
  2330. opcode = Js::OpCode::BEQ;
  2331. break;
  2332. case Js::OpCode::CmNeq_A:
  2333. case Js::OpCode::CmSrNeq_A:
  2334. case Js::OpCode::CmNeq_I4:
  2335. opcode = Js::OpCode::BNE;
  2336. break;
  2337. case Js::OpCode::CmGt_A:
  2338. case Js::OpCode::CmGt_I4:
  2339. opcode = Js::OpCode::BGT;
  2340. break;
  2341. case Js::OpCode::CmGe_A:
  2342. case Js::OpCode::CmGe_I4:
  2343. opcode = Js::OpCode::BGE;
  2344. break;
  2345. case Js::OpCode::CmLt_A:
  2346. case Js::OpCode::CmLt_I4:
  2347. opcode = Js::OpCode::BLT;
  2348. break;
  2349. case Js::OpCode::CmLe_A:
  2350. case Js::OpCode::CmLe_I4:
  2351. opcode = Js::OpCode::BLE;
  2352. break;
  2353. case Js::OpCode::CmUnGt_A:
  2354. case Js::OpCode::CmUnGt_I4:
  2355. opcode = Js::OpCode::BHI;
  2356. break;
  2357. case Js::OpCode::CmUnGe_A:
  2358. case Js::OpCode::CmUnGe_I4:
  2359. opcode = Js::OpCode::BCS;
  2360. break;
  2361. case Js::OpCode::CmUnLt_A:
  2362. case Js::OpCode::CmUnLt_I4:
  2363. opcode = Js::OpCode::BCC;
  2364. break;
  2365. case Js::OpCode::CmUnLe_A:
  2366. case Js::OpCode::CmUnLe_I4:
  2367. opcode = Js::OpCode::BLS;
  2368. break;
  2369. default: Assert(false);
  2370. }
  2371. // Tagged ints?
  2372. bool isTaggedInts = false;
  2373. if (src1->IsTaggedInt() || src1->IsInt32())
  2374. {
  2375. if (src2->IsTaggedInt() || src2->IsInt32())
  2376. {
  2377. isTaggedInts = true;
  2378. }
  2379. }
  2380. if (!isTaggedInts)
  2381. {
  2382. this->GenerateSmIntPairTest(instr, src1, src2, helper);
  2383. }
  2384. if (dst->IsEqual(src1))
  2385. {
  2386. IR::RegOpnd *newSrc1 = IR::RegOpnd::New(TyMachReg, m_func);
  2387. Lowerer::InsertMove(newSrc1, src1, instr);
  2388. src1 = newSrc1;
  2389. }
  2390. if (dst->IsEqual(src2))
  2391. {
  2392. IR::RegOpnd *newSrc2 = IR::RegOpnd::New(TyMachReg, m_func);
  2393. Lowerer::InsertMove(newSrc2, src2, instr);
  2394. src2 = newSrc2;
  2395. }
  2396. IR::Opnd *opndTrue, *opndFalse;
  2397. if (dst->IsInt32())
  2398. {
  2399. opndTrue = IR::IntConstOpnd::New(1, TyMachReg, this->m_func);
  2400. opndFalse = IR::IntConstOpnd::New(0, TyMachReg, this->m_func);
  2401. }
  2402. else
  2403. {
  2404. opndTrue = m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue);
  2405. opndFalse = m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse);
  2406. }
  2407. // LDIMM dst, trueResult
  2408. // CMP src1, src2
  2409. // BEQ $fallthru
  2410. // LDIMM dst, falseResult
  2411. // B $fallthru
  2412. src1 = src1->UseWithNewType(TyInt32, m_func);
  2413. src2 = src2->UseWithNewType(TyInt32, m_func);
  2414. instr->InsertBefore(IR::Instr::New(Js::OpCode::LDIMM, dst, opndTrue, m_func));
  2415. IR::Instr *instrCmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  2416. instrCmp->SetSrc1(src1);
  2417. instrCmp->SetSrc2(src2);
  2418. instr->InsertBefore(instrCmp);
  2419. LegalizeMD::LegalizeInstr(instrCmp);
  2420. instr->InsertBefore(IR::BranchInstr::New(opcode, fallthru, m_func));
  2421. instr->InsertBefore(IR::Instr::New(Js::OpCode::LDIMM, dst, opndFalse, m_func));
  2422. if (isTaggedInts)
  2423. {
  2424. instr->InsertAfter(fallthru);
  2425. instr->Remove();
  2426. return true;
  2427. }
  2428. // B $fallthru
  2429. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::B, fallthru, m_func));
  2430. instr->InsertBefore(helper);
  2431. instr->InsertAfter(fallthru);
  2432. return false;
  2433. }
  2434. IR::Instr * LowererMD::GenerateConvBool(IR::Instr *instr)
  2435. {
  2436. // dst = LDIMM true
  2437. // TST src1, src2
  2438. // BNE fallthrough
  2439. // dst = LDIMM false
  2440. // fallthrough:
  2441. IR::RegOpnd *dst = instr->GetDst()->AsRegOpnd();
  2442. IR::RegOpnd *src1 = instr->GetSrc1()->AsRegOpnd();
  2443. IR::Opnd *opndTrue = m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue);
  2444. IR::Opnd *opndFalse = m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse);
  2445. IR::LabelInstr *fallthru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2446. // dst = LDIMM true
  2447. IR::Instr *instrFirst = IR::Instr::New(Js::OpCode::LDIMM, dst, opndTrue, m_func);
  2448. instr->InsertBefore(instrFirst);
  2449. // TST src1, src2
  2450. IR::Instr *instrTst = IR::Instr::New(Js::OpCode::TST, m_func);
  2451. instrTst->SetSrc1(src1);
  2452. instrTst->SetSrc2(src1);
  2453. instr->InsertBefore(instrTst);
  2454. LegalizeMD::LegalizeInstr(instrTst);
  2455. // BNE fallthrough
  2456. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::BNE, fallthru, m_func));
  2457. // dst = LDIMM false
  2458. instr->InsertBefore(IR::Instr::New(Js::OpCode::LDIMM, dst, opndFalse, m_func));
  2459. // fallthrough:
  2460. instr->InsertAfter(fallthru);
  2461. instr->Remove();
  2462. return instrFirst;
  2463. }
  2464. ///----------------------------------------------------------------------------
  2465. ///
  2466. /// LowererMD::GenerateFastAdd
  2467. ///
  2468. /// NOTE: We assume that only the sum of two Int31's will have 0x2 set. This
  2469. /// is only true until we have a var type with tag == 0x2.
  2470. ///
  2471. ///----------------------------------------------------------------------------
  2472. bool
  2473. LowererMD::GenerateFastAdd(IR::Instr * instrAdd)
  2474. {
  2475. // Given:
  2476. //
  2477. // dst = Add src1, src2
  2478. //
  2479. // Generate:
  2480. //
  2481. // (If not 2 Int31's, jump to $helper.)
  2482. // s1 = MOV src1
  2483. // s1 = ADDS s1, src2 -- try an inline add
  2484. // BVS $helper -- bail if the add overflowed
  2485. // s1 = ORR s1, AtomTag_IntPtr
  2486. // dst = MOV s1
  2487. // B $fallthru
  2488. // $helper:
  2489. // (caller generates helper call)
  2490. // $fallthru:
  2491. IR::Instr * instr;
  2492. IR::LabelInstr * labelHelper;
  2493. IR::LabelInstr * labelFallThru;
  2494. IR::Opnd * opndReg;
  2495. IR::Opnd * opndSrc1;
  2496. IR::Opnd * opndSrc2;
  2497. opndSrc1 = instrAdd->GetSrc1();
  2498. opndSrc2 = instrAdd->GetSrc2();
  2499. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Add instruction");
  2500. // Generate fastpath for Incr_A anyway -
  2501. // Incrementing strings representing integers can be inter-mixed with integers e.g. "1"++ -> converts 1 to an int and thereafter, integer increment is expected.
  2502. if (opndSrc1->IsRegOpnd() && (opndSrc1->AsRegOpnd()->IsNotInt() || opndSrc1->GetValueType().IsString()
  2503. || (instrAdd->m_opcode != Js::OpCode::Incr_A && opndSrc1->GetValueType().IsLikelyString())))
  2504. {
  2505. return false;
  2506. }
  2507. if (opndSrc2->IsRegOpnd() && (opndSrc2->AsRegOpnd()->IsNotInt() ||
  2508. opndSrc2->GetValueType().IsLikelyString()))
  2509. {
  2510. return false;
  2511. }
  2512. // Tagged ints?
  2513. bool isTaggedInts = false;
  2514. if (opndSrc1->IsTaggedInt())
  2515. {
  2516. if (opndSrc2->IsTaggedInt())
  2517. {
  2518. isTaggedInts = true;
  2519. }
  2520. }
  2521. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2522. if (!isTaggedInts)
  2523. {
  2524. // (If not 2 Int31's, jump to $helper.)
  2525. this->GenerateSmIntPairTest(instrAdd, opndSrc1, opndSrc2, labelHelper);
  2526. }
  2527. if (opndSrc1->IsImmediateOpnd())
  2528. {
  2529. // If opnd1 is a constant, just swap them.
  2530. IR::Opnd *opndTmp = opndSrc1;
  2531. opndSrc1 = opndSrc2;
  2532. opndSrc2 = opndTmp;
  2533. }
  2534. //
  2535. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2536. // relevant only on ARM64.
  2537. //
  2538. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2539. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2540. // s1 = MOV src1
  2541. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2542. Lowerer::InsertMove(opndReg, opndSrc1, instrAdd);
  2543. // s1 = ADDS s1, src2
  2544. instr = IR::Instr::New(Js::OpCode::ADDS, opndReg, opndReg, opndSrc2, this->m_func);
  2545. instrAdd->InsertBefore(instr);
  2546. Legalize(instr);
  2547. // BVS $helper
  2548. instr = IR::BranchInstr::New(Js::OpCode::BVS, labelHelper, this->m_func);
  2549. instrAdd->InsertBefore(instr);
  2550. //
  2551. // Convert TyInt32 operand, back to TyMachPtr type.
  2552. //
  2553. if(TyMachReg != opndReg->GetType())
  2554. {
  2555. opndReg = opndReg->UseWithNewType(TyMachPtr, this->m_func);
  2556. }
  2557. // s1 = ORR s1, AtomTag_IntPtr
  2558. GenerateInt32ToVarConversion(opndReg, instrAdd);
  2559. // dst = MOV s1
  2560. instr = IR::Instr::New(Js::OpCode::MOV, instrAdd->GetDst(), opndReg, this->m_func);
  2561. instrAdd->InsertBefore(instr);
  2562. // B $fallthru
  2563. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2564. instr = IR::BranchInstr::New(Js::OpCode::B, labelFallThru, this->m_func);
  2565. instrAdd->InsertBefore(instr);
  2566. // $helper:
  2567. // (caller generates helper call)
  2568. // $fallthru:
  2569. instrAdd->InsertBefore(labelHelper);
  2570. instrAdd->InsertAfter(labelFallThru);
  2571. return true;
  2572. }
  2573. ///----------------------------------------------------------------------------
  2574. ///
  2575. /// LowererMD::GenerateFastSub
  2576. ///
  2577. ///
  2578. ///----------------------------------------------------------------------------
  2579. bool
  2580. LowererMD::GenerateFastSub(IR::Instr * instrSub)
  2581. {
  2582. // Given:
  2583. //
  2584. // dst = Sub src1, src2
  2585. //
  2586. // Generate:
  2587. //
  2588. // (If not 2 Int31's, jump to $helper.)
  2589. // s1 = MOV src1
  2590. // s1 = SUBS s1, src2 -- try an inline sub
  2591. // BVS $helper -- bail if the subtract overflowed
  2592. // BNE $helper
  2593. // s1 = ORR s1, AtomTag_IntPtr
  2594. // dst = MOV s1
  2595. // B $fallthru
  2596. // $helper:
  2597. // (caller generates helper call)
  2598. // $fallthru:
  2599. IR::Instr * instr;
  2600. IR::LabelInstr * labelHelper;
  2601. IR::LabelInstr * labelFallThru;
  2602. IR::Opnd * opndReg;
  2603. IR::Opnd * opndSrc1;
  2604. IR::Opnd * opndSrc2;
  2605. opndSrc1 = instrSub->GetSrc1();
  2606. opndSrc2 = instrSub->GetSrc2();
  2607. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Sub instruction");
  2608. // Not tagged ints?
  2609. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2610. {
  2611. return false;
  2612. }
  2613. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2614. {
  2615. return false;
  2616. }
  2617. // Tagged ints?
  2618. bool isTaggedInts = false;
  2619. if (opndSrc1->IsTaggedInt())
  2620. {
  2621. if (opndSrc2->IsTaggedInt())
  2622. {
  2623. isTaggedInts = true;
  2624. }
  2625. }
  2626. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2627. if (!isTaggedInts)
  2628. {
  2629. // (If not 2 Int31's, jump to $helper.)
  2630. this->GenerateSmIntPairTest(instrSub, opndSrc1, opndSrc2, labelHelper);
  2631. }
  2632. //
  2633. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2634. // relevant only on ARM64.
  2635. //
  2636. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2637. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2638. // s1 = MOV src1
  2639. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2640. Lowerer::InsertMove(opndReg, opndSrc1, instrSub);
  2641. // s1 = SUBS s1, src2
  2642. instr = IR::Instr::New(Js::OpCode::SUBS, opndReg, opndReg, opndSrc2, this->m_func);
  2643. instrSub->InsertBefore(instr);
  2644. Legalize(instr);
  2645. // BVS $helper
  2646. instr = IR::BranchInstr::New(Js::OpCode::BVS, labelHelper, this->m_func);
  2647. instrSub->InsertBefore(instr);
  2648. //
  2649. // Convert TyInt32 operand, back to TyMachPtr type.
  2650. //
  2651. if(TyMachReg != opndReg->GetType())
  2652. {
  2653. opndReg = opndReg->UseWithNewType(TyMachPtr, this->m_func);
  2654. }
  2655. // s1 = ORR s1, AtomTag_IntPtr
  2656. GenerateInt32ToVarConversion(opndReg, instrSub);
  2657. // dst = MOV s1
  2658. instr = IR::Instr::New(Js::OpCode::MOV, instrSub->GetDst(), opndReg, this->m_func);
  2659. instrSub->InsertBefore(instr);
  2660. // B $fallthru
  2661. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2662. instr = IR::BranchInstr::New(Js::OpCode::B, labelFallThru, this->m_func);
  2663. instrSub->InsertBefore(instr);
  2664. // $helper:
  2665. // (caller generates helper call)
  2666. // $fallthru:
  2667. instrSub->InsertBefore(labelHelper);
  2668. instrSub->InsertAfter(labelFallThru);
  2669. return true;
  2670. }
  2671. ///----------------------------------------------------------------------------
  2672. ///
  2673. /// LowererMD::GenerateFastMul
  2674. ///
  2675. ///----------------------------------------------------------------------------
  2676. bool
  2677. LowererMD::GenerateFastMul(IR::Instr * instrMul)
  2678. {
  2679. // Given:
  2680. //
  2681. // dst = Mul src1, src2
  2682. //
  2683. // Generate:
  2684. //
  2685. // (If not 2 Int31's, jump to $helper.)
  2686. // s1 = MOV src1
  2687. // s2 = MOV src2
  2688. // s3 = SMULL s1, s2 -- do the signed mul
  2689. // CMP s3, s3 SXTW
  2690. // BNE $helper -- bail if the result overflowed
  2691. // CBZ s3, $zero -- Check result is 0. might be -0. Result is -0 when a negative number is multiplied with 0.
  2692. // B $nonzero
  2693. // $zero: -- result of mul was 0. try to check for -0
  2694. // s2 = ADDS s2, src1 --check for same sign
  2695. // BGE $nonzero - positive 0 if signs are equal
  2696. // dst = ToVar(-0.0) -- load negative 0
  2697. // B $fallthru
  2698. // $nonzero:
  2699. // s3 = ORR s3, AtomTag_IntPtr
  2700. // dst= MOV s3
  2701. // B $fallthru
  2702. // $helper:
  2703. // (caller generates helper call)
  2704. // $fallthru:
  2705. IR::LabelInstr * labelHelper;
  2706. IR::LabelInstr * labelFallThru;
  2707. IR::LabelInstr * labelNonZero;
  2708. IR::Instr * instr;
  2709. IR::RegOpnd * opndReg1;
  2710. IR::RegOpnd * opndReg2;
  2711. IR::RegOpnd * s3;
  2712. IR::Opnd * opndSrc1;
  2713. IR::Opnd * opndSrc2;
  2714. opndSrc1 = instrMul->GetSrc1();
  2715. opndSrc2 = instrMul->GetSrc2();
  2716. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on mul instruction");
  2717. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2718. {
  2719. return true;
  2720. }
  2721. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2722. {
  2723. return true;
  2724. }
  2725. // (If not 2 Int31's, jump to $helper.)
  2726. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2727. labelNonZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2728. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2729. this->GenerateSmIntPairTest(instrMul, opndSrc1, opndSrc2, labelHelper);
  2730. //
  2731. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2732. // relevant only on ARM64.
  2733. //
  2734. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2735. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2736. if (opndSrc1->IsImmediateOpnd())
  2737. {
  2738. IR::Opnd * temp = opndSrc1;
  2739. opndSrc1 = opndSrc2;
  2740. opndSrc2 = temp;
  2741. }
  2742. // s1 = MOV src1
  2743. opndReg1 = IR::RegOpnd::New(TyInt32, this->m_func);
  2744. Lowerer::InsertMove(opndReg1, opndSrc1, instrMul);
  2745. // s2 = MOV src2
  2746. opndReg2 = IR::RegOpnd::New(TyInt32, this->m_func);
  2747. Lowerer::InsertMove(opndReg2, opndSrc2, instrMul);
  2748. // s3 = SMULL s1, s2
  2749. s3 = IR::RegOpnd::New(TyInt64, this->m_func);
  2750. instr = IR::Instr::New(Js::OpCode::SMULL, s3, opndReg1, opndReg2, this->m_func);
  2751. instrMul->InsertBefore(instr);
  2752. // CMP s3, s3 SXTW s3
  2753. instr = IR::Instr::New(Js::OpCode::CMP_SXTW, this->m_func);
  2754. instr->SetSrc1(s3);
  2755. instr->SetSrc2(s3);
  2756. instrMul->InsertBefore(instr);
  2757. // BNE $helper
  2758. instr = IR::BranchInstr::New(Js::OpCode::BNE, labelHelper, this->m_func);
  2759. instrMul->InsertBefore(instr);
  2760. // CBZ s3, $zero -- Check result is 0. might be -0. Result is -0 when a negative number is multiplied with 0.
  2761. IR::LabelInstr *labelZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2762. instr = IR::BranchInstr::New(Js::OpCode::CBZ, labelZero, this->m_func);
  2763. instr->SetSrc1(s3);
  2764. instrMul->InsertBefore(instr);
  2765. // B $nonzero
  2766. instr = IR::BranchInstr::New(Js::OpCode::B, labelNonZero, this->m_func);
  2767. instrMul->InsertBefore(instr);
  2768. // $zero:
  2769. instrMul->InsertBefore(labelZero);
  2770. // s2 = ADDS s2, s1
  2771. instr = IR::Instr::New(Js::OpCode::ADDS, opndReg2, opndReg2, opndReg1, this->m_func);
  2772. instrMul->InsertBefore(instr);
  2773. Legalize(instr);
  2774. // BGE $nonzero
  2775. instr = IR::BranchInstr::New(Js::OpCode::BGE, labelNonZero, this->m_func);
  2776. instrMul->InsertBefore(instr);
  2777. // dst = ToVar(-0.0) -- load negative 0
  2778. instr = IR::Instr::New(Js::OpCode::LDIMM, instrMul->GetDst(), m_lowerer->LoadLibraryValueOpnd(instrMul, LibraryValue::ValueNegativeZero), this->m_func);
  2779. instrMul->InsertBefore(instr);
  2780. // B $fallthru
  2781. instr = IR::BranchInstr::New(Js::OpCode::B, labelFallThru, this->m_func);
  2782. instrMul->InsertBefore(instr);
  2783. // $nonzero:
  2784. instrMul->InsertBefore(labelNonZero);
  2785. // dst = MOV_TRUNC s3
  2786. instr = IR::Instr::New(Js::OpCode::MOV_TRUNC, instrMul->GetDst()->UseWithNewType(TyInt32,this->m_func), s3->UseWithNewType(TyInt32, this->m_func), this->m_func);
  2787. instrMul->InsertBefore(instr);
  2788. // dst = OR dst, AtomTag_IntPtr
  2789. GenerateInt32ToVarConversion(instrMul->GetDst(), instrMul);
  2790. // B $fallthru
  2791. instr = IR::BranchInstr::New(Js::OpCode::B, labelFallThru, this->m_func);
  2792. instrMul->InsertBefore(instr);
  2793. // $helper:
  2794. // (caller generates helper call)
  2795. // $fallthru:
  2796. instrMul->InsertBefore(labelHelper);
  2797. instrMul->InsertAfter(labelFallThru);
  2798. return true;
  2799. }
  2800. ///----------------------------------------------------------------------------
  2801. ///
  2802. /// LowererMD::GenerateFastAnd
  2803. ///
  2804. ///----------------------------------------------------------------------------
  2805. bool
  2806. LowererMD::GenerateFastAnd(IR::Instr * instrAnd)
  2807. {
  2808. // Left empty to match AMD64; assuming this is not performance critical
  2809. return true;
  2810. }
  2811. ///----------------------------------------------------------------------------
  2812. ///
  2813. /// LowererMD::GenerateFastOr
  2814. ///
  2815. ///----------------------------------------------------------------------------
  2816. bool
  2817. LowererMD::GenerateFastOr(IR::Instr * instrOr)
  2818. {
  2819. // Left empty to match AMD64; assuming this is not performance critical
  2820. return true;
  2821. }
  2822. ///----------------------------------------------------------------------------
  2823. ///
  2824. /// LowererMD::GenerateFastXor
  2825. ///
  2826. ///----------------------------------------------------------------------------
  2827. bool
  2828. LowererMD::GenerateFastXor(IR::Instr * instrXor)
  2829. {
  2830. // Left empty to match AMD64; assuming this is not performance critical
  2831. return true;
  2832. }
  2833. //----------------------------------------------------------------------------
  2834. //
  2835. // LowererMD::GenerateFastNot
  2836. //
  2837. //----------------------------------------------------------------------------
  2838. bool
  2839. LowererMD::GenerateFastNot(IR::Instr * instrNot)
  2840. {
  2841. // Left empty to match AMD64; assuming this is not performance critical
  2842. return true;
  2843. }
  2844. //
  2845. // If value is zero in tagged int representation, jump to $labelHelper.
  2846. //
  2847. void
  2848. LowererMD::GenerateTaggedZeroTest( IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelHelper )
  2849. {
  2850. // Cast the var to 32 bit integer.
  2851. if(opndSrc->GetSize() != 4)
  2852. {
  2853. opndSrc = opndSrc->UseWithNewType(TyUint32, this->m_func);
  2854. }
  2855. AssertMsg(TySize[opndSrc->GetType()] == 4, "This technique works only on the 32-bit version");
  2856. if(labelHelper != nullptr)
  2857. {
  2858. // CBZ src1, $labelHelper
  2859. IR::Instr* instr = IR::BranchInstr::New(Js::OpCode::CBZ, labelHelper, this->m_func);
  2860. instr->SetSrc1(opndSrc);
  2861. insertInstr->InsertBefore(instr);
  2862. }
  2863. else
  2864. {
  2865. // TST src1, src1
  2866. IR::Instr* instr = IR::Instr::New(Js::OpCode::TST, this->m_func);
  2867. instr->SetSrc1(opndSrc);
  2868. instr->SetSrc2(opndSrc);
  2869. insertInstr->InsertBefore(instr);
  2870. LegalizeMD::LegalizeInstr(instr);
  2871. }
  2872. }
  2873. bool
  2874. LowererMD::GenerateFastNeg(IR::Instr * instrNeg)
  2875. {
  2876. // Given:
  2877. //
  2878. // dst = Not src
  2879. //
  2880. // Generate:
  2881. //
  2882. // if not int, jump $helper
  2883. // if src == 0 -- test for zero (must be handled by the runtime to preserve
  2884. // BEQ $helper -- Difference between +0 and -0)
  2885. // dst = SUB dst, 0, src -- do an inline NEG
  2886. // BVS $helper -- bail if the subtract overflowed
  2887. // dst = OR dst, tag -- restore the var tag on the result
  2888. // BVS $helper
  2889. // B $fallthru
  2890. // $helper:
  2891. // (caller generates helper call)
  2892. // $fallthru:
  2893. IR::Instr * instr;
  2894. IR::LabelInstr * labelHelper = nullptr;
  2895. IR::LabelInstr * labelFallThru = nullptr;
  2896. IR::Opnd * opndSrc1;
  2897. IR::Opnd * opndDst;
  2898. bool usingNewDst = false;
  2899. opndSrc1 = instrNeg->GetSrc1();
  2900. AssertMsg(opndSrc1, "Expected src opnd on Neg instruction");
  2901. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->m_sym->IsIntConst())
  2902. {
  2903. IR::Opnd *newOpnd;
  2904. IntConstType value = opndSrc1->AsRegOpnd()->m_sym->GetIntConstValue();
  2905. if (value == 0)
  2906. {
  2907. // If the negate operand is zero, the result is -0.0, which is a Number rather than an Int31.
  2908. newOpnd = m_lowerer->LoadLibraryValueOpnd(instrNeg, LibraryValue::ValueNegativeZero);
  2909. }
  2910. else
  2911. {
  2912. // negation below can overflow because max negative int32 value > max positive value by 1.
  2913. newOpnd = IR::AddrOpnd::NewFromNumber(-(int64)value, m_func);
  2914. }
  2915. instrNeg->ClearBailOutInfo();
  2916. instrNeg->FreeSrc1();
  2917. instrNeg->SetSrc1(newOpnd);
  2918. instrNeg = this->ChangeToAssign(instrNeg);
  2919. // Skip lowering call to helper
  2920. return false;
  2921. }
  2922. bool isInt = (opndSrc1->IsTaggedInt());
  2923. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2924. {
  2925. return true;
  2926. }
  2927. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2928. if (!isInt)
  2929. {
  2930. GenerateSmIntTest(opndSrc1, instrNeg, labelHelper);
  2931. }
  2932. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits.
  2933. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2934. GenerateTaggedZeroTest(opndSrc1, instrNeg, labelHelper);
  2935. if (opndSrc1->IsEqual(instrNeg->GetDst()))
  2936. {
  2937. usingNewDst = true;
  2938. opndDst = IR::RegOpnd::New(TyInt32, this->m_func);
  2939. }
  2940. else
  2941. {
  2942. opndDst = instrNeg->GetDst()->UseWithNewType(TyInt32, this->m_func);
  2943. }
  2944. // dst = SUBS zr, src
  2945. instr = IR::Instr::New(Js::OpCode::SUBS, opndDst, IR::RegOpnd::New(nullptr, RegZR, TyInt32, this->m_func), opndSrc1, this->m_func);
  2946. instrNeg->InsertBefore(instr);
  2947. // BVS $helper
  2948. instr = IR::BranchInstr::New(Js::OpCode::BVS, labelHelper, this->m_func);
  2949. instrNeg->InsertBefore(instr);
  2950. //
  2951. // Convert TyInt32 operand, back to TyMachPtr type.
  2952. //
  2953. if (TyMachReg != opndDst->GetType())
  2954. {
  2955. opndDst = opndDst->UseWithNewType(TyMachPtr, this->m_func);
  2956. }
  2957. GenerateInt32ToVarConversion(opndDst, instrNeg);
  2958. if (usingNewDst)
  2959. {
  2960. Lowerer::InsertMove(instrNeg->GetDst(), opndDst, instrNeg);
  2961. }
  2962. // B $fallthru
  2963. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2964. instr = IR::BranchInstr::New(Js::OpCode::B, labelFallThru, this->m_func);
  2965. instrNeg->InsertBefore(instr);
  2966. // $helper:
  2967. // (caller generates helper sequence)
  2968. // $fallthru:
  2969. AssertMsg(labelHelper, "Should not be NULL");
  2970. instrNeg->InsertBefore(labelHelper);
  2971. instrNeg->InsertAfter(labelFallThru);
  2972. return true;
  2973. }
  2974. ///----------------------------------------------------------------------------
  2975. ///
  2976. /// LowererMD::GenerateFastShiftLeft
  2977. ///
  2978. ///----------------------------------------------------------------------------
  2979. bool
  2980. LowererMD::GenerateFastShiftLeft(IR::Instr * instrShift)
  2981. {
  2982. // Left empty to match AMD64; assuming this is not performance critical
  2983. return true;
  2984. }
  2985. ///----------------------------------------------------------------------------
  2986. ///
  2987. /// LowererMD::GenerateFastShiftRight
  2988. ///
  2989. ///----------------------------------------------------------------------------
  2990. bool
  2991. LowererMD::GenerateFastShiftRight(IR::Instr * instrShift)
  2992. {
  2993. // Given:
  2994. //
  2995. // dst = Shr/ShrU src1, src2
  2996. //
  2997. // Generate:
  2998. //
  2999. // (If not 2 Int31's, jump to $helper.)
  3000. // s1 = MOV src1
  3001. // s2 = MOV src2
  3002. // AND s2, 0x1F [unsigned only] // Bail if unsigned and not shifting,
  3003. // BEQ $helper [unsigned only] // as we may not end up with a taggable int
  3004. // s1 = ASR/LSR s1, s2
  3005. // ORR s1, 1 << VarTag_Shift
  3006. //dst = MOV s1
  3007. // B $fallthru
  3008. // $helper:
  3009. // (caller generates helper call)
  3010. // $fallthru:
  3011. IR::Instr * instr;
  3012. IR::LabelInstr * labelHelper;
  3013. IR::LabelInstr * labelFallThru;
  3014. IR::Opnd * opndReg;
  3015. IR::Opnd * opndSrc1;
  3016. IR::Opnd * opndSrc2;
  3017. Assert(instrShift->m_opcode == Js::OpCode::ShrU_A || instrShift->m_opcode == Js::OpCode::Shr_A);
  3018. bool isUnsigned = (instrShift->m_opcode == Js::OpCode::ShrU_A);
  3019. opndSrc1 = instrShift->GetSrc1();
  3020. opndSrc2 = instrShift->GetSrc2();
  3021. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Add instruction");
  3022. // Not int?
  3023. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  3024. {
  3025. return true;
  3026. }
  3027. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  3028. {
  3029. return true;
  3030. }
  3031. // Tagged ints?
  3032. bool isTaggedInts = false;
  3033. if (opndSrc1->IsTaggedInt())
  3034. {
  3035. if (opndSrc2->IsTaggedInt())
  3036. {
  3037. isTaggedInts = true;
  3038. }
  3039. }
  3040. IntConstType s2Value = 0;
  3041. bool src2IsIntConst = false;
  3042. if (isUnsigned)
  3043. {
  3044. if (opndSrc2->IsRegOpnd())
  3045. {
  3046. src2IsIntConst = opndSrc2->AsRegOpnd()->m_sym->IsTaggableIntConst();
  3047. if (src2IsIntConst)
  3048. {
  3049. s2Value = opndSrc2->AsRegOpnd()->m_sym->GetIntConstValue();
  3050. }
  3051. }
  3052. else
  3053. {
  3054. AssertMsg(opndSrc2->IsAddrOpnd() && Js::TaggedInt::Is(opndSrc2->AsAddrOpnd()->m_address),
  3055. "Expect src2 of shift right to be reg or Var.");
  3056. src2IsIntConst = true;
  3057. s2Value = Js::TaggedInt::ToInt32(opndSrc2->AsAddrOpnd()->m_address);
  3058. }
  3059. // 32-bit Shifts only uses the bottom 5 bits.
  3060. s2Value &= 0x1F;
  3061. // Unsigned shift by 0 could yield a value not encodable as a tagged int.
  3062. if (isUnsigned && src2IsIntConst && s2Value == 0)
  3063. {
  3064. return true;
  3065. }
  3066. }
  3067. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3068. if (!isTaggedInts)
  3069. {
  3070. // (If not 2 Int31's, jump to $helper.)
  3071. this->GenerateSmIntPairTest(instrShift, opndSrc1, opndSrc2, labelHelper);
  3072. }
  3073. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  3074. if (src2IsIntConst)
  3075. {
  3076. opndSrc2 = IR::IntConstOpnd::New(s2Value, TyInt32, this->m_func);
  3077. }
  3078. else
  3079. {
  3080. // s2 = MOV src2
  3081. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  3082. Lowerer::InsertMove(opndReg, opndSrc2, instrShift);
  3083. opndSrc2 = opndReg;
  3084. }
  3085. if (!src2IsIntConst && isUnsigned)
  3086. {
  3087. // s2 = AND s2, 0x1F [unsigned only] // Bail if unsigned and not shifting,
  3088. instr = IR::Instr::New(Js::OpCode::AND, opndSrc2, opndSrc2, IR::IntConstOpnd::New(0x1F, TyInt32, this->m_func), this->m_func);
  3089. instrShift->InsertBefore(instr);
  3090. // CBZ s2, $helper [unsigned only] // as we may not end up with a taggable int
  3091. instr = IR::BranchInstr::New(Js::OpCode::CBZ, labelHelper, this->m_func);
  3092. instr->SetSrc1(opndSrc2);
  3093. instrShift->InsertBefore(instr);
  3094. }
  3095. // s1 = MOV src1
  3096. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  3097. Lowerer::InsertMove(opndReg, opndSrc1, instrShift);
  3098. // s1 = ASR/LSR s1, RCX
  3099. instr = IR::Instr::New(isUnsigned ? Js::OpCode::LSR : Js::OpCode::ASR, opndReg, opndReg, opndSrc2, this->m_func);
  3100. instrShift->InsertBefore(instr);
  3101. //
  3102. // Convert TyInt32 operand, back to TyMachPtr type.
  3103. //
  3104. if(TyMachReg != opndReg->GetType())
  3105. {
  3106. opndReg = opndReg->UseWithNewType(TyMachPtr, this->m_func);
  3107. }
  3108. // ORR s1, 1 << VarTag_Shift
  3109. this->GenerateInt32ToVarConversion(opndReg, instrShift);
  3110. // dst = MOV s1
  3111. instr = IR::Instr::New(Js::OpCode::MOV, instrShift->GetDst(), opndReg, this->m_func);
  3112. instrShift->InsertBefore(instr);
  3113. // B $fallthru
  3114. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3115. instr = IR::BranchInstr::New(Js::OpCode::B, labelFallThru, this->m_func);
  3116. instrShift->InsertBefore(instr);
  3117. // $helper:
  3118. // (caller generates helper call)
  3119. // $fallthru:
  3120. instrShift->InsertBefore(labelHelper);
  3121. instrShift->InsertAfter(labelFallThru);
  3122. return true;
  3123. }
  3124. void
  3125. LowererMD::GenerateFastBrS(IR::BranchInstr *brInstr)
  3126. {
  3127. IR::Opnd *src1 = brInstr->UnlinkSrc1();
  3128. Assert(src1->IsIntConstOpnd() || src1->IsAddrOpnd() || src1->IsRegOpnd());
  3129. m_lowerer->InsertTest(
  3130. m_lowerer->LoadOptimizationOverridesValueOpnd(
  3131. brInstr, OptimizationOverridesValue::OptimizationOverridesSideEffects),
  3132. src1,
  3133. brInstr);
  3134. Js::OpCode opcode;
  3135. switch(brInstr->m_opcode)
  3136. {
  3137. case Js::OpCode::BrHasSideEffects:
  3138. opcode = Js::OpCode::BNE;
  3139. break;
  3140. case Js::OpCode::BrNotHasSideEffects:
  3141. opcode = Js::OpCode::BEQ;
  3142. break;
  3143. default:
  3144. Assert(UNREACHED);
  3145. __assume(false);
  3146. }
  3147. brInstr->m_opcode = opcode;
  3148. }
  3149. ///----------------------------------------------------------------------------
  3150. ///
  3151. /// LowererMD::GenerateSmIntPairTest
  3152. ///
  3153. /// Generate code to test whether the given operands are both Int31 vars
  3154. /// and branch to the given label if not.
  3155. ///
  3156. ///----------------------------------------------------------------------------
  3157. IR::Instr *
  3158. LowererMD::GenerateSmIntPairTest(
  3159. IR::Instr * instrInsert,
  3160. IR::Opnd * opndSrc1,
  3161. IR::Opnd * opndSrc2,
  3162. IR::LabelInstr * labelFail)
  3163. {
  3164. IR::Opnd * opndReg;
  3165. IR::Instr * instrPrev = instrInsert->m_prev;
  3166. IR::Instr * instr;
  3167. Assert(opndSrc1->GetType() == TyVar);
  3168. Assert(opndSrc2->GetType() == TyVar);
  3169. if (opndSrc1->IsTaggedInt())
  3170. {
  3171. IR::Opnd *tempOpnd = opndSrc1;
  3172. opndSrc1 = opndSrc2;
  3173. opndSrc2 = tempOpnd;
  3174. }
  3175. if (opndSrc2->IsTaggedInt())
  3176. {
  3177. if (opndSrc1->IsTaggedInt())
  3178. {
  3179. return instrPrev;
  3180. }
  3181. GenerateSmIntTest(opndSrc1, instrInsert, labelFail);
  3182. return instrPrev;
  3183. }
  3184. opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  3185. // s1 = MOV src1
  3186. // s1 = UBFX s1, VarTagShift - 16, 64 - (VarTag_Shift - 16)
  3187. // s2 = MOV src2
  3188. // s1 = BFXIL s2, VarTagShift, 64 - VarTag_Shift
  3189. // s1 = EOR s1, AtomTag_Pair ------ compare the tags together to the expected tag pair
  3190. // CBNZ s1, $fail
  3191. // s1 = MOV src1
  3192. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  3193. instrInsert->InsertBefore(instr);
  3194. // s1 = UBFX s1, VarTagShift - 16, 64 - (VarTag_Shift - 16)
  3195. instr = IR::Instr::New(Js::OpCode::UBFX, opndReg, opndReg, IR::IntConstOpnd::New(BITFIELD(Js::VarTag_Shift - 16, 64 - (Js::VarTag_Shift - 16)), TyMachReg, this->m_func), this->m_func);
  3196. instrInsert->InsertBefore(instr);
  3197. // s2 = MOV src2
  3198. IR::Opnd * opndReg1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  3199. instr = IR::Instr::New(Js::OpCode::MOV, opndReg1, opndSrc2, this->m_func);
  3200. instrInsert->InsertBefore(instr);
  3201. // s1 = BFXIL s2, VarTagShift, 64 - VarTag_Shift
  3202. instr = IR::Instr::New(Js::OpCode::BFXIL, opndReg, opndReg1, IR::IntConstOpnd::New(BITFIELD(Js::VarTag_Shift, 64 - Js::VarTag_Shift), TyMachReg, this->m_func), this->m_func);
  3203. instrInsert->InsertBefore(instr);
  3204. opndReg = opndReg->UseWithNewType(TyInt32, this->m_func)->AsRegOpnd();
  3205. // s1 = EOR s1, AtomTag_Pair
  3206. instr = IR::Instr::New(Js::OpCode::EOR, opndReg, opndReg, IR::IntConstOpnd::New(Js::AtomTag_Pair, TyInt32, this->m_func, true), this->m_func);
  3207. instrInsert->InsertBefore(instr);
  3208. // CBNZ s1, $fail
  3209. instr = IR::BranchInstr::New(Js::OpCode::CBNZ, labelFail, this->m_func);
  3210. instr->SetSrc1(opndReg);
  3211. instrInsert->InsertBefore(instr);
  3212. return instrPrev;
  3213. }
  3214. bool LowererMD::GenerateObjectTest(IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
  3215. {
  3216. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  3217. if (opndSrc->IsTaggedValue() && fContinueLabel)
  3218. {
  3219. // Insert delete branch opcode to tell the dbChecks not to assert on the helper label we may fall through into
  3220. IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
  3221. insertInstr->InsertBefore(fakeBr);
  3222. return false;
  3223. }
  3224. else if (opndSrc->IsNotTaggedValue() && !fContinueLabel)
  3225. {
  3226. return false;
  3227. }
  3228. IR::Opnd * opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  3229. // s1 = MOV src1 - Move to a temporary
  3230. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc, this->m_func);
  3231. insertInstr->InsertBefore(instr);
  3232. // s1 = UBFX s1, s1, #VarTag_Shift, #64 - VarTag_Shift
  3233. instr = IR::Instr::New(Js::OpCode::UBFX, opndReg, opndReg, IR::IntConstOpnd::New(BITFIELD(Js::VarTag_Shift, 64 - Js::VarTag_Shift), TyMachReg, this->m_func), this->m_func);
  3234. insertInstr->InsertBefore(instr);
  3235. if (fContinueLabel)
  3236. {
  3237. // CBZ s1, $labelHelper
  3238. instr = IR::BranchInstr::New(Js::OpCode::CBZ, labelTarget, this->m_func);
  3239. instr->SetSrc1(opndReg);
  3240. insertInstr->InsertBefore(instr);
  3241. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3242. insertInstr->InsertBefore(labelHelper);
  3243. }
  3244. else
  3245. {
  3246. // CBNZ s1, $labelHelper
  3247. instr = IR::BranchInstr::New(Js::OpCode::CBNZ, labelTarget, this->m_func);
  3248. instr->SetSrc1(opndReg);
  3249. insertInstr->InsertBefore(instr);
  3250. }
  3251. return true;
  3252. }
  3253. void
  3254. LowererMD::GenerateLoadTaggedType(IR::Instr * instrLdSt, IR::RegOpnd * opndType, IR::RegOpnd * opndTaggedType)
  3255. {
  3256. // taggedType = OR type, InlineCacheAuxSlotTypeTag
  3257. IR::IntConstOpnd * opndAuxSlotTag = IR::IntConstOpnd::New(InlineCacheAuxSlotTypeTag, TyInt8, instrLdSt->m_func);
  3258. IR::Instr * instr = IR::Instr::New(Js::OpCode::ORR, opndTaggedType, opndType, opndAuxSlotTag, instrLdSt->m_func);
  3259. instrLdSt->InsertBefore(instr);
  3260. }
  3261. void
  3262. LowererMD::GenerateLoadPolymorphicInlineCacheSlot(IR::Instr * instrLdSt, IR::RegOpnd * opndInlineCache, IR::RegOpnd * opndType, uint polymorphicInlineCacheSize)
  3263. {
  3264. // Generate
  3265. //
  3266. // LDR r1, type
  3267. // LSR r1, r1, #PolymorphicInlineCacheShift
  3268. // AND r1, r1, #(size - 1)
  3269. // LSL r1, r1, #log2(sizeof(Js::InlineCache))
  3270. // ADD inlineCache, inlineCache, r1
  3271. // MOV r1, type
  3272. IR::RegOpnd * opndOffset = IR::RegOpnd::New(TyMachPtr, instrLdSt->m_func);
  3273. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndOffset, opndType, instrLdSt->m_func);
  3274. instrLdSt->InsertBefore(instr);
  3275. IntConstType rightShiftAmount = PolymorphicInlineCacheShift;
  3276. IntConstType leftShiftAmount = Math::Log2(sizeof(Js::InlineCache));
  3277. // instead of generating
  3278. // LSR r1, r1, #PolymorphicInlineCacheShift
  3279. // AND r1, r1, #(size - 1)
  3280. // LSL r1, r1, #log2(sizeof(Js::InlineCache))
  3281. //
  3282. // we can generate:
  3283. // LSR r1, r1, #(PolymorphicInlineCacheShift - log2(sizeof(Js::InlineCache))
  3284. // AND r1, r1, #(size - 1) << log2(sizeof(Js::InlineCache))
  3285. Assert(rightShiftAmount > leftShiftAmount);
  3286. instr = IR::Instr::New(Js::OpCode::LSR, opndOffset, opndOffset, IR::IntConstOpnd::New(rightShiftAmount - leftShiftAmount, TyUint8, instrLdSt->m_func, true), instrLdSt->m_func);
  3287. instrLdSt->InsertBefore(instr);
  3288. Lowerer::InsertAnd(opndOffset, opndOffset, IR::IntConstOpnd::New(((IntConstType)(polymorphicInlineCacheSize - 1)) << leftShiftAmount, TyMachPtr, instrLdSt->m_func, true), instrLdSt);
  3289. // ADD inlineCache, inlineCache, r1
  3290. Lowerer::InsertAdd(false, opndInlineCache, opndInlineCache, opndOffset, instrLdSt);
  3291. }
  3292. //----------------------------------------------------------------------------
  3293. //
  3294. // LowererMD::GenerateFastScopedFldLookup
  3295. //
  3296. // This is a helper call which generates asm for both
  3297. // ScopedLdFld & ScopedStFld
  3298. //
  3299. //----------------------------------------------------------------------------
  3300. IR::Instr *
  3301. LowererMD::GenerateFastScopedFld(IR::Instr * instrScopedFld, bool isLoad)
  3302. {
  3303. // LDR s1, [base, offset(length)]
  3304. // CMP s1, 1 -- get the length on array and test if it is 1.
  3305. // BNE $helper
  3306. // LDR s2, [base, offset(scopes)] -- load the first scope
  3307. // LDR s3, [s2, offset(type)]
  3308. // LDIMM s4, inlineCache
  3309. // LDR s5, [s4, offset(u.local.type)]
  3310. // CMP s3, s5 -- check type
  3311. // BNE $helper
  3312. // LDR s6, [s2, offset(slots)] -- load the slots array
  3313. // LDR s7 , [s4, offset(u.local.slotIndex)] -- load the cached slot index
  3314. //
  3315. // if (load) {
  3316. // LDR dst, [s6, s7, LSL #2] -- load the value from the slot
  3317. // }
  3318. // else {
  3319. // STR src, [s6, s7, LSL #2]
  3320. // }
  3321. // B $done
  3322. //$helper:
  3323. // dst = BLX PatchGetPropertyScoped(inlineCache, base, field, defaultInstance, scriptContext)
  3324. //$done:
  3325. IR::Instr * instr;
  3326. IR::Instr * instrPrev = instrScopedFld->m_prev;
  3327. IR::RegOpnd * opndBase;
  3328. IR::RegOpnd * opndReg1; //s1
  3329. IR::RegOpnd * opndReg2; //s2
  3330. IR::RegOpnd * opndInlineCache; //s4
  3331. IR::IndirOpnd * indirOpnd;
  3332. IR::Opnd * propertyBase;
  3333. IR::LabelInstr * labelHelper;
  3334. IR::LabelInstr * labelFallThru;
  3335. if (isLoad)
  3336. {
  3337. propertyBase = instrScopedFld->GetSrc1();
  3338. }
  3339. else
  3340. {
  3341. propertyBase = instrScopedFld->GetDst();
  3342. }
  3343. AssertMsg(propertyBase->IsSymOpnd() && propertyBase->AsSymOpnd()->IsPropertySymOpnd() && propertyBase->AsSymOpnd()->m_sym->IsPropertySym(),
  3344. "Expected property sym operand of ScopedLdFld or ScopedStFld");
  3345. IR::PropertySymOpnd * propertySymOpnd = propertyBase->AsPropertySymOpnd();
  3346. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  3347. const IR::AutoReuseOpnd holdAfterLegalization(opndBase, m_func);
  3348. AssertMsg(opndBase->m_sym->m_isSingleDef, "We assume this isn't redefined");
  3349. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3350. // LDR s1, [base, offset(length)] -- get the length on array and test if it is 1.
  3351. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfLength(), TyInt16, this->m_func);
  3352. opndReg1 = IR::RegOpnd::New(TyInt32, this->m_func);
  3353. Lowerer::InsertMove(opndReg1, indirOpnd, instrScopedFld);
  3354. // CMP s1, 1 -- get the length on array and test if it is 1.
  3355. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  3356. instr->SetSrc1(opndReg1);
  3357. instr->SetSrc2(IR::IntConstOpnd::New(0x1, TyInt8, this->m_func));
  3358. instrScopedFld->InsertBefore(instr);
  3359. LegalizeMD::LegalizeInstr(instr);
  3360. // BNE $helper
  3361. instr = IR::BranchInstr::New(Js::OpCode::BNE, labelHelper, this->m_func);
  3362. instrScopedFld->InsertBefore(instr);
  3363. // LDR s2, [base, offset(scopes)] -- load the first scope
  3364. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg,this->m_func);
  3365. opndReg2 = IR::RegOpnd::New(TyMachReg, this->m_func);
  3366. Lowerer::InsertMove(opndReg2, indirOpnd, instrScopedFld);
  3367. // LDR s3, [s2, offset(type)]
  3368. // LDIMM s4, inlineCache
  3369. // LDR s5, [s4, offset(u.local.type)]
  3370. // CMP s3, s5 -- check type
  3371. // BNE $helper
  3372. opndInlineCache = IR::RegOpnd::New(TyMachReg, this->m_func);
  3373. opndReg2->m_sym->m_isNotNumber = true;
  3374. IR::RegOpnd * opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3375. this->m_lowerer->GenerateObjectTestAndTypeLoad(instrScopedFld, opndReg2, opndType, labelHelper);
  3376. Lowerer::InsertMove(opndInlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrScopedFld, propertySymOpnd), instrScopedFld);
  3377. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3378. // Check the local cache with the tagged type
  3379. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3380. GenerateLoadTaggedType(instrScopedFld, opndType, opndTaggedType);
  3381. Lowerer::GenerateLocalInlineCacheCheck(instrScopedFld, opndTaggedType, opndInlineCache, labelHelper);
  3382. if (isLoad)
  3383. {
  3384. IR::Opnd *opndDst = instrScopedFld->GetDst();
  3385. Lowerer::GenerateLdFldFromLocalInlineCache(instrScopedFld, opndReg2, opndDst, opndInlineCache, labelFallThru, false);
  3386. }
  3387. else
  3388. {
  3389. IR::Opnd *opndSrc = instrScopedFld->GetSrc1();
  3390. GenerateStFldFromLocalInlineCache(instrScopedFld, opndReg2, opndSrc, opndInlineCache, labelFallThru, false);
  3391. }
  3392. // $helper:
  3393. // if (isLoad) {
  3394. // dst = BLX PatchGetPropertyScoped(inlineCache, opndBase, propertyId, srcBase, scriptContext)
  3395. // }
  3396. // else {
  3397. // BLX PatchSetPropertyScoped(inlineCache, base, field, value, defaultInstance, scriptContext)
  3398. // }
  3399. // $fallthru:
  3400. instrScopedFld->InsertBefore(labelHelper);
  3401. instrScopedFld->InsertAfter(labelFallThru);
  3402. return instrPrev;
  3403. }
  3404. //----------------------------------------------------------------------------
  3405. //
  3406. // LowererMD::GenerateFastScopedLdFld
  3407. //
  3408. // Make use of the helper to cache the type and slot index used to do a ScopedLdFld
  3409. // when the scope is an array of length 1.
  3410. // Extract the only element from array and do an inline load from the appropriate slot
  3411. // if the type hasn't changed since the last time this ScopedLdFld was executed.
  3412. //
  3413. //----------------------------------------------------------------------------
  3414. IR::Instr *
  3415. LowererMD::GenerateFastScopedLdFld(IR::Instr * instrLdScopedFld)
  3416. {
  3417. //Helper GenerateFastScopedFldLookup generates following:
  3418. //
  3419. // LDR s1, [base, offset(length)]
  3420. // CMP s1, 1 -- get the length on array and test if it is 1.
  3421. // BNE $helper
  3422. // LDR s2, [base, offset(scopes)] -- load the first scope
  3423. // LDR s3, [s2, offset(type)]
  3424. // LDIMM s4, inlineCache
  3425. // LDR s5, [s4, offset(u.local.type)]
  3426. // CMP s3, s5 -- check type
  3427. // BNE $helper
  3428. // LDR s6, [s2, offset(slots)] -- load the slots array
  3429. // LDR s7 , [s4, offset(u.local.slotIndex)] -- load the cached slot index
  3430. // LDR dst, [s6, s7, LSL #2] -- load the value from the slot
  3431. // B $done
  3432. //$helper:
  3433. // dst = BLX PatchGetPropertyScoped(inlineCache, base, field, defaultInstance, scriptContext)
  3434. //$done:
  3435. return GenerateFastScopedFld(instrLdScopedFld, true);
  3436. }
  3437. //----------------------------------------------------------------------------
  3438. //
  3439. // LowererMD::GenerateFastScopedStFld
  3440. //
  3441. // Make use of the helper to cache the type and slot index used to do a ScopedStFld
  3442. // when the scope is an array of length 1.
  3443. // Extract the only element from array and do an inline load from the appropriate slot
  3444. // if the type hasn't changed since the last time this ScopedStFld was executed.
  3445. //
  3446. //----------------------------------------------------------------------------
  3447. IR::Instr *
  3448. LowererMD::GenerateFastScopedStFld(IR::Instr * instrStScopedFld)
  3449. {
  3450. // LDR s1, [base, offset(length)]
  3451. // CMP s1, 1 -- get the length on array and test if it is 1.
  3452. // BNE $helper
  3453. // LDR s2, [base, offset(scopes)] -- load the first scope
  3454. // LDR s3, [s2, offset(type)]
  3455. // LDIMM s4, inlineCache
  3456. // LDR s5, [s4, offset(u.local.type)]
  3457. // CMP s3, s5 -- check type
  3458. // BNE $helper
  3459. // LDR s6, [s2, offset(slots)] -- load the slots array
  3460. // LDR s7 , [s4, offset(u.local.slotIndex)] -- load the cached slot index
  3461. // STR src, [s6, s7, LSL #2] -- store the value directly at the slot
  3462. // B $done
  3463. //$helper:
  3464. // BLX PatchSetPropertyScoped(inlineCache, base, field, value, defaultInstance, scriptContext)
  3465. //$done:
  3466. return GenerateFastScopedFld(instrStScopedFld, false);
  3467. }
  3468. void
  3469. LowererMD::GenerateStFldFromLocalInlineCache(
  3470. IR::Instr * instrStFld,
  3471. IR::RegOpnd * opndBase,
  3472. IR::Opnd * opndSrc,
  3473. IR::RegOpnd * opndInlineCache,
  3474. IR::LabelInstr * labelFallThru,
  3475. bool isInlineSlot)
  3476. {
  3477. IR::RegOpnd * opndSlotArray = nullptr;
  3478. IR::IndirOpnd * opndIndir;
  3479. IR::Instr * instr;
  3480. if (!isInlineSlot)
  3481. {
  3482. // s2 = MOV base->slots -- load the slot array
  3483. opndSlotArray = IR::RegOpnd::New(TyMachReg, instrStFld->m_func);
  3484. opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrStFld->m_func);
  3485. Lowerer::InsertMove(opndSlotArray, opndIndir, instrStFld);
  3486. }
  3487. // LDR s5, [s2, offset(u.local.slotIndex)] -- load the cached slot index
  3488. IR::RegOpnd *opndSlotIndex = IR::RegOpnd::New(TyUint16, instrStFld->m_func);
  3489. opndIndir = IR::IndirOpnd::New(opndInlineCache, offsetof(Js::InlineCache, u.local.slotIndex), TyUint16, instrStFld->m_func);
  3490. Lowerer::InsertMove(opndSlotIndex, opndIndir, instrStFld);
  3491. if (isInlineSlot)
  3492. {
  3493. // STR src, [base, s5, LSL #2] -- store the value directly to the slot [s4 + s5 * 4] = src
  3494. opndIndir = IR::IndirOpnd::New(opndBase, opndSlotIndex, LowererMD::GetDefaultIndirScale(), TyMachReg, instrStFld->m_func);
  3495. instr = IR::Instr::New(Js::OpCode::STR, opndIndir, opndSrc, instrStFld->m_func);
  3496. instrStFld->InsertBefore(instr);
  3497. LegalizeMD::LegalizeInstr(instr);
  3498. }
  3499. else
  3500. {
  3501. // STR src, [s4, s5, LSL #2] -- store the value directly to the slot [s4 + s5 * 4] = src
  3502. opndIndir = IR::IndirOpnd::New(opndSlotArray, opndSlotIndex, LowererMD::GetDefaultIndirScale(), TyMachReg, instrStFld->m_func);
  3503. instr = IR::Instr::New(Js::OpCode::STR, opndIndir, opndSrc, instrStFld->m_func);
  3504. instrStFld->InsertBefore(instr);
  3505. LegalizeMD::LegalizeInstr(instr);
  3506. }
  3507. // B $done
  3508. instr = IR::BranchInstr::New(Js::OpCode::B, labelFallThru, instrStFld->m_func);
  3509. instrStFld->InsertBefore(instr);
  3510. }
  3511. IR::Opnd *
  3512. LowererMD::CreateStackArgumentsSlotOpnd()
  3513. {
  3514. // Save the newly-created args object to its dedicated stack slot.
  3515. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(nullptr, FRAME_REG , TyMachReg, m_func),
  3516. -MachArgsSlotOffset, TyMachPtr, m_func);
  3517. return indirOpnd;
  3518. }
  3519. //
  3520. // jump to $labelHelper, based on the result of CMP
  3521. //
  3522. void LowererMD::GenerateSmIntTest(IR::Opnd *opndSrc, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::Instr **instrFirst, bool fContinueLabel /* = false */)
  3523. {
  3524. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  3525. IR::Opnd * opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  3526. // s1 = MOV src1 - Move to a temporary
  3527. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc, this->m_func);
  3528. insertInstr->InsertBefore(instr);
  3529. if (instrFirst)
  3530. {
  3531. *instrFirst = instr;
  3532. }
  3533. // s1 = UBFX s1, VarTag_Shift, 64 - VarTag_Shift
  3534. instr = IR::Instr::New(Js::OpCode::UBFX, opndReg, opndReg, IR::IntConstOpnd::New(BITFIELD(Js::VarTag_Shift, 64 - Js::VarTag_Shift), TyMachReg, this->m_func), this->m_func);
  3535. insertInstr->InsertBefore(instr);
  3536. Legalize(instr);
  3537. // s1 = EOR s1, AtomTag
  3538. instr = IR::Instr::New(Js::OpCode::EOR, opndReg, opndReg, IR::IntConstOpnd::New(Js::AtomTag, TyInt32, this->m_func, /* dontEncode = */ true), this->m_func);
  3539. insertInstr->InsertBefore(instr);
  3540. if(fContinueLabel)
  3541. {
  3542. // CBZ s1, $labelHelper
  3543. instr = IR::BranchInstr::New(Js::OpCode::CBZ, labelHelper, this->m_func);
  3544. }
  3545. else
  3546. {
  3547. // CBNZ s1, $labelHelper
  3548. instr = IR::BranchInstr::New(Js::OpCode::CBNZ, labelHelper, this->m_func);
  3549. }
  3550. instr->SetSrc1(opndReg);
  3551. insertInstr->InsertBefore(instr);
  3552. }
  3553. void LowererMD::GenerateInt32ToVarConversion(IR::Opnd * opndSrc, IR::Instr * insertInstr )
  3554. {
  3555. AssertMsg(opndSrc->IsRegOpnd(), "NYI for other types");
  3556. IR::Instr* instr = IR::Instr::New(Js::OpCode::ORR, opndSrc, opndSrc, IR::IntConstOpnd::New(Js::AtomTag_IntPtr, TyMachReg, this->m_func), this->m_func);
  3557. insertInstr->InsertBefore(instr);
  3558. }
  3559. IR::RegOpnd *
  3560. LowererMD::GenerateUntagVar(IR::RegOpnd * src, IR::LabelInstr * labelFail, IR::Instr * assignInstr, bool generateTagCheck)
  3561. {
  3562. Assert(src->IsVar());
  3563. // MOV valueOpnd, index
  3564. IR::RegOpnd *valueOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  3565. //
  3566. // Convert Index to 32 bits.
  3567. //
  3568. if (generateTagCheck)
  3569. {
  3570. IR::Opnd * opnd = src->UseWithNewType(TyMachReg, this->m_func);
  3571. Assert(!opnd->IsTaggedInt());
  3572. this->GenerateSmIntTest(opnd, assignInstr, labelFail);
  3573. }
  3574. // Doing a 32-bit MOV clears the tag bits on ARM64. Use MOV_TRUNC so it doesn't get peeped away.
  3575. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV_TRUNC, valueOpnd, src->UseWithNewType(TyInt32, this->m_func), this->m_func);
  3576. assignInstr->InsertBefore(instr);
  3577. return valueOpnd;
  3578. }
  3579. IR::RegOpnd *LowererMD::LoadNonnegativeIndex(
  3580. IR::RegOpnd *indexOpnd,
  3581. const bool skipNegativeCheck,
  3582. IR::LabelInstr *const notTaggedIntLabel,
  3583. IR::LabelInstr *const negativeLabel,
  3584. IR::Instr *const insertBeforeInstr)
  3585. {
  3586. Assert(indexOpnd);
  3587. Assert(indexOpnd->IsVar() || indexOpnd->GetType() == TyInt32 || indexOpnd->GetType() == TyUint32);
  3588. Assert(indexOpnd->GetType() != TyUint32 || skipNegativeCheck);
  3589. Assert(!indexOpnd->IsVar() || notTaggedIntLabel);
  3590. Assert(skipNegativeCheck || negativeLabel);
  3591. Assert(insertBeforeInstr);
  3592. if(indexOpnd->IsVar())
  3593. {
  3594. if (indexOpnd->GetValueType().IsLikelyFloat())
  3595. {
  3596. return m_lowerer->LoadIndexFromLikelyFloat(indexOpnd, skipNegativeCheck, notTaggedIntLabel, negativeLabel, insertBeforeInstr);
  3597. }
  3598. indexOpnd = GenerateUntagVar(indexOpnd, notTaggedIntLabel, insertBeforeInstr, !indexOpnd->IsTaggedInt());
  3599. }
  3600. if(!skipNegativeCheck)
  3601. {
  3602. // TBNZ index, #31, $notTaggedIntOrNegative
  3603. IR::Instr *instr = IR::BranchInstr::New(Js::OpCode::TBNZ, negativeLabel, this->m_func);
  3604. instr->SetSrc1(indexOpnd);
  3605. instr->SetSrc2(IR::IntConstOpnd::New(31, TyVar, this->m_func));
  3606. insertBeforeInstr->InsertBefore(instr);
  3607. }
  3608. return indexOpnd;
  3609. }
  3610. // Inlines fast-path for int Mul/Add or int Mul/Sub. If not int, call MulAdd/MulSub helper
  3611. bool LowererMD::TryGenerateFastMulAdd(IR::Instr * instrAdd, IR::Instr ** pInstrPrev)
  3612. {
  3613. IR::Instr *instrMul = instrAdd->GetPrevRealInstrOrLabel();
  3614. IR::Opnd *addSrc;
  3615. IR::RegOpnd *addCommonSrcOpnd;
  3616. Assert(instrAdd->m_opcode == Js::OpCode::Add_A || instrAdd->m_opcode == Js::OpCode::Sub_A);
  3617. bool isSub = (instrAdd->m_opcode == Js::OpCode::Sub_A) ? true : false;
  3618. // Mul needs to be a single def reg
  3619. if (instrMul->m_opcode != Js::OpCode::Mul_A || instrMul->GetDst()->IsRegOpnd() == false)
  3620. {
  3621. // Cannot generate MulAdd
  3622. return false;
  3623. }
  3624. if (instrMul->HasBailOutInfo())
  3625. {
  3626. // Bailout will be generated for the Add, but not the Mul.
  3627. // We could handle this, but this path isn't used that much anymore.
  3628. return false;
  3629. }
  3630. IR::RegOpnd *regMulDst = instrMul->GetDst()->AsRegOpnd();
  3631. if (regMulDst->m_sym->m_isSingleDef == false)
  3632. {
  3633. // Cannot generate MulAdd
  3634. return false;
  3635. }
  3636. // Only handle a * b + c, so dst of Mul needs to match left source of Add
  3637. if (instrMul->GetDst()->IsEqual(instrAdd->GetSrc1()))
  3638. {
  3639. addCommonSrcOpnd = instrAdd->GetSrc1()->AsRegOpnd();
  3640. addSrc = instrAdd->GetSrc2();
  3641. }
  3642. else if (instrMul->GetDst()->IsEqual(instrAdd->GetSrc2()))
  3643. {
  3644. addSrc = instrAdd->GetSrc1();
  3645. addCommonSrcOpnd = instrAdd->GetSrc2()->AsRegOpnd();
  3646. }
  3647. else
  3648. {
  3649. return false;
  3650. }
  3651. // Only handle a * b + c where c != a * b
  3652. if (instrAdd->GetSrc1()->IsEqual(instrAdd->GetSrc2()))
  3653. {
  3654. return false;
  3655. }
  3656. if (addCommonSrcOpnd->m_isTempLastUse == false)
  3657. {
  3658. return false;
  3659. }
  3660. IR::Opnd *mulSrc1 = instrMul->GetSrc1();
  3661. IR::Opnd *mulSrc2 = instrMul->GetSrc2();
  3662. if (mulSrc1->IsRegOpnd() && mulSrc1->AsRegOpnd()->IsTaggedInt()
  3663. && mulSrc2->IsRegOpnd() && mulSrc2->AsRegOpnd()->IsTaggedInt())
  3664. {
  3665. return false;
  3666. }
  3667. // Save prevInstr for the main lower loop
  3668. *pInstrPrev = instrMul->m_prev;
  3669. // Generate int31 fast-path for Mul, go to MulAdd helper if it fails, or one of the source is marked notInt
  3670. if (!(addSrc->IsRegOpnd() && addSrc->AsRegOpnd()->IsNotInt())
  3671. && !(mulSrc1->IsRegOpnd() && mulSrc1->AsRegOpnd()->IsNotInt())
  3672. && !(mulSrc2->IsRegOpnd() && mulSrc2->AsRegOpnd()->IsNotInt()))
  3673. {
  3674. this->GenerateFastMul(instrMul);
  3675. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3676. IR::Instr *instr = IR::BranchInstr::New(Js::OpCode::B, labelHelper, this->m_func);
  3677. instrMul->InsertBefore(instr);
  3678. // Generate int31 fast-path for Add
  3679. bool success;
  3680. if (isSub)
  3681. {
  3682. success = this->GenerateFastSub(instrAdd);
  3683. }
  3684. else
  3685. {
  3686. success = this->GenerateFastAdd(instrAdd);
  3687. }
  3688. if (!success)
  3689. {
  3690. labelHelper->isOpHelper = false;
  3691. }
  3692. // Generate MulAdd helper call
  3693. instrAdd->InsertBefore(labelHelper);
  3694. }
  3695. if (instrAdd->dstIsTempNumber)
  3696. {
  3697. m_lowerer->LoadHelperTemp(instrAdd, instrAdd);
  3698. }
  3699. else
  3700. {
  3701. IR::Opnd *tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  3702. this->LoadHelperArgument(instrAdd, tempOpnd);
  3703. }
  3704. this->m_lowerer->LoadScriptContext(instrAdd);
  3705. IR::JnHelperMethod helper;
  3706. if (addSrc == instrAdd->GetSrc2())
  3707. {
  3708. instrAdd->FreeSrc1();
  3709. IR::Opnd *addOpnd = instrAdd->UnlinkSrc2();
  3710. this->LoadHelperArgument(instrAdd, addOpnd);
  3711. helper = isSub ? IR::HelperOp_MulSubRight : IR::HelperOp_MulAddRight;
  3712. }
  3713. else
  3714. {
  3715. instrAdd->FreeSrc2();
  3716. IR::Opnd *addOpnd = instrAdd->UnlinkSrc1();
  3717. this->LoadHelperArgument(instrAdd, addOpnd);
  3718. helper = isSub ? IR::HelperOp_MulSubLeft : IR::HelperOp_MulAddLeft;
  3719. }
  3720. IR::Opnd *src2 = instrMul->UnlinkSrc2();
  3721. this->LoadHelperArgument(instrAdd, src2);
  3722. IR::Opnd *src1 = instrMul->UnlinkSrc1();
  3723. this->LoadHelperArgument(instrAdd, src1);
  3724. this->ChangeToHelperCall(instrAdd, helper);
  3725. instrMul->Remove();
  3726. return true;
  3727. }
  3728. IR::Instr *
  3729. LowererMD::LoadCheckedFloat(
  3730. IR::RegOpnd *opndOrig,
  3731. IR::RegOpnd *opndFloat,
  3732. IR::LabelInstr *labelInline,
  3733. IR::LabelInstr *labelHelper,
  3734. IR::Instr *instrInsert,
  3735. const bool checkForNullInLoopBody)
  3736. {
  3737. //
  3738. // if (TaggedInt::Is(opndOrig))
  3739. // opndFloat = FCVT opndOrig_32
  3740. // B $labelInline
  3741. // else
  3742. // B $labelOpndIsNotInt
  3743. //
  3744. // $labelOpndIsNotInt:
  3745. // if (TaggedFloat::Is(opndOrig))
  3746. // s2 = MOV opndOrig
  3747. // s2 = EOR FloatTag_Value
  3748. // opndFloat = FCVT s2
  3749. // else
  3750. // B $labelHelper
  3751. //
  3752. // $labelInline:
  3753. //
  3754. IR::Instr *instrFirst = nullptr;
  3755. IR::LabelInstr *labelOpndIsNotInt = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3756. GenerateSmIntTest(opndOrig, instrInsert, labelOpndIsNotInt, &instrFirst);
  3757. if (opndOrig->GetValueType().IsLikelyFloat())
  3758. {
  3759. // Make this path helper if value is likely a float
  3760. instrInsert->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true));
  3761. }
  3762. IR::Opnd *opndOrig_32 = opndOrig->UseWithNewType(TyInt32, this->m_func);
  3763. EmitIntToFloat(opndFloat, opndOrig_32, instrInsert);
  3764. IR::Instr *jmpInline = IR::BranchInstr::New(Js::OpCode::B, labelInline, this->m_func);
  3765. instrInsert->InsertBefore(jmpInline);
  3766. instrInsert->InsertBefore(labelOpndIsNotInt);
  3767. GenerateFloatTest(opndOrig, instrInsert, labelHelper, checkForNullInLoopBody);
  3768. IR::RegOpnd *s2 = IR::RegOpnd::New(TyMachReg, this->m_func);
  3769. IR::Instr *mov = IR::Instr::New(Js::OpCode::MOV, s2, opndOrig, this->m_func);
  3770. instrInsert->InsertBefore(mov);
  3771. IR::Instr *eorTag = IR::Instr::New(Js::OpCode::EOR,
  3772. s2,
  3773. s2,
  3774. IR::IntConstOpnd::New(Js::FloatTag_Value,
  3775. TyMachReg,
  3776. this->m_func,
  3777. /* dontEncode = */ true),
  3778. this->m_func);
  3779. instrInsert->InsertBefore(eorTag);
  3780. IR::Instr *movFloat = IR::Instr::New(Js::OpCode::FMOV_GEN, opndFloat, s2, this->m_func);
  3781. instrInsert->InsertBefore(movFloat);
  3782. return instrFirst;
  3783. }
  3784. void
  3785. LowererMD::EmitLoadFloatFromNumber(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr)
  3786. {
  3787. IR::LabelInstr *labelDone;
  3788. IR::Instr *instr;
  3789. labelDone = EmitLoadFloatCommon(dst, src, insertInstr, insertInstr->HasBailOutInfo());
  3790. if (labelDone == nullptr)
  3791. {
  3792. // We're done
  3793. insertInstr->Remove();
  3794. return;
  3795. }
  3796. // $Done note: insertAfter
  3797. insertInstr->InsertAfter(labelDone);
  3798. if (!insertInstr->HasBailOutInfo())
  3799. {
  3800. // $Done
  3801. insertInstr->Remove();
  3802. return;
  3803. }
  3804. IR::LabelInstr *labelNoBailOut = nullptr;
  3805. IR::SymOpnd *tempSymOpnd = nullptr;
  3806. if (insertInstr->GetBailOutKind() == IR::BailOutPrimitiveButString)
  3807. {
  3808. if (!this->m_func->tempSymDouble)
  3809. {
  3810. this->m_func->tempSymDouble = StackSym::New(TyFloat64, this->m_func);
  3811. this->m_func->StackAllocate(this->m_func->tempSymDouble, MachDouble);
  3812. }
  3813. // LEA r3, tempSymDouble
  3814. IR::RegOpnd *reg3Opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  3815. tempSymOpnd = IR::SymOpnd::New(this->m_func->tempSymDouble, TyFloat64, this->m_func);
  3816. Lowerer::InsertLea(reg3Opnd, tempSymOpnd, insertInstr);
  3817. // regBoolResult = to_number_fromPrimitive(value, &dst, allowUndef, scriptContext);
  3818. this->m_lowerer->LoadScriptContext(insertInstr);
  3819. IR::IntConstOpnd *allowUndefOpnd;
  3820. if (insertInstr->GetBailOutKind() == IR::BailOutPrimitiveButString)
  3821. {
  3822. allowUndefOpnd = IR::IntConstOpnd::New(true, TyInt32, this->m_func);
  3823. }
  3824. else
  3825. {
  3826. Assert(insertInstr->GetBailOutKind() == IR::BailOutNumberOnly);
  3827. allowUndefOpnd = IR::IntConstOpnd::New(false, TyInt32, this->m_func);
  3828. }
  3829. this->LoadHelperArgument(insertInstr, allowUndefOpnd);
  3830. this->LoadHelperArgument(insertInstr, reg3Opnd);
  3831. this->LoadHelperArgument(insertInstr, src);
  3832. IR::RegOpnd *regBoolResult = IR::RegOpnd::New(TyInt32, this->m_func);
  3833. instr = IR::Instr::New(Js::OpCode::Call, regBoolResult, IR::HelperCallOpnd::New(IR::HelperOp_ConvNumber_FromPrimitive, this->m_func), this->m_func);
  3834. insertInstr->InsertBefore(instr);
  3835. this->LowerCall(instr, 0);
  3836. // TEST regBoolResult, regBoolResult
  3837. instr = IR::Instr::New(Js::OpCode::TST, this->m_func);
  3838. instr->SetSrc1(regBoolResult);
  3839. instr->SetSrc2(regBoolResult);
  3840. insertInstr->InsertBefore(instr);
  3841. LegalizeMD::LegalizeInstr(instr);
  3842. // BNE $noBailOut
  3843. labelNoBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3844. instr = IR::BranchInstr::New(Js::OpCode::BNE, labelNoBailOut, this->m_func);
  3845. insertInstr->InsertBefore(instr);
  3846. }
  3847. // Bailout code
  3848. Assert(insertInstr->m_opcode == Js::OpCode::FromVar);
  3849. insertInstr->UnlinkDst();
  3850. insertInstr->FreeSrc1();
  3851. IR::Instr *bailoutInstr = insertInstr;
  3852. insertInstr = bailoutInstr->m_next;
  3853. this->m_lowerer->GenerateBailOut(bailoutInstr);
  3854. // $noBailOut
  3855. if (labelNoBailOut)
  3856. {
  3857. insertInstr->InsertBefore(labelNoBailOut);
  3858. Assert(dst->IsRegOpnd());
  3859. // VLDR dst, [pResult].f64
  3860. instr = IR::Instr::New(Js::OpCode::FLDR, dst, tempSymOpnd, this->m_func);
  3861. insertInstr->InsertBefore(instr);
  3862. LegalizeMD::LegalizeInstr(instr);
  3863. }
  3864. }
  3865. IR::LabelInstr*
  3866. LowererMD::EmitLoadFloatCommon(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr, bool needHelperLabel)
  3867. {
  3868. IR::Instr *instr;
  3869. Assert(src->GetType() == TyVar);
  3870. Assert(dst->GetType() == TyFloat64 || TyFloat32);
  3871. bool isFloatConst = false;
  3872. IR::RegOpnd *regFloatOpnd = nullptr;
  3873. if (src->IsRegOpnd() && src->AsRegOpnd()->m_sym->m_isFltConst)
  3874. {
  3875. IR::RegOpnd *regOpnd = src->AsRegOpnd();
  3876. Assert(regOpnd->m_sym->m_isSingleDef);
  3877. Js::Var value = regOpnd->m_sym->GetFloatConstValueAsVar_PostGlobOpt();
  3878. IR::MemRefOpnd *memRef = IR::MemRefOpnd::New((BYTE*)value + Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  3879. regFloatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  3880. instr = IR::Instr::New(Js::OpCode::FLDR, regFloatOpnd, memRef, this->m_func);
  3881. insertInstr->InsertBefore(instr);
  3882. LegalizeMD::LegalizeInstr(instr);
  3883. isFloatConst = true;
  3884. }
  3885. // Src is constant?
  3886. if (src->IsImmediateOpnd() || src->IsFloatConstOpnd())
  3887. {
  3888. regFloatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  3889. m_lowerer->LoadFloatFromNonReg(src, regFloatOpnd, insertInstr);
  3890. isFloatConst = true;
  3891. }
  3892. if (isFloatConst)
  3893. {
  3894. if (dst->GetType() == TyFloat32)
  3895. {
  3896. // FCVT.F32.F64 regOpnd32.f32, regOpnd.f64 -- Convert regOpnd from f64 to f32
  3897. IR::RegOpnd *regOpnd32 = regFloatOpnd->UseWithNewType(TyFloat32, this->m_func)->AsRegOpnd();
  3898. instr = IR::Instr::New(Js::OpCode::FCVT, regOpnd32, regFloatOpnd, this->m_func);
  3899. insertInstr->InsertBefore(instr);
  3900. // FMOV dst, regOpnd32
  3901. instr = IR::Instr::New(Js::OpCode::FMOV, dst, regOpnd32, this->m_func);
  3902. insertInstr->InsertBefore(instr);
  3903. }
  3904. else
  3905. {
  3906. instr = IR::Instr::New(Js::OpCode::FMOV, dst, regFloatOpnd, this->m_func);
  3907. insertInstr->InsertBefore(instr);
  3908. }
  3909. LegalizeMD::LegalizeInstr(instr);
  3910. return nullptr;
  3911. }
  3912. Assert(src->IsRegOpnd());
  3913. IR::LabelInstr *labelStore = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3914. IR::LabelInstr *labelHelper;
  3915. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3916. if (needHelperLabel)
  3917. {
  3918. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3919. }
  3920. else
  3921. {
  3922. labelHelper = labelDone;
  3923. }
  3924. IR::RegOpnd *reg2 = IR::RegOpnd::New(TyMachDouble, this->m_func);
  3925. // Load the float value in reg2
  3926. this->LoadCheckedFloat(src->AsRegOpnd(), reg2, labelStore, labelHelper, insertInstr, needHelperLabel);
  3927. // $Store
  3928. insertInstr->InsertBefore(labelStore);
  3929. if (dst->GetType() == TyFloat32)
  3930. {
  3931. IR::RegOpnd *reg2_32 = reg2->UseWithNewType(TyFloat32, this->m_func)->AsRegOpnd();
  3932. // FCVT.F32.F64 r2_32.f32, r2.f64 -- Convert regOpnd from f64 to f32
  3933. instr = IR::Instr::New(Js::OpCode::FCVT, reg2_32, reg2, this->m_func);
  3934. insertInstr->InsertBefore(instr);
  3935. // FMOV dst, r2_32
  3936. instr = IR::Instr::New(Js::OpCode::FMOV, dst, reg2_32, this->m_func);
  3937. insertInstr->InsertBefore(instr);
  3938. }
  3939. else
  3940. {
  3941. // FMOV dst, r2
  3942. instr = IR::Instr::New(Js::OpCode::FMOV, dst, reg2, this->m_func);
  3943. insertInstr->InsertBefore(instr);
  3944. }
  3945. LegalizeMD::LegalizeInstr(instr);
  3946. // B $Done
  3947. instr = IR::BranchInstr::New(Js::OpCode::B, labelDone, this->m_func);
  3948. insertInstr->InsertBefore(instr);
  3949. if (needHelperLabel)
  3950. {
  3951. // $Helper
  3952. insertInstr->InsertBefore(labelHelper);
  3953. }
  3954. return labelDone;
  3955. }
  3956. void
  3957. LowererMD::EmitLoadFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr, IR::Instr * instrBailOut, IR::LabelInstr * labelBailOut)
  3958. {
  3959. IR::LabelInstr *labelDone;
  3960. IR::Instr *instr;
  3961. Assert(src->GetType() == TyVar);
  3962. Assert(dst->GetType() == TyFloat64 || TyFloat32);
  3963. Assert(src->IsRegOpnd());
  3964. labelDone = EmitLoadFloatCommon(dst, src, insertInstr, true);
  3965. if (labelDone == nullptr)
  3966. {
  3967. // We're done
  3968. return;
  3969. }
  3970. IR::BailOutKind bailOutKind = instrBailOut && instrBailOut->HasBailOutInfo() ? instrBailOut->GetBailOutKind() : IR::BailOutInvalid;
  3971. if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  3972. {
  3973. // Bail out instead of making the helper call.
  3974. Assert(labelBailOut);
  3975. m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, insertInstr);
  3976. insertInstr->InsertBefore(labelDone);
  3977. return;
  3978. }
  3979. IR::Opnd *memAddress = dst;
  3980. if (dst->IsRegOpnd())
  3981. {
  3982. IR::SymOpnd *symOpnd = nullptr;
  3983. if (dst->GetType() == TyFloat32)
  3984. {
  3985. symOpnd = IR::SymOpnd::New(StackSym::New(TyFloat32, this->m_func), TyFloat32, this->m_func);
  3986. this->m_func->StackAllocate(symOpnd->m_sym->AsStackSym(), sizeof(float));
  3987. }
  3988. else
  3989. {
  3990. symOpnd = IR::SymOpnd::New(StackSym::New(TyFloat64,this->m_func), TyMachDouble, this->m_func);
  3991. this->m_func->StackAllocate(symOpnd->m_sym->AsStackSym(), sizeof(double));
  3992. }
  3993. memAddress = symOpnd;
  3994. }
  3995. // LEA r3, dst
  3996. IR::RegOpnd *reg3Opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  3997. Lowerer::InsertLea(reg3Opnd, memAddress, insertInstr);
  3998. // to_number_full(value, &dst, scriptContext);
  3999. // Create dummy binary op to convert into helper
  4000. instr = IR::Instr::New(Js::OpCode::Add_A, this->m_func);
  4001. instr->SetSrc1(src);
  4002. instr->SetSrc2(reg3Opnd);
  4003. insertInstr->InsertBefore(instr);
  4004. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  4005. {
  4006. _Analysis_assume_(instrBailOut != nullptr);
  4007. instr = instr->ConvertToBailOutInstr(instrBailOut->GetBailOutInfo(), bailOutKind);
  4008. if (instrBailOut->GetBailOutInfo()->bailOutInstr == instrBailOut)
  4009. {
  4010. IR::Instr * instrShare = instrBailOut->ShareBailOut();
  4011. m_lowerer->LowerBailTarget(instrShare);
  4012. }
  4013. }
  4014. IR::JnHelperMethod helper;
  4015. if (dst->GetType() == TyFloat32)
  4016. {
  4017. helper = IR::HelperOp_ConvFloat_Helper;
  4018. }
  4019. else
  4020. {
  4021. helper = IR::HelperOp_ConvNumber_Helper;
  4022. }
  4023. this->m_lowerer->LowerBinaryHelperMem(instr, helper);
  4024. if (dst->IsRegOpnd())
  4025. {
  4026. instr = IR::Instr::New(Js::OpCode::FLDR, dst , memAddress, this->m_func);
  4027. insertInstr->InsertBefore(instr);
  4028. LegalizeMD::LegalizeInstr(instr);
  4029. }
  4030. // $Done
  4031. insertInstr->InsertBefore(labelDone);
  4032. }
  4033. void
  4034. LowererMD::GenerateFastRecyclerAlloc(size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, IR::LabelInstr* allocHelperLabel, IR::LabelInstr* allocDoneLabel)
  4035. {
  4036. ScriptContextInfo* scriptContext = this->m_func->GetScriptContextInfo();
  4037. void* allocatorAddress;
  4038. uint32 endAddressOffset;
  4039. uint32 freeListOffset;
  4040. size_t alignedSize = HeapInfo::GetAlignedSizeNoCheck(allocSize);
  4041. bool allowNativeCodeBumpAllocation = scriptContext->GetRecyclerAllowNativeCodeBumpAllocation();
  4042. Recycler::GetNormalHeapBlockAllocatorInfoForNativeAllocation((void*)scriptContext->GetRecyclerAddr(), alignedSize,
  4043. allocatorAddress, endAddressOffset, freeListOffset,
  4044. allowNativeCodeBumpAllocation, this->m_func->IsOOPJIT());
  4045. IR::RegOpnd * allocatorAddressRegOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  4046. // LDIMM allocatorAddressRegOpnd, allocator
  4047. IR::AddrOpnd* allocatorAddressOpnd = IR::AddrOpnd::New(allocatorAddress, IR::AddrOpndKindDynamicMisc, this->m_func);
  4048. IR::Instr * loadAllocatorAddressInstr = IR::Instr::New(Js::OpCode::LDIMM, allocatorAddressRegOpnd, allocatorAddressOpnd, this->m_func);
  4049. insertionPointInstr->InsertBefore(loadAllocatorAddressInstr);
  4050. IR::IndirOpnd * endAddressOpnd = IR::IndirOpnd::New(allocatorAddressRegOpnd, endAddressOffset, TyMachPtr, this->m_func);
  4051. IR::IndirOpnd * freeObjectListOpnd = IR::IndirOpnd::New(allocatorAddressRegOpnd, freeListOffset, TyMachPtr, this->m_func);
  4052. // LDR newObjDst, allocator->freeObjectList
  4053. IR::Instr * loadMemBlockInstr = IR::Instr::New(Js::OpCode::LDR, newObjDst, freeObjectListOpnd, this->m_func);
  4054. insertionPointInstr->InsertBefore(loadMemBlockInstr);
  4055. LegalizeMD::LegalizeInstr(loadMemBlockInstr);
  4056. // nextMemBlock = ADD newObjDst, allocSize
  4057. IR::RegOpnd * nextMemBlockOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  4058. IR::IntConstOpnd* allocSizeOpnd = IR::IntConstOpnd::New((int32)allocSize, TyInt32, this->m_func);
  4059. IR::Instr * loadNextMemBlockInstr = IR::Instr::New(Js::OpCode::ADD, nextMemBlockOpnd, newObjDst, allocSizeOpnd, this->m_func);
  4060. insertionPointInstr->InsertBefore(loadNextMemBlockInstr);
  4061. LegalizeMD::LegalizeInstr(loadNextMemBlockInstr);
  4062. // CMP nextMemBlock, allocator->endAddress
  4063. IR::Instr * checkInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4064. checkInstr->SetSrc1(nextMemBlockOpnd);
  4065. checkInstr->SetSrc2(endAddressOpnd);
  4066. insertionPointInstr->InsertBefore(checkInstr);
  4067. LegalizeMD::LegalizeInstr(checkInstr);
  4068. // BHI $allocHelper
  4069. IR::BranchInstr * branchToAllocHelperInstr = IR::BranchInstr::New(Js::OpCode::BHI, allocHelperLabel, this->m_func);
  4070. insertionPointInstr->InsertBefore(branchToAllocHelperInstr);
  4071. // LDR allocator->freeObjectList, nextMemBlock
  4072. IR::Instr * setFreeObjectListInstr = IR::Instr::New(Js::OpCode::LDR, freeObjectListOpnd, nextMemBlockOpnd, this->m_func);
  4073. insertionPointInstr->InsertBefore(setFreeObjectListInstr);
  4074. LegalizeMD::LegalizeInstr(setFreeObjectListInstr);
  4075. // B $allocDone
  4076. IR::BranchInstr * branchToAllocDoneInstr = IR::BranchInstr::New(Js::OpCode::B, allocDoneLabel, this->m_func);
  4077. insertionPointInstr->InsertBefore(branchToAllocDoneInstr);
  4078. }
  4079. void
  4080. LowererMD::GenerateClz(IR::Instr * instr)
  4081. {
  4082. Assert(instr->GetSrc1()->IsIntegral32());
  4083. Assert(IRType_IsNativeInt(instr->GetDst()->GetType()));
  4084. instr->m_opcode = Js::OpCode::CLZ;
  4085. LegalizeMD::LegalizeInstr(instr);
  4086. }
  4087. void
  4088. LowererMD::SaveDoubleToVar(IR::RegOpnd * dstOpnd, IR::RegOpnd *opndFloat, IR::Instr *instrOrig, IR::Instr *instrInsert, bool isHelper)
  4089. {
  4090. Assert(opndFloat->GetType() == TyFloat64);
  4091. // Call JSNumber::ToVar to save the float operand to the result of the original (var) instruction
  4092. // s1 = MOV opndFloat
  4093. IR::RegOpnd *s1 = IR::RegOpnd::New(TyMachReg, m_func);
  4094. IR::Instr *mov = IR::Instr::New(Js::OpCode::FMOV_GEN, s1, opndFloat, m_func);
  4095. instrInsert->InsertBefore(mov);
  4096. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  4097. {
  4098. // s1 = FMOV_GEN src
  4099. // tmp = UBFX s1, #52, #11 ; extract exponent, bits 52-62
  4100. // cmp = tmp, 0x7FF
  4101. // beq tmp, helper
  4102. // b done
  4103. // helper:
  4104. // tmp2 = tmp2 = UBFX s1, #0, #52 ; extract mantissa, bits 0-51
  4105. // cbz tmp2, done
  4106. // s1 = JavascriptNumber::k_Nan
  4107. // done:
  4108. IR::RegOpnd* tmp = IR::RegOpnd::New(TyMachReg, m_func);
  4109. IR::Instr* newInstr = IR::Instr::New(Js::OpCode::UBFX, tmp, s1, IR::IntConstOpnd::New(BITFIELD(52, 11), TyMachReg, m_func, true), m_func);
  4110. instrInsert->InsertBefore(newInstr);
  4111. LowererMD::Legalize(newInstr);
  4112. newInstr = IR::Instr::New(Js::OpCode::CMP, tmp, IR::IntConstOpnd::New(0x7FF, TyMachReg, m_func, true), m_func);
  4113. instrInsert->InsertBefore(newInstr);
  4114. LowererMD::Legalize(newInstr);
  4115. IR::LabelInstr* helper = Lowerer::InsertLabel(true, instrInsert);
  4116. IR::Instr* branch = IR::BranchInstr::New(Js::OpCode::BEQ, helper, m_func);
  4117. helper->InsertBefore(branch);
  4118. IR::LabelInstr* done = Lowerer::InsertLabel(isHelper, instrInsert);
  4119. Lowerer::InsertBranch(Js::OpCode::Br, done, helper);
  4120. IR::RegOpnd* tmp2 = IR::RegOpnd::New(TyMachReg, m_func);
  4121. newInstr = IR::Instr::New(Js::OpCode::UBFX, tmp2, s1, IR::IntConstOpnd::New(BITFIELD(0, 52), TyMachReg, m_func, true), m_func);
  4122. done->InsertBefore(newInstr);
  4123. LowererMD::Legalize(newInstr);
  4124. branch = IR::BranchInstr::New(Js::OpCode::CBZ, done, m_func);
  4125. branch->SetSrc1(tmp2);
  4126. done->InsertBefore(branch);
  4127. IR::Opnd * opndNaN = IR::AddrOpnd::New((Js::Var)Js::JavascriptNumber::k_Nan, IR::AddrOpndKindConstantVar, m_func, true);
  4128. Lowerer::InsertMove(s1, opndNaN, done);
  4129. }
  4130. // s1 = EOR s1, FloatTag_Value
  4131. // dst = s1
  4132. IR::Instr* setTag = IR::Instr::New(Js::OpCode::EOR, s1, s1, IR::AddrOpnd::New((Js::Var)Js::FloatTag_Value, IR::AddrOpndKindConstantVar, this->m_func, true), this->m_func);
  4133. IR::Instr* movDst = IR::Instr::New(Js::OpCode::MOV, dstOpnd, s1, this->m_func);
  4134. instrInsert->InsertBefore(setTag);
  4135. instrInsert->InsertBefore(movDst);
  4136. LowererMD::Legalize(setTag);
  4137. }
  4138. void
  4139. LowererMD::GenerateFastAbs(IR::Opnd *dst, IR::Opnd *src, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
  4140. {
  4141. // if isFloat goto $float
  4142. // s1 = MOV src
  4143. // CMP s1, #0
  4144. // s1 = CSNEGPL s1, s1
  4145. // TBNZ s1, #31, $labelHelper
  4146. // s1 = ORR s1, AtomTag_IntPtr
  4147. // JMP $done
  4148. // $float
  4149. // CMP [src], JavascriptNumber.vtable
  4150. // JNE $helper
  4151. // MOVSD r1, [src + offsetof(value)]
  4152. // ANDPD r1, absDoubleCst
  4153. // dst = DoubleToVar(r1)
  4154. IR::Instr *instr = nullptr;
  4155. IR::LabelInstr *labelFloat = nullptr;
  4156. bool isInt = false;
  4157. bool isNotInt = false;
  4158. if (src->IsRegOpnd())
  4159. {
  4160. if (src->AsRegOpnd()->IsTaggedInt())
  4161. {
  4162. isInt = true;
  4163. }
  4164. else if (src->AsRegOpnd()->IsNotInt())
  4165. {
  4166. isNotInt = true;
  4167. }
  4168. }
  4169. else if (src->IsAddrOpnd())
  4170. {
  4171. IR::AddrOpnd *varOpnd = src->AsAddrOpnd();
  4172. Assert(varOpnd->IsVar() && Js::TaggedInt::Is(varOpnd->m_address));
  4173. int64 absValue = ::_abs64(Js::TaggedInt::ToInt32(varOpnd->m_address));
  4174. if (!Js::TaggedInt::IsOverflow(absValue))
  4175. {
  4176. varOpnd->SetAddress(Js::TaggedInt::ToVarUnchecked((int32)absValue), IR::AddrOpndKindConstantVar);
  4177. instr = IR::Instr::New(Js::OpCode::MOV, dst, varOpnd, this->m_func);
  4178. insertInstr->InsertBefore(instr);
  4179. return;
  4180. }
  4181. }
  4182. if (src->IsRegOpnd() == false)
  4183. {
  4184. IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  4185. instr = IR::Instr::New(Js::OpCode::MOV, regOpnd, src, this->m_func);
  4186. insertInstr->InsertBefore(instr);
  4187. src = regOpnd;
  4188. }
  4189. bool emitFloatAbs = !isInt;
  4190. if (!isNotInt)
  4191. {
  4192. if (!isInt)
  4193. {
  4194. IR::LabelInstr *label = labelHelper;
  4195. if (emitFloatAbs)
  4196. {
  4197. label = labelFloat = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4198. }
  4199. GenerateSmIntTest(src, insertInstr, label);
  4200. }
  4201. // s1 = MOV src
  4202. IR::RegOpnd *regSrc = IR::RegOpnd::New(TyInt32, this->m_func);
  4203. Lowerer::InsertMove(regSrc, src, insertInstr);
  4204. // CMP s1, #0
  4205. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4206. instr->SetSrc1(regSrc);
  4207. instr->SetSrc2(IR::IntConstOpnd::New(0, IRType::TyInt32, this->m_func));
  4208. insertInstr->InsertBefore(instr);
  4209. Legalize(instr);
  4210. // s1 = CSNEGPL s1, s1
  4211. instr = IR::Instr::New(Js::OpCode::CSNEGPL, regSrc, regSrc, regSrc, this->m_func);
  4212. insertInstr->InsertBefore(instr);
  4213. Legalize(instr);
  4214. // TBNZ s1, #31, $labelHelper
  4215. instr = IR::BranchInstr::New(Js::OpCode::TBNZ, labelHelper, this->m_func);
  4216. instr->SetSrc1(regSrc);
  4217. instr->SetSrc2(IR::IntConstOpnd::New(31, IRType::TyInt32, this->m_func));
  4218. insertInstr->InsertBefore(instr);
  4219. // MOV dst, s1
  4220. instr = IR::Instr::New(Js::OpCode::MOV, dst, regSrc, this->m_func);
  4221. insertInstr->InsertBefore(instr);
  4222. GenerateInt32ToVarConversion(dst, insertInstr);
  4223. }
  4224. if (labelFloat)
  4225. {
  4226. // B $done
  4227. instr = IR::BranchInstr::New(Js::OpCode::B, doneLabel, this->m_func);
  4228. insertInstr->InsertBefore(instr);
  4229. // $float
  4230. insertInstr->InsertBefore(labelFloat);
  4231. }
  4232. if (emitFloatAbs)
  4233. {
  4234. // if (typeof(src) == double)
  4235. IR::RegOpnd *src64 = src->AsRegOpnd();
  4236. GenerateFloatTest(src64, insertInstr, labelHelper);
  4237. // dst64 = MOV src64
  4238. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, src64, this->m_func));
  4239. // Unconditionally set the sign bit. This will get XORd away when we remove the tag.
  4240. // dst64 = ORR 0x8000000000000000
  4241. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::ORR, dst, dst, IR::IntConstOpnd::New(MachSignBit, TyMachReg, this->m_func), this->m_func));
  4242. }
  4243. else if(!isInt)
  4244. {
  4245. // The source is not known to be a tagged int, so either it's definitely not an int (isNotInt), or the int version of
  4246. // abs failed the tag check and jumped here. We can't emit the float version of abs (!emitFloatAbs) due to SSE2 not
  4247. // being available, so jump straight to the helper.
  4248. // JMP $helper
  4249. instr = IR::BranchInstr::New(Js::OpCode::B, labelHelper, this->m_func);
  4250. insertInstr->InsertBefore(instr);
  4251. }
  4252. }
  4253. void
  4254. LowererMD::EmitInt4Instr(IR::Instr *instr)
  4255. {
  4256. IR::Instr * newInstr;
  4257. IR::Opnd * src1;
  4258. IR::Opnd * src2;
  4259. switch (instr->m_opcode)
  4260. {
  4261. case Js::OpCode::Neg_I4:
  4262. instr->m_opcode = Js::OpCode::SUB;
  4263. instr->SetSrc2(instr->UnlinkSrc1());
  4264. instr->SetSrc1(IR::RegOpnd::New(nullptr, RegZR, TyInt32, instr->m_func));
  4265. break;
  4266. case Js::OpCode::Not_I4:
  4267. instr->m_opcode = Js::OpCode::MVN;
  4268. break;
  4269. case Js::OpCode::Add_I4:
  4270. instr->m_opcode = Js::OpCode::ADD;
  4271. break;
  4272. case Js::OpCode::Sub_I4:
  4273. instr->m_opcode = Js::OpCode::SUB;
  4274. break;
  4275. case Js::OpCode::Mul_I4:
  4276. instr->m_opcode = Js::OpCode::MUL;
  4277. break;
  4278. case Js::OpCode::DivU_I4:
  4279. AssertMsg(UNREACHED, "Unsigned div NYI");
  4280. case Js::OpCode::Div_I4:
  4281. instr->m_opcode = Js::OpCode::SDIV;
  4282. break;
  4283. case Js::OpCode::RemU_I4:
  4284. AssertMsg(UNREACHED, "Unsigned rem NYI");
  4285. case Js::OpCode::Rem_I4:
  4286. instr->m_opcode = Js::OpCode::REM;
  4287. break;
  4288. case Js::OpCode::Or_I4:
  4289. instr->m_opcode = Js::OpCode::ORR;
  4290. break;
  4291. case Js::OpCode::Xor_I4:
  4292. instr->m_opcode = Js::OpCode::EOR;
  4293. break;
  4294. case Js::OpCode::And_I4:
  4295. instr->m_opcode = Js::OpCode::AND;
  4296. break;
  4297. case Js::OpCode::Shl_I4:
  4298. case Js::OpCode::ShrU_I4:
  4299. case Js::OpCode::Shr_I4:
  4300. ChangeToShift(instr, false /* needFlags */);
  4301. break;
  4302. case Js::OpCode::BrTrue_I4:
  4303. instr->m_opcode = Js::OpCode::CBNZ;
  4304. break;
  4305. case Js::OpCode::BrFalse_I4:
  4306. instr->m_opcode = Js::OpCode::CBZ;
  4307. break;
  4308. case Js::OpCode::BrEq_I4:
  4309. instr->m_opcode = Js::OpCode::BEQ;
  4310. goto br2_Common;
  4311. case Js::OpCode::BrNeq_I4:
  4312. instr->m_opcode = Js::OpCode::BNE;
  4313. goto br2_Common;
  4314. case Js::OpCode::BrGt_I4:
  4315. instr->m_opcode = Js::OpCode::BGT;
  4316. goto br2_Common;
  4317. case Js::OpCode::BrGe_I4:
  4318. instr->m_opcode = Js::OpCode::BGE;
  4319. goto br2_Common;
  4320. case Js::OpCode::BrLe_I4:
  4321. instr->m_opcode = Js::OpCode::BLE;
  4322. goto br2_Common;
  4323. case Js::OpCode::BrLt_I4:
  4324. instr->m_opcode = Js::OpCode::BLT;
  4325. goto br2_Common;
  4326. case Js::OpCode::BrUnGt_I4:
  4327. instr->m_opcode = Js::OpCode::BHI;
  4328. goto br2_Common;
  4329. case Js::OpCode::BrUnGe_I4:
  4330. instr->m_opcode = Js::OpCode::BCS;
  4331. goto br2_Common;
  4332. case Js::OpCode::BrUnLt_I4:
  4333. instr->m_opcode = Js::OpCode::BCC;
  4334. goto br2_Common;
  4335. case Js::OpCode::BrUnLe_I4:
  4336. instr->m_opcode = Js::OpCode::BLS;
  4337. goto br2_Common;
  4338. br2_Common:
  4339. src1 = instr->UnlinkSrc1();
  4340. src2 = instr->UnlinkSrc2();
  4341. newInstr = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  4342. instr->InsertBefore(newInstr);
  4343. newInstr->SetSrc1(src1);
  4344. newInstr->SetSrc2(src2);
  4345. // Let instr point to the CMP so we can legalize it.
  4346. instr = newInstr;
  4347. break;
  4348. default:
  4349. AssertMsg(UNREACHED, "NYI I4 instr");
  4350. break;
  4351. }
  4352. LegalizeMD::LegalizeInstr(instr);
  4353. }
  4354. void
  4355. LowererMD::LowerInt4NegWithBailOut(
  4356. IR::Instr *const instr,
  4357. const IR::BailOutKind bailOutKind,
  4358. IR::LabelInstr *const bailOutLabel,
  4359. IR::LabelInstr *const skipBailOutLabel)
  4360. {
  4361. Assert(instr);
  4362. Assert(instr->m_opcode == Js::OpCode::Neg_I4);
  4363. Assert(!instr->HasBailOutInfo());
  4364. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  4365. Assert(bailOutLabel);
  4366. Assert(instr->m_next == bailOutLabel);
  4367. Assert(skipBailOutLabel);
  4368. Assert(instr->GetDst()->IsInt32());
  4369. Assert(instr->GetSrc1()->IsInt32());
  4370. // SUBS dst, zr, src1
  4371. // BVS $bailOutLabel
  4372. // BEQ $bailOutLabel
  4373. // B $skipBailOut
  4374. // $bailOut:
  4375. // ...
  4376. // $skipBailOut:
  4377. // Lower the instruction
  4378. instr->m_opcode = Js::OpCode::SUBS;
  4379. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  4380. instr->SetSrc2(instr->UnlinkSrc1()->UseWithNewType(TyInt32, instr->m_func));
  4381. instr->SetSrc1(IR::RegOpnd::New(nullptr, RegZR, TyInt32, instr->m_func));
  4382. Legalize(instr);
  4383. if(bailOutKind & IR::BailOutOnOverflow)
  4384. {
  4385. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::BVS, bailOutLabel, instr->m_func));
  4386. }
  4387. if(bailOutKind & IR::BailOutOnNegativeZero)
  4388. {
  4389. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::BEQ, bailOutLabel, instr->m_func));
  4390. }
  4391. // Skip bailout
  4392. bailOutLabel->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, skipBailOutLabel, instr->m_func));
  4393. }
  4394. void
  4395. LowererMD::LowerInt4AddWithBailOut(
  4396. IR::Instr *const instr,
  4397. const IR::BailOutKind bailOutKind,
  4398. IR::LabelInstr *const bailOutLabel,
  4399. IR::LabelInstr *const skipBailOutLabel)
  4400. {
  4401. Assert(instr);
  4402. Assert(instr->m_opcode == Js::OpCode::Add_I4);
  4403. Assert(!instr->HasBailOutInfo());
  4404. Assert(
  4405. (bailOutKind & IR::BailOutOnResultConditions) == IR::BailOutOnOverflow ||
  4406. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  4407. Assert(bailOutLabel);
  4408. Assert(instr->m_next == bailOutLabel);
  4409. Assert(skipBailOutLabel);
  4410. Assert(instr->GetDst()->IsInt32());
  4411. Assert(instr->GetSrc1()->IsInt32());
  4412. Assert(instr->GetSrc2()->IsInt32());
  4413. // ADDS dst, src1, src2
  4414. // BVC skipBailOutLabel
  4415. // fallthrough to bailout
  4416. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  4417. Assert(dst->IsRegOpnd());
  4418. const bool dstEquSrc1 = dst->IsEqual(src1), dstEquSrc2 = dst->IsEqual(src2);
  4419. if(dstEquSrc1 || dstEquSrc2)
  4420. {
  4421. LowererMD::ChangeToAssign(instr->SinkDst(Js::OpCode::Ld_I4, RegNOREG, skipBailOutLabel));
  4422. }
  4423. // Lower the instruction
  4424. ChangeToAdd(instr, true /* needFlags */);
  4425. Legalize(instr);
  4426. // Skip bailout on no overflow
  4427. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::BVC, skipBailOutLabel, instr->m_func));
  4428. // Fall through to bailOutLabel
  4429. }
  4430. void
  4431. LowererMD::LowerInt4SubWithBailOut(
  4432. IR::Instr *const instr,
  4433. const IR::BailOutKind bailOutKind,
  4434. IR::LabelInstr *const bailOutLabel,
  4435. IR::LabelInstr *const skipBailOutLabel)
  4436. {
  4437. Assert(instr);
  4438. Assert(instr->m_opcode == Js::OpCode::Sub_I4);
  4439. Assert(!instr->HasBailOutInfo());
  4440. Assert(
  4441. (bailOutKind & IR::BailOutOnResultConditions) == IR::BailOutOnOverflow ||
  4442. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  4443. Assert(bailOutLabel);
  4444. Assert(instr->m_next == bailOutLabel);
  4445. Assert(skipBailOutLabel);
  4446. Assert(instr->GetDst()->IsInt32());
  4447. Assert(instr->GetSrc1()->IsInt32());
  4448. Assert(instr->GetSrc2()->IsInt32());
  4449. // SUBS dst, src1, src2
  4450. // BVC skipBailOutLabel
  4451. // fallthrough to bailout
  4452. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  4453. Assert(dst->IsRegOpnd());
  4454. const bool dstEquSrc1 = dst->IsEqual(src1), dstEquSrc2 = dst->IsEqual(src2);
  4455. if(dstEquSrc1 || dstEquSrc2)
  4456. {
  4457. LowererMD::ChangeToAssign(instr->SinkDst(Js::OpCode::Ld_I4, RegNOREG, skipBailOutLabel));
  4458. }
  4459. // Lower the instruction
  4460. ChangeToSub(instr, true /* needFlags */);
  4461. Legalize(instr);
  4462. // Skip bailout on no overflow
  4463. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::BVC, skipBailOutLabel, instr->m_func));
  4464. // Fall through to bailOutLabel
  4465. }
  4466. void
  4467. LowererMD::LowerInt4MulWithBailOut(
  4468. IR::Instr *const instr,
  4469. const IR::BailOutKind bailOutKind,
  4470. IR::LabelInstr *const bailOutLabel,
  4471. IR::LabelInstr *const skipBailOutLabel)
  4472. {
  4473. Assert(instr);
  4474. Assert(instr->m_opcode == Js::OpCode::Mul_I4);
  4475. Assert(!instr->HasBailOutInfo());
  4476. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  4477. Assert(bailOutLabel);
  4478. Assert(instr->m_next == bailOutLabel);
  4479. Assert(skipBailOutLabel);
  4480. IR::Opnd *dst = instr->GetDst();
  4481. IR::Opnd *src1 = instr->UnlinkSrc1();
  4482. IR::Opnd *src2 = instr->UnlinkSrc2();
  4483. IR::Instr *insertInstr;
  4484. Assert(dst->IsInt32());
  4485. Assert(src1->IsInt32());
  4486. Assert(src2->IsInt32());
  4487. // s3 = SMULL src1, src2 // result is i64
  4488. IR::Opnd* s3 = IR::RegOpnd::New(TyInt64, instr->m_func);
  4489. insertInstr = IR::Instr::New(Js::OpCode::SMULL, s3, src1, src2, instr->m_func);
  4490. instr->InsertBefore(insertInstr);
  4491. LegalizeMD::LegalizeInstr(insertInstr);
  4492. // dst = MOV_TRUNC s3
  4493. instr->m_opcode = Js::OpCode::MOV_TRUNC;
  4494. instr->SetSrc1(s3->UseWithNewType(TyInt32, instr->m_func));
  4495. // check negative zero
  4496. //
  4497. // If the result is zero, we need to check and only bail out if it would be -0.
  4498. // We know that if the result is 0/-0, at least operand should be zero.
  4499. // We should bailout if src1 + src2 < 0, as this proves that the other operand is negative
  4500. //
  4501. // CMN src1, src2
  4502. // BPL $skipBailOutLabel
  4503. //
  4504. // $bailOutLabel
  4505. // GenerateBailout
  4506. //
  4507. // $skipBailOutLabel
  4508. IR::LabelInstr *checkForNegativeZeroLabel = nullptr;
  4509. if(bailOutKind & IR::BailOutOnNegativeZero)
  4510. {
  4511. checkForNegativeZeroLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true);
  4512. bailOutLabel->InsertBefore(checkForNegativeZeroLabel);
  4513. Assert(dst->IsRegOpnd());
  4514. Assert(!src1->IsEqual(src2)); // cannot result in -0 if both operands are the same; GlobOpt should have figured that out
  4515. // CMN src1, src2
  4516. // BPL $skipBailOutLabel
  4517. insertInstr = IR::Instr::New(Js::OpCode::CMN, instr->m_func);
  4518. insertInstr->SetSrc1(src1);
  4519. insertInstr->SetSrc2(src2);
  4520. bailOutLabel->InsertBefore(insertInstr);
  4521. LegalizeMD::LegalizeInstr(insertInstr);
  4522. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::BPL, skipBailOutLabel, instr->m_func));
  4523. // Fall through to bailOutLabel
  4524. }
  4525. IR::LabelInstr* insertBeforeInstr = checkForNegativeZeroLabel ? checkForNegativeZeroLabel : bailOutLabel;
  4526. //check overflow
  4527. if(bailOutKind & IR::BailOutOnMulOverflow || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck)
  4528. {
  4529. insertInstr = IR::Instr::New(Js::OpCode::CMP_SXTW, instr->m_func);
  4530. insertInstr->SetSrc1(s3);
  4531. insertInstr->SetSrc2(s3);
  4532. instr->InsertBefore(insertInstr);
  4533. // BNE $bailOutHelper
  4534. insertInstr = IR::BranchInstr::New(Js::OpCode::BNE, bailOutLabel, instr->m_func);
  4535. instr->InsertBefore(insertInstr);
  4536. }
  4537. if(bailOutKind & IR::BailOutOnNegativeZero)
  4538. {
  4539. // TST dst, dst
  4540. // BEQ $checkForNegativeZeroLabel
  4541. insertInstr = IR::Instr::New(Js::OpCode::TST, instr->m_func);
  4542. insertInstr->SetSrc1(dst);
  4543. insertInstr->SetSrc2(dst);
  4544. insertBeforeInstr->InsertBefore(insertInstr);
  4545. LegalizeMD::LegalizeInstr(insertInstr);
  4546. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::BEQ, checkForNegativeZeroLabel, instr->m_func));
  4547. }
  4548. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::B, skipBailOutLabel, instr->m_func));
  4549. }
  4550. void
  4551. LowererMD::LowerInt4RemWithBailOut(
  4552. IR::Instr *const instr,
  4553. const IR::BailOutKind bailOutKind,
  4554. IR::LabelInstr *const bailOutLabel,
  4555. IR::LabelInstr *const skipBailOutLabel) const
  4556. {
  4557. Assert(instr);
  4558. Assert(instr->m_opcode == Js::OpCode::Rem_I4);
  4559. Assert(!instr->HasBailOutInfo());
  4560. Assert(bailOutKind & IR::BailOutOnResultConditions);
  4561. Assert(bailOutLabel);
  4562. Assert(instr->m_next == bailOutLabel);
  4563. Assert(skipBailOutLabel);
  4564. IR::Opnd *dst = instr->GetDst();
  4565. IR::Opnd *src1 = instr->GetSrc1();
  4566. Assert(dst->IsInt32());
  4567. Assert(src1->IsInt32());
  4568. Assert(instr->GetSrc2()->IsInt32());
  4569. //Lower the instruction
  4570. EmitInt4Instr(instr);
  4571. //check for negative zero
  4572. //We have, dst = src1 % src2
  4573. //We need to bailout if dst == 0 and src1 < 0
  4574. // tst dst, dst
  4575. // bne $skipBailOutLabel
  4576. // tst src1,src1
  4577. // bpl $skipBailOutLabel
  4578. //
  4579. //$bailOutLabel
  4580. // GenerateBailout();
  4581. //
  4582. //$skipBailOutLabel
  4583. if(bailOutKind & IR::BailOutOnNegativeZero)
  4584. {
  4585. IR::LabelInstr *checkForNegativeZeroLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true);
  4586. bailOutLabel->InsertBefore(checkForNegativeZeroLabel);
  4587. IR::Instr *insertInstr = IR::Instr::New(Js::OpCode::TST, instr->m_func);
  4588. insertInstr->SetSrc1(dst);
  4589. insertInstr->SetSrc2(dst);
  4590. bailOutLabel->InsertBefore(insertInstr);
  4591. LegalizeMD::LegalizeInstr(insertInstr);
  4592. IR::Instr *branchInstr = IR::BranchInstr::New(Js::OpCode::BNE, skipBailOutLabel, instr->m_func);
  4593. bailOutLabel->InsertBefore(branchInstr);
  4594. insertInstr = IR::Instr::New(Js::OpCode::TST, instr->m_func);
  4595. insertInstr->SetSrc1(src1);
  4596. insertInstr->SetSrc2(src1);
  4597. bailOutLabel->InsertBefore(insertInstr);
  4598. LegalizeMD::LegalizeInstr(insertInstr);
  4599. branchInstr = IR::BranchInstr::New(Js::OpCode::BPL, skipBailOutLabel, instr->m_func);
  4600. bailOutLabel->InsertBefore(branchInstr);
  4601. }
  4602. // Fall through to bailOutLabel
  4603. }
  4604. void
  4605. LowererMD::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  4606. {
  4607. // MOV.32 e1, e_src1
  4608. // TBNZ e1, #31, $Helper [uint32] -- overflows?
  4609. // ORR r1, 1<<VarTag_Shift
  4610. // MOV r_dst, r1
  4611. // JMP $done [uint32]
  4612. // $helper [uint32]
  4613. // EmitLoadVarNoCheck
  4614. // $done [uint32]
  4615. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  4616. Assert(instrLoad->GetDst()->GetType() == TyVar);
  4617. bool isInt = false;
  4618. IR::Opnd *dst = instrLoad->GetDst();
  4619. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  4620. IR::LabelInstr *labelHelper = nullptr;
  4621. // TODO: Fix bad lowering. We shouldn't get TyVars here.
  4622. // Assert(instrLoad->GetSrc1()->GetType() == TyInt32);
  4623. src1->SetType(isFromUint32 ? TyUint32 : TyInt32);
  4624. if (src1->IsTaggedInt())
  4625. {
  4626. isInt = true;
  4627. }
  4628. else if (src1->IsNotInt())
  4629. {
  4630. // ToVar()
  4631. this->EmitLoadVarNoCheck(dst->AsRegOpnd(), src1, instrLoad, isFromUint32, isHelper);
  4632. return;
  4633. }
  4634. IR::RegOpnd *r1 = IR::RegOpnd::New(TyVar, m_func);
  4635. // e1 = MOV_TRUNC src1
  4636. // (Use 32-bit MOV_TRUNC here as we rely on the register copy to clear the upper 32 bits.)
  4637. IR::RegOpnd *e1 = r1->Copy(m_func)->AsRegOpnd();
  4638. e1->SetType(TyInt32);
  4639. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV_TRUNC,
  4640. e1,
  4641. src1,
  4642. m_func));
  4643. if (!isInt && isFromUint32)
  4644. {
  4645. Assert(!labelHelper);
  4646. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  4647. // TBNZ e1, #31, $helper
  4648. IR::Instr* instr = IR::BranchInstr::New(Js::OpCode::TBNZ, labelHelper, m_func);
  4649. instr->SetSrc1(e1);
  4650. instr->SetSrc2(IR::IntConstOpnd::New(31, TyInt32, m_func));
  4651. instrLoad->InsertBefore(instr);
  4652. }
  4653. // The previous operation clears the top 32 bits.
  4654. // ORR r1, 1<<VarTag_Shift
  4655. this->GenerateInt32ToVarConversion(r1, instrLoad);
  4656. // REVIEW: We need r1 only if we could generate sn = Ld_A_I4 sn. i.e. the destination and
  4657. // source are the same.
  4658. // r_dst = MOV r1
  4659. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV,
  4660. dst,
  4661. r1,
  4662. m_func));
  4663. if (labelHelper)
  4664. {
  4665. Assert(isFromUint32);
  4666. // B $done
  4667. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  4668. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::B, labelDone, m_func));
  4669. // $helper
  4670. instrLoad->InsertBefore(labelHelper);
  4671. // ToVar()
  4672. this->EmitLoadVarNoCheck(dst->AsRegOpnd(), src1, instrLoad, isFromUint32, true);
  4673. // $done
  4674. instrLoad->InsertBefore(labelDone);
  4675. }
  4676. instrLoad->Remove();
  4677. }
  4678. void
  4679. LowererMD::EmitLoadVarNoCheck(IR::RegOpnd * dst, IR::RegOpnd * src, IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  4680. {
  4681. IR::RegOpnd * floatReg = IR::RegOpnd::New(TyFloat64, this->m_func);
  4682. if (isFromUint32)
  4683. {
  4684. this->EmitUIntToFloat(floatReg, src, instrLoad);
  4685. }
  4686. else
  4687. {
  4688. this->EmitIntToFloat(floatReg, src, instrLoad);
  4689. }
  4690. this->SaveDoubleToVar(dst, floatReg, instrLoad, instrLoad, isHelper);
  4691. }
  4692. bool
  4693. LowererMD::EmitLoadInt32(IR::Instr *instrLoad, bool conversionFromObjectAllowed, bool bailOutOnHelper, IR::LabelInstr * labelBailOut)
  4694. {
  4695. //
  4696. // r1 = MOV src1
  4697. // rtest = UBFX src1, AtomTag_Shift, 64 - AtomTag_Shift
  4698. // EOR rtest, 1
  4699. // CBNZ $helper or $float
  4700. // r_dst = MOV.32 e_src1
  4701. // B $done
  4702. // $float:
  4703. // dst = ConvertToFloat(r1, $helper)
  4704. // $helper:
  4705. // r_dst = ToInt32()
  4706. //
  4707. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  4708. Assert(instrLoad->GetSrc1()->GetType() == TyVar);
  4709. // TODO: Fix bad lowering. We shouldn't see TyVars here.
  4710. // Assert(instrLoad->GetDst()->GetType() == TyInt32);
  4711. bool isInt = false;
  4712. bool isNotInt = false;
  4713. IR::Opnd *dst = instrLoad->GetDst();
  4714. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  4715. IR::LabelInstr *helper = nullptr;
  4716. IR::LabelInstr *labelFloat = nullptr;
  4717. IR::LabelInstr *done = nullptr;
  4718. if (src1->IsTaggedInt())
  4719. {
  4720. isInt = true;
  4721. }
  4722. else if (src1->IsNotInt())
  4723. {
  4724. isNotInt = true;
  4725. }
  4726. if (src1->IsEqual(instrLoad->GetDst()) == false)
  4727. {
  4728. // r1 = MOV src1
  4729. IR::RegOpnd *r1 = IR::RegOpnd::New(TyVar, instrLoad->m_func);
  4730. r1->SetValueType(src1->GetValueType());
  4731. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV, r1, src1, instrLoad->m_func));
  4732. src1 = r1;
  4733. }
  4734. const ValueType src1ValueType(src1->GetValueType());
  4735. const bool doFloatToIntFastPath =
  4736. (src1ValueType.IsLikelyFloat() || src1ValueType.IsLikelyUntaggedInt()) &&
  4737. !(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger));
  4738. if (isNotInt)
  4739. {
  4740. // Known to be non-integer. If we are required to bail out on helper call, just re-jit.
  4741. if (!doFloatToIntFastPath && bailOutOnHelper)
  4742. {
  4743. if(!GlobOpt::DoEliminateArrayAccessHelperCall(this->m_func))
  4744. {
  4745. // Array access helper call removal is already off for some reason. Prevent trying to rejit again
  4746. // because it won't help and the same thing will happen again. Just abort jitting this function.
  4747. if(PHASE_TRACE(Js::BailOutPhase, this->m_func))
  4748. {
  4749. Output::Print(_u(" Aborting JIT because EliminateArrayAccessHelperCall is already off\n"));
  4750. Output::Flush();
  4751. }
  4752. throw Js::OperationAbortedException();
  4753. }
  4754. throw Js::RejitException(RejitReason::ArrayAccessHelperCallEliminationDisabled);
  4755. }
  4756. }
  4757. else
  4758. {
  4759. // It could be an integer in this case.
  4760. if (!isInt)
  4761. {
  4762. if(doFloatToIntFastPath)
  4763. {
  4764. labelFloat = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, false);
  4765. }
  4766. else
  4767. {
  4768. helper = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, true);
  4769. }
  4770. this->GenerateSmIntTest(src1, instrLoad, labelFloat ? labelFloat : helper);
  4771. }
  4772. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV_TRUNC,
  4773. dst->UseWithNewType(TyInt32, instrLoad->m_func),
  4774. src1->UseWithNewType(TyInt32, instrLoad->m_func),
  4775. instrLoad->m_func));
  4776. if (!isInt)
  4777. {
  4778. // JMP $done
  4779. done = instrLoad->GetOrCreateContinueLabel();
  4780. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::B, done, m_func));
  4781. }
  4782. }
  4783. if (!isInt)
  4784. {
  4785. if(doFloatToIntFastPath)
  4786. {
  4787. if(labelFloat)
  4788. {
  4789. instrLoad->InsertBefore(labelFloat);
  4790. }
  4791. if(!helper)
  4792. {
  4793. helper = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, true);
  4794. }
  4795. if(!done)
  4796. {
  4797. done = instrLoad->GetOrCreateContinueLabel();
  4798. }
  4799. IR::RegOpnd* floatOpnd = this->CheckFloatAndUntag(src1, instrLoad, helper);
  4800. this->ConvertFloatToInt32(instrLoad->GetDst(), floatOpnd, helper, done, instrLoad);
  4801. }
  4802. // $helper:
  4803. if (helper)
  4804. {
  4805. instrLoad->InsertBefore(helper);
  4806. }
  4807. if(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger))
  4808. {
  4809. // Avoid bailout if we have a JavascriptNumber whose value is a signed 32-bit integer
  4810. m_lowerer->LoadInt32FromUntaggedVar(instrLoad);
  4811. // Need to bail out instead of calling a helper
  4812. return true;
  4813. }
  4814. if (bailOutOnHelper)
  4815. {
  4816. Assert(labelBailOut);
  4817. m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, instrLoad);
  4818. instrLoad->Remove();
  4819. }
  4820. else if (conversionFromObjectAllowed)
  4821. {
  4822. m_lowerer->LowerUnaryHelperMem(instrLoad, IR::HelperConv_ToInt32);
  4823. }
  4824. else
  4825. {
  4826. m_lowerer->LowerUnaryHelperMemWithBoolReference(instrLoad, IR::HelperConv_ToInt32_NoObjects, true /*useBoolForBailout*/);
  4827. }
  4828. }
  4829. else
  4830. {
  4831. instrLoad->Remove();
  4832. }
  4833. return false;
  4834. }
  4835. void
  4836. LowererMD::ImmedSrcToReg(IR::Instr * instr, IR::Opnd * newOpnd, int srcNum)
  4837. {
  4838. if (srcNum == 2)
  4839. {
  4840. instr->SetSrc2(newOpnd);
  4841. }
  4842. else
  4843. {
  4844. Assert(srcNum == 1);
  4845. instr->SetSrc1(newOpnd);
  4846. }
  4847. switch (instr->m_opcode)
  4848. {
  4849. case Js::OpCode::LDIMM:
  4850. instr->m_opcode = Js::OpCode::MOV;
  4851. break;
  4852. default:
  4853. // Nothing to do (unless we have immed/reg variations for other instructions).
  4854. break;
  4855. }
  4856. }
  4857. IR::LabelInstr *
  4858. LowererMD::GetBailOutStackRestoreLabel(BailOutInfo * bailOutInfo, IR::LabelInstr * exitTargetInstr)
  4859. {
  4860. return exitTargetInstr;
  4861. }
  4862. StackSym *
  4863. LowererMD::GetImplicitParamSlotSym(Js::ArgSlot argSlot)
  4864. {
  4865. return GetImplicitParamSlotSym(argSlot, this->m_func);
  4866. }
  4867. StackSym *
  4868. LowererMD::GetImplicitParamSlotSym(Js::ArgSlot argSlot, Func * func)
  4869. {
  4870. // For ARM, offset for implicit params always start at 0
  4871. // TODO: Consider not to use the argSlot number for the param slot sym, which can
  4872. // be confused with arg slot number from javascript
  4873. StackSym * stackSym = StackSym::NewParamSlotSym(argSlot, func);
  4874. func->SetArgOffset(stackSym, argSlot * MachPtr);
  4875. func->SetHasImplicitParamLoad();
  4876. return stackSym;
  4877. }
  4878. IR::LabelInstr *
  4879. LowererMD::EnsureEHEpilogLabel()
  4880. {
  4881. if (this->m_func->m_epilogLabel)
  4882. {
  4883. return this->m_func->m_epilogLabel;
  4884. }
  4885. IR::Instr *exitInstr = this->m_func->m_exitInstr;
  4886. IR::Instr *prevInstr = exitInstr->GetPrevRealInstrOrLabel();
  4887. if (prevInstr->IsLabelInstr())
  4888. {
  4889. this->m_func->m_epilogLabel = prevInstr->AsLabelInstr();
  4890. return prevInstr->AsLabelInstr();
  4891. }
  4892. IR::LabelInstr *labelInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4893. exitInstr->InsertBefore(labelInstr);
  4894. this->m_func->m_epilogLabel = labelInstr;
  4895. return labelInstr;
  4896. }
  4897. // Helper method: inserts legalized assign for given srcOpnd into RegD0 in front of given instr in the following way:
  4898. // dstReg = InsertMove srcOpnd
  4899. // Used to put args of inline built-in call into RegD0 and RegD1 before we call actual CRT function.
  4900. void LowererMD::GenerateAssignForBuiltinArg(RegNum dstReg, IR::Opnd* srcOpnd, IR::Instr* instr)
  4901. {
  4902. IR::RegOpnd* tempDst = IR::RegOpnd::New(nullptr, dstReg, TyMachDouble, this->m_func);
  4903. tempDst->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  4904. Lowerer::InsertMove(tempDst, srcOpnd, instr);
  4905. }
  4906. // For given InlineMathXXX instr, generate the call to actual CRT function/CPU instr.
  4907. void LowererMD::GenerateFastInlineBuiltInCall(IR::Instr* instr, IR::JnHelperMethod helperMethod)
  4908. {
  4909. switch (instr->m_opcode)
  4910. {
  4911. case Js::OpCode::InlineMathSqrt:
  4912. // Sqrt maps directly to the VFP instruction.
  4913. // src and dst are already float, all we need is just change the opcode and legalize.
  4914. // Before:
  4915. // dst = InlineMathSqrt src1
  4916. // After:
  4917. // <potential FSTR by legalizer if src1 is not a register>
  4918. // dst = FSQRT src1
  4919. Assert(helperMethod == (IR::JnHelperMethod)0);
  4920. Assert(instr->GetSrc2() == nullptr);
  4921. instr->m_opcode = Js::OpCode::FSQRT;
  4922. LegalizeMD::LegalizeInstr(instr);
  4923. break;
  4924. case Js::OpCode::InlineMathAbs:
  4925. Assert(helperMethod == (IR::JnHelperMethod)0);
  4926. return GenerateFastInlineBuiltInMathAbs(instr);
  4927. case Js::OpCode::InlineMathFloor:
  4928. case Js::OpCode::InlineMathCeil:
  4929. Assert(helperMethod == (IR::JnHelperMethod)0);
  4930. return GenerateFastInlineBuiltInMathFloorCeil(instr);
  4931. case Js::OpCode::InlineMathRound:
  4932. Assert(helperMethod == (IR::JnHelperMethod)0);
  4933. return GenerateFastInlineBuiltInMathRound(instr);
  4934. case Js::OpCode::InlineMathMin:
  4935. case Js::OpCode::InlineMathMax:
  4936. Assert(helperMethod == (IR::JnHelperMethod)0);
  4937. return GenerateFastInlineBuiltInMathMinMax(instr);
  4938. default:
  4939. // Before:
  4940. // dst = <Built-in call> src1, src2
  4941. // After:
  4942. // d0 = InsertMove src1
  4943. // lr = MOV helperAddr
  4944. // BLX lr
  4945. // dst = InsertMove call->dst (d0)
  4946. // Src1
  4947. AssertMsg(instr->GetDst()->IsFloat(), "Currently accepting only float args for math helpers -- dst.");
  4948. AssertMsg(instr->GetSrc1()->IsFloat(), "Currently accepting only float args for math helpers -- src1.");
  4949. AssertMsg(!instr->GetSrc2() || instr->GetSrc2()->IsFloat(), "Currently accepting only float args for math helpers -- src2.");
  4950. this->GenerateAssignForBuiltinArg((RegNum)FIRST_FLOAT_REG, instr->UnlinkSrc1(), instr);
  4951. // Src2
  4952. if (instr->GetSrc2() != nullptr)
  4953. {
  4954. this->GenerateAssignForBuiltinArg((RegNum)(FIRST_FLOAT_REG + 1), instr->UnlinkSrc2(), instr);
  4955. }
  4956. // Call CRT.
  4957. IR::RegOpnd* floatCallDst = IR::RegOpnd::New(nullptr, (RegNum)(FIRST_FLOAT_REG), TyMachDouble, this->m_func); // Dst in d0.
  4958. IR::Instr* floatCall = IR::Instr::New(Js::OpCode::BLR, floatCallDst, this->m_func);
  4959. instr->InsertBefore(floatCall);
  4960. // lr = MOV helperAddr
  4961. // BLX lr
  4962. IR::AddrOpnd* targetAddr = IR::AddrOpnd::New((Js::Var)IR::GetMethodOriginalAddress(m_func->GetThreadContextInfo(), helperMethod), IR::AddrOpndKind::AddrOpndKindDynamicMisc, this->m_func);
  4963. IR::RegOpnd *targetOpnd = IR::RegOpnd::New(nullptr, RegLR, TyMachPtr, this->m_func);
  4964. IR::Instr *movInstr = IR::Instr::New(Js::OpCode::LDIMM, targetOpnd, targetAddr, this->m_func);
  4965. targetOpnd->m_isCallArg = true;
  4966. floatCall->SetSrc1(targetOpnd);
  4967. floatCall->InsertBefore(movInstr);
  4968. // Save the result.
  4969. Lowerer::InsertMove(instr->UnlinkDst(), floatCall->GetDst(), instr);
  4970. instr->Remove();
  4971. break;
  4972. }
  4973. }
  4974. void
  4975. LowererMD::GenerateFastInlineBuiltInMathAbs(IR::Instr *inlineInstr)
  4976. {
  4977. IR::Opnd* src = inlineInstr->GetSrc1()->Copy(this->m_func);
  4978. IR::Opnd* dst = inlineInstr->UnlinkDst();
  4979. Assert(src);
  4980. IR::Instr* tmpInstr;
  4981. IRType srcType = src->GetType();
  4982. IR::Instr* nextInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4983. IR::Instr* continueInstr = m_lowerer->LowerBailOnIntMin(inlineInstr);
  4984. continueInstr->InsertAfter(nextInstr);
  4985. if (srcType == IRType::TyInt32)
  4986. {
  4987. // Note: if execution gets so far, we always get (untagged) int32 here.
  4988. Assert(src->IsRegOpnd());
  4989. // CMP src, #0
  4990. tmpInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4991. tmpInstr->SetSrc1(src);
  4992. tmpInstr->SetSrc2(IR::IntConstOpnd::New(0, IRType::TyInt32, this->m_func));
  4993. nextInstr->InsertBefore(tmpInstr);
  4994. Legalize(tmpInstr);
  4995. // dst = CSNEGPL dst, src, src
  4996. tmpInstr = IR::Instr::New(Js::OpCode::CSNEGPL, dst, src, src, this->m_func);
  4997. nextInstr->InsertBefore(tmpInstr);
  4998. Legalize(tmpInstr);
  4999. }
  5000. else if (srcType == IRType::TyFloat64)
  5001. {
  5002. // FABS dst, src
  5003. tmpInstr = IR::Instr::New(Js::OpCode::FABS, dst, src, this->m_func);
  5004. nextInstr->InsertBefore(tmpInstr);
  5005. }
  5006. else
  5007. {
  5008. AssertMsg(FALSE, "GenerateFastInlineBuiltInMathAbs: unexpected type of the src!");
  5009. }
  5010. }
  5011. void
  5012. LowererMD::GenerateFastInlineMathFround(IR::Instr* instr)
  5013. {
  5014. // Note that this is fround, not round; this operation is to
  5015. // round a double to Float32 precision.
  5016. IR::Opnd* src1 = instr->GetSrc1();
  5017. IR::Opnd* dst = instr->GetDst();
  5018. Assert(dst->IsFloat());
  5019. Assert(src1->IsFloat());
  5020. IRType srcType = src1->GetType();
  5021. IRType dstType = dst->GetType();
  5022. if (srcType == TyFloat32)
  5023. {
  5024. if (dstType == TyFloat32)
  5025. {
  5026. LowererMD::ChangeToAssign(instr);
  5027. }
  5028. else
  5029. {
  5030. Assert(dstType == TyFloat64);
  5031. instr->m_opcode = Js::OpCode::FCVT;
  5032. LegalizeMD::LegalizeInstr(instr);
  5033. }
  5034. }
  5035. else
  5036. {
  5037. Assert(srcType == TyFloat64);
  5038. if (dstType == TyFloat32)
  5039. {
  5040. instr->m_opcode = Js::OpCode::FCVT;
  5041. LegalizeMD::LegalizeInstr(instr);
  5042. }
  5043. else
  5044. {
  5045. Assert(dstType == TyFloat64);
  5046. IR::RegOpnd* tempOpnd = IR::RegOpnd::New(TyFloat32, instr->m_func);
  5047. IR::Instr* shortener = IR::Instr::New(Js::OpCode::FCVT, tempOpnd, instr->UnlinkSrc1(), instr->m_func);
  5048. instr->InsertBefore(shortener);
  5049. instr->SetSrc1(tempOpnd);
  5050. instr->m_opcode = Js::OpCode::FCVT;
  5051. LegalizeMD::LegalizeInstr(instr);
  5052. }
  5053. }
  5054. }
  5055. void
  5056. LowererMD::GenerateFastInlineBuiltInMathRound(IR::Instr* instr)
  5057. {
  5058. Assert(instr->GetDst()->IsInt32());
  5059. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5060. // Allocate an integer register for negative zero checks if needed
  5061. IR::Opnd * negZeroReg = nullptr;
  5062. if (instr->ShouldCheckForNegativeZero())
  5063. {
  5064. negZeroReg = IR::RegOpnd::New(TyInt64, this->m_func);
  5065. }
  5066. // FMOV floatOpnd, src
  5067. IR::Opnd * src = instr->UnlinkSrc1();
  5068. IR::RegOpnd* floatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  5069. this->m_lowerer->InsertMove(floatOpnd, src, instr);
  5070. IR::LabelInstr * bailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);;
  5071. bool sharedBailout = (instr->GetBailOutInfo()->bailOutInstr != instr) ? true : false;
  5072. // FMOV_GEN negZeroReg, floatOpnd (note this is done before the 0.5 add below)
  5073. if (negZeroReg)
  5074. {
  5075. instr->InsertBefore(IR::Instr::New(Js::OpCode::FMOV_GEN, negZeroReg, floatOpnd, instr->m_func));
  5076. }
  5077. // Add 0.5
  5078. IR::Opnd * pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoublePointFiveAddr(), IRType::TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  5079. this->m_lowerer->InsertAdd(false, floatOpnd, floatOpnd, pointFive, instr);
  5080. // MSR FPSR, xzr
  5081. IR::Instr* setFPSRInstr = IR::Instr::New(Js::OpCode::MSR_FPSR, instr->m_func);
  5082. setFPSRInstr->SetSrc1(IR::RegOpnd::New(nullptr, RegZR, TyUint32, instr->m_func));
  5083. instr->InsertBefore(setFPSRInstr);
  5084. // FCVTM intOpnd, floatOpnd
  5085. IR::Opnd * intOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  5086. instr->InsertBefore(IR::Instr::New(Js::OpCode::FCVTM, intOpnd, floatOpnd, instr->m_func));
  5087. // FCVTM would set FPSR.IOC (0th bit in FPSR) if the source cannot be represented within the destination register
  5088. // MRS exceptReg, FPSR
  5089. IR::Opnd * exceptReg = IR::RegOpnd::New(TyUint32, this->m_func);
  5090. instr->InsertBefore(IR::Instr::New(Js::OpCode::MRS_FPSR, exceptReg, instr->m_func));
  5091. IR::LabelInstr* checkOverflowLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5092. // CBNZ intOpnd, done/checkOverflow
  5093. IR::BranchInstr * cbnzInstr = cbnzInstr = IR::BranchInstr::New(Js::OpCode::CBNZ, checkOverflowLabel, instr->m_func);
  5094. cbnzInstr->SetSrc1(intOpnd);
  5095. instr->InsertBefore(cbnzInstr);
  5096. if (negZeroReg)
  5097. {
  5098. // TBZ negZeroReg, 63
  5099. IR::BranchInstr * tbzInstr = IR::BranchInstr::New(Js::OpCode::TBZ, doneLabel, instr->m_func);
  5100. tbzInstr->SetSrc1(negZeroReg);
  5101. tbzInstr->SetSrc2(IR::IntConstOpnd::New(63, TyMachReg, instr->m_func));
  5102. instr->InsertBefore(tbzInstr);
  5103. Lowerer::InsertBranch(LowererMD::MDUncondBranchOpcode, bailoutLabel, instr);
  5104. }
  5105. instr->InsertBefore(checkOverflowLabel);
  5106. // TBZ exceptReg, #0, done
  5107. IR::BranchInstr * tbzInstr = IR::BranchInstr::New(Js::OpCode::TBZ, doneLabel, instr->m_func);
  5108. tbzInstr->SetSrc1(exceptReg);
  5109. tbzInstr->SetSrc2(IR::IntConstOpnd::New(0, TyMachReg, instr->m_func));
  5110. instr->InsertBefore(tbzInstr);
  5111. IR::Opnd * dst = instr->UnlinkDst();
  5112. instr->InsertAfter(doneLabel);
  5113. if (!sharedBailout)
  5114. {
  5115. instr->InsertBefore(bailoutLabel);
  5116. }
  5117. // In case of a shared bailout, we should jump to the code that sets some data on the bailout record which is specific
  5118. // to this bailout. Pass the bailoutLabel to GenerateFunction so that it may use the label as the collectRuntimeStatsLabel.
  5119. this->m_lowerer->GenerateBailOut(instr, nullptr, nullptr, sharedBailout ? bailoutLabel : nullptr);
  5120. // MOV dst, intOpnd
  5121. IR::Instr* movInstr = IR::Instr::New(Js::OpCode::MOV, dst, intOpnd, this->m_func);
  5122. doneLabel->InsertAfter(movInstr);
  5123. }
  5124. void
  5125. LowererMD::GenerateFastInlineBuiltInMathFloorCeil(IR::Instr* instr)
  5126. {
  5127. Assert(instr->GetDst()->IsInt32());
  5128. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5129. // Allocate an integer register for negative zero checks if needed
  5130. IR::Opnd * negZeroReg = nullptr;
  5131. if (instr->ShouldCheckForNegativeZero())
  5132. {
  5133. negZeroReg = IR::RegOpnd::New(TyInt64, this->m_func);
  5134. }
  5135. // FMOV floatOpnd, src
  5136. IR::Opnd * src = instr->UnlinkSrc1();
  5137. IR::RegOpnd* floatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  5138. this->m_lowerer->InsertMove(floatOpnd, src, instr);
  5139. IR::LabelInstr * bailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);;
  5140. bool sharedBailout = (instr->GetBailOutInfo()->bailOutInstr != instr) ? true : false;
  5141. // MSR FPSR, xzr
  5142. IR::Instr* setFPSRInstr = IR::Instr::New(Js::OpCode::MSR_FPSR, instr->m_func);
  5143. setFPSRInstr->SetSrc1(IR::RegOpnd::New(nullptr, RegZR, TyUint32, instr->m_func));
  5144. instr->InsertBefore(setFPSRInstr);
  5145. // FMOV_GEN negZeroReg, floatOpnd (note this is done before the 0.5 add below)
  5146. if (negZeroReg)
  5147. {
  5148. instr->InsertBefore(IR::Instr::New(Js::OpCode::FMOV_GEN, negZeroReg, floatOpnd, instr->m_func));
  5149. }
  5150. // FCVTM/FCVTP intOpnd, floatOpnd
  5151. IR::Opnd * intOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  5152. instr->InsertBefore(IR::Instr::New((instr->m_opcode == Js::OpCode::InlineMathCeil) ? Js::OpCode::FCVTP : Js::OpCode::FCVTM, intOpnd, floatOpnd, instr->m_func));
  5153. // EOR negZeroReg, #0x8000000000000000
  5154. if (negZeroReg)
  5155. {
  5156. instr->InsertBefore(IR::Instr::New(Js::OpCode::EOR, negZeroReg, negZeroReg, IR::IntConstOpnd::New(0x8000000000000000ULL, IRType::TyInt64, this->m_func), instr->m_func));
  5157. }
  5158. // FCVTM would set FPSR.IOC (0th bit in FPSR) if the source cannot be represented within the destination register
  5159. // MRS exceptReg, FPSR
  5160. IR::Opnd * exceptReg = IR::RegOpnd::New(TyUint32, this->m_func);
  5161. instr->InsertBefore(IR::Instr::New(Js::OpCode::MRS_FPSR, exceptReg, instr->m_func));
  5162. // CBZ negZeroReg, bailout
  5163. if (negZeroReg)
  5164. {
  5165. IR::BranchInstr * cbzInstr = IR::BranchInstr::New(Js::OpCode::CBZ, bailoutLabel, instr->m_func);
  5166. cbzInstr->SetSrc1(negZeroReg);
  5167. instr->InsertBefore(cbzInstr);
  5168. }
  5169. // TBZ exceptReg, #0, done
  5170. IR::BranchInstr * tbzInstr = IR::BranchInstr::New(Js::OpCode::TBZ, doneLabel, instr->m_func);
  5171. tbzInstr->SetSrc1(exceptReg);
  5172. tbzInstr->SetSrc2(IR::IntConstOpnd::New(0, TyMachReg, instr->m_func));
  5173. instr->InsertBefore(tbzInstr);
  5174. IR::Opnd * dst = instr->UnlinkDst();
  5175. instr->InsertAfter(doneLabel);
  5176. if(!sharedBailout)
  5177. {
  5178. instr->InsertBefore(bailoutLabel);
  5179. }
  5180. // In case of a shared bailout, we should jump to the code that sets some data on the bailout record which is specific
  5181. // to this bailout. Pass the bailoutLabel to GenerateFunction so that it may use the label as the collectRuntimeStatsLabel.
  5182. this->m_lowerer->GenerateBailOut(instr, nullptr, nullptr, sharedBailout ? bailoutLabel : nullptr);
  5183. // MOV dst, intOpnd
  5184. IR::Instr* movInstr = IR::Instr::New(Js::OpCode::MOV, dst, intOpnd, this->m_func);
  5185. doneLabel->InsertAfter(movInstr);
  5186. }
  5187. void
  5188. LowererMD::GenerateFastInlineBuiltInMathMinMax(IR::Instr* instr)
  5189. {
  5190. IR::Opnd* dst = instr->GetDst();
  5191. if (dst->IsInt32())
  5192. {
  5193. IR::Opnd* src1 = instr->GetSrc1();
  5194. IR::Opnd* src2 = instr->GetSrc2();
  5195. // CMP src1, src2
  5196. IR::Instr* cmpInstr = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  5197. cmpInstr->SetSrc1(src1);
  5198. cmpInstr->SetSrc2(src2);
  5199. instr->InsertBefore(cmpInstr);
  5200. Legalize(cmpInstr);
  5201. // (min) CSELLT dst, src1, src2
  5202. // (max) CSELLT dst, src2, src1
  5203. IR::Opnd* op1 = (instr->m_opcode == Js::OpCode::InlineMathMin) ? src1 : src2;
  5204. IR::Opnd* op2 = (instr->m_opcode == Js::OpCode::InlineMathMin) ? src2 : src1;
  5205. IR::Instr * csellinstr = IR::Instr::New(Js::OpCode::CSELLT, dst, op1, op2, instr->m_func);
  5206. instr->InsertBefore(csellinstr);
  5207. Legalize(csellinstr);
  5208. instr->Remove();
  5209. }
  5210. else if (dst->IsFloat64())
  5211. {
  5212. // (min) FMIN dst, src1, src2
  5213. // (max) FMAX dst, src1, src2
  5214. instr->m_opcode = (instr->m_opcode == Js::OpCode::InlineMathMin) ? Js::OpCode::FMIN : Js::OpCode::FMAX;
  5215. }
  5216. }
  5217. IR::Instr *
  5218. LowererMD::LowerToFloat(IR::Instr *instr)
  5219. {
  5220. switch (instr->m_opcode)
  5221. {
  5222. case Js::OpCode::Add_A:
  5223. instr->m_opcode = Js::OpCode::FADD;
  5224. break;
  5225. case Js::OpCode::Sub_A:
  5226. instr->m_opcode = Js::OpCode::FSUB;
  5227. break;
  5228. case Js::OpCode::Mul_A:
  5229. instr->m_opcode = Js::OpCode::FMUL;
  5230. break;
  5231. case Js::OpCode::Div_A:
  5232. instr->m_opcode = Js::OpCode::FDIV;
  5233. break;
  5234. case Js::OpCode::Neg_A:
  5235. instr->m_opcode = Js::OpCode::FNEG;
  5236. break;
  5237. case Js::OpCode::BrEq_A:
  5238. case Js::OpCode::BrNeq_A:
  5239. case Js::OpCode::BrSrEq_A:
  5240. case Js::OpCode::BrSrNeq_A:
  5241. case Js::OpCode::BrGt_A:
  5242. case Js::OpCode::BrGe_A:
  5243. case Js::OpCode::BrLt_A:
  5244. case Js::OpCode::BrLe_A:
  5245. case Js::OpCode::BrNotEq_A:
  5246. case Js::OpCode::BrNotNeq_A:
  5247. case Js::OpCode::BrSrNotEq_A:
  5248. case Js::OpCode::BrSrNotNeq_A:
  5249. case Js::OpCode::BrNotGt_A:
  5250. case Js::OpCode::BrNotGe_A:
  5251. case Js::OpCode::BrNotLt_A:
  5252. case Js::OpCode::BrNotLe_A:
  5253. return this->LowerFloatCondBranch(instr->AsBranchInstr());
  5254. default:
  5255. Assume(UNREACHED);
  5256. }
  5257. LegalizeMD::LegalizeInstr(instr);
  5258. return instr;
  5259. }
  5260. IR::BranchInstr *
  5261. LowererMD::LowerFloatCondBranch(IR::BranchInstr *instrBranch, bool ignoreNaN)
  5262. {
  5263. IR::Instr *instr;
  5264. Js::OpCode brOpcode = Js::OpCode::InvalidOpCode;
  5265. bool addNaNCheck = false;
  5266. Func * func = instrBranch->m_func;
  5267. IR::Opnd *src1 = instrBranch->UnlinkSrc1();
  5268. IR::Opnd *src2 = instrBranch->UnlinkSrc2();
  5269. IR::Instr *instrCmp = IR::Instr::New(Js::OpCode::FCMP, func);
  5270. instrCmp->SetSrc1(src1);
  5271. instrCmp->SetSrc2(src2);
  5272. instrBranch->InsertBefore(instrCmp);
  5273. LegalizeMD::LegalizeInstr(instrCmp);
  5274. switch (instrBranch->m_opcode)
  5275. {
  5276. case Js::OpCode::BrSrEq_A:
  5277. case Js::OpCode::BrEq_A:
  5278. case Js::OpCode::BrNotNeq_A:
  5279. case Js::OpCode::BrSrNotNeq_A:
  5280. brOpcode = Js::OpCode::BEQ;
  5281. break;
  5282. case Js::OpCode::BrNeq_A:
  5283. case Js::OpCode::BrSrNeq_A:
  5284. case Js::OpCode::BrSrNotEq_A:
  5285. case Js::OpCode::BrNotEq_A:
  5286. brOpcode = Js::OpCode::BNE;
  5287. addNaNCheck = !ignoreNaN; //Special check for BNE as it is set when the operands are unordered (NaN).
  5288. break;
  5289. case Js::OpCode::BrLe_A:
  5290. brOpcode = Js::OpCode::BLS; //Can't use BLE as it is set when the operands are unordered (NaN).
  5291. break;
  5292. case Js::OpCode::BrLt_A:
  5293. brOpcode = Js::OpCode::BCC; //Can't use BLT as is set when the operands are unordered (NaN).
  5294. break;
  5295. case Js::OpCode::BrGe_A:
  5296. brOpcode = Js::OpCode::BGE;
  5297. break;
  5298. case Js::OpCode::BrGt_A:
  5299. brOpcode = Js::OpCode::BGT;
  5300. break;
  5301. case Js::OpCode::BrNotLe_A:
  5302. brOpcode = Js::OpCode::BHI;
  5303. break;
  5304. case Js::OpCode::BrNotLt_A:
  5305. brOpcode = Js::OpCode::BPL;
  5306. break;
  5307. case Js::OpCode::BrNotGe_A:
  5308. brOpcode = Js::OpCode::BLT;
  5309. break;
  5310. case Js::OpCode::BrNotGt_A:
  5311. brOpcode = Js::OpCode::BLE;
  5312. break;
  5313. default:
  5314. Assert(false);
  5315. break;
  5316. }
  5317. if (addNaNCheck)
  5318. {
  5319. instr = IR::BranchInstr::New(Js::OpCode::BVS, instrBranch->GetTarget(), func);
  5320. instrBranch->InsertBefore(instr);
  5321. }
  5322. instr = IR::BranchInstr::New(brOpcode, instrBranch->GetTarget(), func);
  5323. instrBranch->InsertBefore(instr);
  5324. instrBranch->Remove();
  5325. return instr->AsBranchInstr();
  5326. }
  5327. void
  5328. LowererMD::EmitIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5329. {
  5330. IR::Instr *instr;
  5331. Assert(dst->IsRegOpnd() && dst->IsFloat64());
  5332. Assert(src->IsRegOpnd() && src->IsInt32());
  5333. // Convert to Float
  5334. instr = IR::Instr::New(Js::OpCode::FCVT, dst, src, this->m_func);
  5335. instrInsert->InsertBefore(instr);
  5336. }
  5337. void
  5338. LowererMD::EmitUIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5339. {
  5340. IR::Instr *instr;
  5341. Assert(dst->IsRegOpnd() && dst->IsFloat64());
  5342. Assert(src->IsRegOpnd() && src->IsUInt32());
  5343. // Convert to Float
  5344. instr = IR::Instr::New(Js::OpCode::FCVT, dst, src, this->m_func);
  5345. instrInsert->InsertBefore(instr);
  5346. }
  5347. void LowererMD::ConvertFloatToInt32(IR::Opnd* intOpnd, IR::Opnd* floatOpnd, IR::LabelInstr * labelHelper, IR::LabelInstr * labelDone, IR::Instr * instrInsert)
  5348. {
  5349. Assert(floatOpnd->IsFloat64());
  5350. Assert(intOpnd->IsInt32());
  5351. // VCVTS32F64 dst.i32, src.f64
  5352. // Convert to int
  5353. // ARM64_WORKITEM: On ARM32 this used the current rounding mode; here we are explicitly rounding toward zero -- is that ok?
  5354. IR::Instr * instr = IR::Instr::New(Js::OpCode::FCVTZ, intOpnd, floatOpnd, this->m_func);
  5355. instrInsert->InsertBefore(instr);
  5356. Legalize(instr);
  5357. this->CheckOverflowOnFloatToInt32(instrInsert, intOpnd, labelHelper, labelDone);
  5358. }
  5359. void
  5360. LowererMD::EmitIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5361. {
  5362. Assert(UNREACHED);
  5363. }
  5364. void
  5365. LowererMD::EmitUIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5366. {
  5367. Assert(UNREACHED);
  5368. }
  5369. void
  5370. LowererMD::EmitLongToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5371. {
  5372. Assert(UNREACHED);
  5373. }
  5374. void
  5375. LowererMD::CheckOverflowOnFloatToInt32(IR::Instr* instrInsert, IR::Opnd* intOpnd, IR::LabelInstr * labelHelper, IR::LabelInstr * labelDone)
  5376. {
  5377. // Test for 0x80000000 or 0x7FFFFFFF
  5378. // tmp = EOR src, 0x80000000; gives 0 or -1 for overflow values
  5379. // tmp = EOR_ASR31 tmp, tmp; tmp = tmp ^ ((int32)tmp >> 31) -- converts -1 or 0 to 0
  5380. // CBZ tmp, helper; branch if tmp was -1 or 0
  5381. // B done;
  5382. IR::RegOpnd* tmp = IR::RegOpnd::New(TyInt32, this->m_func);
  5383. IR::Instr* instr = IR::Instr::New(Js::OpCode::EOR, tmp, intOpnd, IR::IntConstOpnd::New(0x80000000, TyUint32, this->m_func, true), this->m_func);
  5384. instrInsert->InsertBefore(instr);
  5385. instr = IR::Instr::New(Js::OpCode::EOR_ASR31, tmp, tmp, tmp, this->m_func);
  5386. instrInsert->InsertBefore(instr);
  5387. // CBZ $helper
  5388. instr = IR::BranchInstr::New(Js::OpCode::CBZ, labelHelper, this->m_func);
  5389. instr->SetSrc1(tmp);
  5390. instrInsert->InsertBefore(instr);
  5391. // B $done
  5392. instr = IR::BranchInstr::New(Js::OpCode::B, labelDone, this->m_func);
  5393. instrInsert->InsertBefore(instr);
  5394. }
  5395. void
  5396. LowererMD::EmitFloatToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert, IR::Instr * instrBailOut, IR::LabelInstr * labelBailOut)
  5397. {
  5398. IR::BailOutKind bailOutKind = IR::BailOutInvalid;
  5399. if (instrBailOut && instrBailOut->HasBailOutInfo())
  5400. {
  5401. bailOutKind = instrBailOut->GetBailOutKind();
  5402. if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  5403. {
  5404. // Bail out instead of calling helper. If this is happening unconditionally, the caller should instead throw a rejit exception.
  5405. Assert(labelBailOut);
  5406. m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, instrInsert);
  5407. return;
  5408. }
  5409. }
  5410. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5411. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5412. IR::Instr *instr;
  5413. ConvertFloatToInt32(dst, src, labelHelper, labelDone, instrInsert);
  5414. // $Helper
  5415. instrInsert->InsertBefore(labelHelper);
  5416. instr = IR::Instr::New(Js::OpCode::Call, dst, this->m_func);
  5417. instrInsert->InsertBefore(instr);
  5418. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  5419. {
  5420. _Analysis_assume_(instrBailOut != nullptr);
  5421. instr = instr->ConvertToBailOutInstr(instrBailOut->GetBailOutInfo(), bailOutKind);
  5422. if (instrBailOut->GetBailOutInfo()->bailOutInstr == instrBailOut)
  5423. {
  5424. IR::Instr * instrShare = instrBailOut->ShareBailOut();
  5425. m_lowerer->LowerBailTarget(instrShare);
  5426. }
  5427. }
  5428. // dst = ToInt32Core(src);
  5429. LoadDoubleHelperArgument(instr, src);
  5430. this->ChangeToHelperCall(instr, IR::HelperConv_ToInt32Core);
  5431. // $Done
  5432. instrInsert->InsertBefore(labelDone);
  5433. }
  5434. IR::Instr *
  5435. LowererMD::InsertConvertFloat64ToInt32(const RoundMode roundMode, IR::Opnd *const dst, IR::Opnd *const src, IR::Instr *const insertBeforeInstr)
  5436. {
  5437. Assert(dst);
  5438. Assert(dst->IsInt32());
  5439. Assert(src);
  5440. Assert(src->IsFloat64());
  5441. Assert(insertBeforeInstr);
  5442. // The caller is expected to check for overflow. To have that work be done automatically, use LowererMD::EmitFloatToInt.
  5443. Func *const func = insertBeforeInstr->m_func;
  5444. IR::AutoReuseOpnd autoReuseSrcPlusHalf;
  5445. IR::Instr *instr = nullptr;
  5446. switch (roundMode)
  5447. {
  5448. case RoundModeTowardInteger:
  5449. case RoundModeHalfToEven:
  5450. {
  5451. // Conversion with rounding towards nearest integer is not supported by the architecture. Add 0.5 and do a
  5452. // round-toward-zero conversion instead.
  5453. IR::RegOpnd *const srcPlusHalf = IR::RegOpnd::New(TyFloat64, func);
  5454. autoReuseSrcPlusHalf.Initialize(srcPlusHalf, func);
  5455. Lowerer::InsertAdd(
  5456. false /* needFlags */,
  5457. srcPlusHalf,
  5458. src,
  5459. IR::MemRefOpnd::New(insertBeforeInstr->m_func->GetThreadContextInfo()->GetDoublePointFiveAddr(), TyFloat64, func,
  5460. IR::AddrOpndKindDynamicDoubleRef),
  5461. insertBeforeInstr);
  5462. instr = IR::Instr::New(LowererMD::MDConvertFloat64ToInt32Opcode(RoundModeTowardZero), dst, srcPlusHalf, func);
  5463. insertBeforeInstr->InsertBefore(instr);
  5464. LowererMD::Legalize(instr);
  5465. return instr;
  5466. }
  5467. default:
  5468. AssertMsg(0, "RoundMode not supported.");
  5469. return nullptr;
  5470. }
  5471. }
  5472. IR::Instr *
  5473. LowererMD::LoadFloatZero(IR::Opnd * opndDst, IR::Instr * instrInsert)
  5474. {
  5475. Assert(opndDst->GetType() == TyFloat64);
  5476. IR::Opnd * zero = IR::MemRefOpnd::New(instrInsert->m_func->GetThreadContextInfo()->GetDoubleZeroAddr(), TyFloat64, instrInsert->m_func, IR::AddrOpndKindDynamicDoubleRef);
  5477. // Todo(magardn): Make sure the correct opcode is used for moving between float and non-float regs (FMOV_GEN)
  5478. return Lowerer::InsertMove(opndDst, zero, instrInsert);
  5479. }
  5480. IR::Instr *
  5481. LowererMD::LoadFloatValue(IR::Opnd * opndDst, double value, IR::Instr * instrInsert)
  5482. {
  5483. // Floating point zero is a common value to load. Let's use a single memory location instead of allocating new memory for each.
  5484. const bool isFloatZero = value == 0.0 && !Js::JavascriptNumber::IsNegZero(value); // (-0.0 == 0.0) yields true
  5485. if (isFloatZero)
  5486. {
  5487. return LowererMD::LoadFloatZero(opndDst, instrInsert);
  5488. }
  5489. void * pValue = NativeCodeDataNewNoFixup(instrInsert->m_func->GetNativeCodeDataAllocator(), DoubleType<DataDesc_LowererMD_LoadFloatValue_Double>, value);
  5490. IR::Opnd * opnd;
  5491. if (instrInsert->m_func->IsOOPJIT())
  5492. {
  5493. int offset = NativeCodeData::GetDataTotalOffset(pValue);
  5494. auto addressRegOpnd = IR::RegOpnd::New(TyMachPtr, instrInsert->m_func);
  5495. Lowerer::InsertMove(
  5496. addressRegOpnd,
  5497. IR::MemRefOpnd::New(instrInsert->m_func->GetWorkItem()->GetWorkItemData()->nativeDataAddr, TyMachPtr, instrInsert->m_func, IR::AddrOpndKindDynamicNativeCodeDataRef),
  5498. instrInsert);
  5499. opnd = IR::IndirOpnd::New(addressRegOpnd, offset, TyMachDouble,
  5500. #if DBG
  5501. NativeCodeData::GetDataDescription(pValue, instrInsert->m_func->m_alloc),
  5502. #endif
  5503. instrInsert->m_func, true);
  5504. }
  5505. else
  5506. {
  5507. opnd = IR::MemRefOpnd::New((void*)pValue, TyMachDouble, instrInsert->m_func);
  5508. }
  5509. IR::Instr * instr = IR::Instr::New(Js::OpCode::FLDR, opndDst, opnd, instrInsert->m_func);
  5510. instrInsert->InsertBefore(instr);
  5511. LegalizeMD::LegalizeInstr(instr);
  5512. return instr;
  5513. }
  5514. void LowererMD::GenerateFloatTest(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper, const bool checkForNullInLoopBody)
  5515. {
  5516. if (opndSrc->GetValueType().IsFloat())
  5517. {
  5518. return;
  5519. }
  5520. // TST s1, floatTagReg
  5521. IR::Opnd* floatTag = IR::IntConstOpnd::New(Js::FloatTag_Value, TyMachReg, this->m_func, /* dontEncode = */ true);
  5522. IR::Instr* instr = IR::Instr::New(Js::OpCode::TST, this->m_func);
  5523. instr->SetSrc1(opndSrc);
  5524. instr->SetSrc2(floatTag);
  5525. insertInstr->InsertBefore(instr);
  5526. LegalizeMD::LegalizeInstr(instr);
  5527. // BZ $helper
  5528. instr = IR::BranchInstr::New(Js::OpCode::BEQ /* BZ */, labelHelper, this->m_func);
  5529. insertInstr->InsertBefore(instr);
  5530. }
  5531. IR::RegOpnd* LowererMD::CheckFloatAndUntag(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper)
  5532. {
  5533. IR::Opnd* floatTag = IR::IntConstOpnd::New(Js::FloatTag_Value, TyMachReg, this->m_func, /* dontEncode = */ true);
  5534. // MOV floatTagReg, FloatTag_Value
  5535. if (!opndSrc->GetValueType().IsFloat())
  5536. {
  5537. // TST s1, floatTagReg
  5538. IR::Instr* instr = IR::Instr::New(Js::OpCode::TST, this->m_func);
  5539. instr->SetSrc1(opndSrc);
  5540. instr->SetSrc2(floatTag);
  5541. insertInstr->InsertBefore(instr);
  5542. LegalizeMD::LegalizeInstr(instr);
  5543. // BZ $helper
  5544. instr = IR::BranchInstr::New(Js::OpCode::BEQ /* BZ */, labelHelper, this->m_func);
  5545. insertInstr->InsertBefore(instr);
  5546. }
  5547. IR::RegOpnd* untaggedFloat = IR::RegOpnd::New(TyMachReg, this->m_func);
  5548. IR::Instr* instr = IR::Instr::New(Js::OpCode::EOR, untaggedFloat, opndSrc, floatTag, this->m_func);
  5549. insertInstr->InsertBefore(instr);
  5550. IR::RegOpnd *floatReg = IR::RegOpnd::New(TyMachDouble, this->m_func);
  5551. instr = IR::Instr::New(Js::OpCode::FMOV_GEN, floatReg, untaggedFloat, this->m_func);
  5552. insertInstr->InsertBefore(instr);
  5553. return floatReg;
  5554. }
  5555. template <bool verify>
  5556. void
  5557. LowererMD::Legalize(IR::Instr *const instr, bool fPostRegAlloc)
  5558. {
  5559. if (verify)
  5560. {
  5561. // NYI for the rest of legalization
  5562. return;
  5563. }
  5564. LegalizeMD::LegalizeInstr(instr);
  5565. }
  5566. template void LowererMD::Legalize<false>(IR::Instr *const instr, bool fPostRegalloc);
  5567. #if DBG
  5568. template void LowererMD::Legalize<true>(IR::Instr *const instr, bool fPostRegalloc);
  5569. #endif
  5570. void
  5571. LowererMD::FinalLower()
  5572. {
  5573. NoRecoverMemoryArenaAllocator tempAlloc(_u("BE-ARMFinalLower"), m_func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  5574. EncodeReloc *pRelocList = nullptr;
  5575. size_t totalJmpTableSizeInBytes = 0;
  5576. uintptr_t instrOffset = 0;
  5577. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, this->m_func->m_tailInstr, this->m_func->m_headInstr)
  5578. {
  5579. if (instr->IsLowered() == false)
  5580. {
  5581. if (instr->IsLabelInstr())
  5582. {
  5583. //This is not the real set, Real offset gets set in encoder.
  5584. IR::LabelInstr *labelInstr = instr->AsLabelInstr();
  5585. labelInstr->SetOffset(instrOffset);
  5586. }
  5587. switch (instr->m_opcode)
  5588. {
  5589. case Js::OpCode::Ret:
  5590. instr->Remove();
  5591. break;
  5592. case Js::OpCode::Leave:
  5593. Assert(this->m_func->DoOptimizeTry() && !this->m_func->IsLoopBodyInTry());
  5594. instrPrev = m_lowerer->LowerLeave(instr, instr->AsBranchInstr()->GetTarget(), true /*fromFinalLower*/);
  5595. break;
  5596. }
  5597. }
  5598. else
  5599. {
  5600. instrOffset = instrOffset + MachMaxInstrSize;
  5601. if (instr->IsBranchInstr())
  5602. {
  5603. IR::BranchInstr *branchInstr = instr->AsBranchInstr();
  5604. if (branchInstr->IsMultiBranch())
  5605. {
  5606. Assert(instr->GetSrc1() && instr->GetSrc1()->IsRegOpnd());
  5607. IR::MultiBranchInstr * multiBranchInstr = instr->AsBranchInstr()->AsMultiBrInstr();
  5608. if (multiBranchInstr->m_isSwitchBr &&
  5609. (multiBranchInstr->m_kind == IR::MultiBranchInstr::IntJumpTable || multiBranchInstr->m_kind == IR::MultiBranchInstr::SingleCharStrJumpTable))
  5610. {
  5611. BranchJumpTableWrapper * branchJumpTableWrapper = multiBranchInstr->GetBranchJumpTable();
  5612. totalJmpTableSizeInBytes += (branchJumpTableWrapper->tableSize * sizeof(void*));
  5613. // instrOffset is relative to the end of the function. Jump tables come after the function and so would result in negative offsets. label offsets
  5614. // are unsigned so instead give jump table lables offsets relative to the end of the jump table section.
  5615. branchJumpTableWrapper->labelInstr->SetOffset(totalJmpTableSizeInBytes);
  5616. }
  5617. }
  5618. else if (!LowererMD::IsUnconditionalBranch(branchInstr)) //Ignore other direct branches
  5619. {
  5620. uintptr_t targetOffset = branchInstr->GetTarget()->GetOffset();
  5621. if (targetOffset != 0)
  5622. {
  5623. // this is forward reference
  5624. if (LegalizeMD::LegalizeDirectBranch(branchInstr, instrOffset))
  5625. {
  5626. //There might be an instruction inserted for legalizing conditional branch
  5627. instrOffset = instrOffset + MachMaxInstrSize;
  5628. }
  5629. }
  5630. else
  5631. {
  5632. EncodeReloc::New(&pRelocList, RelocTypeBranch19, (BYTE*)instrOffset, branchInstr, &tempAlloc);
  5633. //Assume this is a backward long branch, we shall fix up after complete pass, be conservative here
  5634. instrOffset = instrOffset + MachMaxInstrSize;
  5635. }
  5636. }
  5637. }
  5638. else if (LowererMD::IsAssign(instr) || instr->m_opcode == Js::OpCode::LEA || instr->m_opcode == Js::OpCode::LDARGOUTSZ || instr->m_opcode == Js::OpCode::REM)
  5639. {
  5640. // Cleanup spill code
  5641. // INSTR_BACKWARD_EDITING_IN_RANGE implies that next loop iteration will use instrPrev (instr->m_prev computed before entering current loop iteration).
  5642. IR::Instr* instrNext = instr->m_next;
  5643. bool canExpand = this->FinalLowerAssign(instr);
  5644. if (canExpand)
  5645. {
  5646. uint32 expandedInstrCount = 0; // The number of instrs the LDIMM expands into.
  5647. FOREACH_INSTR_IN_RANGE(instrCount, instrPrev->m_next, instrNext)
  5648. {
  5649. ++expandedInstrCount;
  5650. }
  5651. NEXT_INSTR_IN_RANGE;
  5652. Assert(expandedInstrCount > 0);
  5653. // Adjust the offset for expanded instrs.
  5654. instrOffset += (expandedInstrCount - 1) * MachMaxInstrSize; // We already accounted for one MachMaxInstrSize.
  5655. }
  5656. }
  5657. if (instr->m_opcode == Js::OpCode::ADR)
  5658. {
  5659. IR::LabelInstr* label = instr->GetSrc1()->AsLabelOpnd()->GetLabel();
  5660. if (label->GetOffset() != 0 && !label->m_isDataLabel)
  5661. {
  5662. // this is forward reference
  5663. if (LegalizeMD::LegalizeAdrOffset(instr, instrOffset))
  5664. {
  5665. //Additional instructions were inserted.
  5666. instrOffset = instrOffset + MachMaxInstrSize * 2;
  5667. }
  5668. }
  5669. else
  5670. {
  5671. EncodeReloc::New(&pRelocList, RelocTypeLabelAdr, (BYTE*)instrOffset, instr, &tempAlloc);
  5672. //Assume this is a backward long branch, we shall fix up after complete pass, be conservative here
  5673. instrOffset = instrOffset + MachMaxInstrSize * 2;
  5674. }
  5675. }
  5676. }
  5677. } NEXT_INSTR_BACKWARD_EDITING_IN_RANGE;
  5678. //Fixup all the backward branches
  5679. for (EncodeReloc *reloc = pRelocList; reloc; reloc = reloc->m_next)
  5680. {
  5681. uintptr_t relocAddress = (uintptr_t)reloc->m_consumerOffset;
  5682. switch (reloc->m_relocType)
  5683. {
  5684. case RelocTypeBranch19:
  5685. AssertMsg(relocAddress < reloc->m_relocInstr->AsBranchInstr()->GetTarget()->GetOffset(), "Only backward branches require fixup");
  5686. LegalizeMD::LegalizeDirectBranch(reloc->m_relocInstr->AsBranchInstr(), relocAddress);
  5687. break;
  5688. case RelocTypeLabelAdr:
  5689. {
  5690. IR::LabelInstr* label = reloc->m_relocInstr->GetSrc1()->AsLabelOpnd()->GetLabel();
  5691. if (label->m_isDataLabel)
  5692. {
  5693. uintptr_t dataOffset;
  5694. if (label == m_func->GetFuncStartLabel())
  5695. {
  5696. dataOffset = instrOffset - relocAddress;
  5697. }
  5698. else if (label == m_func->GetFuncEndLabel())
  5699. {
  5700. dataOffset = relocAddress;
  5701. }
  5702. else
  5703. {
  5704. Assert(label->GetOffset() != 0);
  5705. // jump table label offsets are relative to the end of the jump table area.
  5706. dataOffset = relocAddress + totalJmpTableSizeInBytes - label->GetOffset();
  5707. // PC is a union with offset. Encoder expects this to be nullptr for jump table labels.
  5708. label->SetPC(nullptr);
  5709. }
  5710. LegalizeMD::LegalizeDataAdr(reloc->m_relocInstr, dataOffset);
  5711. break;
  5712. }
  5713. AssertMsg(relocAddress < label->GetOffset(), "Only backward branches require fixup");
  5714. LegalizeMD::LegalizeAdrOffset(reloc->m_relocInstr, relocAddress);
  5715. break;
  5716. }
  5717. default:
  5718. Assert(false);
  5719. }
  5720. }
  5721. }
  5722. // Returns true, if and only if the assign may expand into multiple instrs.
  5723. bool
  5724. LowererMD::FinalLowerAssign(IR::Instr * instr)
  5725. {
  5726. if (instr->m_opcode == Js::OpCode::LDIMM)
  5727. {
  5728. LegalizeMD::LegalizeInstr(instr);
  5729. // LDIMM can expand into up to 4 instructions when the immediate is more than 16 bytes,
  5730. // it can also expand into multiple different no-op (normally MOV) instrs when we obfuscate it, which is randomly.
  5731. return true;
  5732. }
  5733. else if (EncoderMD::IsLoad(instr) || instr->m_opcode == Js::OpCode::LEA)
  5734. {
  5735. Assert(instr->GetDst()->IsRegOpnd());
  5736. if (!instr->GetSrc1()->IsRegOpnd())
  5737. {
  5738. LegalizeMD::LegalizeSrc(instr, instr->GetSrc1(), 1);
  5739. return true;
  5740. }
  5741. instr->m_opcode = instr->GetSrc1()->IsFloat() ? Js::OpCode::FMOV : Js::OpCode::MOV;
  5742. }
  5743. else if (EncoderMD::IsStore(instr))
  5744. {
  5745. Assert(instr->GetSrc1()->IsRegOpnd());
  5746. if (!instr->GetDst()->IsRegOpnd())
  5747. {
  5748. LegalizeMD::LegalizeDst(instr);
  5749. return true;
  5750. }
  5751. instr->m_opcode = instr->GetDst()->IsFloat() ? Js::OpCode::FMOV : Js::OpCode::MOV;
  5752. }
  5753. else if (instr->m_opcode == Js::OpCode::LDARGOUTSZ)
  5754. {
  5755. Assert(instr->GetDst()->IsRegOpnd());
  5756. Assert((instr->GetSrc1() == nullptr) && (instr->GetSrc2() == nullptr));
  5757. // dst = LDARGOUTSZ
  5758. // This loads the function's arg out area size into the dst operand. We need a pseudo-op,
  5759. // because we generate the instruction during Lower but don't yet know the value of the constant it needs
  5760. // to load. Change it to the appropriate LDIMM here.
  5761. uint32 argOutSize = UInt32Math::Mul(this->m_func->m_argSlotsForFunctionsCalled, MachRegInt, Js::Throw::OutOfMemory);
  5762. instr->SetSrc1(IR::IntConstOpnd::New(argOutSize, TyMachReg, this->m_func));
  5763. instr->m_opcode = Js::OpCode::LDIMM;
  5764. LegalizeMD::LegalizeInstr(instr);
  5765. return true;
  5766. }
  5767. else if (instr->m_opcode == Js::OpCode::REM)
  5768. {
  5769. IR::Opnd* dst = instr->GetDst();
  5770. IR::Opnd* src1 = instr->GetSrc1();
  5771. IR::Opnd* src2 = instr->GetSrc2();
  5772. Assert(src1->IsRegOpnd());
  5773. Assert(src2->IsRegOpnd());
  5774. RegNum dstReg = dst->AsRegOpnd()->GetReg();
  5775. if (dstReg == src1->AsRegOpnd()->GetReg() || dstReg == src2->AsRegOpnd()->GetReg())
  5776. {
  5777. Assert(src1->AsRegOpnd()->GetReg() != SCRATCH_REG);
  5778. Assert(src2->AsRegOpnd()->GetReg() != SCRATCH_REG);
  5779. Assert(src1->GetType() == src2->GetType());
  5780. // r17 = SDIV src1, src2
  5781. IR::RegOpnd *regScratch = IR::RegOpnd::New(nullptr, SCRATCH_REG, src1->GetType(), instr->m_func);
  5782. IR::Instr *insertInstr = IR::Instr::New(Js::OpCode::SDIV, regScratch, src1, src2, instr->m_func);
  5783. instr->InsertBefore(insertInstr);
  5784. // r17 = MSUB src1, src2, r17 (r17 = src1 - src2 * r17)
  5785. insertInstr = IR::Instr::New(Js::OpCode::MSUB, regScratch, src1, src2, instr->m_func);
  5786. instr->InsertBefore(insertInstr);
  5787. // mov dst, r17
  5788. insertInstr = IR::Instr::New(dst->IsFloat() ? Js::OpCode::FMOV : Js::OpCode::MOV, dst, regScratch, instr->m_func);
  5789. instr->InsertBefore(insertInstr);
  5790. instr->Remove();
  5791. }
  5792. else
  5793. {
  5794. // dst = SDIV src1, src2
  5795. IR::Instr *divInstr = IR::Instr::New(Js::OpCode::SDIV, dst, src1, src2, instr->m_func);
  5796. instr->InsertBefore(divInstr);
  5797. // dst = MSUB src1, src2, dst (dst = src1 - src2 * dst)
  5798. instr->m_opcode = Js::OpCode::MSUB;
  5799. }
  5800. return true;
  5801. }
  5802. return false;
  5803. }
  5804. IR::Opnd *
  5805. LowererMD::GenerateArgOutForStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  5806. {
  5807. return this->m_lowerer->GenerateArgOutForStackArgs(callInstr, stackArgsInstr);
  5808. }
  5809. IR::Instr *
  5810. LowererMD::LowerDivI4AndBailOnReminder(IR::Instr * instr, IR::LabelInstr * bailOutLabel)
  5811. {
  5812. // result = SDIV numerator, denominator
  5813. // mulResult = MUL result, denominator
  5814. // CMP mulResult, numerator
  5815. // BNE bailout
  5816. // <Caller insert more checks here>
  5817. // dst = MOV result <-- insertBeforeInstr
  5818. instr->m_opcode = Js::OpCode::SDIV;
  5819. // delay assigning to the final dst.
  5820. IR::Instr * sinkedInstr = instr->SinkDst(Js::OpCode::MOV);
  5821. LegalizeMD::LegalizeInstr(instr);
  5822. LegalizeMD::LegalizeInstr(sinkedInstr);
  5823. IR::Opnd * resultOpnd = instr->GetDst();
  5824. IR::Opnd * numerator = instr->GetSrc1();
  5825. IR::Opnd * denominatorOpnd = instr->GetSrc2();
  5826. // Insert all check before the assignment to the actual
  5827. IR::Instr * insertBeforeInstr = instr->m_next;
  5828. // Jump to bailout if the reminder is not 0 (or the divResult * denominator is not same as the numerator)
  5829. IR::RegOpnd * mulResult = IR::RegOpnd::New(TyInt32, m_func);
  5830. IR::Instr * mulInstr = IR::Instr::New(Js::OpCode::MUL, mulResult, resultOpnd, denominatorOpnd, m_func);
  5831. insertBeforeInstr->InsertBefore(mulInstr);
  5832. LegalizeMD::LegalizeInstr(mulInstr);
  5833. this->m_lowerer->InsertCompareBranch(mulResult, numerator, Js::OpCode::BrNeq_A, bailOutLabel, insertBeforeInstr);
  5834. return insertBeforeInstr;
  5835. }
  5836. void
  5837. LowererMD::LowerInlineSpreadArgOutLoop(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd)
  5838. {
  5839. this->m_lowerer->LowerInlineSpreadArgOutLoopUsingRegisters(callInstr, indexOpnd, arrayElementsStartOpnd);
  5840. }
  5841. void
  5842. LowererMD::LowerTypeof(IR::Instr* typeOfInstr)
  5843. {
  5844. Func * func = typeOfInstr->m_func;
  5845. IR::Opnd * src1 = typeOfInstr->GetSrc1();
  5846. IR::Opnd * dst = typeOfInstr->GetDst();
  5847. Assert(src1->IsRegOpnd() && dst->IsRegOpnd());
  5848. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5849. IR::LabelInstr * taggedIntLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  5850. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  5851. // MOV typeDisplayStringsArray, &javascriptLibrary->typeDisplayStrings
  5852. IR::RegOpnd * typeDisplayStringsArrayOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5853. m_lowerer->InsertMove(typeDisplayStringsArrayOpnd, IR::AddrOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetTypeDisplayStringsOffset(), IR::AddrOpndKindConstantAddress, this->m_func), typeOfInstr);
  5854. GenerateObjectTest(src1, typeOfInstr, taggedIntLabel);
  5855. // MOV typeRegOpnd, [src1 + offset(Type)]
  5856. IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, func);
  5857. m_lowerer->InsertMove(typeRegOpnd,
  5858. IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func),
  5859. typeOfInstr);
  5860. IR::LabelInstr * falsyLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  5861. m_lowerer->GenerateFalsyObjectTest(typeOfInstr, typeRegOpnd, falsyLabel);
  5862. // <$not falsy>
  5863. // MOV typeId, TypeIds_Object
  5864. // MOV objTypeId, [typeRegOpnd + offsetof(typeId)]
  5865. // CMP objTypeId, TypeIds_Limit /*external object test*/
  5866. // BCS $externalObjectLabel
  5867. // MOV typeId, objTypeId
  5868. // $loadTypeDisplayStringLabel:
  5869. // MOV dst, typeDisplayStrings[typeId]
  5870. // TEST dst, dst
  5871. // BEQ $helper
  5872. // B $done
  5873. IR::RegOpnd * typeIdOpnd = IR::RegOpnd::New(TyUint32, func);
  5874. m_lowerer->InsertMove(typeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_Object, TyUint32, func), typeOfInstr);
  5875. IR::RegOpnd * objTypeIdOpnd = IR::RegOpnd::New(TyUint32, func);
  5876. m_lowerer->InsertMove(objTypeIdOpnd, IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, func), typeOfInstr);
  5877. IR::LabelInstr * loadTypeDisplayStringLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  5878. m_lowerer->InsertCompareBranch(objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_Limit, TyUint32, func), Js::OpCode::BrGe_A, true /*unsigned*/, loadTypeDisplayStringLabel, typeOfInstr);
  5879. m_lowerer->InsertMove(typeIdOpnd, objTypeIdOpnd, typeOfInstr);
  5880. typeOfInstr->InsertBefore(loadTypeDisplayStringLabel);
  5881. if (dst->IsEqual(src1))
  5882. {
  5883. ChangeToAssign(typeOfInstr->HoistSrc1(Js::OpCode::Ld_A));
  5884. }
  5885. m_lowerer->InsertMove(dst, IR::IndirOpnd::New(typeDisplayStringsArrayOpnd, typeIdOpnd, this->GetDefaultIndirScale(), TyMachPtr, func), typeOfInstr);
  5886. m_lowerer->InsertTestBranch(dst, dst, Js::OpCode::BrEq_A, helperLabel, typeOfInstr);
  5887. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, typeOfInstr);
  5888. // $taggedInt:
  5889. // MOV dst, typeDisplayStrings[TypeIds_Number]
  5890. // B $done
  5891. typeOfInstr->InsertBefore(taggedIntLabel);
  5892. m_lowerer->InsertMove(dst, IR::IndirOpnd::New(typeDisplayStringsArrayOpnd, Js::TypeIds_Number * sizeof(Js::Var), TyMachPtr, func), typeOfInstr);
  5893. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, typeOfInstr);
  5894. // $falsy:
  5895. // MOV dst, "undefined"
  5896. // B $done
  5897. typeOfInstr->InsertBefore(falsyLabel);
  5898. IR::Opnd * undefinedDisplayStringOpnd = IR::IndirOpnd::New(typeDisplayStringsArrayOpnd, Js::TypeIds_Undefined, TyMachPtr, func);
  5899. m_lowerer->InsertMove(dst, undefinedDisplayStringOpnd, typeOfInstr);
  5900. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, typeOfInstr);
  5901. // $helper
  5902. // CALL OP_TypeOf
  5903. // $done
  5904. typeOfInstr->InsertBefore(helperLabel);
  5905. typeOfInstr->InsertAfter(doneLabel);
  5906. m_lowerer->LowerUnaryHelperMem(typeOfInstr, IR::HelperOp_Typeof);
  5907. }
  5908. void
  5909. LowererMD::InsertObjectPoison(IR::Opnd* poisonedOpnd, IR::BranchInstr* branchInstr, IR::Instr* insertInstr, bool isForStore)
  5910. {
  5911. if ((isForStore && CONFIG_FLAG_RELEASE(PoisonObjectsForStores)) || (!isForStore && CONFIG_FLAG_RELEASE(PoisonObjectsForLoads)))
  5912. {
  5913. Js::OpCode opcode;
  5914. if (branchInstr->m_opcode == Js::OpCode::BNE)
  5915. {
  5916. opcode = Js::OpCode::CSELEQ;
  5917. }
  5918. else
  5919. {
  5920. AssertOrFailFastMsg(branchInstr->m_opcode == Js::OpCode::BEQ, "Unexpected branch type in InsertObjectPoison preceeding instruction");
  5921. opcode = Js::OpCode::CSELNE;
  5922. }
  5923. AssertOrFailFast(branchInstr->m_prev->m_opcode == Js::OpCode::SUBS || branchInstr->m_prev->m_opcode == Js::OpCode::ANDS);
  5924. IR::RegOpnd* regZero = IR::RegOpnd::New(nullptr, RegZR, TyMachPtr, insertInstr->m_func);
  5925. IR::Instr* csel = IR::Instr::New(opcode, poisonedOpnd, poisonedOpnd, regZero, insertInstr->m_func);
  5926. insertInstr->InsertBefore(csel);
  5927. }
  5928. }
  5929. IR::BranchInstr*
  5930. LowererMD::InsertMissingItemCompareBranch(IR::Opnd* compareSrc, IR::Opnd* missingItemOpnd, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr)
  5931. {
  5932. Assert(compareSrc->IsFloat64() && missingItemOpnd->IsUint64());
  5933. IR::Opnd * compareSrcUint64Opnd = IR::RegOpnd::New(TyUint64, m_func);
  5934. if (compareSrc->IsRegOpnd())
  5935. {
  5936. IR::Instr * movDoubleToUint64Instr = IR::Instr::New(Js::OpCode::FMOV_GEN, compareSrcUint64Opnd, compareSrc, insertBeforeInstr->m_func);
  5937. insertBeforeInstr->InsertBefore(movDoubleToUint64Instr);
  5938. }
  5939. else if (compareSrc->IsIndirOpnd())
  5940. {
  5941. compareSrcUint64Opnd = compareSrc->UseWithNewType(TyUint64, m_func);
  5942. }
  5943. return m_lowerer->InsertCompareBranch(compareSrcUint64Opnd, missingItemOpnd, opcode, target, insertBeforeInstr);
  5944. }
  5945. #if DBG
  5946. //
  5947. // Helps in debugging of fast paths.
  5948. //
  5949. void LowererMD::GenerateDebugBreak( IR::Instr * insertInstr )
  5950. {
  5951. IR::Instr *int3 = IR::Instr::New(Js::OpCode::DEBUGBREAK, insertInstr->m_func);
  5952. insertInstr->InsertBefore(int3);
  5953. }
  5954. #endif
  5955. #ifdef _CONTROL_FLOW_GUARD
  5956. void
  5957. LowererMD::GenerateCFGCheck(IR::Opnd * entryPointOpnd, IR::Instr * insertBeforeInstr)
  5958. {
  5959. bool useJITTrampoline = CONFIG_FLAG(UseJITTrampoline);
  5960. IR::LabelInstr * callLabelInstr = nullptr;
  5961. uintptr_t jitThunkStartAddress = NULL;
  5962. if (useJITTrampoline)
  5963. {
  5964. #if ENABLE_OOP_NATIVE_CODEGEN
  5965. if (m_func->IsOOPJIT())
  5966. {
  5967. OOPJITThunkEmitter * jitThunkEmitter = m_func->GetOOPThreadContext()->GetJITThunkEmitter();
  5968. jitThunkStartAddress = jitThunkEmitter->EnsureInitialized();
  5969. }
  5970. else
  5971. #endif
  5972. {
  5973. InProcJITThunkEmitter * jitThunkEmitter = m_func->GetInProcThreadContext()->GetJITThunkEmitter();
  5974. jitThunkStartAddress = jitThunkEmitter->EnsureInitialized();
  5975. }
  5976. if (jitThunkStartAddress)
  5977. {
  5978. uintptr_t endAddressOfSegment = jitThunkStartAddress + InProcJITThunkEmitter::TotalThunkSize;
  5979. Assert(endAddressOfSegment > jitThunkStartAddress);
  5980. // Generate instructions for local Pre-Reserved Segment Range check
  5981. IR::AddrOpnd * endAddressOfSegmentConstOpnd = IR::AddrOpnd::New(endAddressOfSegment, IR::AddrOpndKindDynamicMisc, m_func);
  5982. IR::RegOpnd *resultOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  5983. callLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  5984. IR::LabelInstr * cfgLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  5985. // resultOpnd = SUB endAddressOfSegmentConstOpnd, entryPointOpnd
  5986. // CMP resultOpnd, TotalThunkSize
  5987. // BHS $cfgLabel
  5988. // AND entryPointOpnd, ~(ThunkSize-1)
  5989. // JMP $callLabel
  5990. m_lowerer->InsertSub(false, resultOpnd, endAddressOfSegmentConstOpnd, entryPointOpnd, insertBeforeInstr);
  5991. m_lowerer->InsertCompareBranch(resultOpnd, IR::IntConstOpnd::New(InProcJITThunkEmitter::TotalThunkSize, TyMachReg, m_func, true), Js::OpCode::BrGe_A, true, cfgLabelInstr, insertBeforeInstr);
  5992. m_lowerer->InsertAnd(entryPointOpnd, entryPointOpnd, IR::IntConstOpnd::New(InProcJITThunkEmitter::ThunkAlignmentMask, TyMachReg, m_func, true), insertBeforeInstr);
  5993. m_lowerer->InsertBranch(Js::OpCode::Br, callLabelInstr, insertBeforeInstr);
  5994. insertBeforeInstr->InsertBefore(cfgLabelInstr);
  5995. }
  5996. }
  5997. //MOV x15, entryPoint
  5998. IR::RegOpnd * entryPointRegOpnd = IR::RegOpnd::New(nullptr, RegR15, TyMachReg, this->m_func);
  5999. entryPointRegOpnd->m_isCallArg = true;
  6000. IR::Instr *movInstrEntryPointToRegister = Lowerer::InsertMove(entryPointRegOpnd, entryPointOpnd, insertBeforeInstr);
  6001. //Generate CheckCFG CALL here
  6002. IR::HelperCallOpnd *cfgCallOpnd = IR::HelperCallOpnd::New(IR::HelperGuardCheckCall, this->m_func);
  6003. IR::Instr* cfgCallInstr = IR::Instr::New(Js::OpCode::BLR, this->m_func);
  6004. this->m_func->SetHasCallsOnSelfAndParents();
  6005. //mov x16, __guard_check_icall_fptr
  6006. IR::RegOpnd *targetOpnd = IR::RegOpnd::New(nullptr, RegR16, TyMachPtr, this->m_func);
  6007. IR::Instr *movInstr = Lowerer::InsertMove(targetOpnd, cfgCallOpnd, insertBeforeInstr);
  6008. Legalize(movInstr);
  6009. //call x16
  6010. cfgCallInstr->SetSrc1(targetOpnd);
  6011. //CALL cfg(x15)
  6012. insertBeforeInstr->InsertBefore(cfgCallInstr);
  6013. if (jitThunkStartAddress)
  6014. {
  6015. Assert(callLabelInstr);
  6016. if (CONFIG_FLAG(ForceJITCFGCheck))
  6017. {
  6018. // Always generate CFG check to make sure that the address is still valid
  6019. movInstrEntryPointToRegister->InsertBefore(callLabelInstr);
  6020. }
  6021. else
  6022. {
  6023. insertBeforeInstr->InsertBefore(callLabelInstr);
  6024. }
  6025. }
  6026. }
  6027. #endif