LowerMD.cpp 326 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Language/JavascriptFunctionArgIndex.h"
  7. const Js::OpCode LowererMD::MDUncondBranchOpcode = Js::OpCode::B;
  8. const Js::OpCode LowererMD::MDTestOpcode = Js::OpCode::TST;
  9. const Js::OpCode LowererMD::MDOrOpcode = Js::OpCode::ORR;
  10. const Js::OpCode LowererMD::MDXorOpcode = Js::OpCode::EOR;
  11. const Js::OpCode LowererMD::MDOverflowBranchOpcode = Js::OpCode::BVS;
  12. const Js::OpCode LowererMD::MDNotOverflowBranchOpcode = Js::OpCode::BVC;
  13. const Js::OpCode LowererMD::MDConvertFloat32ToFloat64Opcode = Js::OpCode::VCVTF64F32;
  14. const Js::OpCode LowererMD::MDConvertFloat64ToFloat32Opcode = Js::OpCode::VCVTF32F64;
  15. const Js::OpCode LowererMD::MDCallOpcode = Js::OpCode::Call;
  16. const Js::OpCode LowererMD::MDImulOpcode = Js::OpCode::MUL;
  17. template<typename T>
  18. inline void Swap(T& x, T& y)
  19. {
  20. T temp = x;
  21. x = y;
  22. y = temp;
  23. }
  24. // Static utility fn()
  25. //
  26. bool
  27. LowererMD::IsAssign(const IR::Instr *instr)
  28. {
  29. return (instr->m_opcode == Js::OpCode::MOV ||
  30. instr->m_opcode == Js::OpCode::VMOV ||
  31. instr->m_opcode == Js::OpCode::LDIMM ||
  32. instr->m_opcode == Js::OpCode::LDR ||
  33. instr->m_opcode == Js::OpCode::VLDR ||
  34. instr->m_opcode == Js::OpCode::VLDR32 ||
  35. instr->m_opcode == Js::OpCode::STR ||
  36. instr->m_opcode == Js::OpCode::VSTR ||
  37. instr->m_opcode == Js::OpCode::VSTR32);
  38. }
  39. ///----------------------------------------------------------------------------
  40. ///
  41. /// LowererMD::IsCall
  42. ///
  43. ///----------------------------------------------------------------------------
  44. bool
  45. LowererMD::IsCall(const IR::Instr *instr)
  46. {
  47. return (instr->m_opcode == Js::OpCode::BL ||
  48. instr->m_opcode == Js::OpCode::BLX);
  49. }
  50. ///----------------------------------------------------------------------------
  51. ///
  52. /// LowererMD::IsIndirectBranch
  53. ///
  54. ///----------------------------------------------------------------------------
  55. bool
  56. LowererMD::IsIndirectBranch(const IR::Instr *instr)
  57. {
  58. return (instr->m_opcode == Js::OpCode::BX);
  59. }
  60. ///----------------------------------------------------------------------------
  61. ///
  62. /// LowererMD::IsUnconditionalBranch
  63. ///
  64. ///----------------------------------------------------------------------------
  65. bool
  66. LowererMD::IsUnconditionalBranch(const IR::Instr *instr)
  67. {
  68. return instr->m_opcode == Js::OpCode::B;
  69. }
  70. bool
  71. LowererMD::IsReturnInstr(const IR::Instr *instr)
  72. {
  73. return instr->m_opcode == Js::OpCode::LDRRET || instr->m_opcode == Js::OpCode::RET;
  74. }
  75. ///----------------------------------------------------------------------------
  76. ///
  77. /// LowererMD::InvertBranch
  78. ///
  79. ///----------------------------------------------------------------------------
  80. void
  81. LowererMD::InvertBranch(IR::BranchInstr *branchInstr)
  82. {
  83. switch (branchInstr->m_opcode)
  84. {
  85. case Js::OpCode::BEQ:
  86. branchInstr->m_opcode = Js::OpCode::BNE;
  87. break;
  88. case Js::OpCode::BNE:
  89. branchInstr->m_opcode = Js::OpCode::BEQ;
  90. break;
  91. case Js::OpCode::BGE:
  92. branchInstr->m_opcode = Js::OpCode::BLT;
  93. break;
  94. case Js::OpCode::BGT:
  95. branchInstr->m_opcode = Js::OpCode::BLE;
  96. break;
  97. case Js::OpCode::BLT:
  98. branchInstr->m_opcode = Js::OpCode::BGE;
  99. break;
  100. case Js::OpCode::BLE:
  101. branchInstr->m_opcode = Js::OpCode::BGT;
  102. break;
  103. case Js::OpCode::BCS:
  104. branchInstr->m_opcode = Js::OpCode::BCC;
  105. break;
  106. case Js::OpCode::BCC:
  107. branchInstr->m_opcode = Js::OpCode::BCS;
  108. break;
  109. case Js::OpCode::BMI:
  110. branchInstr->m_opcode = Js::OpCode::BPL;
  111. break;
  112. case Js::OpCode::BPL:
  113. branchInstr->m_opcode = Js::OpCode::BMI;
  114. break;
  115. case Js::OpCode::BVS:
  116. branchInstr->m_opcode = Js::OpCode::BVC;
  117. break;
  118. case Js::OpCode::BVC:
  119. branchInstr->m_opcode = Js::OpCode::BVS;
  120. break;
  121. case Js::OpCode::BLS:
  122. branchInstr->m_opcode = Js::OpCode::BHI;
  123. break;
  124. case Js::OpCode::BHI:
  125. branchInstr->m_opcode = Js::OpCode::BLS;
  126. break;
  127. default:
  128. AssertMsg(UNREACHED, "B missing in InvertBranch()");
  129. }
  130. }
  131. Js::OpCode
  132. LowererMD::MDConvertFloat64ToInt32Opcode(const RoundMode roundMode)
  133. {
  134. switch (roundMode)
  135. {
  136. case RoundModeTowardZero:
  137. return Js::OpCode::VCVTS32F64;
  138. case RoundModeTowardInteger:
  139. return Js::OpCode::Nop;
  140. case RoundModeHalfToEven:
  141. return Js::OpCode::VCVTRS32F64;
  142. default:
  143. AssertMsg(0, "RoundMode has no MD mapping.");
  144. return Js::OpCode::Nop;
  145. }
  146. }
  147. // GenerateMemRef: Return an opnd that can be used to access the given address.
  148. // ARM can't encode direct accesses to physical addresses, so put the address in a register
  149. // and return an indir. (This facilitates re-use of the loaded address without having to re-load it.)
  150. IR::Opnd *
  151. LowererMD::GenerateMemRef(intptr_t addr, IRType type, IR::Instr *instr, bool dontEncode)
  152. {
  153. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  154. IR::AddrOpnd *addrOpnd = IR::AddrOpnd::New(addr, IR::AddrOpndKindDynamicMisc, this->m_func, dontEncode);
  155. LowererMD::CreateAssign(baseOpnd, addrOpnd, instr);
  156. return IR::IndirOpnd::New(baseOpnd, 0, type, this->m_func);
  157. }
  158. void
  159. LowererMD::FlipHelperCallArgsOrder()
  160. {
  161. int left = 0;
  162. int right = helperCallArgsCount - 1;
  163. while (left < right)
  164. {
  165. IR::Opnd *tempOpnd = helperCallArgs[left];
  166. helperCallArgs[left] = helperCallArgs[right];
  167. helperCallArgs[right] = tempOpnd;
  168. left++;
  169. right--;
  170. }
  171. }
  172. IR::Instr *
  173. LowererMD::LowerCallHelper(IR::Instr *instrCall)
  174. {
  175. IR::Opnd *argOpnd = instrCall->UnlinkSrc2();
  176. IR::Instr *prevInstr = instrCall;
  177. IR::JnHelperMethod helperMethod = instrCall->GetSrc1()->AsHelperCallOpnd()->m_fnHelper;
  178. instrCall->FreeSrc1();
  179. while (argOpnd)
  180. {
  181. Assert(argOpnd->IsRegOpnd());
  182. IR::RegOpnd *regArg = argOpnd->AsRegOpnd();
  183. Assert(regArg->m_sym->m_isSingleDef);
  184. IR::Instr *instrArg = regArg->m_sym->m_instrDef;
  185. Assert(instrArg->m_opcode == Js::OpCode::ArgOut_A ||
  186. (helperMethod == IR::JnHelperMethod::HelperOP_InitCachedScope && instrArg->m_opcode == Js::OpCode::ExtendArg_A));
  187. prevInstr = this->LoadHelperArgument(prevInstr, instrArg->GetSrc1());
  188. argOpnd = instrArg->GetSrc2();
  189. if (instrArg->m_opcode == Js::OpCode::ArgOut_A)
  190. {
  191. instrArg->UnlinkSrc1();
  192. if (argOpnd)
  193. {
  194. instrArg->UnlinkSrc2();
  195. }
  196. regArg->Free(this->m_func);
  197. instrArg->Remove();
  198. }
  199. }
  200. this->m_lowerer->LoadScriptContext(instrCall);
  201. this->FlipHelperCallArgsOrder();
  202. return this->ChangeToHelperCall(instrCall, helperMethod);
  203. }
  204. // Lower a call: May be either helper or native JS call. Just set the opcode, and
  205. // put the result into the return register. (No stack adjustment required.)
  206. IR::Instr *
  207. LowererMD::LowerCall(IR::Instr * callInstr, Js::ArgSlot argCount)
  208. {
  209. IR::Instr *retInstr = callInstr;
  210. IR::Opnd *targetOpnd = callInstr->GetSrc1();
  211. AssertMsg(targetOpnd, "Call without a target?");
  212. // This is required here due to calls created during lowering
  213. callInstr->m_func->SetHasCallsOnSelfAndParents();
  214. if (targetOpnd->IsRegOpnd())
  215. {
  216. // Indirect call
  217. callInstr->m_opcode = Js::OpCode::BLX;
  218. }
  219. else
  220. {
  221. AssertMsg(targetOpnd->IsHelperCallOpnd(), "Why haven't we loaded the call target?");
  222. // Direct call
  223. //
  224. // load the address into a register because we cannot directly access more than 24 bit constants
  225. // in BL instruction. Non helper call methods will already be accessed indirectly.
  226. //
  227. // Skip this for bailout calls. The register allocator will lower that as appropriate, without affecting spill choices.
  228. if (!callInstr->HasBailOutInfo())
  229. {
  230. IR::RegOpnd *regOpnd = IR::RegOpnd::New(nullptr, RegLR, TyMachPtr, this->m_func);
  231. IR::Instr *movInstr = IR::Instr::New(Js::OpCode::LDIMM, regOpnd, callInstr->GetSrc1(), this->m_func);
  232. regOpnd->m_isCallArg = true;
  233. callInstr->UnlinkSrc1();
  234. callInstr->SetSrc1(regOpnd);
  235. callInstr->InsertBefore(movInstr);
  236. }
  237. callInstr->m_opcode = Js::OpCode::BLX;
  238. }
  239. // For the sake of the prolog/epilog, note that we're not in a leaf. (Deliberately not
  240. // overloading Func::m_isLeaf here, as that's used for other purposes.)
  241. this->m_func->m_unwindInfo.SetHasCalls(true);
  242. IR::Opnd *dstOpnd = callInstr->GetDst();
  243. if (dstOpnd)
  244. {
  245. IR::Instr * movInstr;
  246. if(dstOpnd->IsFloat64())
  247. {
  248. movInstr = callInstr->SinkDst(Js::OpCode::VMOV);
  249. callInstr->GetDst()->AsRegOpnd()->SetReg(RETURN_DBL_REG);
  250. movInstr->GetSrc1()->AsRegOpnd()->SetReg(RETURN_DBL_REG);
  251. retInstr = movInstr;
  252. }
  253. else
  254. {
  255. movInstr = callInstr->SinkDst(Js::OpCode::MOV);
  256. callInstr->GetDst()->AsRegOpnd()->SetReg(RETURN_REG);
  257. movInstr->GetSrc1()->AsRegOpnd()->SetReg(RETURN_REG);
  258. retInstr = movInstr;
  259. }
  260. }
  261. //
  262. // assign the arguments to appropriate positions
  263. //
  264. AssertMsg(this->helperCallArgsCount >= 0, "Fatal. helper call arguments ought to be positive");
  265. AssertMsg(this->helperCallArgsCount <= MaxArgumentsToHelper, "Too many helper call arguments");
  266. uint16 argsLeft = this->helperCallArgsCount;
  267. uint16 doubleArgsLeft = this->helperCallDoubleArgsCount;
  268. uint16 intArgsLeft = argsLeft - doubleArgsLeft;
  269. while(argsLeft > 0)
  270. {
  271. IR::Opnd *helperArgOpnd = this->helperCallArgs[this->helperCallArgsCount - argsLeft];
  272. IR::Opnd * opndParam = nullptr;
  273. if (helperArgOpnd->GetType() == TyMachDouble)
  274. {
  275. opndParam = this->GetOpndForArgSlot(doubleArgsLeft - 1, true);
  276. AssertMsg(opndParam->IsRegOpnd(), "NYI for other kind of operands");
  277. --doubleArgsLeft;
  278. }
  279. else
  280. {
  281. opndParam = this->GetOpndForArgSlot(intArgsLeft - 1);
  282. --intArgsLeft;
  283. }
  284. LowererMD::CreateAssign(opndParam, helperArgOpnd, callInstr);
  285. --argsLeft;
  286. }
  287. Assert(doubleArgsLeft == 0 && intArgsLeft == 0 && argsLeft == 0);
  288. // We're done with the args (if any) now, so clear the param location state.
  289. this->FinishArgLowering();
  290. return retInstr;
  291. }
  292. IR::Instr *
  293. LowererMD::LoadDynamicArgument(IR::Instr *instr, uint argNumber)
  294. {
  295. Assert(instr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  296. Assert(instr->GetSrc2() == nullptr);
  297. IR::Opnd* dst = GetOpndForArgSlot((Js::ArgSlot) (argNumber - 1));
  298. instr->SetDst(dst);
  299. instr->m_opcode = Js::OpCode::MOV;
  300. LegalizeMD::LegalizeInstr(instr, false);
  301. return instr;
  302. }
  303. IR::Instr *
  304. LowererMD::LoadDynamicArgumentUsingLength(IR::Instr *instr)
  305. {
  306. Assert(instr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  307. IR::RegOpnd* src2 = instr->UnlinkSrc2()->AsRegOpnd();
  308. IR::Instr *add = IR::Instr::New(Js::OpCode::SUB, IR::RegOpnd::New(TyInt32, this->m_func), src2, IR::IntConstOpnd::New(1, TyInt8, this->m_func), this->m_func);
  309. instr->InsertBefore(add);
  310. //We need store nth actuals, so stack location is after function object, callinfo & this pointer
  311. IR::RegOpnd *stackPointer = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func);
  312. IR::IndirOpnd *actualsLocation = IR::IndirOpnd::New(stackPointer, add->GetDst()->AsRegOpnd(), GetDefaultIndirScale(), TyMachReg, this->m_func);
  313. instr->SetDst(actualsLocation);
  314. instr->m_opcode = Js::OpCode::LDR;
  315. LegalizeMD::LegalizeInstr(instr, false);
  316. return instr;
  317. }
  318. void
  319. LowererMD::SetMaxArgSlots(Js::ArgSlot actualCount /*including this*/)
  320. {
  321. Js::ArgSlot offset = 3;//For function object & callInfo & this
  322. if (this->m_func->m_argSlotsForFunctionsCalled < (uint32) (actualCount + offset))
  323. {
  324. this->m_func->m_argSlotsForFunctionsCalled = (uint32)(actualCount + offset);
  325. }
  326. return;
  327. }
  328. void
  329. LowererMD::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, size_t value, IR::Instr * insertBeforeInstr, bool isZeroed)
  330. {
  331. m_lowerer->GenerateMemInit(opnd, offset, (uint32)value, insertBeforeInstr, isZeroed);
  332. }
  333. IR::Instr *
  334. LowererMD::LowerCallIDynamic(IR::Instr *callInstr, IR::Instr*saveThisArgOutInstr, IR::Opnd *argsLength, ushort callFlags, IR::Instr * insertBeforeInstrForCFG)
  335. {
  336. callInstr->InsertBefore(saveThisArgOutInstr); //Move this Argout next to call;
  337. this->LoadDynamicArgument(saveThisArgOutInstr, 3); //this pointer is the 3rd argument
  338. //callInfo
  339. if (callInstr->m_func->IsInlinee())
  340. {
  341. Assert(argsLength->AsIntConstOpnd()->GetValue() == callInstr->m_func->actualCount);
  342. this->SetMaxArgSlots((Js::ArgSlot)callInstr->m_func->actualCount);
  343. }
  344. else
  345. {
  346. callInstr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, argsLength, argsLength, IR::IntConstOpnd::New(1, TyInt8, this->m_func), this->m_func));
  347. this->SetMaxArgSlots(Js::InlineeCallInfo::MaxInlineeArgoutCount);
  348. }
  349. LowererMD::CreateAssign( this->GetOpndForArgSlot(1), argsLength, callInstr);
  350. IR::RegOpnd *funcObjOpnd = callInstr->UnlinkSrc1()->AsRegOpnd();
  351. GeneratePreCall(callInstr, funcObjOpnd);
  352. // functionOpnd is the first argument.
  353. IR::Opnd * opndParam = this->GetOpndForArgSlot(0);
  354. LowererMD::CreateAssign(opndParam, funcObjOpnd, callInstr);
  355. return this->LowerCall(callInstr, 0);
  356. }
  357. void
  358. LowererMD::GenerateFunctionObjectTest(IR::Instr * callInstr, IR::RegOpnd *functionObjOpnd, bool isHelper, IR::LabelInstr* continueAfterExLabel /* = nullptr */)
  359. {
  360. AssertMsg(!m_func->IsJitInDebugMode() || continueAfterExLabel, "When jit is in debug mode, continueAfterExLabel must be provided otherwise continue after exception may cause AV.");
  361. if (!functionObjOpnd->IsNotTaggedValue())
  362. {
  363. IR::Instr * insertBeforeInstr = callInstr;
  364. // Need check and error if we are calling a tagged int.
  365. if (!functionObjOpnd->IsTaggedInt())
  366. {
  367. // TST functionObjOpnd, 1
  368. IR::Instr * instr = IR::Instr::New(Js::OpCode::TST, this->m_func);
  369. instr->SetSrc1(functionObjOpnd);
  370. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func));
  371. callInstr->InsertBefore(instr);
  372. // BNE $helper
  373. // B $callLabel
  374. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  375. instr = IR::BranchInstr::New(Js::OpCode::BNE, helperLabel, this->m_func);
  376. callInstr->InsertBefore(instr);
  377. IR::LabelInstr * callLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  378. instr = IR::BranchInstr::New(Js::OpCode::B, callLabel, this->m_func);
  379. callInstr->InsertBefore(instr);
  380. callInstr->InsertBefore(helperLabel);
  381. callInstr->InsertBefore(callLabel);
  382. insertBeforeInstr = callLabel;
  383. }
  384. this->m_lowerer->GenerateRuntimeError(insertBeforeInstr, JSERR_NeedFunction);
  385. if (continueAfterExLabel)
  386. {
  387. // Under debugger the RuntimeError (exception) can be ignored, generate branch right after RunTimeError instr
  388. // to jmp to a safe place (which would normally be debugger bailout check).
  389. IR::BranchInstr* continueAfterEx = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, continueAfterExLabel, this->m_func);
  390. insertBeforeInstr->InsertBefore(continueAfterEx);
  391. }
  392. }
  393. }
  394. IR::Instr*
  395. LowererMD::GeneratePreCall(IR::Instr * callInstr, IR::Opnd *functionObjOpnd)
  396. {
  397. IR::RegOpnd * functionTypeRegOpnd = nullptr;
  398. // For calls to fixed functions we load the function's type directly from the known (hard-coded) function object address.
  399. // For other calls, we need to load it from the function object stored in a register operand.
  400. if (functionObjOpnd->IsAddrOpnd() && functionObjOpnd->AsAddrOpnd()->m_isFunction)
  401. {
  402. functionTypeRegOpnd = this->m_lowerer->GenerateFunctionTypeFromFixedFunctionObject(callInstr, functionObjOpnd);
  403. }
  404. else if (functionObjOpnd->IsRegOpnd())
  405. {
  406. AssertMsg(functionObjOpnd->AsRegOpnd()->m_sym->IsStackSym(), "Expected call target to be stackSym");
  407. functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  408. IR::IndirOpnd* functionTypeIndirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(),
  409. Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  410. LowererMD::CreateAssign(functionTypeRegOpnd, functionTypeIndirOpnd, callInstr);
  411. }
  412. else
  413. {
  414. AssertMsg(false, "Unexpected call target operand type.");
  415. }
  416. int entryPointOffset = Js::Type::GetOffsetOfEntryPoint();
  417. IR::IndirOpnd* entryPointOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, entryPointOffset, TyMachPtr, this->m_func);
  418. IR::RegOpnd * targetAddrOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  419. IR::Instr * stackParamInsert = LowererMD::CreateAssign(targetAddrOpnd, entryPointOpnd, callInstr);
  420. // targetAddrOpnd is the address we'll call.
  421. callInstr->SetSrc1(targetAddrOpnd);
  422. return stackParamInsert;
  423. }
  424. IR::Instr *
  425. LowererMD::LowerCallI(IR::Instr * callInstr, ushort callFlags, bool isHelper, IR::Instr * insertBeforeInstrForCFG)
  426. {
  427. // Indirect call using JS calling convention:
  428. // R0 = callee func object
  429. // R1 = callinfo
  430. // R2 = arg0 ("this")
  431. // R3 = arg1
  432. // [sp] = arg2
  433. // etc.
  434. // First load the target address. Note that we want to wind up with this:
  435. // ...
  436. // [sp+4] = arg3
  437. // [sp] = arg2
  438. // load target addr from func obj
  439. // R3 = arg1
  440. // ...
  441. // R0 = func obj
  442. // BLX target addr
  443. // This way the register containing the target addr interferes with the param regs
  444. // only, not the regs we use to store params to the stack.
  445. // We're sinking the stores of stack params so that the call sequence is contiguous.
  446. // This is required by nested calls, since each call will re-use the same stack slots.
  447. // But if there is no nesting, stack params can be stored as soon as they're computed.
  448. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  449. // If this is a call for new, we already pass the function operand through NewScObject,
  450. // which checks if the function operand is a real function or not, don't need to add a check again.
  451. // If this is a call to a fixed function, we've already verified that the target is, indeed, a function.
  452. if (callInstr->m_opcode != Js::OpCode::CallIFixed && !(callFlags & Js::CallFlags_New))
  453. {
  454. IR::LabelInstr* continueAfterExLabel = Lowerer::InsertContinueAfterExceptionLabelForDebugger(m_func, callInstr, isHelper);
  455. GenerateFunctionObjectTest(callInstr, functionObjOpnd->AsRegOpnd(), isHelper, continueAfterExLabel);
  456. // TODO: Remove unreachable code if we have proved that it is a tagged in.
  457. }
  458. // Can't assert until we remove unreachable code if we have proved that it is a tagged int.
  459. // Assert((callFlags & Js::CallFlags_New) || !functionWrapOpnd->IsTaggedInt());
  460. IR::Instr * stackParamInsert = GeneratePreCall(callInstr, functionObjOpnd);
  461. // We need to get the calculated CallInfo in SimpleJit because that doesn't include any changes for stack alignment
  462. IR::IntConstOpnd *callInfo;
  463. int32 argCount = this->LowerCallArgs(callInstr, stackParamInsert, callFlags, 1, &callInfo);
  464. // functionObjOpnd is the first argument.
  465. IR::Opnd * opndParam = this->GetOpndForArgSlot(0);
  466. LowererMD::CreateAssign(opndParam, functionObjOpnd, callInstr);
  467. IR::Opnd *const finalDst = callInstr->GetDst();
  468. // Finally, lower the call instruction itself.
  469. IR::Instr* ret = this->LowerCall(callInstr, (Js::ArgSlot)argCount);
  470. IR::AutoReuseOpnd autoReuseSavedFunctionObjOpnd;
  471. if (callInstr->IsJitProfilingInstr())
  472. {
  473. Assert(callInstr->m_func->IsSimpleJit());
  474. Assert(!CONFIG_FLAG(NewSimpleJit));
  475. if(finalDst &&
  476. finalDst->IsRegOpnd() &&
  477. functionObjOpnd->IsRegOpnd() &&
  478. finalDst->AsRegOpnd()->m_sym == functionObjOpnd->AsRegOpnd()->m_sym)
  479. {
  480. // The function object sym is going to be overwritten, so save it in a temp for profiling
  481. IR::RegOpnd *const savedFunctionObjOpnd = IR::RegOpnd::New(functionObjOpnd->GetType(), callInstr->m_func);
  482. autoReuseSavedFunctionObjOpnd.Initialize(savedFunctionObjOpnd, callInstr->m_func);
  483. Lowerer::InsertMove(savedFunctionObjOpnd, functionObjOpnd, callInstr->m_next);
  484. functionObjOpnd = savedFunctionObjOpnd;
  485. }
  486. auto instr = callInstr->AsJitProfilingInstr();
  487. ret = this->m_lowerer->GenerateCallProfiling(
  488. instr->profileId,
  489. instr->inlineCacheIndex,
  490. instr->GetDst(),
  491. functionObjOpnd,
  492. callInfo,
  493. instr->isProfiledReturnCall,
  494. callInstr,
  495. ret);
  496. }
  497. return ret;
  498. }
  499. int32
  500. LowererMD::LowerCallArgs(IR::Instr *callInstr, IR::Instr *stackParamInsert, ushort callFlags, Js::ArgSlot extraParams, IR::IntConstOpnd **callInfoOpndRef)
  501. {
  502. AssertMsg(this->helperCallArgsCount == 0, "We don't support nested helper calls yet");
  503. uint32 argCount = 0;
  504. IR::Opnd* opndParam;
  505. // Now walk the user arguments and remember the arg count.
  506. IR::Instr * argInstr = callInstr;
  507. IR::Opnd *src2Opnd = callInstr->UnlinkSrc2();
  508. while (src2Opnd->IsSymOpnd())
  509. {
  510. // Get the arg instr
  511. IR::SymOpnd * argLinkOpnd = src2Opnd->AsSymOpnd();
  512. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  513. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  514. argLinkOpnd->Free(this->m_func);
  515. argInstr = argLinkSym->m_instrDef;
  516. // The arg sym isn't assigned a constant directly anymore
  517. argLinkSym->m_isConst = false;
  518. argLinkSym->m_isIntConst = false;
  519. argLinkSym->m_isTaggableIntConst = false;
  520. // The arg slot nums are 1-based, so subtract 1. Then add 1 for the non-user args (callinfo).
  521. auto argSlotNum = argLinkSym->GetArgSlotNum();
  522. if(argSlotNum + extraParams < argSlotNum)
  523. {
  524. Js::Throw::OutOfMemory();
  525. }
  526. opndParam = this->GetOpndForArgSlot(argSlotNum + extraParams);
  527. src2Opnd = argInstr->UnlinkSrc2();
  528. argInstr->ReplaceDst(opndParam);
  529. argInstr->Unlink();
  530. if (opndParam->IsRegOpnd())
  531. {
  532. callInstr->InsertBefore(argInstr);
  533. }
  534. else
  535. {
  536. stackParamInsert->InsertBefore(argInstr);
  537. }
  538. this->ChangeToAssign(argInstr);
  539. argCount++;
  540. }
  541. IR::RegOpnd * argLinkOpnd = src2Opnd->AsRegOpnd();
  542. StackSym *argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  543. AssertMsg(!argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  544. IR::Instr *startCallInstr = argLinkSym->m_instrDef;
  545. AssertMsg(startCallInstr->m_opcode == Js::OpCode::StartCall || startCallInstr->m_opcode == Js::OpCode::LoweredStartCall, "Problem with arg chain.");
  546. AssertMsg(startCallInstr->GetArgOutCount(/*getInterpreterArgOutCount*/ false) == argCount,
  547. "ArgCount doesn't match StartCall count");
  548. // Deal with the SC.
  549. this->LowerStartCall(startCallInstr);
  550. // Second argument is the callinfo.
  551. IR::IntConstOpnd *opndCallInfo = Lowerer::MakeCallInfoConst(callFlags, argCount, m_func);
  552. if(callInfoOpndRef)
  553. {
  554. opndCallInfo->Use(m_func);
  555. *callInfoOpndRef = opndCallInfo;
  556. }
  557. opndParam = this->GetOpndForArgSlot(extraParams);
  558. LowererMD::CreateAssign(opndParam, opndCallInfo, callInstr);
  559. return argCount + 1 + extraParams; // + 1 for call flags
  560. }
  561. IR::Instr *
  562. LowererMD::LowerStartCall(IR::Instr * instr)
  563. {
  564. // StartCall doesn't need to generate a stack adjustment. Just delete it.
  565. instr->m_opcode = Js::OpCode::LoweredStartCall;
  566. return instr;
  567. }
  568. IR::Instr *
  569. LowererMD::LoadHelperArgument(IR::Instr * instr, IR::Opnd * opndArgValue)
  570. {
  571. // Load the given parameter into the appropriate location.
  572. // We update the current param state so we can do this work without making the caller
  573. // do the work.
  574. Assert(this->helperCallArgsCount < LowererMD::MaxArgumentsToHelper);
  575. __analysis_assume(this->helperCallArgsCount < MaxArgumentsToHelper);
  576. helperCallArgs[helperCallArgsCount++] = opndArgValue;
  577. if (opndArgValue->GetType() == TyMachDouble)
  578. {
  579. this->helperCallDoubleArgsCount++;
  580. }
  581. return instr;
  582. }
  583. void
  584. LowererMD::FinishArgLowering()
  585. {
  586. this->helperCallArgsCount = 0;
  587. this->helperCallDoubleArgsCount = 0;
  588. }
  589. IR::Opnd *
  590. LowererMD::GetOpndForArgSlot(Js::ArgSlot argSlot, bool isDoubleArgument)
  591. {
  592. IR::Opnd * opndParam = nullptr;
  593. if (!isDoubleArgument)
  594. {
  595. if (argSlot < NUM_INT_ARG_REGS)
  596. {
  597. // Return an instance of the next arg register.
  598. IR::RegOpnd *regOpnd;
  599. regOpnd = IR::RegOpnd::New(nullptr, (RegNum)(argSlot + FIRST_INT_ARG_REG), TyMachReg, this->m_func);
  600. regOpnd->m_isCallArg = true;
  601. opndParam = regOpnd;
  602. }
  603. else
  604. {
  605. // Create a stack slot reference and bump up the size of this function's outgoing param area,
  606. // if necessary.
  607. argSlot = argSlot - NUM_INT_ARG_REGS;
  608. IntConstType offset = argSlot * MachRegInt;
  609. IR::RegOpnd * spBase = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  610. opndParam = IR::IndirOpnd::New(spBase, offset, TyMachReg, this->m_func);
  611. if (this->m_func->m_argSlotsForFunctionsCalled < (uint32)(argSlot + 1))
  612. {
  613. this->m_func->m_argSlotsForFunctionsCalled = argSlot + 1;
  614. }
  615. }
  616. }
  617. else
  618. {
  619. if (argSlot < MaxDoubleArgumentsToHelper)
  620. {
  621. // Return an instance of the next arg register.
  622. IR::RegOpnd *regOpnd;
  623. regOpnd = IR::RegOpnd::New(nullptr, (RegNum)(argSlot + FIRST_DOUBLE_ARG_REG), TyMachDouble, this->m_func);
  624. regOpnd->m_isCallArg = true;
  625. opndParam = regOpnd;
  626. }
  627. else
  628. {
  629. AssertMsg(false,"More than 8 double parameter passing disallowed");
  630. }
  631. }
  632. return opndParam;
  633. }
  634. IR::Instr *
  635. LowererMD::LoadDoubleHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  636. {
  637. // Load the given parameter into the appropriate location.
  638. // We update the current param state so we can do this work without making the caller
  639. // do the work.
  640. Assert(opndArg->GetType() == TyMachDouble);
  641. return this->LoadHelperArgument(instr, opndArg);
  642. }
  643. void
  644. LowererMD::GenerateStackProbe(IR::Instr *insertInstr, bool afterProlog)
  645. {
  646. //
  647. // Generate a stack overflow check. This can be as simple as a cmp esp, const
  648. // because this function is guaranteed to be called on its base thread only.
  649. // If the check fails call ThreadContext::ProbeCurrentStack which will check again and must throw.
  650. //
  651. // LDIMM r12, ThreadContext::scriptStackLimit + frameSize //Load to register first, as this can be more than 12 bit supported in CMP
  652. // CMP sp, r12
  653. // BGT done
  654. // begin:
  655. // LDIMM r0, frameSize
  656. // LDIMM r1, scriptContext
  657. // LDIMM r2, ThreadContext::ProbeCurrentStack //MUST THROW
  658. // BLX r2 //BX r2 if the stackprobe is before prolog
  659. // done:
  660. //
  661. // For thread context with script interrupt enabled:
  662. // LDIMM r12, &ThreadContext::scriptStackLimitForCurrentThread
  663. // LDR r12, [r12]
  664. // ADD r12, frameSize
  665. // BVS $helper
  666. // CMP sp, r12
  667. // BGT done
  668. // $helper:
  669. // LDIMM r0, frameSize
  670. // LDIMM r1, scriptContext
  671. // LDIMM r2, ThreadContext::ProbeCurrentStack //MUST THROW
  672. // BLX r2 //BX r2 if the stackprobe is before prolog
  673. // done:
  674. //
  675. //m_localStackHeight for ARM contains (m_argSlotsForFunctionsCalled * MachPtr)
  676. uint32 frameSize = this->m_func->m_localStackHeight + Js::Constants::MinStackJIT;
  677. IR::RegOpnd *scratchOpnd = IR::RegOpnd::New(nullptr, SCRATCH_REG, TyMachReg, this->m_func);
  678. IR::LabelInstr *helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, afterProlog);
  679. IR::Instr *instr;
  680. bool doInterruptProbe = m_func->GetJITFunctionBody()->DoInterruptProbe();
  681. if (doInterruptProbe || !m_func->GetThreadContextInfo()->IsThreadBound())
  682. {
  683. // Load the current stack limit and add the current frame allocation.
  684. {
  685. intptr_t pLimit = m_func->GetThreadContextInfo()->GetThreadStackLimitAddr();
  686. this->CreateAssign(scratchOpnd, IR::AddrOpnd::New(pLimit, IR::AddrOpndKindDynamicMisc, this->m_func), insertInstr);
  687. this->CreateAssign(scratchOpnd, IR::IndirOpnd::New(scratchOpnd, 0, TyMachReg, this->m_func), insertInstr);
  688. }
  689. if (EncoderMD::CanEncodeModConst12(frameSize))
  690. {
  691. // If the frame size is small enough, just add the constant.
  692. // Does this ever happen with the size of the MinStackJIT constant?
  693. instr = IR::Instr::New(Js::OpCode::ADDS, scratchOpnd, scratchOpnd,
  694. IR::IntConstOpnd::New(frameSize, TyMachReg, this->m_func), this->m_func);
  695. insertInstr->InsertBefore(instr);
  696. }
  697. else
  698. {
  699. // We need a second scratch reg.
  700. // If we're probing after the prolog, the reg has already been saved and will be restored.
  701. // If not, push and pop it here, knowing that we'll never throw while the stack is whacked.
  702. Assert(!afterProlog || this->m_func->m_unwindInfo.GetSavedScratchReg());
  703. BVUnit scratchBit;
  704. IR::Opnd *opnd;
  705. if (!afterProlog)
  706. {
  707. opnd = IR::IndirOpnd::New(IR::RegOpnd::New(nullptr, RegSP, TyMachReg, this->m_func), (int32)0, TyMachReg, this->m_func);
  708. instr = IR::Instr::New(Js::OpCode::PUSH, opnd, this->m_func);
  709. scratchBit.Set(RegEncode[SP_ALLOC_SCRATCH_REG]);
  710. opnd = IR::RegBVOpnd::New(scratchBit, TyMachReg, this->m_func);
  711. instr->SetSrc1(opnd);
  712. insertInstr->InsertBefore(instr);
  713. }
  714. IR::Opnd *scratchOpnd2 = IR::RegOpnd::New(nullptr, SP_ALLOC_SCRATCH_REG, TyMachReg, this->m_func);
  715. this->CreateAssign(scratchOpnd2, IR::IntConstOpnd::New(frameSize, TyMachReg, this->m_func), insertInstr);
  716. instr = IR::Instr::New(Js::OpCode::ADDS, scratchOpnd, scratchOpnd, scratchOpnd2, this->m_func);
  717. insertInstr->InsertBefore(instr);
  718. if (!afterProlog)
  719. {
  720. Assert(scratchBit.Test(RegEncode[SP_ALLOC_SCRATCH_REG]));
  721. opnd = IR::RegBVOpnd::New(scratchBit, TyMachReg, this->m_func);
  722. instr = IR::Instr::New(Js::OpCode::POP, opnd, this->m_func);
  723. opnd = IR::IndirOpnd::New(IR::RegOpnd::New(nullptr, RegSP, TyMachReg, this->m_func), (int32)0, TyMachReg, this->m_func);
  724. instr->SetSrc1(opnd);
  725. insertInstr->InsertBefore(instr);
  726. }
  727. }
  728. // If this add overflows, we have to call the helper.
  729. instr = IR::BranchInstr::New(Js::OpCode::BVS, helperLabel, this->m_func);
  730. insertInstr->InsertBefore(instr);
  731. }
  732. else
  733. {
  734. uint32 scriptStackLimit = (uint32)m_func->GetThreadContextInfo()->GetScriptStackLimit();
  735. IR::Opnd *stackLimitOpnd = IR::IntConstOpnd::New(frameSize + scriptStackLimit, TyMachReg, this->m_func);
  736. this->CreateAssign(scratchOpnd, stackLimitOpnd, insertInstr);
  737. }
  738. IR::LabelInstr *doneLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  739. if (!IS_FAULTINJECT_STACK_PROBE_ON) // Do stack check fastpath only if not doing StackProbe fault injection
  740. {
  741. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  742. instr->SetSrc1(IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func));
  743. instr->SetSrc2(scratchOpnd);
  744. insertInstr->InsertBefore(instr);
  745. instr = IR::BranchInstr::New(Js::OpCode::BGT, doneLabelInstr, this->m_func);
  746. insertInstr->InsertBefore(instr);
  747. }
  748. insertInstr->InsertBefore(helperLabel);
  749. // Zero out the pointer to the list of stack nested funcs, since the functions won't be initialized on this path.
  750. scratchOpnd = IR::RegOpnd::New(nullptr, RegR0, TyMachReg, m_func);
  751. IR::RegOpnd *frameReg = IR::RegOpnd::New(nullptr, GetRegFramePointer(), TyMachReg, m_func);
  752. CreateAssign(scratchOpnd, IR::IntConstOpnd::New(0, TyMachReg, m_func), insertInstr);
  753. IR::Opnd *indirOpnd = IR::IndirOpnd::New(
  754. frameReg, -(int32)(Js::Constants::StackNestedFuncList * sizeof(Js::Var)), TyMachReg, m_func);
  755. CreateAssign(indirOpnd, scratchOpnd, insertInstr);
  756. IR::RegOpnd *r0Opnd = IR::RegOpnd::New(nullptr, RegR0, TyMachReg, this->m_func);
  757. this->CreateAssign(r0Opnd, IR::IntConstOpnd::New(frameSize, TyMachReg, this->m_func, true), insertInstr);
  758. IR::RegOpnd *r1Opnd = IR::RegOpnd::New(nullptr, RegR1, TyMachReg, this->m_func);
  759. this->CreateAssign(r1Opnd, this->m_lowerer->LoadScriptContextOpnd(insertInstr), insertInstr);
  760. IR::RegOpnd *r2Opnd = IR::RegOpnd::New(nullptr, RegR2, TyMachReg, m_func);
  761. this->CreateAssign(r2Opnd, IR::HelperCallOpnd::New(IR::HelperProbeCurrentStack, this->m_func), insertInstr);
  762. instr = IR::Instr::New(afterProlog? Js::OpCode::BLX : Js::OpCode::BX, this->m_func);
  763. instr->SetSrc1(r2Opnd);
  764. insertInstr->InsertBefore(instr);
  765. insertInstr->InsertBefore(doneLabelInstr);
  766. Security::InsertRandomFunctionPad(doneLabelInstr);
  767. }
  768. //
  769. // Emits the code to allocate 'size' amount of space on stack. for values smaller than PAGE_SIZE
  770. // this will just emit sub rsp,size otherwise calls _chkstk.
  771. //
  772. bool
  773. LowererMD::GenerateStackAllocation(IR::Instr *instr, uint32 allocSize, uint32 probeSize)
  774. {
  775. IR::RegOpnd * spOpnd = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func);
  776. if (IsSmallStack(probeSize))
  777. {
  778. AssertMsg(!(allocSize & 0xFFFFF000), "Must fit in 12 bits");
  779. // Generate SUB SP, SP, stackSize
  780. IR::IntConstOpnd * stackSizeOpnd = IR::IntConstOpnd::New(allocSize, TyMachReg, this->m_func, true);
  781. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::SUB, spOpnd, spOpnd, stackSizeOpnd, this->m_func);
  782. instr->InsertBefore(subInstr);
  783. return false;
  784. }
  785. //__chkStk is a leaf function and hence alignment is not required.
  786. // Generate _chkstk call
  787. // LDIMM RegR4, stackSize/4 //input: r4 = the number of WORDS (word = 4 bytes) to allocate,
  788. // LDIMM RegR12, HelperCRT_chkstk
  789. // BLX RegR12
  790. // SUB SP, SP, RegR4 //output: r4 = total number of BYTES probed/allocated.
  791. //chkstk expects the stacksize argument in R4 register
  792. IR::RegOpnd *r4Opnd = IR::RegOpnd::New(nullptr, SP_ALLOC_SCRATCH_REG, TyMachReg, this->m_func);
  793. IR::RegOpnd *targetOpnd = IR::RegOpnd::New(nullptr, SCRATCH_REG, TyMachReg, this->m_func);
  794. IR::IntConstOpnd * stackSizeOpnd = IR::IntConstOpnd::New((allocSize/MachPtr), TyMachReg, this->m_func, true);
  795. IR::Instr *movInstr = IR::Instr::New(Js::OpCode::LDIMM, r4Opnd, stackSizeOpnd, this->m_func);
  796. instr->InsertBefore(movInstr);
  797. IR::Instr *movHelperAddrInstr = IR::Instr::New(Js::OpCode::LDIMM, targetOpnd, IR::HelperCallOpnd::New(IR::HelperCRT_chkstk, this->m_func), this->m_func);
  798. instr->InsertBefore(movHelperAddrInstr);
  799. IR::Instr * callInstr = IR::Instr::New(Js::OpCode::BLX, r4Opnd, targetOpnd, this->m_func);
  800. instr->InsertBefore(callInstr);
  801. // Generate SUB SP, SP, R4
  802. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::SUB, spOpnd, spOpnd, r4Opnd, this->m_func);
  803. instr->InsertBefore(subInstr);
  804. // return true to imply scratch register is trashed
  805. return true;
  806. }
  807. void
  808. LowererMD::GenerateStackDeallocation(IR::Instr *instr, uint32 allocSize)
  809. {
  810. IR::RegOpnd * spOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  811. IR::Instr * spAdjustInstr = IR::Instr::New(Js::OpCode::ADD,
  812. spOpnd,
  813. spOpnd,
  814. IR::IntConstOpnd::New(allocSize, TyMachReg, this->m_func, true), this->m_func);
  815. instr->InsertBefore(spAdjustInstr);
  816. LegalizeMD::LegalizeInstr(spAdjustInstr, true);
  817. }
  818. //------------------------------------------------------------------------------------------
  819. //
  820. // Prologs and epilogs on ARM:
  821. //
  822. // 1. Normal non-leaf function:
  823. //
  824. // MOV r12,0 -- prepare to clear the arg obj slot (not in prolog or pdata)
  825. // $PrologStart:
  826. // PUSH {r0-r3} -- home parameters (homes only r0-r1 for global function, r2 as well for eval with "this"
  827. // PUSH {r11,lr} -- save frame pointer and return address
  828. // MOV r11,sp -- set up frame chain (r11 points to saved r11)
  829. // PUSH {r4-r10,r12} -- save non-volatile regs (only used), clear arg obj slot
  830. // VPUSH {d8-d15} -- save non-volatile double regs (only used)
  831. // SUB sp, stack -- allocate locals and arg out area
  832. // <probe stack> -- not in prolog
  833. // ...
  834. // ADD sp, stack -- deallocate locals and args
  835. // POP {r4-r10,r12} -- restore registers
  836. // POP {r11} -- restore frame pointer
  837. // LDR pc,[sp],#20 -- load return address into pc and deallocate remaining stack
  838. // $EpilogEnd:
  839. //
  840. // 2. Function with large stack
  841. //
  842. // <probe stack> -- not in prolog
  843. // MOV r12,0
  844. // $PrologStart:
  845. // <save params and regs, set up frame chain as above>
  846. // MOV r4, stack/4 -- input param to chkstk is a DWORD count
  847. // LDIMM r12, &chkstk
  848. // BLX r12
  849. // SUB sp, r4 -- byte count returned by chkstk in r4
  850. // ...
  851. // <epilog as above>
  852. //
  853. // 3. Function with try-catch-finally
  854. //
  855. // MOV r12,0
  856. // $PrologStart:
  857. // PUSH {r0-r3}
  858. // PUSH {r11,lr}
  859. // MOV r11,sp
  860. // PUSH {r4-r10,r12}
  861. // MOV r6,sp -- save pointer to the saved regs
  862. // SUB sp, locals -- allocate locals area only
  863. // MOV r7,sp -- set up locals pointer; all accesses to locals in the body are through r7
  864. // PUSH {r6} -- store the saved regs pointer on the stack
  865. // SUB sp, args -- allocate space for out args passed on stack
  866. // ...
  867. // ADD sp, args
  868. // POP {r6} -- load the saved regs pointer
  869. // MOV sp,r6 -- restore sp to the saved regs area
  870. // POP {r4-r10,r12}
  871. // POP {r11}
  872. // LDR pc,[sp],#20
  873. // $EpilogEnd:
  874. IR::Instr *
  875. LowererMD::LowerEntryInstr(IR::EntryInstr * entryInstr)
  876. {
  877. IR::Instr *insertInstr = entryInstr->m_next;
  878. BYTE regEncode;
  879. BOOL hasTry = this->m_func->HasTry();
  880. // Begin recording info for later pdata/xdata emission.
  881. UnwindInfoManager *unwindInfo = &this->m_func->m_unwindInfo;
  882. unwindInfo->Init(this->m_func);
  883. // WRT CheckAlignment:
  884. // - The code commented out below (which seems to be copied from x86) causes a hang: it trashes LR to make the call.
  885. // - Ideally, we could save R0-R3, L11, LR to stack (R0-R3 can potentially be trashed + make sure to keep 8 byte alignment)
  886. // then call the HelperScrFunc_CheckAlignment which should take 1 argument:
  887. // whether it's leaf (should be 4 byte aligned) or non-leaf function (should be 8-byte aligned),
  888. // then restore R0-R3, R11, LR from the stack.
  889. // - But since on ARM currently the helper doesn't do anything, let's just comment this code out.
  890. // - On x86 there is no LR and all args go to stack, that's why similar code works fine.
  891. //#ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  892. // if (Js::Configuration::Global.flags.IsEnabled(Js::CheckAlignmentFlag))
  893. // {
  894. // IR::Instr * callInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  895. // callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperScrFunc_CheckAlignment, this->m_func));
  896. // insertInstr->InsertBefore(callInstr);
  897. //
  898. // this->LowerCall(callInstr, 0);
  899. // }
  900. //#endif
  901. //First calculate the local stack
  902. if (hasTry)
  903. {
  904. // If there's a try in the function, then the locals area must be 8-byte-aligned. That's because
  905. // the main function will allocate a locals area, and the try helper will allocate the same stack
  906. // but without a locals area, and both must be 8-byte aligned. So adding the locals area can't change
  907. // the alignment.
  908. this->m_func->m_localStackHeight = Math::Align<int32>(this->m_func->m_localStackHeight, MachStackAlignment);
  909. }
  910. if (this->m_func->HasInlinee())
  911. {
  912. // Allocate the inlined arg out stack in the locals. Allocate an additional slot so that
  913. // we can unconditionally clear the first slot past the current frame.
  914. this->m_func->m_localStackHeight += this->m_func->GetInlineeArgumentStackSize();
  915. }
  916. int32 stackAdjust = this->m_func->m_localStackHeight + (this->m_func->m_argSlotsForFunctionsCalled * MachPtr);
  917. if (stackAdjust != 0)
  918. {
  919. //We might need to call ProbeStack or __chkstk hence mark this function as hasCalls
  920. unwindInfo->SetHasCalls(true);
  921. }
  922. bool hasStackNestedFuncList = false;
  923. // We need to have the same register saves in the prolog as the arm_CallEhFrame, so that we can use the same
  924. // epilog. So always allocate a slot for the stack nested func here whether we actually do have any stack
  925. // nested func or not
  926. // TODO-STACK-NESTED-FUNC: May be use a different arm_CallEhFrame for when we have stack nested func?
  927. if (this->m_func->HasAnyStackNestedFunc() || hasTry)
  928. {
  929. // Just force it to have calls if we have stack nested func so we have a stable
  930. // location for the stack nested function list
  931. hasStackNestedFuncList = true;
  932. unwindInfo->SetHasCalls(true);
  933. }
  934. bool hasCalls = unwindInfo->GetHasCalls();
  935. // Home the params. This is done to enable on-the-fly creation of the arguments object,
  936. // Dyno bailout code, etc. For non-global functions, that means homing all the param registers
  937. // (since we have to assume they all have valid parameters). For the global function,
  938. // just home r0 (function object) and r1 (callinfo), which the runtime can't get by any other means.
  939. int32 regSaveArea = 0;
  940. BVUnit paramRegs;
  941. int homedParamRegCount;
  942. // Note: home all the param registers if there's a try, because that's what the try helpers do.
  943. if (this->m_func->IsLoopBody() && !hasTry)
  944. {
  945. // Jitted loop body takes only one "user" param: the pointer to the local slots.
  946. homedParamRegCount = MIN_HOMED_PARAM_REGS + 1;
  947. Assert(homedParamRegCount <= NUM_INT_ARG_REGS);
  948. }
  949. else if (!hasCalls)
  950. {
  951. // A leaf function (no calls of any kind, including helpers) may still need its params, or, if it
  952. // has none, may still need the function object and call info.
  953. homedParamRegCount = MIN_HOMED_PARAM_REGS + this->m_func->GetInParamsCount();
  954. if (homedParamRegCount > NUM_INT_ARG_REGS)
  955. {
  956. homedParamRegCount = NUM_INT_ARG_REGS;
  957. }
  958. }
  959. else
  960. {
  961. homedParamRegCount = NUM_INT_ARG_REGS;
  962. }
  963. Assert((BYTE)homedParamRegCount == homedParamRegCount);
  964. unwindInfo->SetHomedParamCount((BYTE)homedParamRegCount);
  965. for (int i = 0; i < homedParamRegCount; i++)
  966. {
  967. RegNum reg = (RegNum)(FIRST_INT_ARG_REG + i);
  968. paramRegs.Set(RegEncode[reg]);
  969. regSaveArea += MachRegInt;
  970. }
  971. // Record used callee-saved registers. This is in the form of a fixed bitfield.
  972. BVUnit usedRegs;
  973. int32 fpOffsetSize = 0;
  974. for (RegNum reg = FIRST_CALLEE_SAVED_GP_REG; reg <= LAST_CALLEE_SAVED_GP_REG; reg = (RegNum)(reg+1))
  975. {
  976. Assert(LinearScan::IsCalleeSaved(reg));
  977. Assert(reg != RegLR);
  978. // Save all the regs if there's a try, because that's what the try helpers have to do.
  979. if (this->m_func->m_regsUsed.Test(reg) || hasTry)
  980. {
  981. regEncode = RegEncode[reg];
  982. usedRegs.Set(regEncode);
  983. unwindInfo->SetSavedReg(regEncode);
  984. fpOffsetSize += MachRegInt;
  985. }
  986. }
  987. BVUnit32 usedDoubleRegs;
  988. short doubleRegCount = 0;
  989. if (!hasTry)
  990. {
  991. for (RegNum reg = FIRST_CALLEE_SAVED_DBL_REG; reg <= LAST_CALLEE_SAVED_DBL_REG; reg = (RegNum)(reg+1))
  992. {
  993. Assert(LinearScan::IsCalleeSaved(reg));
  994. if (this->m_func->m_regsUsed.Test(reg))
  995. {
  996. regEncode = RegEncode[reg] - RegEncode[RegD0];
  997. usedDoubleRegs.Set(regEncode);
  998. doubleRegCount++;
  999. }
  1000. }
  1001. if (doubleRegCount)
  1002. {
  1003. BYTE lastDoubleReg = UnwindInfoManager::GetLastSavedReg(usedDoubleRegs.GetWord());
  1004. BYTE firstDoubleReg = UnwindInfoManager::GetFirstSavedReg(usedDoubleRegs.GetWord());
  1005. // We do want to push all the double registers in a single VPUSH instructions
  1006. // This might cause us to VPUSH some registers which are not used
  1007. // But this makes unwind & prolog simple. But if we do see this case a lot
  1008. // then consider adding multiple VPUSH
  1009. short count = lastDoubleReg - firstDoubleReg + 1;
  1010. //Register allocator can allocate a temp reg from the other end of the bit vector so that it can keep it live for longer.
  1011. //Hence count may not be equal to doubleRegCount in all scenarios. These are rare and hence its okay to use single VPUSH instruction.
  1012. //handle these scenarios for free builds
  1013. usedDoubleRegs.SetRange(firstDoubleReg, count);
  1014. doubleRegCount = count;
  1015. }
  1016. }
  1017. else
  1018. {
  1019. // Set for all the callee saved double registers
  1020. usedDoubleRegs.SetRange(RegD8-RegD0, CALLEE_SAVED_DOUBLE_REG_COUNT);
  1021. doubleRegCount = CALLEE_SAVED_DOUBLE_REG_COUNT;
  1022. }
  1023. if (doubleRegCount)
  1024. {
  1025. unwindInfo->SetDoubleSavedRegList(usedDoubleRegs.GetWord());
  1026. fpOffsetSize += (doubleRegCount * MachRegDouble);
  1027. //When there is try-catch we allocate registers even if there are no calls. For scenarios see Win8 487030.
  1028. //This seems to be overkill but consistent with int registers.
  1029. AssertMsg(hasCalls || hasTry, "Assigned double registers without any calls?");
  1030. //Anyway handle it for free builds
  1031. if (!hasCalls)
  1032. {
  1033. this->m_func->m_unwindInfo.SetHasCalls(true);
  1034. hasCalls = true;
  1035. }
  1036. }
  1037. regSaveArea += fpOffsetSize;
  1038. if (hasTry)
  1039. {
  1040. // Account for the saved SP on the stack.
  1041. regSaveArea += MachRegInt;
  1042. }
  1043. this->m_func->m_ArgumentsOffset = fpOffsetSize;
  1044. if (hasStackNestedFuncList)
  1045. {
  1046. // use r11 it allocate one more slot in the register save area
  1047. // We will zero it later
  1048. regEncode = RegEncode[RegR11];
  1049. usedRegs.Set(regEncode);
  1050. unwindInfo->SetSavedReg(regEncode);
  1051. regSaveArea += MachRegInt;
  1052. fpOffsetSize += MachRegInt;
  1053. this->m_func->m_ArgumentsOffset += MachRegInt;
  1054. }
  1055. // NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE
  1056. //
  1057. // If you change this->m_func->m_localStackHeight after the following code you MUST take that
  1058. // into account below. Otherwise, the stack will become unbalanced or corrupted.
  1059. //
  1060. // NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE
  1061. DWORD stackProbeStackHeight = this->m_func->m_localStackHeight;
  1062. // If we've already got calls and we don't have a try, we need to take adjustments
  1063. // below into account to determine whether our not our final stack height is going to be
  1064. // encodable. We're not going to take into account the adjustment for saving R4, because we're
  1065. // trying to figure out if we will be able to encode if we DON'T save it. If we save it anyway,
  1066. // the point is moot.
  1067. if (hasCalls && !hasTry)
  1068. {
  1069. int32 bytesOnStack = stackAdjust + regSaveArea + 3 * MachRegInt;
  1070. int32 alignPad = Math::Align<int32>(bytesOnStack, MachStackAlignment) - bytesOnStack;
  1071. if (alignPad)
  1072. {
  1073. stackProbeStackHeight += alignPad;
  1074. }
  1075. }
  1076. bool useDynamicStackProbe =
  1077. (m_func->GetJITFunctionBody()->DoInterruptProbe() || !m_func->GetThreadContextInfo()->IsThreadBound()) &&
  1078. !EncoderMD::CanEncodeModConst12(stackProbeStackHeight + Js::Constants::MinStackJIT);
  1079. if (useDynamicStackProbe && !hasCalls)
  1080. {
  1081. this->m_func->m_unwindInfo.SetHasCalls(true);
  1082. hasCalls = true;
  1083. }
  1084. if (hasCalls)
  1085. {
  1086. //If we need a dedicated arguments slot we mark R12 as the save register.
  1087. //This is to imitate PUSH 0 to arguments slot.
  1088. regEncode = RegEncode[SCRATCH_REG];
  1089. usedRegs.Set(regEncode);
  1090. unwindInfo->SetSavedReg(regEncode);
  1091. //Update register save area and offset to actual in params
  1092. //account for r12 push - MachRegInt
  1093. //account for frame register setup push {r11,lr} - 2 * MachRegInt
  1094. regSaveArea += 3 * MachRegInt;
  1095. this->m_func->m_ArgumentsOffset += 3 * MachRegInt;
  1096. //Note: Separate push instruction is generated for r11 & lr push and hence usedRegs mask is not updated with
  1097. //bit mask for these registers.
  1098. if (!IsSmallStack(stackAdjust) || useDynamicStackProbe)
  1099. {
  1100. unwindInfo->SetSavedScratchReg(true);
  1101. if (!usedRegs.Test(RegEncode[SP_ALLOC_SCRATCH_REG])) //If its a large stack and RegR4 is not already saved.
  1102. {
  1103. // If it is not a small stack we have to call __chkstk.
  1104. // __chkstk has special calling convention and trashes R4
  1105. // And if we're probing the stack dynamically, we need an extra reg to do the frame size calculation.
  1106. //
  1107. // Note that it's possible that we now no longer need a dynamic stack probe because
  1108. // m_localStackHeight may be encodable in Mod12. However, this is a chicken-and-egg
  1109. // problem, so we're going to stick with saving R4 even though it's possible it
  1110. // won't get modified.
  1111. usedRegs.Set(RegEncode[SP_ALLOC_SCRATCH_REG]);
  1112. regSaveArea += MachRegInt;
  1113. fpOffsetSize += MachRegInt;
  1114. this->m_func->m_ArgumentsOffset += MachRegInt;
  1115. unwindInfo->SetSavedReg(RegEncode[SP_ALLOC_SCRATCH_REG]);
  1116. }
  1117. }
  1118. // Frame size is local var area plus stack arg area, 8-byte-aligned (if we're in a non-leaf).
  1119. int32 bytesOnStack = stackAdjust + regSaveArea;
  1120. int32 alignPad = Math::Align<int32>(bytesOnStack, MachStackAlignment) - bytesOnStack;
  1121. if (alignPad)
  1122. {
  1123. stackAdjust += alignPad;
  1124. if (hasTry)
  1125. {
  1126. // We have to align the arg area, since the helper won't allocate a locals area.
  1127. Assert(alignPad % MachRegInt == 0);
  1128. this->m_func->m_argSlotsForFunctionsCalled += alignPad / MachRegInt;
  1129. }
  1130. else
  1131. {
  1132. // Treat the alignment pad as part of the locals area, which will put it as far from SP as possible.
  1133. // Note that we've already handled the change to the stack height above in checking
  1134. // for dynamic probes.
  1135. this->m_func->m_localStackHeight += alignPad;
  1136. }
  1137. }
  1138. }
  1139. Assert(fpOffsetSize >= 0);
  1140. if (m_func->GetMaxInlineeArgOutSize() != 0)
  1141. {
  1142. // subtracting 2 for frame pointer & return address
  1143. this->m_func->GetJITOutput()->SetFrameHeight(this->m_func->m_localStackHeight + this->m_func->m_ArgumentsOffset - 2 * MachRegInt);
  1144. }
  1145. //Generate StackProbe for large stack's first even before register push
  1146. bool fStackProbeAfterProlog = IsSmallStack(stackAdjust);
  1147. if (!fStackProbeAfterProlog)
  1148. {
  1149. GenerateStackProbe(insertInstr, false); //stack is already aligned in this case
  1150. }
  1151. IR::RegOpnd * r12Opnd = nullptr;
  1152. // Zero-initialize dedicated arguments slot
  1153. if (hasCalls)
  1154. {
  1155. //R12 acts a dummy zero register which we push to arguments slot
  1156. //mov r12, 0
  1157. Assert(r12Opnd == nullptr);
  1158. r12Opnd = IR::RegOpnd::New(nullptr, SCRATCH_REG, TyMachReg, this->m_func);
  1159. IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV, r12Opnd, IR::IntConstOpnd::New(0, TyMachReg, this->m_func), this->m_func);
  1160. insertInstr->InsertBefore(instrMov);
  1161. IR::LabelInstr *prologStartLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  1162. insertInstr->InsertBefore(prologStartLabel);
  1163. this->m_func->m_unwindInfo.SetPrologStartLabel(prologStartLabel->m_id);
  1164. }
  1165. if (!paramRegs.IsEmpty())
  1166. {
  1167. // Generate PUSH {r0-r3}
  1168. IR::Instr * instrPush = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1169. instrPush->SetDst(IR::IndirOpnd::New(IR::RegOpnd::New(nullptr, RegSP, TyMachReg, this->m_func), (int32)0, TyMachReg, this->m_func));
  1170. instrPush->SetSrc1(IR::RegBVOpnd::New(paramRegs, TyMachReg, this->m_func));
  1171. insertInstr->InsertBefore(instrPush);
  1172. }
  1173. // Setup Frame pointer
  1174. if (hasCalls)
  1175. {
  1176. BVUnit frameRegs;
  1177. frameRegs.Set(RegEncode[RegR11]);
  1178. frameRegs.Set(RegEncode[RegLR]);
  1179. // Generate PUSH {r11,lr}
  1180. IR::Instr * instrPush = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1181. instrPush->SetDst(IR::IndirOpnd::New(IR::RegOpnd::New(nullptr, RegSP, TyMachReg, this->m_func), (int32)0, TyMachReg, this->m_func));
  1182. instrPush->SetSrc1(IR::RegBVOpnd::New(frameRegs, TyMachReg, this->m_func));
  1183. insertInstr->InsertBefore(instrPush);
  1184. // Generate MOV r11,sp
  1185. IR::RegOpnd* spOpnd = IR::RegOpnd::New(nullptr, RegSP, TyMachReg, this->m_func);
  1186. IR::RegOpnd* r11Opnd = IR::RegOpnd::New(nullptr, RegR11, TyMachReg, this->m_func);
  1187. IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV, r11Opnd, spOpnd, this->m_func);
  1188. insertInstr->InsertBefore(instrMov);
  1189. }
  1190. if (!usedRegs.IsEmpty())
  1191. {
  1192. // Generate PUSH {r4-r10,r12}
  1193. IR::Instr * instrPush = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1194. instrPush->SetDst(IR::IndirOpnd::New(IR::RegOpnd::New(nullptr, RegSP, TyMachReg, this->m_func), (int32)0, TyMachReg, this->m_func));
  1195. instrPush->SetSrc1(IR::RegBVOpnd::New(usedRegs, TyMachReg, this->m_func));
  1196. insertInstr->InsertBefore(instrPush);
  1197. }
  1198. if (!usedDoubleRegs.IsEmpty())
  1199. {
  1200. // Generate VPUSH {d8-d15}
  1201. IR::Instr * instrPush = IR::Instr::New(Js::OpCode::VPUSH, this->m_func);
  1202. instrPush->SetDst(IR::IndirOpnd::New(IR::RegOpnd::New(nullptr, RegSP, TyMachReg, this->m_func), (int32)0, TyMachReg, this->m_func));
  1203. instrPush->SetSrc1(IR::RegBVOpnd::New(usedDoubleRegs, TyMachReg, this->m_func));
  1204. insertInstr->InsertBefore(instrPush);
  1205. }
  1206. if (hasTry)
  1207. {
  1208. // Copy the value of SP before we allocate the locals area. We'll save this value on the stack below.
  1209. LowererMD::CreateAssign(
  1210. IR::RegOpnd::New(nullptr, EH_STACK_SAVE_REG, TyMachReg, this->m_func),
  1211. IR::RegOpnd::New(nullptr, RegSP, TyMachReg, this->m_func),
  1212. insertInstr);
  1213. }
  1214. bool isScratchRegisterThrashed = false;
  1215. uint32 probeSize = stackAdjust;
  1216. RegNum localsReg = this->m_func->GetLocalsPointer();
  1217. if (localsReg != RegSP)
  1218. {
  1219. // Allocate just the locals area first and let the locals pointer point to it.
  1220. // This may or may not generate a chkstk.
  1221. uint32 localsSize = this->m_func->m_localStackHeight;
  1222. if (localsSize != 0)
  1223. {
  1224. isScratchRegisterThrashed = GenerateStackAllocation(insertInstr, localsSize, localsSize);
  1225. stackAdjust -= localsSize;
  1226. if (!IsSmallStack(localsSize))
  1227. {
  1228. // The first alloc generated a chkstk, so we only have to probe (again) if the remaining
  1229. // allocation also exceeds a page.
  1230. probeSize = stackAdjust;
  1231. }
  1232. }
  1233. // Set up the locals pointer.
  1234. LowererMD::CreateAssign(
  1235. IR::RegOpnd::New(nullptr, localsReg, TyMachReg, this->m_func),
  1236. IR::RegOpnd::New(nullptr, RegSP, TyMachReg, this->m_func),
  1237. insertInstr);
  1238. }
  1239. if (hasTry)
  1240. {
  1241. // Now push the reg we used above to save the address of the top of the locals area.
  1242. BVUnit ehReg;
  1243. ehReg.Set(RegEncode[EH_STACK_SAVE_REG]);
  1244. IR::Instr * instrPush =
  1245. IR::Instr::New(
  1246. Js::OpCode::PUSH,
  1247. IR::IndirOpnd::New(
  1248. IR::RegOpnd::New(nullptr, RegSP, TyMachReg, this->m_func), (int32)0, TyMachReg, this->m_func),
  1249. IR::RegBVOpnd::New(ehReg, TyMachReg, this->m_func),
  1250. this->m_func);
  1251. insertInstr->InsertBefore(instrPush);
  1252. }
  1253. // If the stack size is less than a page allocate the stack first & then do the stack probe
  1254. // stack limit has a buffer of StackOverflowHandlingBufferPages pages and we are okay here
  1255. if (stackAdjust != 0)
  1256. {
  1257. isScratchRegisterThrashed = GenerateStackAllocation(insertInstr, stackAdjust, probeSize);
  1258. }
  1259. //As we have already allocated the stack here, we can safely zero out the inlinee argout slot.
  1260. // Zero initialize the first inlinee frames argc.
  1261. if (m_func->GetMaxInlineeArgOutSize() != 0)
  1262. {
  1263. // This is done post prolog. so we don't have to emit unwind data.
  1264. if (r12Opnd == nullptr || isScratchRegisterThrashed)
  1265. {
  1266. r12Opnd = r12Opnd ? r12Opnd : IR::RegOpnd::New(nullptr, SCRATCH_REG, TyMachReg, this->m_func);
  1267. // mov r12, 0
  1268. IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV, r12Opnd, IR::IntConstOpnd::New(0, TyMachReg, this->m_func), this->m_func);
  1269. insertInstr->InsertBefore(instrMov);
  1270. }
  1271. // STR argc, r12
  1272. StackSym *sym = this->m_func->m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1273. sym->m_isInlinedArgSlot = true;
  1274. sym->m_offset = 0;
  1275. IR::Opnd *dst = IR::SymOpnd::New(sym, 0, TyMachReg, this->m_func);
  1276. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::STR,
  1277. dst,
  1278. r12Opnd,
  1279. this->m_func));
  1280. }
  1281. // Now do the stack probe for small stacks
  1282. // hasCalls catches the recursion case
  1283. if ((stackAdjust != 0 || hasCalls) && fStackProbeAfterProlog)
  1284. {
  1285. GenerateStackProbe(insertInstr, true); //stack is already aligned in this case
  1286. }
  1287. return entryInstr;
  1288. }
  1289. IR::Instr *
  1290. LowererMD::LowerExitInstr(IR::ExitInstr * exitInstr)
  1291. {
  1292. // add sp, sp, #local stack space
  1293. // vpop {d8-d15} //restore callee saved double registers.
  1294. // pop {r4-r10, r12} //restore callee saved registers.
  1295. // pop r11 // restore r11 chain.
  1296. // ldr pc, [sp], #offset //homed arguments + 1 for lr
  1297. // See how many params were homed. We don't need to restore the values, just recover the stack space.
  1298. int32 homedParams = this->m_func->m_unwindInfo.GetHomedParamCount();
  1299. BOOL hasTry = this->m_func->HasTry();
  1300. RegNum localsReg = this->m_func->GetLocalsPointer();
  1301. int32 stackAdjust;
  1302. if (hasTry)
  1303. {
  1304. if (this->m_func->DoOptimizeTry())
  1305. {
  1306. this->EnsureEpilogLabel();
  1307. }
  1308. // We'll only deallocate the arg out area then restore SP from the value saved on the stack.
  1309. stackAdjust = (this->m_func->m_argSlotsForFunctionsCalled * MachRegInt);
  1310. }
  1311. else if (localsReg != RegSP)
  1312. {
  1313. // We're going to restore SP from the locals pointer and then deallocate only the locals area.
  1314. LowererMD::CreateAssign(
  1315. IR::RegOpnd::New(nullptr, RegSP, TyMachReg, this->m_func),
  1316. IR::RegOpnd::New(nullptr, localsReg, TyMachReg, this->m_func),
  1317. exitInstr);
  1318. stackAdjust = this->m_func->m_localStackHeight;
  1319. }
  1320. else
  1321. {
  1322. // We're going to deallocate the locals and out arg area at once.
  1323. stackAdjust = (this->m_func->m_argSlotsForFunctionsCalled * MachRegInt) + this->m_func->m_localStackHeight;
  1324. }
  1325. // Record used callee-saved registers. This is in the form of a fixed bitfield.
  1326. BVUnit32 usedRegs;
  1327. for (RegNum reg = FIRST_CALLEE_SAVED_GP_REG; reg <= LAST_CALLEE_SAVED_GP_REG; reg = (RegNum)(reg+1))
  1328. {
  1329. Assert(LinearScan::IsCalleeSaved(reg));
  1330. if (this->m_func->m_regsUsed.Test(reg) || hasTry)
  1331. {
  1332. usedRegs.Set(RegEncode[reg]);
  1333. }
  1334. }
  1335. // We need to have the same register saves in the prolog as the arm_CallEhFrame, so that we can use the same
  1336. // epilog. So always allocate a slot for the stack nested func here whether we actually do have any stack
  1337. // nested func or not
  1338. // TODO-STACK-NESTED-FUNC: May be use a different arm_CallEhFrame for when we have stack nested func?
  1339. if (this->m_func->HasAnyStackNestedFunc() || hasTry)
  1340. {
  1341. usedRegs.Set(RegEncode[RegR11]);
  1342. }
  1343. bool hasCalls = this->m_func->m_unwindInfo.GetHasCalls();
  1344. if (hasCalls)
  1345. {
  1346. // __chkstk has special calling convention and uses R4, and dynamic stack probe on large frames use it too
  1347. if (this->m_func->m_unwindInfo.GetSavedScratchReg())
  1348. {
  1349. usedRegs.Set(RegEncode[SP_ALLOC_SCRATCH_REG]);
  1350. }
  1351. //RegR12 acts a dummy register to deallocate stack allocated for arguments object
  1352. usedRegs.Set(RegEncode[SCRATCH_REG]);
  1353. }
  1354. else if (usedRegs.IsEmpty())
  1355. {
  1356. stackAdjust += homedParams * MachRegInt;
  1357. }
  1358. // 1. Deallocate the stack. In the case of a leaf function with no saved registers, let this
  1359. // deallocation also account for the homed params.
  1360. if (stackAdjust != 0)
  1361. {
  1362. GenerateStackDeallocation(exitInstr, stackAdjust);
  1363. }
  1364. // This is the stack size that the pdata cares about.
  1365. this->m_func->m_unwindInfo.SetStackDepth(stackAdjust);
  1366. if (hasTry)
  1367. {
  1368. // Now restore the locals area by popping the stack.
  1369. BVUnit ehReg;
  1370. ehReg.Set(RegEncode[EH_STACK_SAVE_REG]);
  1371. IR::Instr * instrPop = IR::Instr::New(
  1372. Js::OpCode::POP,
  1373. IR::RegBVOpnd::New(ehReg, TyMachReg, this->m_func),
  1374. IR::IndirOpnd::New(
  1375. IR::RegOpnd::New(nullptr, RegSP, TyMachReg, this->m_func), (int32)0, TyMachReg, this->m_func),
  1376. this->m_func);
  1377. exitInstr->InsertBefore(instrPop);
  1378. LowererMD::CreateAssign(
  1379. IR::RegOpnd::New(nullptr, RegSP, TyMachReg, this->m_func),
  1380. IR::RegOpnd::New(nullptr, EH_STACK_SAVE_REG, TyMachReg, this->m_func),
  1381. exitInstr);
  1382. }
  1383. // 2. Restore saved double registers. Generate vpop {d8-d15}
  1384. BVUnit32 savedDoubleRegs(this->m_func->m_unwindInfo.GetDoubleSavedRegList());
  1385. if (!savedDoubleRegs.IsEmpty())
  1386. {
  1387. IR::Instr * instrVPop = IR::Instr::New(Js::OpCode::VPOP, this->m_func);
  1388. instrVPop->SetDst(IR::RegBVOpnd::New(savedDoubleRegs, TyMachReg, this->m_func));
  1389. instrVPop->SetSrc1(IR::IndirOpnd::New(IR::RegOpnd::New(nullptr, RegSP,TyMachReg, this->m_func), (int32)0, TyMachReg, this->m_func));
  1390. exitInstr->InsertBefore(instrVPop);
  1391. }
  1392. // 3. Restore saved registers. Generate pop {r4-r10,r12}
  1393. if (!usedRegs.IsEmpty())
  1394. {
  1395. IR::Instr * instrPop = IR::Instr::New(Js::OpCode::POP, this->m_func);
  1396. instrPop->SetDst(IR::RegBVOpnd::New(usedRegs, TyMachReg, this->m_func));
  1397. instrPop->SetSrc1(IR::IndirOpnd::New(IR::RegOpnd::New(nullptr, RegSP,TyMachReg, this->m_func), (int32)0, TyMachReg, this->m_func));
  1398. exitInstr->InsertBefore(instrPop);
  1399. }
  1400. if (!hasCalls)
  1401. {
  1402. if (!usedRegs.IsEmpty())
  1403. {
  1404. // We do need to deallocate the area allocated when we homed the params (since we weren't able to fold
  1405. // it into the first stack deallocation).
  1406. // TODO: Consider folding this into the LDM instruction above by having it restore dummy registers.
  1407. IR::RegOpnd * spOpnd = IR::RegOpnd::New(nullptr, this->GetRegStackPointer(), TyMachReg, this->m_func);
  1408. IR::IntConstOpnd * adjustOpnd = IR::IntConstOpnd::New(homedParams * MachRegInt, TyMachReg, this->m_func, true);
  1409. IR::Instr * spAdjustInstr = IR::Instr::New(Js::OpCode::ADD, spOpnd, spOpnd, adjustOpnd, this->m_func);
  1410. exitInstr->InsertBefore(spAdjustInstr);
  1411. }
  1412. // LR is still valid, so return by branching to it.
  1413. IR::Instr * instrRet = IR::Instr::New(
  1414. Js::OpCode::RET,
  1415. IR::RegOpnd::New(nullptr, RegPC, TyMachReg, this->m_func),
  1416. IR::RegOpnd::New(nullptr, RegLR, TyMachReg, this->m_func),
  1417. this->m_func);
  1418. exitInstr->InsertBefore(instrRet);
  1419. }
  1420. else
  1421. {
  1422. // 3. Set up original frame pointer - pop r11
  1423. usedRegs.ClearAll();
  1424. usedRegs.Set(RegEncode[RegR11]);
  1425. IR::Instr * instrPop = IR::Instr::New(Js::OpCode::POP, this->m_func);
  1426. instrPop->SetDst(IR::RegBVOpnd::New(usedRegs, TyMachReg, this->m_func));
  1427. instrPop->SetSrc1(IR::IndirOpnd::New(IR::RegOpnd::New(nullptr, RegSP,TyMachReg, this->m_func), (int32)0, TyMachReg, this->m_func));
  1428. exitInstr->InsertBefore(instrPop);
  1429. // 4. Deallocate homed param area (if necessary) and return.
  1430. // SP now points to the location where we saved LR.
  1431. // So return by doing a LDR pc,[sp],#n, where the postincrement of SP deallocates what remains of the stack.
  1432. // Note: the offset on this indir indicates the postincrement, which is the homed param area plus the size
  1433. // of LR itself.
  1434. IR::IndirOpnd * spIndir = IR::IndirOpnd::New(
  1435. IR::RegOpnd::New(nullptr, RegSP, TyMachReg, this->m_func),
  1436. (homedParams + 1) * MachRegInt,
  1437. TyMachPtr, this->m_func);
  1438. IR::Instr * instrRet = IR::Instr::New(
  1439. Js::OpCode::LDRRET,
  1440. IR::RegOpnd::New(nullptr, RegPC, TyMachReg, this->m_func),
  1441. spIndir,
  1442. this->m_func);
  1443. exitInstr->InsertBefore(instrRet);
  1444. }
  1445. IR::LabelInstr *epilogEndLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  1446. exitInstr->InsertBefore(epilogEndLabel);
  1447. this->m_func->m_unwindInfo.SetEpilogEndLabel(epilogEndLabel->m_id);
  1448. return exitInstr;
  1449. }
  1450. IR::Instr *
  1451. LowererMD::LoadNewScObjFirstArg(IR::Instr * instr, IR::Opnd * argSrc, ushort extraArgs)
  1452. {
  1453. // Spread moves down the argument slot by one.
  1454. // LowerCallArgs will handle the extraArgs. We only need to specify the argument number
  1455. // i.e 1 and not + extraArgs as done in AMD64
  1456. IR::SymOpnd *argOpnd = IR::SymOpnd::New(this->m_func->m_symTable->GetArgSlotSym(1), TyVar, this->m_func);
  1457. IR::Instr *argInstr = IR::Instr::New(Js::OpCode::ArgOut_A, argOpnd, argSrc, this->m_func);
  1458. instr->InsertBefore(argInstr);
  1459. // Insert the argument into the arg chain.
  1460. if (m_lowerer->IsSpreadCall(instr))
  1461. {
  1462. // Spread calls need LdSpreadIndices as the last arg in the arg chain.
  1463. instr = m_lowerer->GetLdSpreadIndicesInstr(instr);
  1464. }
  1465. IR::Opnd *linkOpnd = instr->UnlinkSrc2();
  1466. argInstr->SetSrc2(linkOpnd);
  1467. instr->SetSrc2(argOpnd);
  1468. return argInstr;
  1469. }
  1470. IR::Instr *
  1471. LowererMD::LowerTry(IR::Instr * tryInstr, IR::JnHelperMethod helperMethod)
  1472. {
  1473. // Mark the entry to the try
  1474. IR::Instr * instr = tryInstr->GetNextRealInstrOrLabel();
  1475. AssertMsg(instr->IsLabelInstr(), "No label at the entry to a try?");
  1476. IR::LabelInstr * tryAddr = instr->AsLabelInstr();
  1477. // Arg 7: ScriptContext
  1478. this->m_lowerer->LoadScriptContext(tryAddr);
  1479. if (tryInstr->m_opcode == Js::OpCode::TryCatch || this->m_func->DoOptimizeTry())
  1480. {
  1481. // Arg 6 : hasBailedOutOffset
  1482. IR::Opnd * hasBailedOutOffset = IR::IntConstOpnd::New(this->m_func->m_hasBailedOutSym->m_offset, TyInt32, this->m_func);
  1483. this->LoadHelperArgument(tryAddr, hasBailedOutOffset);
  1484. }
  1485. // Arg 5: arg out size
  1486. IR::RegOpnd * argOutSize = IR::RegOpnd::New(TyMachReg, this->m_func);
  1487. instr = IR::Instr::New(Js::OpCode::LDARGOUTSZ, argOutSize, this->m_func);
  1488. tryAddr->InsertBefore(instr);
  1489. this->LoadHelperArgument(tryAddr, argOutSize);
  1490. // Arg 4: locals pointer
  1491. IR::RegOpnd * localsPtr = IR::RegOpnd::New(nullptr, this->m_func->GetLocalsPointer(), TyMachReg, this->m_func);
  1492. this->LoadHelperArgument(tryAddr, localsPtr);
  1493. // Arg 3: frame pointer
  1494. IR::RegOpnd * framePtr = IR::RegOpnd::New(nullptr, FRAME_REG, TyMachReg, this->m_func);
  1495. this->LoadHelperArgument(tryAddr, framePtr);
  1496. // Arg 2: helper address
  1497. IR::LabelInstr * helperAddr = tryInstr->AsBranchInstr()->GetTarget();
  1498. this->LoadHelperArgument(tryAddr, IR::LabelOpnd::New(helperAddr, this->m_func));
  1499. // Arg 1: try address
  1500. this->LoadHelperArgument(tryAddr, IR::LabelOpnd::New(tryAddr, this->m_func));
  1501. // Call the helper
  1502. IR::RegOpnd *continuationAddr =
  1503. IR::RegOpnd::New(StackSym::New(TyMachReg,this->m_func), RETURN_REG, TyMachReg, this->m_func);
  1504. IR::Instr * callInstr = IR::Instr::New(
  1505. Js::OpCode::Call, continuationAddr, IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
  1506. tryAddr->InsertBefore(callInstr);
  1507. this->LowerCall(callInstr, 0);
  1508. // Jump to the continuation address supplied by the helper
  1509. IR::BranchInstr *branchInstr = IR::MultiBranchInstr::New(Js::OpCode::BX, continuationAddr, this->m_func);
  1510. tryAddr->InsertBefore(branchInstr);
  1511. return tryInstr->m_prev;
  1512. }
  1513. IR::Instr *
  1514. LowererMD::LowerLeave(IR::Instr * leaveInstr, IR::LabelInstr * targetInstr, bool fromFinalLower, bool isOrphanedLeave)
  1515. {
  1516. if (isOrphanedLeave)
  1517. {
  1518. Assert(this->m_func->IsLoopBodyInTry());
  1519. leaveInstr->m_opcode = Js::OpCode::B;
  1520. return leaveInstr->m_prev;
  1521. }
  1522. IR::Instr * instrPrev = leaveInstr->m_prev;
  1523. IR::LabelOpnd *labelOpnd = IR::LabelOpnd::New(targetInstr, this->m_func);
  1524. this->LowerEHRegionReturn(leaveInstr, labelOpnd);
  1525. if (fromFinalLower)
  1526. {
  1527. instrPrev = leaveInstr->m_prev;
  1528. }
  1529. leaveInstr->Remove();
  1530. return instrPrev;
  1531. }
  1532. IR::Instr *
  1533. LowererMD::LowerLeaveNull(IR::Instr * leaveInstr)
  1534. {
  1535. IR::Instr * instrPrev = leaveInstr->m_prev;
  1536. // Return a NULL continuation address to the caller to indicate that the finally did not seize the flow.
  1537. this->LowerEHRegionReturn(leaveInstr, IR::IntConstOpnd::New(0, TyMachReg, this->m_func));
  1538. leaveInstr->Remove();
  1539. return instrPrev;
  1540. }
  1541. IR::Instr *
  1542. LowererMD::LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * targetOpnd)
  1543. {
  1544. IR::RegOpnd *retReg = IR::RegOpnd::New(nullptr, RETURN_REG, TyMachReg, this->m_func);
  1545. // Load the continuation address into the return register.
  1546. LowererMD::CreateAssign(retReg, targetOpnd, insertBeforeInstr);
  1547. IR::LabelInstr *epilogLabel = this->EnsureEpilogLabel();
  1548. IR::BranchInstr *jmpInstr = IR::BranchInstr::New(Js::OpCode::B, epilogLabel, this->m_func);
  1549. insertBeforeInstr->InsertBefore(jmpInstr);
  1550. // return the last instruction inserted
  1551. return jmpInstr;
  1552. }
  1553. IR::Instr *
  1554. LowererMD::LowerCatch(IR::Instr * instr)
  1555. {
  1556. // t1 = catch => t2(r1) = catch
  1557. // => t1 = t2(r1)
  1558. IR::Opnd *catchObj = instr->UnlinkDst();
  1559. IR::RegOpnd *catchParamReg = IR::RegOpnd::New(TyMachPtr, this->m_func);
  1560. catchParamReg->SetReg(CATCH_OBJ_REG);
  1561. instr->SetDst(catchParamReg);
  1562. instr->InsertAfter(IR::Instr::New(Js::OpCode::MOV, catchObj, catchParamReg, this->m_func));
  1563. return instr->m_prev;
  1564. }
  1565. ///----------------------------------------------------------------------------
  1566. ///
  1567. /// LowererMD::Init
  1568. ///
  1569. ///----------------------------------------------------------------------------
  1570. void
  1571. LowererMD::Init(Lowerer *lowerer)
  1572. {
  1573. m_lowerer = lowerer;
  1574. // The arg slot count computed by an earlier phase (e.g., IRBuilder) doesn't work for
  1575. // ARM if it accounts for nesting. Clear it here and let Lower compute its own value.
  1576. this->m_func->m_argSlotsForFunctionsCalled = 0;
  1577. }
  1578. ///----------------------------------------------------------------------------
  1579. ///
  1580. /// LowererMD::LoadInputParamPtr
  1581. ///
  1582. /// Load the address of the start of the passed-in parameters not including
  1583. /// the this parameter.
  1584. ///
  1585. ///----------------------------------------------------------------------------
  1586. IR::Instr *
  1587. LowererMD::LoadInputParamPtr(IR::Instr * instrInsert, IR::RegOpnd * optionalDstOpnd /* = nullptr */)
  1588. {
  1589. if (this->m_func->GetJITFunctionBody()->IsCoroutine())
  1590. {
  1591. IR::RegOpnd * argPtrRegOpnd = Lowerer::LoadGeneratorArgsPtr(instrInsert);
  1592. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(argPtrRegOpnd, 1 * MachPtr, TyMachPtr, this->m_func);
  1593. IR::RegOpnd * dstOpnd = optionalDstOpnd != nullptr ? optionalDstOpnd : IR::RegOpnd::New(TyMachPtr, this->m_func);
  1594. return Lowerer::InsertLea(dstOpnd, indirOpnd, instrInsert);
  1595. }
  1596. else
  1597. {
  1598. StackSym * paramSym = GetImplicitParamSlotSym(3);
  1599. IR::Instr * instr = this->LoadStackAddress(paramSym);
  1600. instrInsert->InsertBefore(instr);
  1601. return instr;
  1602. }
  1603. }
  1604. ///----------------------------------------------------------------------------
  1605. ///
  1606. /// LowererMD::LoadInputParamCount
  1607. ///
  1608. /// Load the passed-in parameter count from the appropriate r11 slot.
  1609. ///
  1610. ///----------------------------------------------------------------------------
  1611. IR::Instr *
  1612. LowererMD::LoadInputParamCount(IR::Instr * instrInsert, int adjust, bool needFlags)
  1613. {
  1614. IR::Instr * instr;
  1615. IR::RegOpnd * dstOpnd;
  1616. IR::SymOpnd * srcOpnd;
  1617. // LDR Rz, CallInfo
  1618. // LSR Rx, Rz, #28 // Get CallEval bit as bottom bit.
  1619. // AND Rx, Rx, #1 // Mask higher 3 bits, Rx has 1 if FrameDisplay is present, zero otherwise
  1620. // LSL Rz, Rz, #8 // Mask higher 8 bits to get the number of arguments
  1621. // LSR Rz, Rz, #8
  1622. // SUB Rz, Rz, Rx // Now Rz has the right number of parameters
  1623. srcOpnd = Lowerer::LoadCallInfo(instrInsert);
  1624. dstOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  1625. instr = IR::Instr::New(Js::OpCode::LDR, dstOpnd, srcOpnd, this->m_func);
  1626. instrInsert->InsertBefore(instr);
  1627. // mask the "calling eval" bit and subtract it from the incoming count.
  1628. // ("Calling eval" means the last param is the frame display, which only the eval built-in should see.)
  1629. IR::RegOpnd * evalBitOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  1630. instr = IR::Instr::New(Js::OpCode::LSR, evalBitOpnd, dstOpnd, IR::IntConstOpnd::New(Math::Log2(Js::CallFlags_ExtraArg) + Js::CallInfo::ksizeofCount, TyMachReg, this->m_func), this->m_func);
  1631. instrInsert->InsertBefore(instr);
  1632. // Mask off other call flags from callinfo
  1633. instr = IR::Instr::New(Js::OpCode::AND, evalBitOpnd, evalBitOpnd, IR::IntConstOpnd::New(0x01, TyUint8, this->m_func), this->m_func);
  1634. instrInsert->InsertBefore(instr);
  1635. instr = IR::Instr::New(Js::OpCode::LSL, dstOpnd, dstOpnd, IR::IntConstOpnd::New(Js::CallInfo::ksizeofCallFlags, TyMachReg, this->m_func), this->m_func);
  1636. instrInsert->InsertBefore(instr);
  1637. instr = IR::Instr::New(Js::OpCode::LSR, dstOpnd, dstOpnd, IR::IntConstOpnd::New(Js::CallInfo::ksizeofCallFlags, TyMachReg, this->m_func), this->m_func);
  1638. instrInsert->InsertBefore(instr);
  1639. if (adjust != 0)
  1640. {
  1641. Assert(adjust < 0);
  1642. Lowerer::InsertAdd(false, evalBitOpnd, evalBitOpnd, IR::IntConstOpnd::New(-adjust, TyUint32, this->m_func), instrInsert);
  1643. }
  1644. return Lowerer::InsertSub(needFlags, dstOpnd, dstOpnd, evalBitOpnd, instrInsert);
  1645. }
  1646. IR::Instr *
  1647. LowererMD::LoadStackArgPtr(IR::Instr * instr)
  1648. {
  1649. if (this->m_func->IsLoopBody())
  1650. {
  1651. // Get the first user param from the interpreter frame instance that was passed in.
  1652. // These args don't include the func object and callinfo; we just need to advance past "this".
  1653. // t1 = LDR [prm1 + m_inParams]
  1654. // dst = ADD t1, sizeof(var)
  1655. Assert(this->m_func->m_loopParamSym);
  1656. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  1657. size_t offset = Js::InterpreterStackFrame::GetOffsetOfInParams();
  1658. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, this->m_func);
  1659. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  1660. LowererMD::CreateAssign(tmpOpnd, indirOpnd, instr);
  1661. instr->SetSrc1(tmpOpnd);
  1662. instr->SetSrc2(IR::IntConstOpnd::New(sizeof(Js::Var), TyMachReg, this->m_func));
  1663. }
  1664. else if (this->m_func->GetJITFunctionBody()->IsCoroutine())
  1665. {
  1666. IR::Instr *instr2 = LoadInputParamPtr(instr, instr->UnlinkDst()->AsRegOpnd());
  1667. instr->Remove();
  1668. instr = instr2;
  1669. }
  1670. else
  1671. {
  1672. // Get the args pointer relative to r11. We assume that r11 is set up, since we'll only be looking
  1673. // for the stack arg pointer in a non-leaf.
  1674. // dst = ADD r11, "this" offset + sizeof(var)
  1675. instr->SetSrc1(IR::RegOpnd::New(nullptr, FRAME_REG, TyMachReg, this->m_func));
  1676. instr->SetSrc2(IR::IntConstOpnd::New((ArgOffsetFromFramePtr + Js::JavascriptFunctionArgIndex_SecondScriptArg) * sizeof(Js::Var), TyMachReg, this->m_func));
  1677. }
  1678. instr->m_opcode = Js::OpCode::ADD;
  1679. return instr->m_prev;
  1680. }
  1681. IR::Instr *
  1682. LowererMD::LoadArgumentsFromFrame(IR::Instr * instr)
  1683. {
  1684. IR::RegOpnd *baseOpnd;
  1685. int32 offset;
  1686. if (this->m_func->IsLoopBody())
  1687. {
  1688. // Get the arguments ptr from the interpreter frame instance that was passed in.
  1689. Assert(this->m_func->m_loopParamSym);
  1690. baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  1691. offset = Js::InterpreterStackFrame::GetOffsetOfArguments();
  1692. }
  1693. else
  1694. {
  1695. // Get the arguments relative to the frame pointer.
  1696. baseOpnd = IR::RegOpnd::New(nullptr, FRAME_REG, TyMachReg, this->m_func);
  1697. offset = -MachArgsSlotOffset;
  1698. }
  1699. instr->SetSrc1(IR::IndirOpnd::New(baseOpnd, offset, TyMachReg, this->m_func));
  1700. this->ChangeToAssign(instr);
  1701. return instr->m_prev;
  1702. }
  1703. // load argument count as I4
  1704. IR::Instr *
  1705. LowererMD::LoadArgumentCount(IR::Instr * instr)
  1706. {
  1707. IR::RegOpnd *baseOpnd;
  1708. int32 offset;
  1709. if (this->m_func->IsLoopBody())
  1710. {
  1711. // Pull the arg count from the interpreter frame instance that was passed in.
  1712. // (The callinfo in the loop body's frame just shows the single parameter, the interpreter frame.)
  1713. Assert(this->m_func->m_loopParamSym);
  1714. baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  1715. offset = Js::InterpreterStackFrame::GetOffsetOfInSlotsCount();
  1716. }
  1717. else
  1718. {
  1719. baseOpnd = IR::RegOpnd::New(nullptr, FRAME_REG, TyMachReg, this->m_func);
  1720. offset = (ArgOffsetFromFramePtr + Js::JavascriptFunctionArgIndex_CallInfo) * sizeof(Js::Var);
  1721. }
  1722. instr->SetSrc1(IR::IndirOpnd::New(baseOpnd, offset, TyInt32, this->m_func));
  1723. this->ChangeToAssign(instr);
  1724. return instr->m_prev;
  1725. }
  1726. ///----------------------------------------------------------------------------
  1727. ///
  1728. /// LowererMD::LoadHeapArguments
  1729. ///
  1730. /// Load the arguments object
  1731. /// NOTE: The same caveat regarding arguments passed on the stack applies here
  1732. /// as in LoadInputParamCount above.
  1733. ///----------------------------------------------------------------------------
  1734. IR::Instr *
  1735. LowererMD::LoadHeapArguments(IR::Instr * instrArgs)
  1736. {
  1737. ASSERT_INLINEE_FUNC(instrArgs);
  1738. Func *func = instrArgs->m_func;
  1739. IR::Instr * instrPrev = instrArgs->m_prev;
  1740. if (func->IsStackArgsEnabled())
  1741. {
  1742. // The initial args slot value is zero.
  1743. instrArgs->m_opcode = Js::OpCode::MOV;
  1744. instrArgs->ReplaceSrc1(IR::AddrOpnd::NewNull(func));
  1745. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase) && func->GetJITFunctionBody()->GetInParamsCount() > 1)
  1746. {
  1747. Output::Print(_u("StackArgFormals : %s (%d) :Removing Heap Arguments object creation in Lowerer. \n"), instrArgs->m_func->GetJITFunctionBody()->GetDisplayName(), instrArgs->m_func->GetFunctionNumber());
  1748. Output::Flush();
  1749. }
  1750. }
  1751. else
  1752. {
  1753. // s7 = formals are let decls
  1754. // s6 = memory context
  1755. // s5 = array of property ID's
  1756. // s4 = local frame instance
  1757. // s3 = address of first actual argument (after "this")
  1758. // s2 = actual argument count
  1759. // s1 = current function
  1760. // dst = JavascriptOperators::LoadHeapArguments(s1, s2, s3, s4, s5, s6, s7)
  1761. // s7 = formals are let decls
  1762. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(instrArgs->m_opcode == Js::OpCode::LdLetHeapArguments ? TRUE : FALSE, TyUint8, func));
  1763. // s6 = memory context
  1764. this->m_lowerer->LoadScriptContext(instrArgs);
  1765. // s5 = array of property ID's
  1766. intptr_t formalsPropIdArray = instrArgs->m_func->GetJITFunctionBody()->GetFormalsPropIdArrayAddr();
  1767. if (!formalsPropIdArray)
  1768. {
  1769. formalsPropIdArray = instrArgs->m_func->GetScriptContextInfo()->GetNullAddr();
  1770. }
  1771. IR::Opnd * argArray = IR::AddrOpnd::New(formalsPropIdArray, IR::AddrOpndKindDynamicMisc, m_func);
  1772. this->LoadHelperArgument(instrArgs, argArray);
  1773. // s4 = local frame instance
  1774. IR::Opnd * frameObj = instrArgs->UnlinkSrc1();
  1775. this->LoadHelperArgument(instrArgs, frameObj);
  1776. if (func->IsInlinee())
  1777. {
  1778. // s3 = address of first actual argument (after "this").
  1779. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  1780. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  1781. IR::Instr *instr = this->LoadStackAddress(firstRealArgSlotSym);
  1782. instrArgs->InsertBefore(instr);
  1783. this->LoadHelperArgument(instrArgs, instr->GetDst());
  1784. // s2 = actual argument count (without counting "this").
  1785. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(func->actualCount - 1, TyUint32, func));
  1786. // s1 = current function.
  1787. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  1788. // Save the newly-created args object to its dedicated stack slot.
  1789. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  1790. LowererMD::CreateAssign(argObjSlotOpnd,instrArgs->GetDst(), instrArgs->m_next);
  1791. }
  1792. else
  1793. {
  1794. // s3 = address of first actual argument (after "this")
  1795. // Stack looks like (function object)+0, (arg count)+4, (this)+8, actual args
  1796. IR::Instr * instr = this->LoadInputParamPtr(instrArgs);
  1797. this->LoadHelperArgument(instrArgs, instr->GetDst());
  1798. // s2 = actual argument count (without counting "this")
  1799. instr = this->LoadInputParamCount(instrArgs, -1);
  1800. IR::Opnd * opndInputParamCount = instr->GetDst();
  1801. this->LoadHelperArgument(instrArgs, opndInputParamCount);
  1802. // s1 = current function
  1803. StackSym * paramSym = GetImplicitParamSlotSym(0);
  1804. IR::Opnd * srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  1805. this->LoadHelperArgument(instrArgs, srcOpnd);
  1806. // Save the newly-created args object to its dedicated stack slot.
  1807. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(nullptr, FRAME_REG , TyMachReg, func),
  1808. -MachArgsSlotOffset, TyMachPtr, m_func);
  1809. LowererMD::CreateAssign(indirOpnd, instrArgs->GetDst(), instrArgs->m_next);
  1810. }
  1811. this->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArguments);
  1812. }
  1813. return instrPrev;
  1814. }
  1815. ///----------------------------------------------------------------------------
  1816. ///
  1817. /// LowererMD::LoadHeapArgsCached
  1818. ///
  1819. /// Load the heap-based arguments object using a cached scope
  1820. ///
  1821. ///----------------------------------------------------------------------------
  1822. IR::Instr *
  1823. LowererMD::LoadHeapArgsCached(IR::Instr * instrArgs)
  1824. {
  1825. Assert(!this->m_func->GetJITFunctionBody()->IsGenerator());
  1826. ASSERT_INLINEE_FUNC(instrArgs);
  1827. Func *func = instrArgs->m_func;
  1828. IR::Instr * instrPrev = instrArgs->m_prev;
  1829. if (instrArgs->m_func->IsStackArgsEnabled())
  1830. {
  1831. instrArgs->m_opcode = Js::OpCode::MOV;
  1832. instrArgs->ReplaceSrc1(IR::AddrOpnd::NewNull(func));
  1833. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase) && func->GetJITFunctionBody()->GetInParamsCount() > 1)
  1834. {
  1835. Output::Print(_u("StackArgFormals : %s (%d) :Removing Heap Arguments object creation in Lowerer. \n"), instrArgs->m_func->GetJITFunctionBody()->GetDisplayName(), instrArgs->m_func->GetFunctionNumber());
  1836. Output::Flush();
  1837. }
  1838. }
  1839. else
  1840. {
  1841. // s7 = formals are let decls
  1842. // s6 = memory context
  1843. // s5 = local frame instance
  1844. // s4 = address of first actual argument (after "this")
  1845. // s3 = formal argument count
  1846. // s2 = actual argument count
  1847. // s1 = current function
  1848. // dst = JavascriptOperators::LoadHeapArgsCached(s1, s2, s3, s4, s5, s6, s7)
  1849. // s7 = formals are let decls
  1850. IR::Opnd * formalsAreLetDecls = IR::IntConstOpnd::New((IntConstType)(instrArgs->m_opcode == Js::OpCode::LdLetHeapArgsCached), TyUint8, func);
  1851. this->LoadHelperArgument(instrArgs, formalsAreLetDecls);
  1852. // s6 = memory context
  1853. this->m_lowerer->LoadScriptContext(instrArgs);
  1854. // s5 = local frame instance
  1855. IR::Opnd * frameObj = instrArgs->UnlinkSrc1();
  1856. this->LoadHelperArgument(instrArgs, frameObj);
  1857. if (func->IsInlinee())
  1858. {
  1859. // s4 = address of first actual argument (after "this").
  1860. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  1861. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  1862. IR::Instr *instr = this->LoadStackAddress(firstRealArgSlotSym);
  1863. instrArgs->InsertBefore(instr);
  1864. this->LoadHelperArgument(instrArgs, instr->GetDst());
  1865. // s3 = formal argument count (without counting "this").
  1866. uint32 formalsCount = func->GetJITFunctionBody()->GetInParamsCount() - 1;
  1867. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyUint32, func));
  1868. // s2 = actual argument count (without counting "this").
  1869. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(func->actualCount - 1, TyUint32, func));
  1870. // s1 = current function.
  1871. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  1872. // Save the newly-created args object to its dedicated stack slot.
  1873. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  1874. LowererMD::CreateAssign(argObjSlotOpnd, instrArgs->GetDst(), instrArgs->m_next);
  1875. }
  1876. else
  1877. {
  1878. // s4 = address of first actual argument (after "this")
  1879. IR::Instr * instr = this->LoadInputParamPtr(instrArgs);
  1880. this->LoadHelperArgument(instrArgs, instr->GetDst());
  1881. // s3 = formal argument count (without counting "this")
  1882. uint32 formalsCount = func->GetInParamsCount() - 1;
  1883. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyMachReg, func));
  1884. // s2 = actual argument count (without counting "this")
  1885. instr = this->LoadInputParamCount(instrArgs, -1);
  1886. this->LoadHelperArgument(instrArgs, instr->GetDst());
  1887. // s1 = current function
  1888. StackSym * paramSym = GetImplicitParamSlotSym(0);
  1889. IR::Opnd * srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  1890. this->LoadHelperArgument(instrArgs, srcOpnd);
  1891. // Save the newly-created args object to its dedicated stack slot.
  1892. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(nullptr, FRAME_REG, TyMachReg, func),
  1893. -MachArgsSlotOffset, TyMachPtr, m_func);
  1894. LowererMD::CreateAssign(indirOpnd, instrArgs->GetDst(), instrArgs->m_next);
  1895. }
  1896. this->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArgsCached);
  1897. }
  1898. return instrPrev;
  1899. }
  1900. ///----------------------------------------------------------------------------
  1901. ///
  1902. /// LowererMD::ChangeToHelperCall
  1903. ///
  1904. /// Change the current instruction to a call to the given helper.
  1905. ///
  1906. ///----------------------------------------------------------------------------
  1907. IR::Instr *
  1908. LowererMD::ChangeToHelperCall(IR::Instr * callInstr, IR::JnHelperMethod helperMethod, IR::LabelInstr *labelBailOut,
  1909. IR::Opnd *opndInstance, IR::PropertySymOpnd *propSymOpnd, bool isHelperContinuation)
  1910. {
  1911. IR::Instr * bailOutInstr = callInstr;
  1912. if (callInstr->HasBailOutInfo())
  1913. {
  1914. if (callInstr->GetBailOutKind() == IR::BailOutOnNotPrimitive)
  1915. {
  1916. callInstr = IR::Instr::New(callInstr->m_opcode, callInstr->m_func);
  1917. bailOutInstr->TransferTo(callInstr);
  1918. bailOutInstr->InsertBefore(callInstr);
  1919. bailOutInstr->m_opcode = Js::OpCode::BailOnNotPrimitive;
  1920. bailOutInstr->SetSrc1(opndInstance);
  1921. }
  1922. else
  1923. {
  1924. bailOutInstr = this->m_lowerer->SplitBailOnImplicitCall(callInstr);
  1925. }
  1926. }
  1927. IR::HelperCallOpnd *helperCallOpnd = Lowerer::CreateHelperCallOpnd(helperMethod, this->GetHelperArgsCount(), m_func);
  1928. if (helperCallOpnd->IsDiagHelperCallOpnd())
  1929. {
  1930. // Load arguments for the wrapper.
  1931. this->LoadHelperArgument(callInstr, IR::AddrOpnd::New((Js::Var)IR::GetMethodOriginalAddress(m_func->GetThreadContextInfo(), helperMethod), IR::AddrOpndKindDynamicMisc, m_func));
  1932. this->m_lowerer->LoadScriptContext(callInstr);
  1933. }
  1934. callInstr->SetSrc1(helperCallOpnd);
  1935. IR::Instr * instrRet = this->LowerCall(callInstr, 0);
  1936. if (bailOutInstr != callInstr)
  1937. {
  1938. // The bailout needs to be lowered after we lower the helper call because the helper argument
  1939. // has already been loaded. We need to drain them on AMD64 before starting another helper call
  1940. if (bailOutInstr->m_opcode == Js::OpCode::BailOnNotObject)
  1941. {
  1942. this->m_lowerer->LowerBailOnNotObject(bailOutInstr, nullptr, labelBailOut);
  1943. }
  1944. else if (bailOutInstr->m_opcode == Js::OpCode::BailOnNotPrimitive)
  1945. {
  1946. this->m_lowerer->LowerBailOnTrue(bailOutInstr, labelBailOut);
  1947. }
  1948. else
  1949. {
  1950. this->m_lowerer->LowerBailOnEqualOrNotEqual(bailOutInstr, nullptr, labelBailOut, propSymOpnd, isHelperContinuation);
  1951. }
  1952. }
  1953. return instrRet;
  1954. }
  1955. IR::Instr* LowererMD::ChangeToHelperCallMem(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  1956. {
  1957. this->m_lowerer->LoadScriptContext(instr);
  1958. return this->ChangeToHelperCall(instr, helperMethod);
  1959. }
  1960. ///----------------------------------------------------------------------------
  1961. ///
  1962. /// LowererMD::ChangeToAssign
  1963. ///
  1964. /// Change to a copy. Handle riscification of operands.
  1965. ///
  1966. ///----------------------------------------------------------------------------
  1967. IR::Instr *
  1968. LowererMD::ChangeToAssignNoBarrierCheck(IR::Instr * instr)
  1969. {
  1970. return ChangeToAssign(instr, instr->GetDst()->GetType());
  1971. }
  1972. IR::Instr *
  1973. LowererMD::ChangeToAssign(IR::Instr * instr)
  1974. {
  1975. return ChangeToWriteBarrierAssign(instr, instr->m_func);
  1976. }
  1977. IR::Instr *
  1978. LowererMD::ChangeToAssign(IR::Instr * instr, IRType type)
  1979. {
  1980. Assert(!instr->HasBailOutInfo() || instr->GetBailOutKind() == IR::BailOutExpectingInteger
  1981. || instr->GetBailOutKind() == IR::BailOutExpectingString);
  1982. IR::Opnd *src = instr->GetSrc1();
  1983. if (src->IsImmediateOpnd() || src->IsLabelOpnd())
  1984. {
  1985. instr->m_opcode = Js::OpCode::LDIMM;
  1986. }
  1987. else if(type == TyFloat32 && instr->GetDst()->IsRegOpnd())
  1988. {
  1989. Assert(instr->GetSrc1()->IsFloat32());
  1990. instr->m_opcode = Js::OpCode::VLDR32;
  1991. // Note that we allocate double register for single precision floats as well, as the register allocator currently
  1992. // does not support 32-bit float registers
  1993. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyFloat64, instr->m_func));
  1994. if(instr->GetSrc1()->IsRegOpnd())
  1995. {
  1996. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyFloat64, instr->m_func));
  1997. }
  1998. }
  1999. else
  2000. {
  2001. instr->m_opcode = LowererMD::GetMoveOp(type);
  2002. }
  2003. LegalizeMD::LegalizeInstr(instr, false);
  2004. return instr;
  2005. }
  2006. IR::Instr *
  2007. LowererMD::ChangeToWriteBarrierAssign(IR::Instr * assignInstr, const Func* func)
  2008. {
  2009. #ifdef RECYCLER_WRITE_BARRIER_JIT
  2010. // WriteBarrier-TODO- Implement ARM JIT
  2011. #endif
  2012. return ChangeToAssignNoBarrierCheck(assignInstr);
  2013. }
  2014. ///----------------------------------------------------------------------------
  2015. ///
  2016. /// LowererMD::ChangeToLea
  2017. ///
  2018. /// Change to a load-effective-address
  2019. ///
  2020. ///----------------------------------------------------------------------------
  2021. IR::Instr *
  2022. LowererMD::ChangeToLea(IR::Instr * instr, bool postRegAlloc)
  2023. {
  2024. Assert(instr);
  2025. Assert(instr->GetDst());
  2026. Assert(instr->GetDst()->IsRegOpnd());
  2027. Assert(instr->GetSrc1());
  2028. Assert(instr->GetSrc1()->IsIndirOpnd() || instr->GetSrc1()->IsSymOpnd());
  2029. Assert(!instr->GetSrc2());
  2030. instr->m_opcode = Js::OpCode::LEA;
  2031. Legalize(instr, postRegAlloc);
  2032. return instr;
  2033. }
  2034. ///----------------------------------------------------------------------------
  2035. ///
  2036. /// LowererMD::CreateAssign
  2037. ///
  2038. /// Create a copy from src to dst. Let ChangeToAssign handle riscification
  2039. /// of operands.
  2040. ///----------------------------------------------------------------------------
  2041. IR::Instr *
  2042. LowererMD::CreateAssign(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsertPt, bool generateWriteBarrier)
  2043. {
  2044. return Lowerer::InsertMove(dst, src, instrInsertPt, generateWriteBarrier);
  2045. }
  2046. ///----------------------------------------------------------------------------
  2047. ///
  2048. /// LowererMD::LowerRet
  2049. ///
  2050. /// Lower Ret to "MOV EAX, src"
  2051. /// The real RET is inserted at the exit of the function when emitting the
  2052. /// epilog.
  2053. ///
  2054. ///----------------------------------------------------------------------------
  2055. IR::Instr *
  2056. LowererMD::LowerRet(IR::Instr * retInstr)
  2057. {
  2058. IR::RegOpnd *retReg = IR::RegOpnd::New(TyMachReg, m_func);
  2059. retReg->SetReg(RETURN_REG);
  2060. Lowerer::InsertMove(retReg, retInstr->UnlinkSrc1(), retInstr);
  2061. retInstr->SetSrc1(retReg);
  2062. return retInstr;
  2063. }
  2064. ///----------------------------------------------------------------------------
  2065. ///
  2066. /// LowererMD::LowerUncondBranch
  2067. ///
  2068. ///----------------------------------------------------------------------------
  2069. IR::Instr *
  2070. LowererMD::LowerUncondBranch(IR::Instr * instr)
  2071. {
  2072. instr->m_opcode = Js::OpCode::B;
  2073. return instr;
  2074. }
  2075. ///----------------------------------------------------------------------------
  2076. ///
  2077. /// LowererMD::LowerMultiBranch
  2078. ///
  2079. ///----------------------------------------------------------------------------
  2080. IR::Instr *
  2081. LowererMD::LowerMultiBranch(IR::Instr * instr)
  2082. {
  2083. instr->m_opcode = Js::OpCode::BX;
  2084. return instr;
  2085. }
  2086. ///----------------------------------------------------------------------------
  2087. ///
  2088. /// LowererMD::MDBranchOpcode
  2089. ///
  2090. /// Map HIR branch opcode to machine-dependent equivalent.
  2091. ///
  2092. ///----------------------------------------------------------------------------
  2093. Js::OpCode
  2094. LowererMD::MDBranchOpcode(Js::OpCode opcode)
  2095. {
  2096. switch (opcode)
  2097. {
  2098. case Js::OpCode::BrEq_A:
  2099. case Js::OpCode::BrSrEq_A:
  2100. case Js::OpCode::BrNotNeq_A:
  2101. case Js::OpCode::BrSrNotNeq_A:
  2102. case Js::OpCode::BrAddr_A:
  2103. return Js::OpCode::BEQ;
  2104. case Js::OpCode::BrNeq_A:
  2105. case Js::OpCode::BrSrNeq_A:
  2106. case Js::OpCode::BrNotEq_A:
  2107. case Js::OpCode::BrSrNotEq_A:
  2108. case Js::OpCode::BrNotAddr_A:
  2109. return Js::OpCode::BNE;
  2110. case Js::OpCode::BrLt_A:
  2111. case Js::OpCode::BrNotGe_A:
  2112. return Js::OpCode::BLT;
  2113. case Js::OpCode::BrLe_A:
  2114. case Js::OpCode::BrNotGt_A:
  2115. return Js::OpCode::BLE;
  2116. case Js::OpCode::BrGt_A:
  2117. case Js::OpCode::BrNotLe_A:
  2118. return Js::OpCode::BGT;
  2119. case Js::OpCode::BrGe_A:
  2120. case Js::OpCode::BrNotLt_A:
  2121. return Js::OpCode::BGE;
  2122. case Js::OpCode::BrUnGt_A:
  2123. return Js::OpCode::BHI;
  2124. case Js::OpCode::BrUnGe_A:
  2125. return Js::OpCode::BCS;
  2126. case Js::OpCode::BrUnLt_A:
  2127. return Js::OpCode::BCC;
  2128. case Js::OpCode::BrUnLe_A:
  2129. return Js::OpCode::BLS;
  2130. default:
  2131. AssertMsg(0, "NYI");
  2132. return opcode;
  2133. }
  2134. }
  2135. Js::OpCode
  2136. LowererMD::MDUnsignedBranchOpcode(Js::OpCode opcode)
  2137. {
  2138. switch (opcode)
  2139. {
  2140. case Js::OpCode::BrEq_A:
  2141. case Js::OpCode::BrSrEq_A:
  2142. case Js::OpCode::BrSrNotNeq_A:
  2143. case Js::OpCode::BrNotNeq_A:
  2144. case Js::OpCode::BrAddr_A:
  2145. return Js::OpCode::BEQ;
  2146. case Js::OpCode::BrNeq_A:
  2147. case Js::OpCode::BrSrNeq_A:
  2148. case Js::OpCode::BrSrNotEq_A:
  2149. case Js::OpCode::BrNotEq_A:
  2150. case Js::OpCode::BrNotAddr_A:
  2151. return Js::OpCode::BNE;
  2152. case Js::OpCode::BrLt_A:
  2153. case Js::OpCode::BrNotGe_A:
  2154. return Js::OpCode::BCC;
  2155. case Js::OpCode::BrLe_A:
  2156. case Js::OpCode::BrNotGt_A:
  2157. return Js::OpCode::BLS;
  2158. case Js::OpCode::BrGt_A:
  2159. case Js::OpCode::BrNotLe_A:
  2160. return Js::OpCode::BHI;
  2161. case Js::OpCode::BrGe_A:
  2162. case Js::OpCode::BrNotLt_A:
  2163. return Js::OpCode::BCS;
  2164. default:
  2165. AssertMsg(0, "NYI");
  2166. return opcode;
  2167. }
  2168. }
  2169. Js::OpCode LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode opcode)
  2170. {
  2171. Assert(opcode == Js::OpCode::BrLt_A || opcode == Js::OpCode::BrGe_A);
  2172. return opcode == Js::OpCode::BrLt_A ? Js::OpCode::BMI : Js::OpCode::BPL;
  2173. }
  2174. void LowererMD::ChangeToAdd(IR::Instr *const instr, const bool needFlags)
  2175. {
  2176. Assert(instr);
  2177. Assert(instr->GetDst());
  2178. Assert(instr->GetSrc1());
  2179. Assert(instr->GetSrc2());
  2180. if(instr->GetDst()->IsFloat64())
  2181. {
  2182. Assert(instr->GetSrc1()->IsFloat64());
  2183. Assert(instr->GetSrc2()->IsFloat64());
  2184. Assert(!needFlags);
  2185. instr->m_opcode = Js::OpCode::VADDF64;
  2186. return;
  2187. }
  2188. instr->m_opcode = needFlags ? Js::OpCode::ADDS : Js::OpCode::ADD;
  2189. }
  2190. void LowererMD::ChangeToSub(IR::Instr *const instr, const bool needFlags)
  2191. {
  2192. Assert(instr);
  2193. Assert(instr->GetDst());
  2194. Assert(instr->GetSrc1());
  2195. Assert(instr->GetSrc2());
  2196. if(instr->GetDst()->IsFloat64())
  2197. {
  2198. Assert(instr->GetSrc1()->IsFloat64());
  2199. Assert(instr->GetSrc2()->IsFloat64());
  2200. Assert(!needFlags);
  2201. instr->m_opcode = Js::OpCode::VSUBF64;
  2202. return;
  2203. }
  2204. instr->m_opcode = needFlags ? Js::OpCode::SUBS : Js::OpCode::SUB;
  2205. }
  2206. void LowererMD::ChangeToShift(IR::Instr *const instr, const bool needFlags)
  2207. {
  2208. Assert(instr);
  2209. Assert(instr->GetDst());
  2210. Assert(instr->GetSrc1());
  2211. Assert(instr->GetSrc2());
  2212. Func *const func = instr->m_func;
  2213. switch(instr->m_opcode)
  2214. {
  2215. case Js::OpCode::Shl_A:
  2216. case Js::OpCode::Shl_I4:
  2217. Assert(!needFlags); // not implemented
  2218. instr->m_opcode = Js::OpCode::LSL;
  2219. break;
  2220. case Js::OpCode::Shr_A:
  2221. case Js::OpCode::Shr_I4:
  2222. instr->m_opcode = needFlags ? Js::OpCode::ASRS : Js::OpCode::ASR;
  2223. break;
  2224. case Js::OpCode::ShrU_A:
  2225. case Js::OpCode::ShrU_I4:
  2226. Assert(!needFlags); // not implemented
  2227. instr->m_opcode = Js::OpCode::LSR;
  2228. break;
  2229. default:
  2230. Assert(false);
  2231. __assume(false);
  2232. }
  2233. // Javascript requires the ShiftCount is masked to the bottom 5 bits.
  2234. if(instr->GetSrc2()->IsIntConstOpnd())
  2235. {
  2236. // In the constant case, do the mask manually.
  2237. IntConstType immed = instr->GetSrc2()->AsIntConstOpnd()->GetValue() & 0x1f;
  2238. if (immed == 0)
  2239. {
  2240. // Shift by zero is just a move, and the shift-right instructions
  2241. // don't permit encoding of a zero shift amount.
  2242. instr->m_opcode = Js::OpCode::MOV;
  2243. instr->FreeSrc2();
  2244. }
  2245. else
  2246. {
  2247. instr->GetSrc2()->AsIntConstOpnd()->SetValue(immed);
  2248. }
  2249. }
  2250. else
  2251. {
  2252. // In the variable case, generate code to do the mask.
  2253. IR::Opnd *const src2 = instr->UnlinkSrc2();
  2254. instr->SetSrc2(IR::RegOpnd::New(TyMachReg, func));
  2255. IR::Instr *const newInstr = IR::Instr::New(
  2256. Js::OpCode::AND, instr->GetSrc2(), src2, IR::IntConstOpnd::New(0x1f, TyInt8, func), func);
  2257. instr->InsertBefore(newInstr);
  2258. }
  2259. }
  2260. const uint16
  2261. LowererMD::GetFormalParamOffset()
  2262. {
  2263. //In ARM formal params are offset into the param area.
  2264. //So we only count the non-user params (Function object & CallInfo and let the encoder account for the saved R11 and LR
  2265. return 2;
  2266. }
  2267. ///----------------------------------------------------------------------------
  2268. ///
  2269. /// LowererMD::LowerCondBranch
  2270. ///
  2271. ///----------------------------------------------------------------------------
  2272. IR::Instr *
  2273. LowererMD::LowerCondBranch(IR::Instr * instr)
  2274. {
  2275. AssertMsg(instr->GetSrc1() != nullptr, "Expected src opnds on conditional branch");
  2276. IR::Opnd * opndSrc1 = instr->UnlinkSrc1();
  2277. IR::Instr * instrPrev = nullptr;
  2278. switch (instr->m_opcode)
  2279. {
  2280. case Js::OpCode::BrTrue_A:
  2281. case Js::OpCode::BrOnNotEmpty:
  2282. case Js::OpCode::BrNotNull_A:
  2283. case Js::OpCode::BrOnObject_A:
  2284. case Js::OpCode::BrOnClassConstructor:
  2285. case Js::OpCode::BrOnBaseConstructorKind:
  2286. Assert(!opndSrc1->IsFloat64());
  2287. AssertMsg(opndSrc1->IsRegOpnd(),"NYI for other operands");
  2288. AssertMsg(instr->GetSrc2() == nullptr, "Expected 1 src on boolean branch");
  2289. instrPrev = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  2290. instrPrev->SetSrc1(opndSrc1);
  2291. instrPrev->SetSrc2(IR::IntConstOpnd::New(0, TyInt32, m_func));
  2292. instr->InsertBefore(instrPrev);
  2293. instr->m_opcode = Js::OpCode::BNE;
  2294. break;
  2295. case Js::OpCode::BrFalse_A:
  2296. case Js::OpCode::BrOnEmpty:
  2297. Assert(!opndSrc1->IsFloat64());
  2298. AssertMsg(opndSrc1->IsRegOpnd(),"NYI for other operands");
  2299. AssertMsg(instr->GetSrc2() == nullptr, "Expected 1 src on boolean branch");
  2300. instrPrev = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  2301. instrPrev->SetSrc1(opndSrc1);
  2302. instrPrev->SetSrc2(IR::IntConstOpnd::New(0, TyInt32, m_func));
  2303. instr->InsertBefore(instrPrev);
  2304. instr->m_opcode = Js::OpCode::BEQ;
  2305. break;
  2306. default:
  2307. IR::Opnd * opndSrc2 = instr->UnlinkSrc2();
  2308. AssertMsg(opndSrc2 != nullptr, "Expected 2 src's on non-boolean branch");
  2309. if (opndSrc1->IsFloat64())
  2310. {
  2311. AssertMsg(opndSrc1->IsRegOpnd(),"NYI for other operands");
  2312. Assert(opndSrc2->IsFloat64());
  2313. Assert(opndSrc2->IsRegOpnd() && opndSrc1->IsRegOpnd());
  2314. //This comparison updates the FPSCR - floating point status control register
  2315. instrPrev = IR::Instr::New(Js::OpCode::VCMPF64, this->m_func);
  2316. instrPrev->SetSrc1(opndSrc1);
  2317. instrPrev->SetSrc2(opndSrc2);
  2318. instr->InsertBefore(instrPrev);
  2319. LegalizeMD::LegalizeInstr(instrPrev, false);
  2320. //Transfer the result to ARM status register control register.
  2321. instrPrev = IR::Instr::New(Js::OpCode::VMRS, this->m_func);
  2322. instr->InsertBefore(instrPrev);
  2323. instr->m_opcode = LowererMD::MDBranchOpcode(instr->m_opcode);
  2324. }
  2325. else
  2326. {
  2327. AssertMsg(opndSrc2->IsRegOpnd() || opndSrc2->IsIntConstOpnd() || (opndSrc2->IsAddrOpnd()), "NYI for other operands");
  2328. instrPrev = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  2329. instrPrev->SetSrc1(opndSrc1);
  2330. instrPrev->SetSrc2(opndSrc2);
  2331. instr->InsertBefore(instrPrev);
  2332. LegalizeMD::LegalizeInstr(instrPrev, false);
  2333. instr->m_opcode = MDBranchOpcode(instr->m_opcode);
  2334. }
  2335. break;
  2336. }
  2337. return instr;
  2338. }
  2339. ///----------------------------------------------------------------------------
  2340. ///
  2341. /// LowererMD::ForceDstToReg
  2342. ///
  2343. ///----------------------------------------------------------------------------
  2344. IR::Instr*
  2345. LowererMD::ForceDstToReg(IR::Instr *instr)
  2346. {
  2347. IR::Opnd * dst = instr->GetDst();
  2348. if (dst->IsRegOpnd())
  2349. {
  2350. return instr;
  2351. }
  2352. IR::Instr * newInstr = instr->SinkDst(Js::OpCode::Ld_A);
  2353. LowererMD::ChangeToAssign(newInstr);
  2354. return newInstr;
  2355. }
  2356. IR::Instr *
  2357. LowererMD::LoadFunctionObjectOpnd(IR::Instr *instr, IR::Opnd *&functionObjOpnd)
  2358. {
  2359. IR::Opnd * src1 = instr->GetSrc1();
  2360. IR::Instr * instrPrev = instr->m_prev;
  2361. if (src1 == nullptr)
  2362. {
  2363. IR::RegOpnd * regOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  2364. //function object is first argument and mark it as IsParamSlotSym.
  2365. StackSym *paramSym = GetImplicitParamSlotSym(0);
  2366. IR::SymOpnd *paramOpnd = IR::SymOpnd::New(paramSym, TyMachPtr, m_func);
  2367. instrPrev = LowererMD::CreateAssign(regOpnd, paramOpnd, instr);
  2368. functionObjOpnd = instrPrev->GetDst();
  2369. }
  2370. else
  2371. {
  2372. // Inlinee LdHomeObj, use the function object opnd on the instruction
  2373. functionObjOpnd = instr->UnlinkSrc1();
  2374. if (!functionObjOpnd->IsRegOpnd())
  2375. {
  2376. Assert(functionObjOpnd->IsAddrOpnd());
  2377. }
  2378. }
  2379. return instrPrev;
  2380. }
  2381. IR::Instr *
  2382. LowererMD::LowerLdSuper(IR::Instr * instr, IR::JnHelperMethod helperOpCode)
  2383. {
  2384. IR::Opnd * functionObjOpnd;
  2385. IR::Instr * instrPrev = LoadFunctionObjectOpnd(instr, functionObjOpnd);
  2386. m_lowerer->LoadScriptContext(instr);
  2387. LoadHelperArgument(instr, functionObjOpnd);
  2388. ChangeToHelperCall(instr, helperOpCode);
  2389. return instrPrev;
  2390. }
  2391. void
  2392. LowererMD::GenerateFastDivByPow2(IR::Instr *instrDiv)
  2393. {
  2394. //// Given:
  2395. //// dst = Div_A src1, src2
  2396. //// where src2 == power of 2
  2397. ////
  2398. //// Generate:
  2399. //// (observation: positive q divides by p equally, where p = power of 2, if q's binary representation
  2400. //// has all zeroes to the right of p's power 2 bit, try to see if that is the case)
  2401. //// s1 = AND src1, 0x80000001 | ((src2Value - 1) << 1)
  2402. //// CMP s1, 1
  2403. //// BNE $doesntDivideEqually
  2404. //// s1 = ASR src1, log2(src2Value) -- do the equal divide
  2405. //// dst = EOR s1, 1 -- restore tagged int bit
  2406. //// B $done
  2407. //// $doesntDivideEqually:
  2408. //// (now check if it divides with the remainder of 1, for which we can do integer divide and accommodate with +0.5
  2409. //// note that we need only the part that is to the left of p's power 2 bit)
  2410. //// s1 = AND s1, 0x80000001 | (src2Value - 1)
  2411. //// CMP s1, 1
  2412. //// BNE $helper
  2413. //// s1 = ASR src1, log2(src2Value) + 1 -- do the integer divide and also shift out the tagged int bit
  2414. //// PUSH 0xXXXXXXXX (ScriptContext)
  2415. //// PUSH s1
  2416. //// dst = CALL Op_FinishOddDivByPow2 -- input: actual value, scriptContext; output: JavascriptNumber with 0.5 added to the input
  2417. //// JMP $done
  2418. //// $helper:
  2419. //// ...
  2420. //// $done:
  2421. //if (instrDiv->GetSrc1()->IsRegOpnd() && instrDiv->GetSrc1()->AsRegOpnd()->m_sym->m_isNotInt)
  2422. //{
  2423. // return;
  2424. //}
  2425. //IR::Opnd *dst = instrDiv->GetDst();
  2426. //IR::Opnd *src1 = instrDiv->GetSrc1();
  2427. //IR::AddrOpnd *src2 = instrDiv->GetSrc2()->IsAddrOpnd() ? instrDiv->GetSrc2()->AsAddrOpnd() : nullptr;
  2428. //IR::LabelInstr *doesntDivideEqually = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2429. //IR::LabelInstr *helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  2430. //IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2431. //IR::RegOpnd *s1 = IR::RegOpnd::New(TyVar, m_func);
  2432. //IR::Instr *instr;
  2433. //Assert(src2 && src2->IsVar() && Js::TaggedInt::Is(src2->m_address) && (Math::IsPow2(Js::TaggedInt::ToInt32(src2->m_address))));
  2434. //int32 src2Value = Js::TaggedInt::ToInt32(src2->m_address);
  2435. //// s1 = AND src1, 0x80000001 | ((src2Value - 1) << 1)
  2436. //instr = IR::Instr::New(Js::OpCode::AND, s1, src1, IR::IntConstOpnd::New((0x80000001 | ((src2Value - 1) << 1)), TyInt32, m_func), m_func);
  2437. //instrDiv->InsertBefore(instr);
  2438. //LegalizeMD::LegalizeInstr(instr, false);
  2439. //// CMP s1, 1
  2440. //instr = IR::Instr::New(Js::OpCode::CMP, m_func);
  2441. //instr->SetSrc1(s1);
  2442. //instr->SetSrc2(IR::IntConstOpnd::New(1, TyInt32, m_func));
  2443. //instrDiv->InsertBefore(instr);
  2444. //// BNE $doesntDivideEqually
  2445. //instr = IR::BranchInstr::New(Js::OpCode::BNE, doesntDivideEqually, m_func);
  2446. //instrDiv->InsertBefore(instr);
  2447. //// s1 = ASR src1, log2(src2Value) -- do the equal divide
  2448. //instr = IR::Instr::New(Js::OpCode::ASR, s1, src1, IR::IntConstOpnd::New(Math::Log2(src2Value), TyInt32, m_func), m_func);
  2449. //instrDiv->InsertBefore(instr);
  2450. //LegalizeMD::LegalizeInstr(instr, false);
  2451. //// dst = ORR s1, 1 -- restore tagged int bit
  2452. //instr = IR::Instr::New(Js::OpCode::ORR, dst, s1, IR::IntConstOpnd::New(1, TyInt32, m_func), m_func);
  2453. //instrDiv->InsertBefore(instr);
  2454. //LegalizeMD::LegalizeInstr(instr, false);
  2455. //
  2456. //// B $done
  2457. //instr = IR::BranchInstr::New(Js::OpCode::B, done, m_func);
  2458. //instrDiv->InsertBefore(instr);
  2459. //// $doesntDivideEqually:
  2460. //instrDiv->InsertBefore(doesntDivideEqually);
  2461. //// s1 = AND s1, 0x80000001 | (src2Value - 1)
  2462. //instr = IR::Instr::New(Js::OpCode::AND, s1, s1, IR::IntConstOpnd::New((0x80000001 | (src2Value - 1)), TyInt32, m_func), m_func);
  2463. //instrDiv->InsertBefore(instr);
  2464. //// CMP s1, 1
  2465. //instr = IR::Instr::New(Js::OpCode::CMP, m_func);
  2466. //instr->SetSrc1(s1);
  2467. //instr->SetSrc2(IR::IntConstOpnd::New(1, TyInt32, m_func));
  2468. //instrDiv->InsertBefore(instr);
  2469. //// BNE $helper
  2470. //instrDiv->InsertBefore(IR::BranchInstr::New(Js::OpCode::BNE, helper, m_func));
  2471. //// s1 = ASR src1, log2(src2Value) + 1 -- do the integer divide and also shift out the tagged int bit
  2472. //instr = IR::Instr::New(Js::OpCode::ASR, s1, src1, IR::IntConstOpnd::New(Math::Log2(src2Value) + 1, TyInt32, m_func), m_func);
  2473. //instrDiv->InsertBefore(instr);
  2474. //LegalizeMD::LegalizeInstr(instr, false);
  2475. //// Arg2: scriptContext
  2476. //IR::JnHelperMethod helperMethod;
  2477. //if (instrDiv->dstIsTempNumber)
  2478. //{
  2479. // // Var JavascriptMath::FinishOddDivByPow2_InPlace(uint32 value, ScriptContext *scriptContext, __out JavascriptNumber* result)
  2480. // helperMethod = IR::HelperOp_FinishOddDivByPow2InPlace;
  2481. // Assert(dst->IsRegOpnd());
  2482. // StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(dst, instr->dstIsTempNumberTransferred);
  2483. // instr = this->LoadStackAddress(tempNumberSym);
  2484. // instrDiv->InsertBefore(instr);
  2485. // LegalizeMD::LegalizeInstr(instr, false);
  2486. // this->LoadHelperArgument(instrDiv, instr->GetDst());
  2487. //}
  2488. //else
  2489. //{
  2490. // // Var JavascriptMath::FinishOddDivByPow2(uint32 value, ScriptContext *scriptContext)
  2491. // helperMethod = IR::HelperOp_FinishOddDivByPow2;
  2492. //}
  2493. //this->m_lowerer->LoadScriptContext(instrDiv);
  2494. //// Arg1: value
  2495. //this->LoadHelperArgument(instrDiv, s1);
  2496. //// dst = CALL Op_FinishOddDivByPow2 -- input: actual value, output: JavascriptNumber with 0.5 added to the input
  2497. //instr = IR::Instr::New(Js::OpCode::Call, dst, IR::HelperCallOpnd::New(helperMethod, m_func), m_func);
  2498. //instrDiv->InsertBefore(instr);
  2499. //this->LowerCall(instr, 0);
  2500. //// JMP $done
  2501. //instrDiv->InsertBefore(IR::BranchInstr::New(Js::OpCode::B, done, m_func));
  2502. //// $helper:
  2503. //instrDiv->InsertBefore(helper);
  2504. //// $done:
  2505. //instrDiv->InsertAfter(done);
  2506. return;
  2507. }
  2508. ///----------------------------------------------------------------------------
  2509. ///
  2510. /// LowererMD::GenerateFastCmSrEqConst
  2511. ///
  2512. ///----------------------------------------------------------------------------
  2513. bool
  2514. LowererMD::GenerateFastCmSrEqConst(IR::Instr *instr)
  2515. {
  2516. //
  2517. // Given:
  2518. // s1 = CmSrEq_A s2, s3
  2519. // where either s2 or s3 is 'null', 'true' or 'false'
  2520. //
  2521. // Generate:
  2522. //
  2523. // CMP s2, s3
  2524. // JEQ $mov_true
  2525. // MOV s1, Library.GetFalse()
  2526. // JMP $done
  2527. // $mov_true:
  2528. // MOV s1, Library.GetTrue()
  2529. // $done:
  2530. //
  2531. Assert(m_lowerer->IsConstRegOpnd(instr->GetSrc2()->AsRegOpnd()));
  2532. return false;
  2533. }
  2534. bool LowererMD::GenerateFastCmXxI4(IR::Instr *instr)
  2535. {
  2536. return this->GenerateFastCmXxTaggedInt(instr);
  2537. }
  2538. ///----------------------------------------------------------------------------
  2539. ///
  2540. /// LowererMD::GenerateFastCmXxTaggedInt
  2541. ///
  2542. ///----------------------------------------------------------------------------
  2543. bool LowererMD::GenerateFastCmXxTaggedInt(IR::Instr *instr, bool isInHelper /* = false */)
  2544. {
  2545. // The idea is to do an inline compare if we can prove that both sources
  2546. // are tagged ints (i.e., are vars with the low bit set).
  2547. //
  2548. // Given:
  2549. //
  2550. // Cmxx_A dst, src1, src2
  2551. //
  2552. // Generate:
  2553. //
  2554. // (If not Int31's, goto $helper)
  2555. // LDIMM dst, trueResult
  2556. // CMP src1, src2
  2557. // BEQ $fallthru
  2558. // LDIMM dst, falseResult
  2559. // B $fallthru
  2560. // $helper:
  2561. // (caller will generate normal helper call sequence)
  2562. // $fallthru:
  2563. IR::Opnd * src1 = instr->GetSrc1();
  2564. IR::Opnd * src2 = instr->GetSrc2();
  2565. IR::Opnd * dst = instr->GetDst();
  2566. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  2567. IR::LabelInstr * fallthru = IR::LabelInstr::New(Js::OpCode::Label, m_func, isInHelper);
  2568. Assert(src1 && src2 && dst);
  2569. // Not tagged ints?
  2570. if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_sym->m_isNotInt)
  2571. {
  2572. return false;
  2573. }
  2574. if (src2->IsRegOpnd() && src2->AsRegOpnd()->m_sym->m_isNotInt)
  2575. {
  2576. return false;
  2577. }
  2578. Js::OpCode opcode = Js::OpCode::InvalidOpCode;
  2579. switch ( instr->m_opcode)
  2580. {
  2581. case Js::OpCode::CmEq_A:
  2582. case Js::OpCode::CmSrEq_A:
  2583. case Js::OpCode::CmEq_I4:
  2584. opcode = Js::OpCode::BEQ;
  2585. break;
  2586. case Js::OpCode::CmNeq_A:
  2587. case Js::OpCode::CmSrNeq_A:
  2588. case Js::OpCode::CmNeq_I4:
  2589. opcode = Js::OpCode::BNE;
  2590. break;
  2591. case Js::OpCode::CmGt_A:
  2592. case Js::OpCode::CmGt_I4:
  2593. opcode = Js::OpCode::BGT;
  2594. break;
  2595. case Js::OpCode::CmGe_A:
  2596. case Js::OpCode::CmGe_I4:
  2597. opcode = Js::OpCode::BGE;
  2598. break;
  2599. case Js::OpCode::CmLt_A:
  2600. case Js::OpCode::CmLt_I4:
  2601. opcode = Js::OpCode::BLT;
  2602. break;
  2603. case Js::OpCode::CmLe_A:
  2604. case Js::OpCode::CmLe_I4:
  2605. opcode = Js::OpCode::BLE;
  2606. break;
  2607. case Js::OpCode::CmUnGt_A:
  2608. case Js::OpCode::CmUnGt_I4:
  2609. opcode = Js::OpCode::BHI;
  2610. break;
  2611. case Js::OpCode::CmUnGe_A:
  2612. case Js::OpCode::CmUnGe_I4:
  2613. opcode = Js::OpCode::BCS;
  2614. break;
  2615. case Js::OpCode::CmUnLt_A:
  2616. case Js::OpCode::CmUnLt_I4:
  2617. opcode = Js::OpCode::BCC;
  2618. break;
  2619. case Js::OpCode::CmUnLe_A:
  2620. case Js::OpCode::CmUnLe_I4:
  2621. opcode = Js::OpCode::BLS;
  2622. break;
  2623. default: Assert(false);
  2624. }
  2625. // Tagged ints?
  2626. bool isTaggedInts = false;
  2627. if (src1->IsTaggedInt() || src1->IsInt32())
  2628. {
  2629. if (src2->IsTaggedInt() || src2->IsInt32())
  2630. {
  2631. isTaggedInts = true;
  2632. }
  2633. }
  2634. if (!isTaggedInts)
  2635. {
  2636. this->GenerateSmIntPairTest(instr, src1, src2, helper);
  2637. }
  2638. if (dst->IsEqual(src1))
  2639. {
  2640. IR::RegOpnd *newSrc1 = IR::RegOpnd::New(TyMachReg, m_func);
  2641. LowererMD::CreateAssign(newSrc1, src1, instr);
  2642. src1 = newSrc1;
  2643. }
  2644. if (dst->IsEqual(src2))
  2645. {
  2646. IR::RegOpnd *newSrc2 = IR::RegOpnd::New(TyMachReg, m_func);
  2647. LowererMD::CreateAssign(newSrc2, src2, instr);
  2648. src2 = newSrc2;
  2649. }
  2650. IR::Opnd *opndTrue, *opndFalse;
  2651. if (dst->IsInt32())
  2652. {
  2653. opndTrue = IR::IntConstOpnd::New(1, TyMachReg, this->m_func);
  2654. opndFalse = IR::IntConstOpnd::New(0, TyMachReg, this->m_func);
  2655. }
  2656. else
  2657. {
  2658. opndTrue = m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue);
  2659. opndFalse = m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse);
  2660. }
  2661. // LDIMM dst, trueResult
  2662. // CMP src1, src2
  2663. // BEQ $fallthru
  2664. // LDIMM dst, falseResult
  2665. // B $fallthru
  2666. instr->InsertBefore(IR::Instr::New(Js::OpCode::LDIMM, dst, opndTrue, m_func));
  2667. IR::Instr *instrCmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  2668. instrCmp->SetSrc1(src1);
  2669. instrCmp->SetSrc2(src2);
  2670. instr->InsertBefore(instrCmp);
  2671. LegalizeMD::LegalizeInstr(instrCmp,false);
  2672. instr->InsertBefore(IR::BranchInstr::New(opcode, fallthru, m_func));
  2673. instr->InsertBefore(IR::Instr::New(Js::OpCode::LDIMM, dst, opndFalse, m_func));
  2674. if (isTaggedInts)
  2675. {
  2676. instr->InsertAfter(fallthru);
  2677. instr->Remove();
  2678. return true;
  2679. }
  2680. // B $fallthru
  2681. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::B, fallthru, m_func));
  2682. instr->InsertBefore(helper);
  2683. instr->InsertAfter(fallthru);
  2684. return false;
  2685. }
  2686. IR::Instr * LowererMD::GenerateConvBool(IR::Instr *instr)
  2687. {
  2688. // dst = LDIMM true
  2689. // TST src1, src2
  2690. // BNE fallthrough
  2691. // dst = LDIMM false
  2692. // fallthrough:
  2693. IR::RegOpnd *dst = instr->GetDst()->AsRegOpnd();
  2694. IR::RegOpnd *src1 = instr->GetSrc1()->AsRegOpnd();
  2695. IR::Opnd *opndTrue = m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue);
  2696. IR::Opnd *opndFalse = m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse);
  2697. IR::LabelInstr *fallthru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2698. // dst = LDIMM true
  2699. IR::Instr *instrFirst = IR::Instr::New(Js::OpCode::LDIMM, dst, opndTrue, m_func);
  2700. instr->InsertBefore(instrFirst);
  2701. // TST src1, src2
  2702. IR::Instr *instrTst = IR::Instr::New(Js::OpCode::TST, m_func);
  2703. instrTst->SetSrc1(src1);
  2704. instrTst->SetSrc2(src1);
  2705. instr->InsertBefore(instrTst);
  2706. LegalizeMD::LegalizeInstr(instrTst, false);
  2707. // BNE fallthrough
  2708. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::BNE, fallthru, m_func));
  2709. // dst = LDIMM false
  2710. instr->InsertBefore(IR::Instr::New(Js::OpCode::LDIMM, dst, opndFalse, m_func));
  2711. // fallthrough:
  2712. instr->InsertAfter(fallthru);
  2713. instr->Remove();
  2714. return instrFirst;
  2715. }
  2716. ///----------------------------------------------------------------------------
  2717. ///
  2718. /// LowererMD::GenerateFastAdd
  2719. ///
  2720. /// NOTE: We assume that only the sum of two Int31's will have 0x2 set. This
  2721. /// is only true until we have a var type with tag == 0x2.
  2722. ///
  2723. ///----------------------------------------------------------------------------
  2724. bool
  2725. LowererMD::GenerateFastAdd(IR::Instr * instrAdd)
  2726. {
  2727. // Given:
  2728. //
  2729. // dst = Add src1, src2
  2730. //
  2731. // Generate:
  2732. //
  2733. // (If not 2 Int31's, use $helper.)
  2734. // s1 = SUB src1, 1 -- get rid of one of the tag
  2735. // tmp = ADDS s1, src2 -- try an inline add
  2736. // BVS $helper
  2737. // dst = MOV tmp
  2738. // B $done
  2739. // $helper:
  2740. // (caller generates helper call)
  2741. // $done:
  2742. IR::Instr * instr;
  2743. IR::LabelInstr * labelHelper;
  2744. IR::LabelInstr * labelDone;
  2745. IR::Opnd * opndReg;
  2746. IR::Opnd * opndSrc1;
  2747. IR::Opnd * opndSrc2;
  2748. opndSrc1 = instrAdd->GetSrc1();
  2749. opndSrc2 = instrAdd->GetSrc2();
  2750. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Add instruction");
  2751. // Generate fastpath for Incr_A anyway -
  2752. // Incrementing strings representing integers can be inter-mixed with integers e.g. "1"++ -> converts 1 to an int and thereafter, integer increment is expected.
  2753. if (opndSrc1->IsRegOpnd() && (opndSrc1->AsRegOpnd()->m_sym->m_isNotInt || opndSrc1->GetValueType().IsString()
  2754. || (instrAdd->m_opcode != Js::OpCode::Incr_A && opndSrc1->GetValueType().IsLikelyString())))
  2755. {
  2756. return false;
  2757. }
  2758. if (opndSrc2->IsRegOpnd() && (opndSrc2->AsRegOpnd()->m_sym->m_isNotInt ||
  2759. opndSrc2->GetValueType().IsLikelyString()))
  2760. {
  2761. return true;
  2762. }
  2763. // Load src's at the top so we don't have to do it repeatedly.
  2764. if (!opndSrc1->IsRegOpnd())
  2765. {
  2766. opndSrc1 = IR::RegOpnd::New(opndSrc1->GetType(), this->m_func);
  2767. LowererMD::CreateAssign(opndSrc1, instrAdd->GetSrc1(), instrAdd);
  2768. }
  2769. if (!opndSrc2->IsRegOpnd())
  2770. {
  2771. opndSrc2 = IR::RegOpnd::New(opndSrc2->GetType(), this->m_func);
  2772. LowererMD::CreateAssign(opndSrc2, instrAdd->GetSrc2(), instrAdd);
  2773. }
  2774. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2775. // Tagged ints?
  2776. bool isTaggedInts = opndSrc1->IsTaggedInt() && opndSrc2->IsTaggedInt();
  2777. if (!isTaggedInts)
  2778. {
  2779. // (If not 2 Int31's, jump to $helper.)
  2780. this->GenerateSmIntPairTest(instrAdd, opndSrc1, opndSrc2, labelHelper);
  2781. }
  2782. if (opndSrc1->IsAddrOpnd())
  2783. {
  2784. // If opnd1 is a constant, just swap them.
  2785. Swap(opndSrc1, opndSrc2);
  2786. }
  2787. // s1 = SUB src1, 1 -- get rid of one of the tag
  2788. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2789. instr = IR::Instr::New(Js::OpCode::SUB, opndReg, opndSrc1, IR::IntConstOpnd::New(1, TyMachReg, this->m_func), this->m_func);
  2790. instrAdd->InsertBefore(instr);
  2791. // tmp = ADDS s1, src2 -- try an inline add
  2792. IR::RegOpnd *opndTmp = IR::RegOpnd::New(TyMachReg, this->m_func);
  2793. instr = IR::Instr::New(Js::OpCode::ADDS, opndTmp, opndReg, opndSrc2, this->m_func);
  2794. instrAdd->InsertBefore(instr);
  2795. // BVS $helper -- if overflow, branch to helper.
  2796. instr = IR::BranchInstr::New(Js::OpCode::BVS, labelHelper, this->m_func);
  2797. instrAdd->InsertBefore(instr);
  2798. // dst = MOV tmp
  2799. LowererMD::CreateAssign(instrAdd->GetDst(), opndTmp, instrAdd);
  2800. // B $done
  2801. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2802. instr = IR::BranchInstr::New(Js::OpCode::B, labelDone, this->m_func);
  2803. instrAdd->InsertBefore(instr);
  2804. // $helper:
  2805. // (caller generates helper call)
  2806. // $done:
  2807. instrAdd->InsertBefore(labelHelper);
  2808. instrAdd->InsertAfter(labelDone);
  2809. // Return true to indicate the original instr must still be lowered.
  2810. return true;
  2811. }
  2812. ///----------------------------------------------------------------------------
  2813. ///
  2814. /// LowererMD::GenerateFastSub
  2815. ///
  2816. ///
  2817. ///----------------------------------------------------------------------------
  2818. bool
  2819. LowererMD::GenerateFastSub(IR::Instr * instrSub)
  2820. {
  2821. // Given:
  2822. //
  2823. // dst = Sub src1, src2
  2824. //
  2825. // Generate:
  2826. //
  2827. // (If not 2 Int31's, jump to $helper.)
  2828. // s1 = SUBS src1, src2 -- try an inline sub
  2829. // BVS $helper -- bail if the subtract overflowed
  2830. // dst = ADD s1, 1 -- restore the var tag on the result
  2831. // B $fallthru
  2832. // $helper:
  2833. // (caller generates helper call)
  2834. // $fallthru:
  2835. IR::Instr * instr;
  2836. IR::LabelInstr * labelHelper;
  2837. IR::LabelInstr * labelFallThru;
  2838. IR::Opnd * opndReg;
  2839. IR::Opnd * opndSrc1;
  2840. IR::Opnd * opndSrc2;
  2841. opndSrc1 = instrSub->GetSrc1();
  2842. opndSrc2 = instrSub->GetSrc2();
  2843. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Sub instruction");
  2844. // Not tagged ints?
  2845. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->m_sym->m_isNotInt ||
  2846. opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->m_sym->m_isNotInt)
  2847. {
  2848. return false;
  2849. }
  2850. // Load src's at the top so we don't have to do it repeatedly.
  2851. if (!opndSrc1->IsRegOpnd())
  2852. {
  2853. opndSrc1 = IR::RegOpnd::New(opndSrc1->GetType(), this->m_func);
  2854. LowererMD::CreateAssign(opndSrc1, instrSub->GetSrc1(), instrSub);
  2855. }
  2856. if (!opndSrc2->IsRegOpnd())
  2857. {
  2858. opndSrc2 = IR::RegOpnd::New(opndSrc2->GetType(), this->m_func);
  2859. LowererMD::CreateAssign(opndSrc2, instrSub->GetSrc2(), instrSub);
  2860. }
  2861. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2862. // Tagged ints?
  2863. bool isTaggedInts = opndSrc1->IsTaggedInt() && opndSrc2->IsTaggedInt();
  2864. if (!isTaggedInts)
  2865. {
  2866. // (If not 2 Int31's, jump to $helper.)
  2867. this->GenerateSmIntPairTest(instrSub, opndSrc1, opndSrc2, labelHelper);
  2868. }
  2869. // s1 = SUBS src1, src2 -- try an inline sub
  2870. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2871. instr = IR::Instr::New(Js::OpCode::SUBS, opndReg, opndSrc1, opndSrc2, this->m_func);
  2872. instrSub->InsertBefore(instr);
  2873. // BVS $helper -- bail if the subtract overflowed
  2874. instr = IR::BranchInstr::New(Js::OpCode::BVS, labelHelper, this->m_func);
  2875. instrSub->InsertBefore(instr);
  2876. // dst = ADD s1, 1 -- restore the var tag on the result
  2877. instr = IR::Instr::New(Js::OpCode::ADD, instrSub->GetDst(), opndReg, IR::IntConstOpnd::New(1, TyMachReg, this->m_func), this->m_func);
  2878. instrSub->InsertBefore(instr);
  2879. LegalizeMD::LegalizeInstr(instr, false);
  2880. // B $fallthru
  2881. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2882. instr = IR::BranchInstr::New(Js::OpCode::B, labelFallThru, this->m_func);
  2883. instrSub->InsertBefore(instr);
  2884. // $helper:
  2885. // (caller generates helper call)
  2886. // $fallthru:
  2887. instrSub->InsertBefore(labelHelper);
  2888. instrSub->InsertAfter(labelFallThru);
  2889. // Return true to indicate the original instr must still be lowered.
  2890. return true;
  2891. }
  2892. ///----------------------------------------------------------------------------
  2893. ///
  2894. /// LowererMD::GenerateFastMul
  2895. ///
  2896. ///----------------------------------------------------------------------------
  2897. bool
  2898. LowererMD::GenerateFastMul(IR::Instr * instrMul)
  2899. {
  2900. // Given:
  2901. //
  2902. // dst = Mul src1, src2
  2903. //
  2904. // Generate:
  2905. //
  2906. // (If not 2 Int31's, jump to $helper.)
  2907. // s1 = SUB src1, AtomTag -- clear the var tag from the value to be multiplied
  2908. // s2 = ASR src2, Js::VarTag_Shift -- extract the real src2 amount from the var
  2909. // (r12:)s1 = SMULL s1, (r12,) s1, s2 -- do the signed mul into 64bit r12:s1, the result will be src1 * src2 * 2
  2910. // (SMULL doesn't set the flags but we don't have 32bit overflow <=> r12-unsigned ? r12==0 : all 33 bits of 64bit result are 1's
  2911. // CMP r12, s1, ASR #31 -- check for overflow (== means no overflow)
  2912. // BNE $helper -- bail if the result overflowed
  2913. // TST s1, s1 -- Check 0 vs -0 (Javascript number is technically double, so need to account for -0)
  2914. // BNE $result -- TODO: consider converting 2 instructions into one: CBZ s1, $zero
  2915. // (result of mul was 0. Account for -0)
  2916. // s2 = ADDS s2, src1 -- MUL is 0 => one of (src1, src2) is 0, see if the other one is positive or negative
  2917. // BGT $result -- positive 0. keep it as int31
  2918. // dst= ToVar(-0.0) -- load negative 0
  2919. // B $fallthru
  2920. // $result:
  2921. // dst= ORR s1, AtomTag -- make sure var tag is set on the result
  2922. // B $fallthru
  2923. // $helper:
  2924. // (caller generates helper call)
  2925. // $fallthru:
  2926. IR::LabelInstr * labelHelper;
  2927. IR::LabelInstr * labelFallThru;
  2928. IR::LabelInstr * labelResult;
  2929. IR::Instr * instr;
  2930. IR::RegOpnd * opndReg1;
  2931. IR::RegOpnd * opndReg2;
  2932. IR::Opnd * opndSrc1;
  2933. IR::Opnd * opndSrc2;
  2934. opndSrc1 = instrMul->GetSrc1();
  2935. opndSrc2 = instrMul->GetSrc2();
  2936. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on mul instruction");
  2937. // (If not 2 Int31's, jump to $helper.)
  2938. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->m_sym->m_isNotInt ||
  2939. opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->m_sym->m_isNotInt)
  2940. {
  2941. return true;
  2942. }
  2943. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2944. labelResult = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2945. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2946. // Load src's at the top so we don't have to do it repeatedly.
  2947. if (!opndSrc1->IsRegOpnd())
  2948. {
  2949. opndSrc1 = IR::RegOpnd::New(opndSrc1->GetType(), this->m_func);
  2950. LowererMD::CreateAssign(opndSrc1, instrMul->GetSrc1(), instrMul);
  2951. }
  2952. if (!opndSrc2->IsRegOpnd())
  2953. {
  2954. opndSrc2 = IR::RegOpnd::New(opndSrc2->GetType(), this->m_func);
  2955. LowererMD::CreateAssign(opndSrc2, instrMul->GetSrc2(), instrMul);
  2956. }
  2957. bool isTaggedInts = opndSrc1->IsTaggedInt() && opndSrc2->IsTaggedInt();
  2958. if (!isTaggedInts)
  2959. {
  2960. // (If not 2 Int31's, jump to $helper.)
  2961. this->GenerateSmIntPairTest(instrMul, opndSrc1->AsRegOpnd(), opndSrc2->AsRegOpnd(), labelHelper);
  2962. }
  2963. // s1 = SUB src1, AtomTag -- clear the var tag from the value to be multiplied
  2964. opndReg1 = IR::RegOpnd::New(TyInt32, this->m_func);
  2965. instr = IR::Instr::New(Js::OpCode::SUB, opndReg1, opndSrc1, IR::IntConstOpnd::New(Js::AtomTag, TyVar, this->m_func), this->m_func); // TODO: TyVar or TyMachReg?
  2966. instrMul->InsertBefore(instr);
  2967. // s2 = ASR src2, Js::VarTag_Shift -- extract the real src2 amount from the var
  2968. opndReg2 = IR::RegOpnd::New(TyInt32, this->m_func);
  2969. instr = IR::Instr::New(Js::OpCode::ASR, opndReg2, opndSrc2,
  2970. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  2971. instrMul->InsertBefore(instr);
  2972. // (r12:)s1 = SMULL s1, (r12,) s1, s2 -- do the signed mul into 64bit r12:s1, the result will be src1 * src2 * 2
  2973. instr = IR::Instr::New(Js::OpCode::SMULL, opndReg1, opndReg1, opndReg2, this->m_func);
  2974. instrMul->InsertBefore(instr);
  2975. // (SMULL doesn't set the flags but we don't have 32bit overflow <=> r12-unsigned ? r12==0 : all 33 bits of 64bit result are 1's
  2976. // CMP r12, s1, ASR #31 -- check for overflow (== means no overflow)
  2977. IR::RegOpnd* opndRegR12 = IR::RegOpnd::New(nullptr, RegR12, TyMachReg, this->m_func);
  2978. instr = IR::Instr::New(Js::OpCode::CMP_ASR31, this->m_func);
  2979. instr->SetSrc1(opndRegR12);
  2980. instr->SetSrc2(opndReg1);
  2981. instrMul->InsertBefore(instr);
  2982. // BNE $helper -- bail if the result overflowed
  2983. instr = IR::BranchInstr::New(Js::OpCode::BNE, labelHelper, this->m_func);
  2984. instrMul->InsertBefore(instr);
  2985. // TST s1, s1 -- Check 0 vs -0 (Javascript number is technically double, so need to account for -0)
  2986. instr = IR::Instr::New(Js::OpCode::TST, this->m_func);
  2987. instr->SetSrc1(opndReg1);
  2988. instr->SetSrc2(opndReg1);
  2989. instrMul->InsertBefore(instr);
  2990. // BNE $result -- TODO: consider converting 2 instructions into one: CBZ s1, $zero
  2991. instr = IR::BranchInstr::New(Js::OpCode::BNE, labelResult, this->m_func);
  2992. instrMul->InsertBefore(instr);
  2993. // (result of mul was 0. Account for -0)
  2994. // s2 = ADDS s2, src1 -- MUL is 0 => one of (src1, src2) is 0, see if the other one is positive or negative
  2995. instr = IR::Instr::New(Js::OpCode::ADDS, opndReg2, opndReg2, opndSrc1, this->m_func);
  2996. instrMul->InsertBefore(instr);
  2997. // BGT $result -- positive 0. keep it as int31
  2998. instr = IR::BranchInstr::New(Js::OpCode::BGT, labelResult, this->m_func);
  2999. instrMul->InsertBefore(instr);
  3000. // dst= ToVar(-0.0) -- load negative 0
  3001. instr = LowererMD::CreateAssign(instrMul->GetDst(), m_lowerer->LoadLibraryValueOpnd(instrMul, LibraryValue::ValueNegativeZero), instrMul);
  3002. // No need to insert: CreateAssign creates legalized instr and inserts it.
  3003. // B $fallthru
  3004. instr = IR::BranchInstr::New(Js::OpCode::B, labelFallThru, this->m_func);
  3005. instrMul->InsertBefore(instr);
  3006. // $result:
  3007. instrMul->InsertBefore(labelResult);
  3008. // dst= ORR s1, AtomTag -- make sure var tag is set on the result
  3009. instr = IR::Instr::New(Js::OpCode::ORR, instrMul->GetDst(), opndReg1, IR::IntConstOpnd::New(Js::AtomTag, TyVar, this->m_func), this->m_func);
  3010. instrMul->InsertBefore(instr);
  3011. LegalizeMD::LegalizeInstr(instr, false);
  3012. // B $fallthru
  3013. instr = IR::BranchInstr::New(Js::OpCode::B, labelFallThru, this->m_func);
  3014. instrMul->InsertBefore(instr);
  3015. // $helper:
  3016. // (caller generates helper call)
  3017. // $fallthru:
  3018. instrMul->InsertBefore(labelHelper);
  3019. instrMul->InsertAfter(labelFallThru);
  3020. // Return true to indicate the original instr must still be lowered.
  3021. return true;
  3022. }
  3023. ///----------------------------------------------------------------------------
  3024. ///
  3025. /// LowererMD::GenerateFastAnd
  3026. ///
  3027. ///----------------------------------------------------------------------------
  3028. bool
  3029. LowererMD::GenerateFastAnd(IR::Instr * instrAnd)
  3030. {
  3031. // Given:
  3032. //
  3033. // dst = And src1, src2
  3034. //
  3035. // Generate:
  3036. //
  3037. //
  3038. // If dst is reg:
  3039. //
  3040. // dst = AND src1, src2
  3041. // TST dst, 1
  3042. // BNE $done
  3043. // (caller generates helper sequence)
  3044. // $done:
  3045. // If dst is not reg:
  3046. //
  3047. // dstReg = AND src1, src2
  3048. // TST dstReg, 1
  3049. // BEQ $helper
  3050. // dst = STR dstReg
  3051. // B $done
  3052. // $helper
  3053. // (caller generates helper sequence)
  3054. // $done:
  3055. IR::Opnd *dst = instrAnd->GetDst();
  3056. IR::Opnd *src1 = instrAnd->GetSrc1();
  3057. IR::Opnd *src2 = instrAnd->GetSrc2();
  3058. IR::Instr *instr;
  3059. // Not tagged ints?
  3060. if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_sym->m_isNotInt)
  3061. {
  3062. return true;
  3063. }
  3064. if (src2->IsRegOpnd() && src2->AsRegOpnd()->m_sym->m_isNotInt)
  3065. {
  3066. return true;
  3067. }
  3068. bool isInt = src1->IsTaggedInt() && src2->IsTaggedInt();
  3069. if (!isInt)
  3070. {
  3071. if (!dst->IsRegOpnd() || dst->IsEqual(src1) || dst->IsEqual(src2))
  3072. {
  3073. // Put the result in a reg and store it only when we know it's final.
  3074. dst = IR::RegOpnd::New(dst->GetType(), this->m_func);
  3075. }
  3076. }
  3077. // dstReg = AND src1, src2
  3078. instr = IR::Instr::New(Js::OpCode::AND, dst, src1, src2, this->m_func);
  3079. instrAnd->InsertBefore(instr);
  3080. LegalizeMD::LegalizeInstr(instr, false);
  3081. if (isInt)
  3082. {
  3083. // If both src's are ints, then we're done, and we need no helper call.
  3084. instrAnd->Remove();
  3085. return false;
  3086. }
  3087. // TST dstReg, 1
  3088. instr = IR::Instr::New(Js::OpCode::TST, this->m_func);
  3089. instr->SetSrc1(dst);
  3090. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func));
  3091. instrAnd->InsertBefore(instr);
  3092. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3093. if (dst == instrAnd->GetDst())
  3094. {
  3095. // BNE $done
  3096. instr = IR::BranchInstr::New(Js::OpCode::BNE, labelDone, this->m_func);
  3097. instrAnd->InsertBefore(instr);
  3098. }
  3099. else
  3100. {
  3101. // BEQ $helper
  3102. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3103. instr = IR::BranchInstr::New(Js::OpCode::BEQ, labelHelper, this->m_func);
  3104. instrAnd->InsertBefore(instr);
  3105. // dst = STR dstReg
  3106. LowererMD::CreateAssign(instrAnd->GetDst(), dst, instrAnd);
  3107. // B $done
  3108. instr = IR::BranchInstr::New(Js::OpCode::B, labelDone, this->m_func);
  3109. instrAnd->InsertBefore(instr);
  3110. // $helper
  3111. instrAnd->InsertBefore(labelHelper);
  3112. }
  3113. // (caller generates helper sequence)
  3114. // $done:
  3115. instrAnd->InsertAfter(labelDone);
  3116. // Return true to indicate the original instr must still be lowered.
  3117. return true;
  3118. }
  3119. ///----------------------------------------------------------------------------
  3120. ///
  3121. /// LowererMD::GenerateFastOr
  3122. ///
  3123. ///----------------------------------------------------------------------------
  3124. bool
  3125. LowererMD::GenerateFastOr(IR::Instr * instrOr)
  3126. {
  3127. // Given:
  3128. //
  3129. // dst = Or src1, src2
  3130. //
  3131. // Generate:
  3132. //
  3133. // (If not 2 Int31's, jump to $helper.)
  3134. //
  3135. // dst = OR src1, src2
  3136. // B $done
  3137. // $helper:
  3138. // (caller generates helper sequence)
  3139. // $fallthru:
  3140. IR::Opnd *src1 = instrOr->GetSrc1();
  3141. IR::Opnd *src2 = instrOr->GetSrc2();
  3142. IR::Opnd *dst = instrOr->GetDst();
  3143. IR::Instr *instr;
  3144. IR::LabelInstr *labelHelper = nullptr;
  3145. // Not tagged ints?
  3146. if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_sym->m_isNotInt)
  3147. {
  3148. return true;
  3149. }
  3150. if (src2->IsRegOpnd() && src2->AsRegOpnd()->m_sym->m_isNotInt)
  3151. {
  3152. return true;
  3153. }
  3154. // Tagged ints?
  3155. bool isInt = src1->IsTaggedInt() && src2->IsTaggedInt();
  3156. // Load the src's at the top so we don't have to do it repeatedly.
  3157. if (!src1->IsRegOpnd())
  3158. {
  3159. src1 = IR::RegOpnd::New(src1->GetType(), this->m_func);
  3160. LowererMD::CreateAssign(src1, instrOr->GetSrc1(), instrOr);
  3161. }
  3162. if (!src2->IsRegOpnd())
  3163. {
  3164. src2 = IR::RegOpnd::New(src2->GetType(), this->m_func);
  3165. LowererMD::CreateAssign(src2, instrOr->GetSrc2(), instrOr);
  3166. }
  3167. if (!isInt)
  3168. {
  3169. // (If not 2 Int31's, jump to $helper.)
  3170. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3171. this->GenerateSmIntPairTest(instrOr, src1, src2, labelHelper);
  3172. }
  3173. // dst = OR src1, src2
  3174. instr = IR::Instr::New(Js::OpCode::ORR, dst, src1, src2, this->m_func);
  3175. instrOr->InsertBefore(instr);
  3176. LegalizeMD::LegalizeInstr(instr, false);
  3177. if (isInt)
  3178. {
  3179. // If both src's are ints, then we're done, and we don't need a helper call.
  3180. instrOr->Remove();
  3181. return false;
  3182. }
  3183. // B $done
  3184. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3185. instr = IR::BranchInstr::New(Js::OpCode::B, labelDone, this->m_func);
  3186. instrOr->InsertBefore(instr);
  3187. // $helper:
  3188. // (caller generates helper sequence)
  3189. // $done:
  3190. instrOr->InsertBefore(labelHelper);
  3191. instrOr->InsertAfter(labelDone);
  3192. // Return true to indicate the original instr must still be lowered.
  3193. return true;
  3194. }
  3195. ///----------------------------------------------------------------------------
  3196. ///
  3197. /// LowererMD::GenerateFastXor
  3198. ///
  3199. ///----------------------------------------------------------------------------
  3200. bool
  3201. LowererMD::GenerateFastXor(IR::Instr * instrXor)
  3202. {
  3203. // Given:
  3204. //
  3205. // dst = Xor src1, src2
  3206. //
  3207. // Generate:
  3208. //
  3209. // (If not 2 Int31's, jump to $helper.)
  3210. //
  3211. // s1 = MOV src1
  3212. // s1 = XOR s1, src2 -- try an inline XOR
  3213. // s1 = INC s1
  3214. // dst = MOV s1
  3215. // JMP $fallthru
  3216. // $helper:
  3217. // (caller generates helper sequence)
  3218. // $fallthru:
  3219. // Return true to indicate the original instr must still be lowered.
  3220. return true;
  3221. }
  3222. //----------------------------------------------------------------------------
  3223. //
  3224. // LowererMD::GenerateFastNot
  3225. //
  3226. //----------------------------------------------------------------------------
  3227. bool
  3228. LowererMD::GenerateFastNot(IR::Instr * instrNot)
  3229. {
  3230. // Given:
  3231. //
  3232. // dst = Not src
  3233. //
  3234. // Generate:
  3235. //
  3236. // TST src, 1
  3237. // BEQ $helper
  3238. // dst = MVN src
  3239. // dst = INC dst
  3240. // JMP $done
  3241. // $helper:
  3242. // (caller generates helper call)
  3243. // $done:
  3244. IR::LabelInstr *labelHelper = nullptr;
  3245. IR::Opnd *src = instrNot->GetSrc1();
  3246. IR::Opnd *dst = instrNot->GetDst();
  3247. IR::Instr *instr;
  3248. bool isInt = src->IsTaggedInt();
  3249. if (!src->IsRegOpnd())
  3250. {
  3251. // Load the src at the top so we don't have to load it twice.
  3252. src = IR::RegOpnd::New(src->GetType(), this->m_func);
  3253. LowererMD::CreateAssign(src, instrNot->GetSrc1(), instrNot);
  3254. }
  3255. if (!dst->IsRegOpnd())
  3256. {
  3257. // We'll store the dst when we're done.
  3258. dst = IR::RegOpnd::New(dst->GetType(), this->m_func);
  3259. }
  3260. if (!isInt)
  3261. {
  3262. // TST src, 1
  3263. instr = IR::Instr::New(Js::OpCode::TST, this->m_func);
  3264. instr->SetSrc1(src);
  3265. instr->SetSrc2(IR::IntConstOpnd::New(1, TyMachReg, this->m_func));
  3266. instrNot->InsertBefore(instr);
  3267. // BEQ $helper
  3268. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3269. instr = IR::BranchInstr::New(Js::OpCode::BEQ, labelHelper, this->m_func);
  3270. instrNot->InsertBefore(instr);
  3271. }
  3272. // dst = MVN src
  3273. instr = IR::Instr::New(Js::OpCode::MVN, dst, src, this->m_func);
  3274. instrNot->InsertBefore(instr);
  3275. // dst = ADD dst, 1
  3276. instr = IR::Instr::New(Js::OpCode::ADD, dst, dst, IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func), this->m_func);
  3277. instrNot->InsertBefore(instr);
  3278. if (dst != instrNot->GetDst())
  3279. {
  3280. // Now store the result.
  3281. LowererMD::CreateAssign(instrNot->GetDst(), dst, instrNot);
  3282. }
  3283. if (isInt)
  3284. {
  3285. // If the src is int, then we're done, and we need no helper call.
  3286. instrNot->Remove();
  3287. return false;
  3288. }
  3289. // B $done
  3290. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3291. instr = IR::BranchInstr::New(Js::OpCode::B, labelDone, this->m_func);
  3292. instrNot->InsertBefore(instr);
  3293. // $helper:
  3294. // (caller generates helper call)
  3295. // $done:
  3296. instrNot->InsertBefore(labelHelper);
  3297. instrNot->InsertAfter(labelDone);
  3298. // Return true to indicate the original instr must still be lowered.
  3299. return true;
  3300. }
  3301. //
  3302. // If value is zero in tagged int representation, jump to $labelHelper.
  3303. //
  3304. void
  3305. LowererMD::GenerateTaggedZeroTest( IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelHelper )
  3306. {
  3307. // CMP src1, AtomTag
  3308. IR::Instr* instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  3309. instr->SetSrc1(opndSrc);
  3310. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt32, this->m_func));
  3311. insertInstr->InsertBefore(instr);
  3312. // BEQ $helper
  3313. if(labelHelper != nullptr)
  3314. {
  3315. // BEQ $labelHelper
  3316. instr = IR::BranchInstr::New(Js::OpCode::BEQ, labelHelper, this->m_func);
  3317. insertInstr->InsertBefore(instr);
  3318. }
  3319. }
  3320. bool
  3321. LowererMD::GenerateFastNeg(IR::Instr * instrNeg)
  3322. {
  3323. // Given:
  3324. //
  3325. // dst = Not src
  3326. //
  3327. // Generate:
  3328. //
  3329. // if not int, jump $helper
  3330. // if src == 0 -- test for zero (must be handled by the runtime to preserve
  3331. // BEQ $helper -- Difference between +0 and -0)
  3332. // dst = RSB src, 0 -- do an inline NEG
  3333. // dst = ADD dst, 2 -- restore the var tag on the result
  3334. // BVS $helper
  3335. // B $fallthru
  3336. // $helper:
  3337. // (caller generates helper call)
  3338. // $fallthru:
  3339. IR::Instr * instr;
  3340. IR::LabelInstr * labelHelper = nullptr;
  3341. IR::LabelInstr * labelFallThru = nullptr;
  3342. IR::Opnd * opndSrc1;
  3343. IR::Opnd * opndDst;
  3344. opndSrc1 = instrNeg->GetSrc1();
  3345. AssertMsg(opndSrc1, "Expected src opnd on Neg instruction");
  3346. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->m_sym->IsIntConst())
  3347. {
  3348. IR::Opnd *newOpnd;
  3349. IntConstType value = opndSrc1->AsRegOpnd()->m_sym->GetIntConstValue();
  3350. if (value == 0)
  3351. {
  3352. // If the negate operand is zero, the result is -0.0, which is a Number rather than an Int31.
  3353. newOpnd = m_lowerer->LoadLibraryValueOpnd(instrNeg, LibraryValue::ValueNegativeZero);
  3354. }
  3355. else
  3356. {
  3357. // negation below can overflow because max negative int32 value > max positive value by 1.
  3358. newOpnd = IR::AddrOpnd::NewFromNumber(-(int64)value, m_func);
  3359. }
  3360. instrNeg->ClearBailOutInfo();
  3361. instrNeg->FreeSrc1();
  3362. instrNeg->SetSrc1(newOpnd);
  3363. instrNeg = this->ChangeToAssign(instrNeg);
  3364. // Skip lowering call to helper
  3365. return false;
  3366. }
  3367. bool isInt = (opndSrc1->IsTaggedInt());
  3368. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->m_sym->m_isNotInt)
  3369. {
  3370. return true;
  3371. }
  3372. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3373. // Load src's at the top so we don't have to do it repeatedly.
  3374. if (!opndSrc1->IsRegOpnd())
  3375. {
  3376. opndSrc1 = IR::RegOpnd::New(opndSrc1->GetType(), this->m_func);
  3377. LowererMD::CreateAssign(opndSrc1, instrNeg->GetSrc1(), instrNeg);
  3378. }
  3379. if (!isInt)
  3380. {
  3381. GenerateSmIntTest(opndSrc1, instrNeg, labelHelper);
  3382. }
  3383. GenerateTaggedZeroTest(opndSrc1, instrNeg, labelHelper);
  3384. opndDst = instrNeg->GetDst();
  3385. if (!opndDst->IsRegOpnd())
  3386. {
  3387. opndDst = IR::RegOpnd::New(opndDst->GetType(), this->m_func);
  3388. }
  3389. // dst = RSB src
  3390. instr = IR::Instr::New(Js::OpCode::RSB, opndDst, opndSrc1, IR::IntConstOpnd::New(0, TyInt32, this->m_func), this->m_func);
  3391. instrNeg->InsertBefore(instr);
  3392. // dst = ADD dst, 2
  3393. instr = IR::Instr::New(Js::OpCode::ADDS, opndDst, opndDst, IR::IntConstOpnd::New(2, TyInt32, this->m_func), this->m_func);
  3394. instrNeg->InsertBefore(instr);
  3395. // BVS $helper
  3396. instr = IR::BranchInstr::New(Js::OpCode::BVS, labelHelper, this->m_func);
  3397. instrNeg->InsertBefore(instr);
  3398. if (opndDst != instrNeg->GetDst())
  3399. {
  3400. //Now store the result.
  3401. LowererMD::CreateAssign(instrNeg->GetDst(), opndDst, instrNeg);
  3402. }
  3403. // B $fallthru
  3404. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3405. instr = IR::BranchInstr::New(Js::OpCode::B, labelFallThru, this->m_func);
  3406. instrNeg->InsertBefore(instr);
  3407. // $helper:
  3408. // (caller generates helper sequence)
  3409. // $fallthru:
  3410. AssertMsg(labelHelper, "Should not be NULL");
  3411. instrNeg->InsertBefore(labelHelper);
  3412. instrNeg->InsertAfter(labelFallThru);
  3413. return true;
  3414. }
  3415. ///----------------------------------------------------------------------------
  3416. ///
  3417. /// LowererMD::GenerateFastShiftLeft
  3418. ///
  3419. ///----------------------------------------------------------------------------
  3420. bool
  3421. LowererMD::GenerateFastShiftLeft(IR::Instr * instrShift)
  3422. {
  3423. // Given:
  3424. //
  3425. // dst = Shl src1, src2
  3426. //
  3427. // Generate:
  3428. //
  3429. // (If not 2 Int31's, jump to $helper.)
  3430. // s1 = MOV src1
  3431. // s1 = SAR s1, Js::VarTag_Shift -- Remove the var tag from the value to be shifted
  3432. // s2 = MOV src2
  3433. // s2 = SAR s2, Js::VarTag_Shift -- extract the real shift amount from the var
  3434. // s1 = SHL s1, s2 -- do the inline shift
  3435. // s3 = MOV s1
  3436. // s3 = SHL s3, Js::VarTag_Shift -- restore the var tag on the result
  3437. // JO $ToVar
  3438. // s3 = INC s3
  3439. // dst = MOV s3
  3440. // JMP $fallthru
  3441. //$ToVar:
  3442. // PUSH scriptContext
  3443. // PUSH s1
  3444. // dst = ToVar()
  3445. // JMP $fallthru
  3446. // $helper:
  3447. // (caller generates helper call)
  3448. // $fallthru:
  3449. // Return true to indicate the original instr must still be lowered.
  3450. return true;
  3451. }
  3452. ///----------------------------------------------------------------------------
  3453. ///
  3454. /// LowererMD::GenerateFastShiftRight
  3455. ///
  3456. ///----------------------------------------------------------------------------
  3457. bool
  3458. LowererMD::GenerateFastShiftRight(IR::Instr * instrShift)
  3459. {
  3460. // Given:
  3461. //
  3462. // dst = Shr/Sar src1, src2
  3463. //
  3464. // Generate:
  3465. //
  3466. // s1 = MOV src1
  3467. // TEST s1, 1
  3468. // JEQ $S1ToInt
  3469. // s1 = SAR s1, VarTag_Shift -- extract the real shift amount from the var
  3470. // JMP $src2
  3471. //$S1ToInt:
  3472. // PUSH scriptContext
  3473. // PUSH s1
  3474. // s1 = ToInt32()/ToUInt32
  3475. //$src2:
  3476. // Load s2 in ECX
  3477. // TEST s2, 1
  3478. // JEQ $S2ToUInt
  3479. // s2 = SAR s2, VarTag_Shift -- extract the real shift amount from the var
  3480. // JMP $Shr
  3481. //$S2ToUInt:
  3482. // PUSH scriptContext
  3483. // PUSH s2
  3484. // s2 = ToUInt32()
  3485. //$Shr:
  3486. // s1 = SHR/SAR s1, s2 -- do the inline shift
  3487. // s3 = MOV s1
  3488. // s3 = SHL s3, s2 -- To tagInt
  3489. // JO $ToVar
  3490. // JS $ToVar
  3491. // s3 = INC s3
  3492. // JMP $done
  3493. //$ToVar:
  3494. // PUSH scriptContext
  3495. // PUSH s1
  3496. // s3 = ToVar()
  3497. //$Done:
  3498. // dst = MOV s3
  3499. // Return true to indicate the original instr must still be lowered.
  3500. return true;
  3501. }
  3502. void
  3503. LowererMD::GenerateFastBrS(IR::BranchInstr *brInstr)
  3504. {
  3505. IR::Opnd *src1 = brInstr->UnlinkSrc1();
  3506. Assert(src1->IsIntConstOpnd() || src1->IsAddrOpnd() || src1->IsRegOpnd());
  3507. m_lowerer->InsertTest(
  3508. m_lowerer->LoadOptimizationOverridesValueOpnd(
  3509. brInstr, OptimizationOverridesValue::OptimizationOverridesSideEffects),
  3510. src1,
  3511. brInstr);
  3512. Js::OpCode opcode;
  3513. switch(brInstr->m_opcode)
  3514. {
  3515. case Js::OpCode::BrHasSideEffects:
  3516. opcode = Js::OpCode::BNE;
  3517. break;
  3518. case Js::OpCode::BrNotHasSideEffects:
  3519. opcode = Js::OpCode::BEQ;
  3520. break;
  3521. default:
  3522. Assert(UNREACHED);
  3523. __assume(false);
  3524. }
  3525. brInstr->m_opcode = opcode;
  3526. }
  3527. ///----------------------------------------------------------------------------
  3528. ///
  3529. /// LowererMD::GenerateSmIntPairTest
  3530. ///
  3531. /// Generate code to test whether the given operands are both Int31 vars
  3532. /// and branch to the given label if not.
  3533. ///
  3534. ///----------------------------------------------------------------------------
  3535. IR::Instr *
  3536. LowererMD::GenerateSmIntPairTest(
  3537. IR::Instr * instrInsert,
  3538. IR::Opnd * src1,
  3539. IR::Opnd * src2,
  3540. IR::LabelInstr * labelFail)
  3541. {
  3542. IR::Opnd * opndReg;
  3543. IR::Instr * instrPrev = instrInsert->m_prev;
  3544. IR::Instr * instr;
  3545. Assert(src1->GetType() == TyVar);
  3546. Assert(src2->GetType() == TyVar);
  3547. //src1 and src2 can either be RegOpnd or AddrOpnd at this point
  3548. if (src1->IsTaggedInt())
  3549. {
  3550. Swap(src1, src2);
  3551. }
  3552. if (src2->IsTaggedInt())
  3553. {
  3554. if (src1->IsTaggedInt())
  3555. {
  3556. return instrPrev;
  3557. }
  3558. IR::RegOpnd *opndSrc1 = src1->AsRegOpnd();
  3559. // TST src1, AtomTag
  3560. // BEQ $fail
  3561. instr = IR::Instr::New(Js::OpCode::TST, this->m_func);
  3562. instr->SetSrc1(opndSrc1);
  3563. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyVar, this->m_func));
  3564. instrInsert->InsertBefore(instr);
  3565. }
  3566. else
  3567. {
  3568. IR::RegOpnd *opndSrc1 = src1->AsRegOpnd();
  3569. IR::RegOpnd *opndSrc2 = src2->AsRegOpnd();
  3570. // s1 = AND src1, 1
  3571. // TST s1, src2
  3572. // BEQ $fail
  3573. // s1 = AND src1, AtomTag
  3574. opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  3575. instr = IR::Instr::New(
  3576. Js::OpCode::AND, opndReg, opndSrc1, IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func), this->m_func);
  3577. instrInsert->InsertBefore(instr);
  3578. // TST s1, src2
  3579. instr = IR::Instr::New(Js::OpCode::TST, this->m_func);
  3580. instr->SetSrc1(opndReg);
  3581. instr->SetSrc2(opndSrc2);
  3582. instrInsert->InsertBefore(instr);
  3583. }
  3584. // BEQ $fail
  3585. instr = IR::BranchInstr::New(Js::OpCode::BEQ, labelFail, this->m_func);
  3586. instrInsert->InsertBefore(instr);
  3587. return instrPrev;
  3588. }
  3589. void LowererMD::GenerateObjectPairTest(IR::Opnd * opndSrc1, IR::Opnd * opndSrc2, IR::Instr * insertInstr, IR::LabelInstr * labelTarget)
  3590. {
  3591. // opndOr = ORR opndSrc1, opndSrc2
  3592. // TST opndOr, AtomTag_Ptr
  3593. // BNE $labelTarget
  3594. IR::RegOpnd * opndOr = IR::RegOpnd::New(TyMachPtr, this->m_func);
  3595. IR::Instr * instr = IR::Instr::New(Js::OpCode::ORR, opndOr, opndSrc1, opndSrc2, this->m_func);
  3596. insertInstr->InsertBefore(instr);
  3597. instr = IR::Instr::New(Js::OpCode::TST, this->m_func);
  3598. instr->SetSrc1(opndOr);
  3599. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag_IntPtr, TyMachReg, this->m_func));
  3600. insertInstr->InsertBefore(instr);
  3601. instr = IR::BranchInstr::New(Js::OpCode::BNE, labelTarget, this->m_func);
  3602. insertInstr->InsertBefore(instr);
  3603. }
  3604. bool LowererMD::GenerateObjectTest(IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
  3605. {
  3606. if (opndSrc->IsTaggedValue() && fContinueLabel)
  3607. {
  3608. // Insert delete branch opcode to tell the dbChecks not to assert on the helper label we may fall through into
  3609. IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
  3610. insertInstr->InsertBefore(fakeBr);
  3611. return false;
  3612. }
  3613. else if (opndSrc->IsNotTaggedValue() && !fContinueLabel)
  3614. {
  3615. return false;
  3616. }
  3617. // TST s1, AtomTag_IntPtr | FloatTag_Value
  3618. IR::Instr * instr = IR::Instr::New(Js::OpCode::TST, this->m_func);
  3619. instr->SetSrc1(opndSrc);
  3620. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag_IntPtr, TyMachReg, this->m_func));
  3621. insertInstr->InsertBefore(instr);
  3622. if (fContinueLabel)
  3623. {
  3624. // BEQ $labelTarget
  3625. instr = IR::BranchInstr::New(Js::OpCode::BEQ, labelTarget, this->m_func);
  3626. insertInstr->InsertBefore(instr);
  3627. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3628. insertInstr->InsertBefore(labelHelper);
  3629. }
  3630. else
  3631. {
  3632. // BNE $labelTarget
  3633. instr = IR::BranchInstr::New(Js::OpCode::BNE, labelTarget, this->m_func);
  3634. insertInstr->InsertBefore(instr);
  3635. }
  3636. return true;
  3637. }
  3638. IR::BranchInstr *
  3639. LowererMD::GenerateLocalInlineCacheCheck(
  3640. IR::Instr * instrLdSt,
  3641. IR::RegOpnd * opndType,
  3642. IR::RegOpnd * opndInlineCache,
  3643. IR::LabelInstr * labelNext,
  3644. bool checkTypeWithoutProperty)
  3645. {
  3646. // Generate:
  3647. //
  3648. // s3 = LDR inlineCache->u.local.type
  3649. // CMP type, s3
  3650. // BNE $next
  3651. IR::Instr * instr;
  3652. IR::IndirOpnd * typeOpnd;
  3653. // s3 = LDR [inlineCache, offset(u.local.type)]
  3654. IR::RegOpnd * s3 = IR::RegOpnd::New(TyMachReg, instrLdSt->m_func);
  3655. if (checkTypeWithoutProperty)
  3656. {
  3657. typeOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.typeWithoutProperty), TyMachPtr, instrLdSt->m_func);
  3658. }
  3659. else
  3660. {
  3661. typeOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.type), TyMachPtr, instrLdSt->m_func);
  3662. }
  3663. instr = IR::Instr::New(Js::OpCode::LDR, s3, typeOpnd, instrLdSt->m_func);
  3664. instrLdSt->InsertBefore(instr);
  3665. // CMP s1, s3
  3666. instr = IR::Instr::New(Js::OpCode::CMP, instrLdSt->m_func);
  3667. instr->SetSrc1(opndType);
  3668. instr->SetSrc2(s3);
  3669. instrLdSt->InsertBefore(instr);
  3670. // BNE $next
  3671. IR::BranchInstr * branchInstr = IR::BranchInstr::New(Js::OpCode::BNE, labelNext, instrLdSt->m_func);
  3672. instrLdSt->InsertBefore(branchInstr);
  3673. return branchInstr;
  3674. }
  3675. void
  3676. LowererMD::GenerateFlagInlineCacheCheckForGetterSetter(
  3677. IR::Instr * insertBeforeInstr,
  3678. IR::RegOpnd * opndInlineCache,
  3679. IR::LabelInstr * labelNext)
  3680. {
  3681. uint accessorFlagMask;
  3682. if (PHASE_OFF(Js::InlineGettersPhase, insertBeforeInstr->m_func))
  3683. {
  3684. accessorFlagMask = Js::InlineCache::GetSetterFlagMask();
  3685. }
  3686. else if (PHASE_OFF(Js::InlineSettersPhase, insertBeforeInstr->m_func))
  3687. {
  3688. accessorFlagMask = Js::InlineCache::GetGetterFlagMask();
  3689. }
  3690. else
  3691. {
  3692. accessorFlagMask = Js::InlineCache::GetGetterSetterFlagMask();
  3693. }
  3694. // Generate:
  3695. //
  3696. // TST [&(inlineCache->u.accessor.flags)], Js::InlineCacheGetterFlag | Js::InlineCacheSetterFlag
  3697. // BEQ $next
  3698. IR::Instr * instr;
  3699. IR::Opnd* flagsOpnd;
  3700. flagsOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.rawUInt16), TyInt8, this->m_func);
  3701. // AND [&(inlineCache->u.accessor.flags)], InlineCacheGetterFlag | InlineCacheSetterFlag
  3702. instr = IR::Instr::New(Js::OpCode::TST,this->m_func);
  3703. instr->SetSrc1(flagsOpnd);
  3704. instr->SetSrc2(IR::IntConstOpnd::New(accessorFlagMask, TyInt8, this->m_func));
  3705. insertBeforeInstr->InsertBefore(instr);
  3706. LegalizeMD::LegalizeInstr(instr, false);
  3707. // BEQ $next
  3708. instr = IR::BranchInstr::New(Js::OpCode::BEQ, labelNext, this->m_func);
  3709. insertBeforeInstr->InsertBefore(instr);
  3710. }
  3711. IR::BranchInstr *
  3712. LowererMD::GenerateFlagInlineCacheCheck(
  3713. IR::Instr * instrLdSt,
  3714. IR::RegOpnd * opndType,
  3715. IR::RegOpnd * opndInlineCache,
  3716. IR::LabelInstr * labelNext)
  3717. {
  3718. // Generate:
  3719. //
  3720. // s3 = LDR inlineCache->u.flags.type
  3721. // CMP type, s3
  3722. // BNE $next
  3723. IR::Instr * instr;
  3724. // LDR s3, [inlineCache, offset(u.flags.type)]
  3725. IR::RegOpnd *s3 = IR::RegOpnd::New(TyMachReg, instrLdSt->m_func);
  3726. IR::IndirOpnd * opndIndir = IR::IndirOpnd::New(opndInlineCache, offsetof(Js::InlineCache, u.accessor.type), TyMachPtr, instrLdSt->m_func);
  3727. instr = IR::Instr::New(Js::OpCode::LDR, s3, opndIndir, instrLdSt->m_func);
  3728. instrLdSt->InsertBefore(instr);
  3729. // CMP type, s3
  3730. instr = IR::Instr::New(Js::OpCode::CMP, instrLdSt->m_func);
  3731. instr->SetSrc1(opndType);
  3732. instr->SetSrc2(s3);
  3733. instrLdSt->InsertBefore(instr);
  3734. // BNE $next
  3735. IR::BranchInstr * branchInstr = IR::BranchInstr::New(Js::OpCode::BNE, labelNext, instrLdSt->m_func);
  3736. instrLdSt->InsertBefore(branchInstr);
  3737. return branchInstr;
  3738. }
  3739. IR::BranchInstr *
  3740. LowererMD::GenerateProtoInlineCacheCheck(
  3741. IR::Instr * instrLdSt,
  3742. IR::RegOpnd * opndType,
  3743. IR::RegOpnd * opndInlineCache,
  3744. IR::LabelInstr * labelNext)
  3745. {
  3746. // Generate:
  3747. //
  3748. // s3 = LDR inlineCache->u.proto.type
  3749. // CMP type, s3
  3750. // BNE $next
  3751. IR::Instr * instr;
  3752. // LDR s3, [inlineCache, offset(u.proto.type)]
  3753. IR::RegOpnd *s3 = IR::RegOpnd::New(TyMachReg, instrLdSt->m_func);
  3754. IR::IndirOpnd * opndIndir = IR::IndirOpnd::New(opndInlineCache, offsetof(Js::InlineCache, u.proto.type), TyMachPtr, instrLdSt->m_func);
  3755. instr = IR::Instr::New(Js::OpCode::LDR, s3, opndIndir, instrLdSt->m_func);
  3756. instrLdSt->InsertBefore(instr);
  3757. // CMP type, s3
  3758. instr = IR::Instr::New(Js::OpCode::CMP, instrLdSt->m_func);
  3759. instr->SetSrc1(opndType);
  3760. instr->SetSrc2(s3);
  3761. instrLdSt->InsertBefore(instr);
  3762. // BNE $next
  3763. IR::BranchInstr * branchInstr = IR::BranchInstr::New(Js::OpCode::BNE, labelNext, instrLdSt->m_func);
  3764. instrLdSt->InsertBefore(branchInstr);
  3765. return branchInstr;
  3766. }
  3767. void
  3768. LowererMD::GenerateLdFldFromLocalInlineCache(
  3769. IR::Instr * instrLdFld,
  3770. IR::RegOpnd * opndBase,
  3771. IR::Opnd * opndDst,
  3772. IR::RegOpnd * opndInlineCache,
  3773. IR::LabelInstr * labelFallThru,
  3774. bool isInlineSlot)
  3775. {
  3776. // Generate:
  3777. //
  3778. // LDR s1, [base, offset(slots)]
  3779. // LDR s2, [inlineCache, offset(u.local.slotIndex)] -- load the cached slot index
  3780. // LDR dst, [s1, s2, LSL #2] -- load the value directly from the slot (dst = s3 + s4 * 4)
  3781. // B $fallthru
  3782. IR::Instr * instr;
  3783. IR::IndirOpnd * opndIndir;
  3784. IR::RegOpnd * opndSlotArray = nullptr;
  3785. if (!isInlineSlot)
  3786. {
  3787. opndSlotArray = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3788. opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrLdFld->m_func);
  3789. instr = IR::Instr::New(Js::OpCode::LDR, opndSlotArray, opndIndir, instrLdFld->m_func);
  3790. instrLdFld->InsertBefore(instr);
  3791. }
  3792. // s2 = LDR [inlineCache, offset(u.local.slotIndex)] -- load the cached slot index
  3793. IR::RegOpnd * s2 = IR::RegOpnd::New(TyUint16, instrLdFld->m_func);
  3794. opndIndir = IR::IndirOpnd::New(opndInlineCache, offsetof(Js::InlineCache, u.local.slotIndex), TyUint16, instrLdFld->m_func);
  3795. instr = IR::Instr::New(Js::OpCode::LDR, s2, opndIndir, instrLdFld->m_func);
  3796. instrLdFld->InsertBefore(instr);
  3797. if (isInlineSlot)
  3798. {
  3799. // LDR dst, [base, s2, LSL #2] -- load the value directly from the inline slot (dst = base + s2 * 4)
  3800. opndIndir = IR::IndirOpnd::New(opndBase, s2, GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3801. instr = IR::Instr::New(Js::OpCode::LDR, opndDst, opndIndir, instrLdFld->m_func);
  3802. instrLdFld->InsertBefore(instr);
  3803. }
  3804. else
  3805. {
  3806. // LDR dst, [s1, s2, LSL #2] -- load the value directly from the slot (dst = s1 + s2 * 4)
  3807. opndIndir = IR::IndirOpnd::New(opndSlotArray, s2, GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3808. instr = IR::Instr::New(Js::OpCode::LDR, opndDst, opndIndir, instrLdFld->m_func);
  3809. instrLdFld->InsertBefore(instr);
  3810. }
  3811. // B $fallthru
  3812. instr = IR::BranchInstr::New(Js::OpCode::B, labelFallThru, instrLdFld->m_func);
  3813. instrLdFld->InsertBefore(instr);
  3814. }
  3815. void
  3816. LowererMD::GenerateLdFldFromProtoInlineCache(
  3817. IR::Instr * instrLdFld,
  3818. IR::RegOpnd * opndBase,
  3819. IR::Opnd * opndDst,
  3820. IR::RegOpnd * opndInlineCache,
  3821. IR::LabelInstr * labelFallThru,
  3822. bool isInlineSlot)
  3823. {
  3824. // Generate:
  3825. //
  3826. // LDR s1, [inlineCache, offset(u.proto.prototypeObject)]
  3827. // LDR s1, [s1, offset(slots)] -- load the slot array
  3828. // LDR s2, [inlineCache, offset(u.proto.slotIndex)]
  3829. // LDR dst, [s1, s2, LSL #2]
  3830. // B $fallthru
  3831. IR::Instr * instr;
  3832. IR::RegOpnd * opndProtoSlots = nullptr;
  3833. // LDR s1, [inlineCache, offset(u.proto.prototypeObject)]
  3834. IR::RegOpnd * opndProto = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3835. IR::IndirOpnd * opndIndir = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.proto.prototypeObject), TyMachReg, instrLdFld->m_func);
  3836. instr = IR::Instr::New(Js::OpCode::LDR, opndProto, opndIndir, instrLdFld->m_func);
  3837. instrLdFld->InsertBefore(instr);
  3838. if (!isInlineSlot)
  3839. {
  3840. // LDR s1, [s1, offset(slots)] -- load the slot array
  3841. opndProtoSlots = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3842. opndIndir = IR::IndirOpnd::New(opndProto, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrLdFld->m_func);
  3843. instr = IR::Instr::New(Js::OpCode::LDR, opndProtoSlots, opndIndir, instrLdFld->m_func);
  3844. instrLdFld->InsertBefore(instr);
  3845. }
  3846. // LDR s2, [inlineCache, offset(u.proto.slotIndex)]
  3847. IR::RegOpnd * opndSlotIndex = IR::RegOpnd::New(TyUint16, instrLdFld->m_func);
  3848. opndIndir = IR::IndirOpnd::New(opndInlineCache, offsetof(Js::InlineCache, u.proto.slotIndex), TyUint16, instrLdFld->m_func);
  3849. instr = IR::Instr::New(Js::OpCode::LDR, opndSlotIndex, opndIndir, instrLdFld->m_func);
  3850. instrLdFld->InsertBefore(instr);
  3851. if (isInlineSlot)
  3852. {
  3853. // LDR dst, [s1, s8, LSL #2]
  3854. opndIndir = IR::IndirOpnd::New(opndProto, opndSlotIndex, GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3855. instr = IR::Instr::New(Js::OpCode::LDR, opndDst, opndIndir, instrLdFld->m_func);
  3856. instrLdFld->InsertBefore(instr);
  3857. }
  3858. else
  3859. {
  3860. // LDR dst, [s7, s8, LSL #2]
  3861. opndIndir = IR::IndirOpnd::New(opndProtoSlots, opndSlotIndex, GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3862. instr = IR::Instr::New(Js::OpCode::LDR, opndDst, opndIndir, instrLdFld->m_func);
  3863. instrLdFld->InsertBefore(instr);
  3864. }
  3865. // B $fallthru
  3866. instr = IR::BranchInstr::New(Js::OpCode::B, labelFallThru, instrLdFld->m_func);
  3867. instrLdFld->InsertBefore(instr);
  3868. }
  3869. void
  3870. LowererMD::GenerateLdLocalFldFromFlagInlineCache(
  3871. IR::Instr * instrLdFld,
  3872. IR::RegOpnd * opndBase,
  3873. IR::Opnd * opndDst,
  3874. IR::RegOpnd * opndInlineCache,
  3875. IR::LabelInstr * labelFallThru,
  3876. bool isInlineSlot)
  3877. {
  3878. // Generate:
  3879. //
  3880. // LDR s1, [base, offset(slots)]
  3881. // LDR s2, [inlineCache, offset(u.flags.slotIndex)] -- load the cached slot index
  3882. // LDR dst, [s1, s2, LSL #2] -- load the value directly from the slot (dst = s3 + s4 * 4)
  3883. // B $fallthru
  3884. IR::Instr * instr;
  3885. IR::IndirOpnd * opndIndir;
  3886. IR::RegOpnd * opndSlotArray = nullptr;
  3887. if (!isInlineSlot)
  3888. {
  3889. opndSlotArray = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3890. opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrLdFld->m_func);
  3891. instr = IR::Instr::New(Js::OpCode::LDR, opndSlotArray, opndIndir, instrLdFld->m_func);
  3892. instrLdFld->InsertBefore(instr);
  3893. }
  3894. // s2 = LDR [inlineCache, offset(u.local.slotIndex)] -- load the cached slot index
  3895. IR::RegOpnd * s2 = IR::RegOpnd::New(TyUint16, instrLdFld->m_func);
  3896. opndIndir = IR::IndirOpnd::New(opndInlineCache, offsetof(Js::InlineCache, u.accessor.slotIndex), TyUint16, instrLdFld->m_func);
  3897. instr = IR::Instr::New(Js::OpCode::LDR, s2, opndIndir, instrLdFld->m_func);
  3898. instrLdFld->InsertBefore(instr);
  3899. if (isInlineSlot)
  3900. {
  3901. // LDR dst, [base, s2, LSL #2] -- load the value directly from the inline slot (dst = base + s2 * 4)
  3902. opndIndir = IR::IndirOpnd::New(opndBase, s2, GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3903. instr = IR::Instr::New(Js::OpCode::LDR, opndDst, opndIndir, instrLdFld->m_func);
  3904. instrLdFld->InsertBefore(instr);
  3905. }
  3906. else
  3907. {
  3908. // LDR dst, [s1, s2, LSL #2] -- load the value directly from the slot (dst = s1 + s2 * 4)
  3909. opndIndir = IR::IndirOpnd::New(opndSlotArray, s2, GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3910. instr = IR::Instr::New(Js::OpCode::LDR, opndDst, opndIndir, instrLdFld->m_func);
  3911. instrLdFld->InsertBefore(instr);
  3912. }
  3913. // B $fallthru
  3914. instr = IR::BranchInstr::New(Js::OpCode::B, labelFallThru, instrLdFld->m_func);
  3915. instrLdFld->InsertBefore(instr);
  3916. }
  3917. void
  3918. LowererMD::GenerateLdFldFromFlagInlineCache(
  3919. IR::Instr * insertBeforeInstr,
  3920. IR::RegOpnd * opndBase,
  3921. IR::RegOpnd * opndInlineCache,
  3922. IR::Opnd * opndDst,
  3923. IR::LabelInstr * labelFallThru,
  3924. bool isInlineSlot)
  3925. {
  3926. // Generate:
  3927. //
  3928. // LDR s1, [inlineCache, offset(u.flags.object)]
  3929. // LDR s1, [s1, offset(slots)] -- load the slot array
  3930. // LDR s2, [inlineCache, offset(u.flags.slotIndex)]
  3931. // LDR dst, [s1, s2, LSL #2]
  3932. // B $fallthru
  3933. IR::Instr * instr;
  3934. IR::RegOpnd * opndObjSlots = nullptr;
  3935. // LDR s1, [inlineCache, offset(u.flags.object)]
  3936. IR::RegOpnd * object = IR::RegOpnd::New(TyMachReg, this->m_func);
  3937. IR::IndirOpnd * opndIndir = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.object), TyMachReg, this->m_func);
  3938. instr = IR::Instr::New(Js::OpCode::LDR, object, opndIndir, this->m_func);
  3939. insertBeforeInstr->InsertBefore(instr);
  3940. if (!isInlineSlot)
  3941. {
  3942. // LDR s1, [s1, offset(slots)] -- load the slot array
  3943. opndObjSlots = IR::RegOpnd::New(TyMachReg, this->m_func);
  3944. opndIndir = IR::IndirOpnd::New(object, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  3945. instr = IR::Instr::New(Js::OpCode::LDR, opndObjSlots, opndIndir, this->m_func);
  3946. insertBeforeInstr->InsertBefore(instr);
  3947. }
  3948. // LDR s2, [inlineCache, offset(u.flags.slotIndex)]
  3949. IR::RegOpnd * opndSlotIndex = IR::RegOpnd::New(TyUint16, this->m_func);
  3950. opndIndir = IR::IndirOpnd::New(opndInlineCache, offsetof(Js::InlineCache, u.accessor.slotIndex), TyUint16, this->m_func);
  3951. instr = IR::Instr::New(Js::OpCode::LDR, opndSlotIndex, opndIndir, this->m_func);
  3952. insertBeforeInstr->InsertBefore(instr);
  3953. if (isInlineSlot)
  3954. {
  3955. // LDR dst, [s1, s8, LSL #2]
  3956. opndIndir = IR::IndirOpnd::New(object, opndSlotIndex, this->GetDefaultIndirScale(), TyMachReg, this->m_func);
  3957. instr = IR::Instr::New(Js::OpCode::LDR, opndDst, opndIndir, this->m_func);
  3958. insertBeforeInstr->InsertBefore(instr);
  3959. }
  3960. else
  3961. {
  3962. // LDR dst, [s7, s8, LSL #2]
  3963. opndIndir = IR::IndirOpnd::New(opndObjSlots, opndSlotIndex, this->GetDefaultIndirScale(), TyMachReg, this->m_func);
  3964. instr = IR::Instr::New(Js::OpCode::LDR, opndDst, opndIndir, this->m_func);
  3965. insertBeforeInstr->InsertBefore(instr);
  3966. }
  3967. // B $fallthru
  3968. instr = IR::BranchInstr::New(Js::OpCode::B, labelFallThru, this->m_func);
  3969. insertBeforeInstr->InsertBefore(instr);
  3970. }
  3971. void
  3972. LowererMD::GenerateLoadTaggedType(IR::Instr * instrLdSt, IR::RegOpnd * opndType, IR::RegOpnd * opndTaggedType)
  3973. {
  3974. // taggedType = OR type, InlineCacheAuxSlotTypeTag
  3975. IR::IntConstOpnd * opndAuxSlotTag = IR::IntConstOpnd::New(InlineCacheAuxSlotTypeTag, TyInt8, instrLdSt->m_func);
  3976. IR::Instr * instr = IR::Instr::New(Js::OpCode::ORR, opndTaggedType, opndType, opndAuxSlotTag, instrLdSt->m_func);
  3977. instrLdSt->InsertBefore(instr);
  3978. }
  3979. void
  3980. LowererMD::GenerateLoadPolymorphicInlineCacheSlot(IR::Instr * instrLdSt, IR::RegOpnd * opndInlineCache, IR::RegOpnd * opndType, uint polymorphicInlineCacheSize)
  3981. {
  3982. // Generate
  3983. //
  3984. // LDR r1, type
  3985. // LSR r1, r1, #PolymorphicInlineCacheShift
  3986. // AND r1, r1, #(size - 1)
  3987. // LSL r1, r1, #log2(sizeof(Js::InlineCache))
  3988. // ADD inlineCache, inlineCache, r1
  3989. // MOV r1, type
  3990. IR::RegOpnd * opndOffset = IR::RegOpnd::New(TyMachPtr, instrLdSt->m_func);
  3991. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndOffset, opndType, instrLdSt->m_func);
  3992. instrLdSt->InsertBefore(instr);
  3993. IntConstType rightShiftAmount = PolymorphicInlineCacheShift;
  3994. IntConstType leftShiftAmount = Math::Log2(sizeof(Js::InlineCache));
  3995. // instead of generating
  3996. // LSR r1, r1, #PolymorphicInlineCacheShift
  3997. // AND r1, r1, #(size - 1)
  3998. // LSL r1, r1, #log2(sizeof(Js::InlineCache))
  3999. //
  4000. // we can generate:
  4001. // LSR r1, r1, #(PolymorphicInlineCacheShift - log2(sizeof(Js::InlineCache))
  4002. // AND r1, r1, #(size - 1) << log2(sizeof(Js::InlineCache))
  4003. Assert(rightShiftAmount > leftShiftAmount);
  4004. instr = IR::Instr::New(Js::OpCode::LSR, opndOffset, opndOffset, IR::IntConstOpnd::New(rightShiftAmount - leftShiftAmount, TyUint8, instrLdSt->m_func, true), instrLdSt->m_func);
  4005. instrLdSt->InsertBefore(instr);
  4006. instr = IR::Instr::New(Js::OpCode::AND, opndOffset, opndOffset, IR::IntConstOpnd::New((polymorphicInlineCacheSize - 1) << leftShiftAmount, TyMachPtr, instrLdSt->m_func, true), instrLdSt->m_func);
  4007. instrLdSt->InsertBefore(instr);
  4008. // ADD inlineCache, inlineCache, r1
  4009. instr = IR::Instr::New(Js::OpCode::ADD, opndInlineCache, opndInlineCache, opndOffset, instrLdSt->m_func);
  4010. instrLdSt->InsertBefore(instr);
  4011. }
  4012. ///----------------------------------------------------------------------------
  4013. ///
  4014. /// LowererMD::GenerateFastLdMethodFromFlags
  4015. ///
  4016. /// Make use of the helper to cache the type and slot index used to do a LdFld
  4017. /// and do an inline load from the appropriate slot if the type hasn't changed
  4018. /// since the last time this LdFld was executed.
  4019. ///
  4020. ///----------------------------------------------------------------------------
  4021. bool
  4022. LowererMD::GenerateFastLdMethodFromFlags(IR::Instr * instrLdFld)
  4023. {
  4024. IR::LabelInstr * labelFallThru;
  4025. IR::LabelInstr * bailOutLabel;
  4026. IR::Opnd * opndSrc;
  4027. IR::Opnd * opndDst;
  4028. IR::RegOpnd * opndBase;
  4029. IR::RegOpnd * opndType;
  4030. IR::RegOpnd * opndInlineCache;
  4031. intptr_t inlineCache;
  4032. opndSrc = instrLdFld->GetSrc1();
  4033. AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->IsPropertySymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(),
  4034. "Expected property sym operand as src of LdFldFlags");
  4035. IR::PropertySymOpnd * propertySymOpnd = opndSrc->AsPropertySymOpnd();
  4036. Assert(propertySymOpnd->m_runtimeInlineCache);
  4037. Assert(!instrLdFld->DoStackArgsOpt(this->m_func));
  4038. // TODO: LdMethodFromFlags doesn't participate in object type specialization. We should be using a temporary
  4039. // register without a type sym here.
  4040. if (propertySymOpnd->IsTypeCheckSeqCandidate())
  4041. {
  4042. AssertMsg(propertySymOpnd->HasObjectTypeSym(), "Type optimized property sym operand without a type sym?");
  4043. StackSym *typeSym = propertySymOpnd->GetObjectTypeSym();
  4044. opndType = IR::RegOpnd::New(typeSym, TyMachReg, this->m_func);
  4045. }
  4046. else
  4047. {
  4048. opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  4049. }
  4050. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  4051. opndDst = instrLdFld->GetDst();
  4052. inlineCache = propertySymOpnd->m_runtimeInlineCache;
  4053. Assert(inlineCache != 0);
  4054. opndInlineCache = IR::RegOpnd::New(TyMachReg, this->m_func);
  4055. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4056. // Label to jump to (or fall through to) when bailing out
  4057. bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instrLdFld->m_func, true /* isOpHelper */);
  4058. LowererMD::CreateAssign(opndInlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrLdFld, propertySymOpnd), instrLdFld);
  4059. IR::LabelInstr * labelFlagAux = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4060. // Check the flag cache with the untagged type
  4061. this->m_lowerer->GenerateObjectTestAndTypeLoad(instrLdFld, opndBase, opndType, bailOutLabel);
  4062. //Blindly do the check for getter flag first and then do the type check
  4063. //We avoid repeated check for getter flag when the function object may be in either
  4064. //inline slots or auxiliary slots
  4065. GenerateFlagInlineCacheCheckForGetterSetter(instrLdFld, opndInlineCache, bailOutLabel);
  4066. GenerateFlagInlineCacheCheck(instrLdFld, opndType, opndInlineCache, labelFlagAux);
  4067. GenerateLdFldFromFlagInlineCache(instrLdFld, opndBase, opndInlineCache, opndDst, labelFallThru, true);
  4068. // Check the flag cache with the tagged type
  4069. instrLdFld->InsertBefore(labelFlagAux);
  4070. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  4071. GenerateLoadTaggedType(instrLdFld, opndType, opndTaggedType);
  4072. GenerateFlagInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, bailOutLabel);
  4073. GenerateLdFldFromFlagInlineCache(instrLdFld, opndBase, opndInlineCache, opndDst, labelFallThru, false);
  4074. instrLdFld->InsertBefore(bailOutLabel);
  4075. instrLdFld->InsertAfter(labelFallThru);
  4076. instrLdFld->UnlinkSrc1();
  4077. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  4078. // ordering instructions anymore.
  4079. this->m_lowerer->GenerateBailOut(instrLdFld);
  4080. return true;
  4081. }
  4082. //----------------------------------------------------------------------------
  4083. //
  4084. // LowererMD::GenerateFastScopedFldLookup
  4085. //
  4086. // This is a helper call which generates asm for both
  4087. // ScopedLdFld & ScopedStFld
  4088. //
  4089. //----------------------------------------------------------------------------
  4090. IR::Instr *
  4091. LowererMD::GenerateFastScopedFld(IR::Instr * instrScopedFld, bool isLoad)
  4092. {
  4093. // LDR s1, [base, offset(length)]
  4094. // CMP s1, 1 -- get the length on array and test if it is 1.
  4095. // BNE $helper
  4096. // LDR s2, [base, offset(scopes)] -- load the first scope
  4097. // LDR s3, [s2, offset(type)]
  4098. // LDIMM s4, inlineCache
  4099. // LDR s5, [s4, offset(u.local.type)]
  4100. // CMP s3, s5 -- check type
  4101. // BNE $helper
  4102. // LDR s6, [s2, offset(slots)] -- load the slots array
  4103. // LDR s7 , [s4, offset(u.local.slotIndex)] -- load the cached slot index
  4104. //
  4105. // if (load) {
  4106. // LDR dst, [s6, s7, LSL #2] -- load the value from the slot
  4107. // }
  4108. // else {
  4109. // STR src, [s6, s7, LSL #2]
  4110. // }
  4111. // B $done
  4112. //$helper:
  4113. // dst = BLX PatchGetPropertyScoped(inlineCache, base, field, defaultInstance, scriptContext)
  4114. //$done:
  4115. IR::Instr * instr;
  4116. IR::Instr * instrPrev = instrScopedFld->m_prev;
  4117. IR::RegOpnd * opndBase;
  4118. IR::RegOpnd * opndReg1; //s1
  4119. IR::RegOpnd * opndReg2; //s2
  4120. IR::RegOpnd * opndInlineCache; //s4
  4121. IR::IndirOpnd * indirOpnd;
  4122. IR::Opnd * propertyBase;
  4123. IR::LabelInstr * labelHelper;
  4124. IR::LabelInstr * labelFallThru;
  4125. if (isLoad)
  4126. {
  4127. propertyBase = instrScopedFld->GetSrc1();
  4128. }
  4129. else
  4130. {
  4131. propertyBase = instrScopedFld->GetDst();
  4132. }
  4133. AssertMsg(propertyBase->IsSymOpnd() && propertyBase->AsSymOpnd()->IsPropertySymOpnd() && propertyBase->AsSymOpnd()->m_sym->IsPropertySym(),
  4134. "Expected property sym operand of ScopedLdFld or ScopedStFld");
  4135. IR::PropertySymOpnd * propertySymOpnd = propertyBase->AsPropertySymOpnd();
  4136. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  4137. AssertMsg(opndBase->m_sym->m_isSingleDef, "We assume this isn't redefined");
  4138. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4139. // LDR s1, [base, offset(length)] -- get the length on array and test if it is 1.
  4140. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfLength(), TyInt16, this->m_func);
  4141. opndReg1 = IR::RegOpnd::New(TyInt32, this->m_func);
  4142. instr = IR::Instr::New(Js::OpCode::LDR, opndReg1, indirOpnd, this->m_func);
  4143. instrScopedFld->InsertBefore(instr);
  4144. // CMP s1, 1 -- get the length on array and test if it is 1.
  4145. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4146. instr->SetSrc1(opndReg1);
  4147. instr->SetSrc2(IR::IntConstOpnd::New(0x1, TyInt8, this->m_func));
  4148. instrScopedFld->InsertBefore(instr);
  4149. // BNE $helper
  4150. instr = IR::BranchInstr::New(Js::OpCode::BNE, labelHelper, this->m_func);
  4151. instrScopedFld->InsertBefore(instr);
  4152. // LDR s2, [base, offset(scopes)] -- load the first scope
  4153. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfScopes(), TyInt32,this->m_func);
  4154. opndReg2 = IR::RegOpnd::New(TyInt32, this->m_func);
  4155. instr = IR::Instr::New(Js::OpCode::LDR, opndReg2, indirOpnd, this->m_func);
  4156. instrScopedFld->InsertBefore(instr);
  4157. // LDR s3, [s2, offset(type)]
  4158. // LDIMM s4, inlineCache
  4159. // LDR s5, [s4, offset(u.local.type)]
  4160. // CMP s3, s5 -- check type
  4161. // BNE $helper
  4162. opndInlineCache = IR::RegOpnd::New(TyInt32, this->m_func);
  4163. opndReg2->m_sym->m_isNotInt = true;
  4164. IR::RegOpnd * opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  4165. this->m_lowerer->GenerateObjectTestAndTypeLoad(instrScopedFld, opndReg2, opndType, labelHelper);
  4166. LowererMD::CreateAssign(opndInlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrScopedFld, propertySymOpnd), instrScopedFld);
  4167. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4168. // Check the local cache with the tagged type
  4169. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  4170. GenerateLoadTaggedType(instrScopedFld, opndType, opndTaggedType);
  4171. GenerateLocalInlineCacheCheck(instrScopedFld, opndTaggedType, opndInlineCache, labelHelper);
  4172. if (isLoad)
  4173. {
  4174. IR::Opnd *opndDst = instrScopedFld->GetDst();
  4175. GenerateLdFldFromLocalInlineCache(instrScopedFld, opndReg2, opndDst, opndInlineCache, labelFallThru, false);
  4176. }
  4177. else
  4178. {
  4179. IR::Opnd *opndSrc = instrScopedFld->GetSrc1();
  4180. GenerateStFldFromLocalInlineCache(instrScopedFld, opndReg2, opndSrc, opndInlineCache, labelFallThru, false);
  4181. }
  4182. // $helper:
  4183. // if (isLoad) {
  4184. // dst = BLX PatchGetPropertyScoped(inlineCache, opndBase, propertyId, srcBase, scriptContext)
  4185. // }
  4186. // else {
  4187. // BLX PatchSetPropertyScoped(inlineCache, base, field, value, defaultInstance, scriptContext)
  4188. // }
  4189. // $fallthru:
  4190. instrScopedFld->InsertBefore(labelHelper);
  4191. instrScopedFld->InsertAfter(labelFallThru);
  4192. return instrPrev;
  4193. }
  4194. //----------------------------------------------------------------------------
  4195. //
  4196. // LowererMD::GenerateFastScopedLdFld
  4197. //
  4198. // Make use of the helper to cache the type and slot index used to do a ScopedLdFld
  4199. // when the scope is an array of length 1.
  4200. // Extract the only element from array and do an inline load from the appropriate slot
  4201. // if the type hasn't changed since the last time this ScopedLdFld was executed.
  4202. //
  4203. //----------------------------------------------------------------------------
  4204. IR::Instr *
  4205. LowererMD::GenerateFastScopedLdFld(IR::Instr * instrLdScopedFld)
  4206. {
  4207. //Helper GenerateFastScopedFldLookup generates following:
  4208. //
  4209. // LDR s1, [base, offset(length)]
  4210. // CMP s1, 1 -- get the length on array and test if it is 1.
  4211. // BNE $helper
  4212. // LDR s2, [base, offset(scopes)] -- load the first scope
  4213. // LDR s3, [s2, offset(type)]
  4214. // LDIMM s4, inlineCache
  4215. // LDR s5, [s4, offset(u.local.type)]
  4216. // CMP s3, s5 -- check type
  4217. // BNE $helper
  4218. // LDR s6, [s2, offset(slots)] -- load the slots array
  4219. // LDR s7 , [s4, offset(u.local.slotIndex)] -- load the cached slot index
  4220. // LDR dst, [s6, s7, LSL #2] -- load the value from the slot
  4221. // B $done
  4222. //$helper:
  4223. // dst = BLX PatchGetPropertyScoped(inlineCache, base, field, defaultInstance, scriptContext)
  4224. //$done:
  4225. return GenerateFastScopedFld(instrLdScopedFld, true);
  4226. }
  4227. //----------------------------------------------------------------------------
  4228. //
  4229. // LowererMD::GenerateFastScopedStFld
  4230. //
  4231. // Make use of the helper to cache the type and slot index used to do a ScopedStFld
  4232. // when the scope is an array of length 1.
  4233. // Extract the only element from array and do an inline load from the appropriate slot
  4234. // if the type hasn't changed since the last time this ScopedStFld was executed.
  4235. //
  4236. //----------------------------------------------------------------------------
  4237. IR::Instr *
  4238. LowererMD::GenerateFastScopedStFld(IR::Instr * instrStScopedFld)
  4239. {
  4240. // LDR s1, [base, offset(length)]
  4241. // CMP s1, 1 -- get the length on array and test if it is 1.
  4242. // BNE $helper
  4243. // LDR s2, [base, offset(scopes)] -- load the first scope
  4244. // LDR s3, [s2, offset(type)]
  4245. // LDIMM s4, inlineCache
  4246. // LDR s5, [s4, offset(u.local.type)]
  4247. // CMP s3, s5 -- check type
  4248. // BNE $helper
  4249. // LDR s6, [s2, offset(slots)] -- load the slots array
  4250. // LDR s7 , [s4, offset(u.local.slotIndex)] -- load the cached slot index
  4251. // STR src, [s6, s7, LSL #2] -- store the value directly at the slot
  4252. // B $done
  4253. //$helper:
  4254. // BLX PatchSetPropertyScoped(inlineCache, base, field, value, defaultInstance, scriptContext)
  4255. //$done:
  4256. return GenerateFastScopedFld(instrStScopedFld, false);
  4257. }
  4258. void
  4259. LowererMD::GenerateStFldFromLocalInlineCache(
  4260. IR::Instr * instrStFld,
  4261. IR::RegOpnd * opndBase,
  4262. IR::Opnd * opndSrc,
  4263. IR::RegOpnd * opndInlineCache,
  4264. IR::LabelInstr * labelFallThru,
  4265. bool isInlineSlot)
  4266. {
  4267. IR::RegOpnd * opndSlotArray = nullptr;
  4268. IR::IndirOpnd * opndIndir;
  4269. IR::Instr * instr;
  4270. if (!isInlineSlot)
  4271. {
  4272. // s2 = MOV base->slots -- load the slot array
  4273. opndSlotArray = IR::RegOpnd::New(TyMachReg, instrStFld->m_func);
  4274. opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrStFld->m_func);
  4275. LowererMD::CreateAssign(opndSlotArray, opndIndir, instrStFld);
  4276. }
  4277. // LDR s5, [s2, offset(u.local.slotIndex)] -- load the cached slot index
  4278. IR::RegOpnd *opndSlotIndex = IR::RegOpnd::New(TyUint16, instrStFld->m_func);
  4279. opndIndir = IR::IndirOpnd::New(opndInlineCache, offsetof(Js::InlineCache, u.local.slotIndex), TyUint16, instrStFld->m_func);
  4280. instr = IR::Instr::New(Js::OpCode::LDR, opndSlotIndex, opndIndir, instrStFld->m_func);
  4281. instrStFld->InsertBefore(instr);
  4282. if (isInlineSlot)
  4283. {
  4284. // STR src, [base, s5, LSL #2] -- store the value directly to the slot [s4 + s5 * 4] = src
  4285. opndIndir = IR::IndirOpnd::New(opndBase, opndSlotIndex, LowererMD::GetDefaultIndirScale(), TyMachReg, instrStFld->m_func);
  4286. instr = IR::Instr::New(Js::OpCode::STR, opndIndir, opndSrc, instrStFld->m_func);
  4287. instrStFld->InsertBefore(instr);
  4288. LegalizeMD::LegalizeInstr(instr, false);
  4289. }
  4290. else
  4291. {
  4292. // STR src, [s4, s5, LSL #2] -- store the value directly to the slot [s4 + s5 * 4] = src
  4293. opndIndir = IR::IndirOpnd::New(opndSlotArray, opndSlotIndex, LowererMD::GetDefaultIndirScale(), TyMachReg, instrStFld->m_func);
  4294. instr = IR::Instr::New(Js::OpCode::STR, opndIndir, opndSrc, instrStFld->m_func);
  4295. instrStFld->InsertBefore(instr);
  4296. LegalizeMD::LegalizeInstr(instr, false);
  4297. }
  4298. // B $done
  4299. instr = IR::BranchInstr::New(Js::OpCode::B, labelFallThru, instrStFld->m_func);
  4300. instrStFld->InsertBefore(instr);
  4301. }
  4302. IR::Opnd *
  4303. LowererMD::CreateStackArgumentsSlotOpnd()
  4304. {
  4305. // Save the newly-created args object to its dedicated stack slot.
  4306. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(nullptr, FRAME_REG , TyMachReg, m_func),
  4307. -MachArgsSlotOffset, TyMachPtr, m_func);
  4308. return indirOpnd;
  4309. }
  4310. //
  4311. // jump to $labelHelper, based on the result of TST
  4312. //
  4313. void LowererMD::GenerateSmIntTest(IR::Opnd *opndSrc, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::Instr **instrFirst, bool fContinueLabel /* = false */)
  4314. {
  4315. // TEST src1, AtomTag
  4316. IR::Instr* instr = IR::Instr::New(Js::OpCode::TST, this->m_func);
  4317. instr->SetSrc1(opndSrc);
  4318. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt32, this->m_func));
  4319. insertInstr->InsertBefore(instr);
  4320. if(fContinueLabel)
  4321. {
  4322. // BNE $labelHelper
  4323. instr = IR::BranchInstr::New(Js::OpCode::BNE, labelHelper, this->m_func);
  4324. insertInstr->InsertBefore(instr);
  4325. }
  4326. else
  4327. {
  4328. // BEQ $labelHelper
  4329. instr = IR::BranchInstr::New(Js::OpCode::BEQ, labelHelper, this->m_func);
  4330. insertInstr->InsertBefore(instr);
  4331. }
  4332. }
  4333. void LowererMD::GenerateInt32ToVarConversion(IR::Opnd * opndSrc, IR::Instr * insertInstr )
  4334. {
  4335. AssertMsg(opndSrc->IsRegOpnd(), "NYI for other types");
  4336. // Shift left & tag.
  4337. // For now this is used only for actual arguments count can only be 24 bits long and non need to check for overflow
  4338. IR:: Instr* instr = IR::Instr::New(Js::OpCode::LSL, opndSrc, opndSrc, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  4339. insertInstr->InsertBefore(instr);
  4340. instr = IR::Instr::New(Js::OpCode::ADD, opndSrc, opndSrc,
  4341. IR::IntConstOpnd::New(Js::VarTag_Shift, TyMachReg, this->m_func),
  4342. this->m_func);
  4343. insertInstr->InsertBefore(instr);
  4344. }
  4345. IR::RegOpnd *
  4346. LowererMD::GenerateUntagVar(IR::RegOpnd * opnd, IR::LabelInstr * instrFail, IR::Instr * insertBeforeInstr, bool generateTagCheck)
  4347. {
  4348. // Generates:
  4349. // int32Opnd = ASRS opnd, Js::VarTag_Shift -- shift-out tag from opnd
  4350. // BCC $helper -- if not tagged int, jmp to $helper
  4351. // Returns: index32Opnd
  4352. Assert(opnd->IsVar());
  4353. IR::RegOpnd * int32Opnd = IR::RegOpnd::New(TyInt32, this->m_func);
  4354. // int32Opnd = ASRS opnd, Js::VarTag_Shift -- shift-out tag from indexOpnd
  4355. IR::Instr *instr = IR::Instr::New(Js::OpCode::ASRS, int32Opnd, opnd,
  4356. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  4357. insertBeforeInstr->InsertBefore(instr);
  4358. LegalizeMD::LegalizeInstr(instr, false);
  4359. // No need to check if we already know that it is a tagged int.
  4360. if (generateTagCheck)
  4361. {
  4362. Assert(!opnd->IsTaggedInt());
  4363. // BCC $helper -- if not tagged int, jmp to $helper
  4364. instr = IR::BranchInstr::New(Js::OpCode::BCC, instrFail, this->m_func);
  4365. insertBeforeInstr->InsertBefore(instr);
  4366. }
  4367. return int32Opnd;
  4368. }
  4369. IR::RegOpnd *LowererMD::LoadNonnegativeIndex(
  4370. IR::RegOpnd *indexOpnd,
  4371. const bool skipNegativeCheck,
  4372. IR::LabelInstr *const notTaggedIntLabel,
  4373. IR::LabelInstr *const negativeLabel,
  4374. IR::Instr *const insertBeforeInstr)
  4375. {
  4376. Assert(indexOpnd);
  4377. Assert(indexOpnd->IsVar() || indexOpnd->GetType() == TyInt32 || indexOpnd->GetType() == TyUint32);
  4378. Assert(indexOpnd->GetType() != TyUint32 || skipNegativeCheck);
  4379. Assert(!indexOpnd->IsVar() || notTaggedIntLabel);
  4380. Assert(skipNegativeCheck || negativeLabel);
  4381. Assert(insertBeforeInstr);
  4382. Func *const func = insertBeforeInstr->m_func;
  4383. IR::AutoReuseOpnd autoReuseIndexOpnd;
  4384. if(indexOpnd->IsVar())
  4385. {
  4386. if (indexOpnd->GetValueType().IsLikelyFloat())
  4387. {
  4388. return m_lowerer->LoadIndexFromLikelyFloat(indexOpnd, skipNegativeCheck, notTaggedIntLabel, negativeLabel, insertBeforeInstr);
  4389. }
  4390. // asrs intIndex, index, 1
  4391. // bcc $notTaggedIntOrNegative
  4392. IR::RegOpnd *const intIndexOpnd = IR::RegOpnd::New(TyInt32, func);
  4393. if(skipNegativeCheck)
  4394. {
  4395. intIndexOpnd->SetType(TyUint32);
  4396. }
  4397. autoReuseIndexOpnd.Initialize(intIndexOpnd, func, false);
  4398. const bool isTaggedInt = indexOpnd->IsTaggedInt();
  4399. Lowerer::InsertShift(
  4400. Js::OpCode::Shr_A,
  4401. !(isTaggedInt && skipNegativeCheck),
  4402. intIndexOpnd,
  4403. indexOpnd,
  4404. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, func),
  4405. insertBeforeInstr);
  4406. if(!isTaggedInt)
  4407. {
  4408. Lowerer::InsertBranch(Js::OpCode::BCC, notTaggedIntLabel, insertBeforeInstr);
  4409. }
  4410. indexOpnd = intIndexOpnd;
  4411. }
  4412. else if(!skipNegativeCheck)
  4413. {
  4414. // tst index, index
  4415. Lowerer::InsertTest(indexOpnd, indexOpnd, insertBeforeInstr);
  4416. }
  4417. if(!skipNegativeCheck)
  4418. {
  4419. // bmi $notTaggedIntOrNegative
  4420. Lowerer::InsertBranch(Js::OpCode::BMI, negativeLabel, insertBeforeInstr);
  4421. }
  4422. return indexOpnd;
  4423. }
  4424. bool LowererMD::GenerateJSBooleanTest(IR::RegOpnd * regSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
  4425. {
  4426. IR::Instr* instr;
  4427. if (regSrc->GetValueType().IsBoolean())
  4428. {
  4429. if (fContinueLabel)
  4430. {
  4431. // B $labelTarget
  4432. instr = IR::BranchInstr::New(Js::OpCode::B, labelTarget, this->m_func);
  4433. insertInstr->InsertBefore(instr);
  4434. #if DBG
  4435. if (labelTarget->isOpHelper)
  4436. {
  4437. labelTarget->m_noHelperAssert = true;
  4438. }
  4439. #endif
  4440. }
  4441. return false;
  4442. }
  4443. // CMP src1, vtable<JavaScriptBoolean>
  4444. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4445. IR::IndirOpnd *vtablePtrOpnd = IR::IndirOpnd::New(regSrc, 0, TyMachPtr, this->m_func);
  4446. instr->SetSrc1(vtablePtrOpnd);
  4447. IR::Opnd *jsBooleanVTable = m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptBoolean);
  4448. instr->SetSrc2(jsBooleanVTable);
  4449. insertInstr->InsertBefore(instr);
  4450. LegalizeMD::LegalizeInstr(instr, false);
  4451. if (fContinueLabel)
  4452. {
  4453. // BEQ $labelTarget
  4454. instr = IR::BranchInstr::New(Js::OpCode::BEQ, labelTarget, this->m_func);
  4455. insertInstr->InsertBefore(instr);
  4456. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4457. insertInstr->InsertBefore(labelHelper);
  4458. }
  4459. else
  4460. {
  4461. // BNE $labelTarget
  4462. instr = IR::BranchInstr::New(Js::OpCode::BNE, labelTarget, this->m_func);
  4463. insertInstr->InsertBefore(instr);
  4464. }
  4465. return true;
  4466. }
  4467. // Inlines fast-path for int Mul/Add or int Mul/Sub. If not int, call MulAdd/MulSub helper
  4468. bool LowererMD::TryGenerateFastMulAdd(IR::Instr * instrAdd, IR::Instr ** pInstrPrev)
  4469. {
  4470. IR::Instr *instrMul = instrAdd->GetPrevRealInstrOrLabel();
  4471. IR::Opnd *addSrc;
  4472. IR::RegOpnd *addCommonSrcOpnd;
  4473. Assert(instrAdd->m_opcode == Js::OpCode::Add_A || instrAdd->m_opcode == Js::OpCode::Sub_A);
  4474. if (instrAdd->m_opcode != Js::OpCode::Add_A)
  4475. {
  4476. // For Add_A we can use SMLAL, but there is no analog of that for Sub_A.
  4477. return false;
  4478. }
  4479. // Mul needs to be a single def reg
  4480. if (instrMul->m_opcode != Js::OpCode::Mul_A || !instrMul->GetDst()->IsRegOpnd())
  4481. {
  4482. // Cannot generate MulAdd
  4483. return false;
  4484. }
  4485. if (instrMul->HasBailOutInfo())
  4486. {
  4487. // Bailout will be generated for the Add, but not the Mul.
  4488. // We could handle this, but this path isn't used that much anymore.
  4489. return false;
  4490. }
  4491. IR::RegOpnd *regMulDst = instrMul->GetDst()->AsRegOpnd();
  4492. if (!regMulDst->m_sym->m_isSingleDef)
  4493. {
  4494. // Cannot generate MulAdd
  4495. return false;
  4496. }
  4497. // Only handle a * b + c, so dst of Mul needs to match left source of Add
  4498. if (instrMul->GetDst()->IsEqual(instrAdd->GetSrc1()))
  4499. {
  4500. addCommonSrcOpnd = instrAdd->GetSrc1()->AsRegOpnd();
  4501. addSrc = instrAdd->GetSrc2();
  4502. }
  4503. else if (instrMul->GetDst()->IsEqual(instrAdd->GetSrc2()))
  4504. {
  4505. addSrc = instrAdd->GetSrc1();
  4506. addCommonSrcOpnd = instrAdd->GetSrc2()->AsRegOpnd();
  4507. }
  4508. else
  4509. {
  4510. return false;
  4511. }
  4512. // Only handle a * b + c where c != a * b
  4513. if (instrAdd->GetSrc1()->IsEqual(instrAdd->GetSrc2()))
  4514. {
  4515. return false;
  4516. }
  4517. if (!addCommonSrcOpnd->m_isTempLastUse)
  4518. {
  4519. return false;
  4520. }
  4521. IR::Opnd *mulSrc1 = instrMul->GetSrc1();
  4522. IR::Opnd *mulSrc2 = instrMul->GetSrc2();
  4523. if (mulSrc1->IsRegOpnd() && mulSrc1->AsRegOpnd()->IsTaggedInt()
  4524. && mulSrc2->IsRegOpnd() && mulSrc2->AsRegOpnd()->IsTaggedInt())
  4525. {
  4526. return false;
  4527. }
  4528. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4529. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  4530. // Save prevInstr for the main lower loop
  4531. *pInstrPrev = instrMul->m_prev;
  4532. // Generate int31 fast-path for Mul-Add, go to MulAdd helper if it fails, or one of the source is marked notInt
  4533. if (!(addSrc->IsRegOpnd() && addSrc->AsRegOpnd()->m_sym->AsStackSym()->m_isNotInt)
  4534. && !(mulSrc1->IsRegOpnd() && mulSrc1->AsRegOpnd()->m_sym->AsStackSym()->m_isNotInt)
  4535. && !(mulSrc2->IsRegOpnd() && mulSrc2->AsRegOpnd()->m_sym->AsStackSym()->m_isNotInt))
  4536. {
  4537. // General idea:
  4538. // - mulSrc1: clear 1 but keep *2 - need special test for tagged int
  4539. // - mulSrc2: shift out the tag - test for overflow inplace
  4540. // - addSrc: keep as is - need special test for tagged int
  4541. //
  4542. // Concerns
  4543. // - we don't need to take care of negative zero/-0, here's why:
  4544. // - per ES5 spec, there are only way to get -0 with add/sub: -0 + -0, -0 -0.
  4545. // - first one is not applicable because -0 would not be a tagged int, so we'll use the helper.
  4546. // - second one is also not applicable because this fast path is only for mul-add, not mul-sub.
  4547. //
  4548. // Steps:
  4549. // (If not mulSrc1 and addSrc are Int31's, jump to $helper)
  4550. // s1 = SUB mulSrc1, 1 -- remove the tag from mulSrc1 but keep it as *2
  4551. // s2 = ASRS mulSrc2, 1 -- shift-out tag from mulSrc2
  4552. // BCC $helper -- if not tagged int, jmp to $helper
  4553. // (Now: mulSrc1 in s1, mulSrc2 in s2)
  4554. // r12 = ASR s3, 31 -- make r12 to be sign-extension of the addSrc.
  4555. // r12:s3 = SMLAL s1, s2 -- note: the add source comes from r12:s3, result is already tagged int = mulSrc1Val*2 * mulSrc2Val + addSrcVal * 2 + 1
  4556. // Overflow check:
  4557. // (SMLAL doesn't set the flags but we don't have 32bit overflow <=> r12-unsigned ? r12==0 : all 33 bits of 64bit result are 1's
  4558. // CMP r12, s3, ASR #31 -- check for overflow (== means no overflow)
  4559. // BNE $helper -- bail if the result overflowed
  4560. // Copy the result into dst
  4561. // dst = s3
  4562. // B $done
  4563. // $helper:
  4564. // ...
  4565. // $done:
  4566. IR::Instr* instr;
  4567. IR::RegOpnd* s1 = IR::RegOpnd::New(mulSrc1->GetType(), this->m_func);
  4568. IR::RegOpnd* s2 = IR::RegOpnd::New(mulSrc2->GetType(), this->m_func);
  4569. IR::RegOpnd* s3 = IR::RegOpnd::New(addSrc->GetType(), this->m_func);
  4570. IR::RegOpnd* opndRegR12 = IR::RegOpnd::New(nullptr, RegR12, TyMachReg, this->m_func);
  4571. // (Load mulSrc1 at the top so we don't have to do it repeatedly)
  4572. if (!mulSrc1->IsRegOpnd())
  4573. {
  4574. LowererMD::CreateAssign(s1, mulSrc1, instrAdd);
  4575. mulSrc1 = s1;
  4576. }
  4577. // Now: mulSrc1 is regOpnd (in case if it wasn't it's now s1).
  4578. // Load addSrc into s3. We'll use it as source and destination of SMLAL.
  4579. LowererMD::CreateAssign(s3, addSrc, instrAdd);
  4580. // (If not mulSrc1 and addSrc are Int31's, jump to $helper)
  4581. bool areTaggedInts = mulSrc1->IsTaggedInt() && s3->IsTaggedInt();
  4582. if (!areTaggedInts)
  4583. {
  4584. this->GenerateSmIntPairTest(instrAdd, mulSrc1->AsRegOpnd(), s3->AsRegOpnd(), labelHelper);
  4585. }
  4586. // s1 = SUB mulSrc1, 1 -- remove the tag from mulSrc1 but keep it as *2
  4587. instr = IR::Instr::New(Js::OpCode::SUB, s1, mulSrc1, IR::IntConstOpnd::New(Js::VarTag_Shift, TyVar, this->m_func), m_func);
  4588. instrAdd->InsertBefore(instr);
  4589. // s2 = ASRS mulSrc2, 1 -- shift-out tag from mulSrc2
  4590. // BCC $helper -- if not tagged int, jmp to $helper
  4591. instr = IR::Instr::New(Js::OpCode::ASRS, s2, mulSrc2, IR::IntConstOpnd::New(Js::VarTag_Shift, TyVar, this->m_func), m_func);
  4592. instrAdd->InsertBefore(instr);
  4593. LegalizeMD::LegalizeInstr(instr, false);
  4594. if (!mulSrc2->IsTaggedInt()) // If we already pre-know it's tagged int, no need to check.
  4595. {
  4596. instr = IR::BranchInstr::New(Js::OpCode::BCC, labelHelper, this->m_func);
  4597. instrAdd->InsertBefore(instr);
  4598. }
  4599. // Now: mulSrc1 in s1, mulSrc2 in s2.
  4600. // r12 = ASR s3, 31 -- make r12 to be sign-extension of the addSrc.
  4601. instr = IR::Instr::New(Js::OpCode::ASR, opndRegR12, s3, IR::IntConstOpnd::New(31, TyVar, this->m_func), m_func);
  4602. instrAdd->InsertBefore(instr);
  4603. // r12:s3 = SMLAL s1, s2 -- note: the add source comes from r12:s3, result is already tagged int = mulSrc1Val*2 * mulSrc2Val + addSrcVal * 2 + 1
  4604. instr = IR::Instr::New(Js::OpCode::SMLAL, s3, s1, s2, this->m_func);
  4605. instrAdd->InsertBefore(instr);
  4606. // Overflow check:
  4607. // (SMLAL doesn't set the flags but we don't have 32bit overflow <=> r12-unsigned ? r12==0 : all 33 bits of 64bit result are 1's
  4608. // CMP r12, s3, ASR #31 -- check for overflow (== means no overflow)
  4609. // BNE $helper -- bail if the result overflowed
  4610. instr = IR::Instr::New(Js::OpCode::CMP_ASR31, this->m_func);
  4611. instr->SetSrc1(opndRegR12);
  4612. instr->SetSrc2(s3);
  4613. instrAdd->InsertBefore(instr);
  4614. instr = IR::BranchInstr::New(Js::OpCode::BNE, labelHelper, this->m_func);
  4615. instrAdd->InsertBefore(instr);
  4616. // Copy the result into dst
  4617. // dst = s3
  4618. LowererMD::CreateAssign(instrAdd->GetDst(), s3, instrAdd);
  4619. LegalizeMD::LegalizeInstr(instr, false);
  4620. // B $done
  4621. instr = IR::BranchInstr::New(Js::OpCode::B, labelDone, this->m_func);
  4622. instrAdd->InsertBefore(instr);
  4623. instrAdd->InsertBefore(labelHelper);
  4624. instrAdd->InsertAfter(labelDone);
  4625. }
  4626. // Generate code to call the Mul-Add helper.
  4627. // Although for the case when one of the source is marked notInt we could just return false from here,
  4628. // it seems that since we did all the checks to see that this is mul+add, it makes sense to use mul-add helper
  4629. // rather than 2 separate helpers - one for mul and one for add (by returning false).
  4630. if (instrAdd->dstIsTempNumber)
  4631. {
  4632. m_lowerer->LoadHelperTemp(instrAdd, instrAdd);
  4633. }
  4634. else
  4635. {
  4636. IR::Opnd *tempOpnd = IR::IntConstOpnd::New(0, TyMachPtr, this->m_func);
  4637. this->LoadHelperArgument(instrAdd, tempOpnd);
  4638. }
  4639. this->m_lowerer->LoadScriptContext(instrAdd);
  4640. IR::JnHelperMethod helper;
  4641. if (addSrc == instrAdd->GetSrc2())
  4642. {
  4643. instrAdd->FreeSrc1();
  4644. IR::Opnd *addOpnd = instrAdd->UnlinkSrc2();
  4645. this->LoadHelperArgument(instrAdd, addOpnd);
  4646. helper = IR::HelperOp_MulAddRight;
  4647. }
  4648. else
  4649. {
  4650. AssertMsg(addSrc == instrAdd->GetSrc1(), "How did we get addSrc which not addInstr->Src1/2");
  4651. instrAdd->FreeSrc2();
  4652. IR::Opnd *addOpnd = instrAdd->UnlinkSrc1();
  4653. this->LoadHelperArgument(instrAdd, addOpnd);
  4654. helper = IR::HelperOp_MulAddLeft;
  4655. }
  4656. // Arg2, Arg1:
  4657. IR::Opnd *src2 = instrMul->UnlinkSrc2();
  4658. this->LoadHelperArgument(instrAdd, src2);
  4659. IR::Opnd *src1 = instrMul->UnlinkSrc1();
  4660. this->LoadHelperArgument(instrAdd, src1);
  4661. this->ChangeToHelperCall(instrAdd, helper);
  4662. instrMul->Remove();
  4663. return true;
  4664. }
  4665. IR::Instr *
  4666. LowererMD::LoadCheckedFloat(
  4667. IR::RegOpnd *opndOrig,
  4668. IR::RegOpnd *opndFloat,
  4669. IR::LabelInstr *labelInline,
  4670. IR::LabelInstr *labelHelper,
  4671. IR::Instr *instrInsert,
  4672. const bool checkForNullInLoopBody)
  4673. {
  4674. // Load one floating-point var into a VFP register, inserting checks to see if it's really a float:
  4675. // Rx = ASRS src, 1
  4676. // BCC $non-int
  4677. // Dx = VMOV Rx
  4678. // flt = VCVT.F64.S32 Dx
  4679. // B $labelInline
  4680. // $non-int
  4681. // LDR Ry, [src]
  4682. // CMP Ry, JavascriptNumber::`vtable'
  4683. // BNE $labelHelper
  4684. // flt = VLDR [t0 + offset(value)]
  4685. IR::Instr * instr = nullptr;
  4686. IR::Opnd * opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4687. IR::Instr * instrFirst = IR::Instr::New(Js::OpCode::ASRS, opnd, opndOrig,
  4688. IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func),
  4689. this->m_func);
  4690. instrInsert->InsertBefore(instrFirst);
  4691. LegalizeMD::LegalizeInstr(instrFirst, false);
  4692. IR::LabelInstr * labelVar = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4693. instr = IR::BranchInstr::New(Js::OpCode::BCC, labelVar, this->m_func);
  4694. instrInsert->InsertBefore(instr);
  4695. if (opndOrig->GetValueType().IsLikelyFloat())
  4696. {
  4697. // Make this path helper if value is likely a float
  4698. instrInsert->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true));
  4699. }
  4700. //Convert integer to floating point
  4701. Assert(opndFloat->GetType() == TyMachDouble);
  4702. instr = IR::Instr::New(Js::OpCode::VMOVARMVFP, opndFloat, opnd, this->m_func);
  4703. instrInsert->InsertBefore(instr);
  4704. //VCVT.F64.S32 opndFloat, opndFloat
  4705. instr = IR::Instr::New(Js::OpCode::VCVTF64S32, opndFloat, opndFloat, this->m_func);
  4706. instrInsert->InsertBefore(instr);
  4707. instr = IR::BranchInstr::New(Js::OpCode::B, labelInline, this->m_func);
  4708. instrInsert->InsertBefore(instr);
  4709. instrInsert->InsertBefore(labelVar);
  4710. LoadFloatValue(opndOrig, opndFloat, labelHelper, instrInsert, checkForNullInLoopBody);
  4711. return instrFirst;
  4712. }
  4713. void
  4714. LowererMD::EmitLoadFloatFromNumber(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr)
  4715. {
  4716. IR::LabelInstr *labelDone;
  4717. IR::Instr *instr;
  4718. labelDone = EmitLoadFloatCommon(dst, src, insertInstr, insertInstr->HasBailOutInfo());
  4719. if (labelDone == nullptr)
  4720. {
  4721. // We're done
  4722. insertInstr->Remove();
  4723. return;
  4724. }
  4725. // $Done note: insertAfter
  4726. insertInstr->InsertAfter(labelDone);
  4727. if (!insertInstr->HasBailOutInfo())
  4728. {
  4729. // $Done
  4730. insertInstr->Remove();
  4731. return;
  4732. }
  4733. IR::LabelInstr *labelNoBailOut = nullptr;
  4734. IR::SymOpnd *tempSymOpnd = nullptr;
  4735. if (insertInstr->GetBailOutKind() == IR::BailOutPrimitiveButString)
  4736. {
  4737. if (!this->m_func->tempSymDouble)
  4738. {
  4739. this->m_func->tempSymDouble = StackSym::New(TyFloat64, this->m_func);
  4740. this->m_func->StackAllocate(this->m_func->tempSymDouble, MachDouble);
  4741. }
  4742. // LEA r3, tempSymDouble
  4743. IR::RegOpnd *reg3Opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4744. tempSymOpnd = IR::SymOpnd::New(this->m_func->tempSymDouble, TyFloat64, this->m_func);
  4745. instr = IR::Instr::New(Js::OpCode::LEA, reg3Opnd, tempSymOpnd, this->m_func);
  4746. insertInstr->InsertBefore(instr);
  4747. // regBoolResult = to_number_fromPrimitive(value, &dst, allowUndef, scriptContext);
  4748. this->m_lowerer->LoadScriptContext(insertInstr);
  4749. IR::IntConstOpnd *allowUndefOpnd;
  4750. if (insertInstr->GetBailOutKind() == IR::BailOutPrimitiveButString)
  4751. {
  4752. allowUndefOpnd = IR::IntConstOpnd::New(true, TyInt32, this->m_func);
  4753. }
  4754. else
  4755. {
  4756. Assert(insertInstr->GetBailOutKind() == IR::BailOutNumberOnly);
  4757. allowUndefOpnd = IR::IntConstOpnd::New(false, TyInt32, this->m_func);
  4758. }
  4759. this->LoadHelperArgument(insertInstr, allowUndefOpnd);
  4760. this->LoadHelperArgument(insertInstr, reg3Opnd);
  4761. this->LoadHelperArgument(insertInstr, src);
  4762. IR::RegOpnd *regBoolResult = IR::RegOpnd::New(TyInt32, this->m_func);
  4763. instr = IR::Instr::New(Js::OpCode::Call, regBoolResult, IR::HelperCallOpnd::New(IR::HelperOp_ConvNumber_FromPrimitive, this->m_func), this->m_func);
  4764. insertInstr->InsertBefore(instr);
  4765. this->LowerCall(instr, 0);
  4766. // TEST regBoolResult, regBoolResult
  4767. instr = IR::Instr::New(Js::OpCode::TST, this->m_func);
  4768. instr->SetSrc1(regBoolResult);
  4769. instr->SetSrc2(regBoolResult);
  4770. insertInstr->InsertBefore(instr);
  4771. // BNE $noBailOut
  4772. labelNoBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4773. instr = IR::BranchInstr::New(Js::OpCode::BNE, labelNoBailOut, this->m_func);
  4774. insertInstr->InsertBefore(instr);
  4775. }
  4776. // Bailout code
  4777. Assert(insertInstr->m_opcode == Js::OpCode::FromVar);
  4778. insertInstr->UnlinkDst();
  4779. insertInstr->FreeSrc1();
  4780. IR::Instr *bailoutInstr = insertInstr;
  4781. insertInstr = bailoutInstr->m_next;
  4782. this->m_lowerer->GenerateBailOut(bailoutInstr);
  4783. // $noBailOut
  4784. if (labelNoBailOut)
  4785. {
  4786. insertInstr->InsertBefore(labelNoBailOut);
  4787. Assert(dst->IsRegOpnd());
  4788. // VLDR dst, [pResult].f64
  4789. instr = IR::Instr::New(Js::OpCode::VLDR, dst, tempSymOpnd, this->m_func);
  4790. insertInstr->InsertBefore(instr);
  4791. LegalizeMD::LegalizeInstr(instr, false);
  4792. }
  4793. }
  4794. IR::LabelInstr*
  4795. LowererMD::EmitLoadFloatCommon(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr, bool needHelperLabel)
  4796. {
  4797. IR::Instr *instr;
  4798. Assert(src->GetType() == TyVar);
  4799. Assert(dst->GetType() == TyFloat64 || TyFloat32);
  4800. bool isFloatConst = false;
  4801. IR::RegOpnd *regFloatOpnd = nullptr;
  4802. if (src->IsRegOpnd() && src->AsRegOpnd()->m_sym->m_isFltConst)
  4803. {
  4804. IR::RegOpnd *regOpnd = src->AsRegOpnd();
  4805. Assert(regOpnd->m_sym->m_isSingleDef);
  4806. Js::Var value = regOpnd->m_sym->GetFloatConstValueAsVar_PostGlobOpt();
  4807. IR::MemRefOpnd *memRef = IR::MemRefOpnd::New((BYTE*)value + Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  4808. regFloatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  4809. instr = IR::Instr::New(Js::OpCode::VLDR, regFloatOpnd, memRef, this->m_func);
  4810. insertInstr->InsertBefore(instr);
  4811. LegalizeMD::LegalizeInstr(instr, false);
  4812. isFloatConst = true;
  4813. }
  4814. // Src is constant?
  4815. if (src->IsImmediateOpnd() || src->IsFloatConstOpnd())
  4816. {
  4817. regFloatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  4818. m_lowerer->LoadFloatFromNonReg(src, regFloatOpnd, insertInstr);
  4819. isFloatConst = true;
  4820. }
  4821. if (isFloatConst)
  4822. {
  4823. if (dst->GetType() == TyFloat32)
  4824. {
  4825. // VCVT.F32.F64 regOpnd32.f32, regOpnd.f64 -- Convert regOpnd from f64 to f32
  4826. IR::RegOpnd *regOpnd32 = regFloatOpnd->UseWithNewType(TyFloat32, this->m_func)->AsRegOpnd();
  4827. instr = IR::Instr::New(Js::OpCode::VCVTF32F64, regOpnd32, regFloatOpnd, this->m_func);
  4828. insertInstr->InsertBefore(instr);
  4829. // VSTR32 dst, regOpnd32
  4830. instr = IR::Instr::New(Js::OpCode::VMOV, dst, regOpnd32, this->m_func);
  4831. insertInstr->InsertBefore(instr);
  4832. }
  4833. else
  4834. {
  4835. instr = IR::Instr::New(Js::OpCode::VMOV, dst, regFloatOpnd, this->m_func);
  4836. insertInstr->InsertBefore(instr);
  4837. }
  4838. LegalizeMD::LegalizeInstr(instr, false);
  4839. return nullptr;
  4840. }
  4841. Assert(src->IsRegOpnd());
  4842. IR::LabelInstr *labelStore = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4843. IR::LabelInstr *labelHelper;
  4844. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4845. if (needHelperLabel)
  4846. {
  4847. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4848. }
  4849. else
  4850. {
  4851. labelHelper = labelDone;
  4852. }
  4853. IR::RegOpnd *reg2 = IR::RegOpnd::New(TyMachDouble, this->m_func);
  4854. // Load the float value in reg2
  4855. this->LoadCheckedFloat(src->AsRegOpnd(), reg2, labelStore, labelHelper, insertInstr, needHelperLabel);
  4856. // $Store
  4857. insertInstr->InsertBefore(labelStore);
  4858. if (dst->GetType() == TyFloat32)
  4859. {
  4860. IR::RegOpnd *reg2_32 = reg2->UseWithNewType(TyFloat32, this->m_func)->AsRegOpnd();
  4861. // VCVT.F32.F64 r2_32.f32, r2.f64 -- Convert regOpnd from f64 to f32
  4862. instr = IR::Instr::New(Js::OpCode::VCVTF32F64, reg2_32, reg2, this->m_func);
  4863. insertInstr->InsertBefore(instr);
  4864. // VMOV dst, r2_32
  4865. instr = IR::Instr::New(Js::OpCode::VMOV, dst, reg2_32, this->m_func);
  4866. insertInstr->InsertBefore(instr);
  4867. }
  4868. else
  4869. {
  4870. // VMOV dst, r2
  4871. instr = IR::Instr::New(Js::OpCode::VMOV, dst, reg2, this->m_func);
  4872. insertInstr->InsertBefore(instr);
  4873. }
  4874. LegalizeMD::LegalizeInstr(instr, false);
  4875. // B $Done
  4876. instr = IR::BranchInstr::New(Js::OpCode::B, labelDone, this->m_func);
  4877. insertInstr->InsertBefore(instr);
  4878. if (needHelperLabel)
  4879. {
  4880. // $Helper
  4881. insertInstr->InsertBefore(labelHelper);
  4882. }
  4883. return labelDone;
  4884. }
  4885. void
  4886. LowererMD::EmitLoadFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr, IR::Instr * instrBailOut, IR::LabelInstr * labelBailOut)
  4887. {
  4888. IR::LabelInstr *labelDone;
  4889. IR::Instr *instr;
  4890. Assert(src->GetType() == TyVar);
  4891. Assert(dst->GetType() == TyFloat64 || TyFloat32);
  4892. Assert(src->IsRegOpnd());
  4893. if (dst->IsIndirOpnd())
  4894. {
  4895. LegalizeMD::LegalizeIndirOpndForVFP(insertInstr, dst->AsIndirOpnd(), false);
  4896. }
  4897. labelDone = EmitLoadFloatCommon(dst, src, insertInstr, true);
  4898. if (labelDone == nullptr)
  4899. {
  4900. // We're done
  4901. return;
  4902. }
  4903. IR::BailOutKind bailOutKind = instrBailOut && instrBailOut->HasBailOutInfo() ? instrBailOut->GetBailOutKind() : IR::BailOutInvalid;
  4904. if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  4905. {
  4906. // Bail out instead of making the helper call.
  4907. Assert(labelBailOut);
  4908. m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, insertInstr);
  4909. insertInstr->InsertBefore(labelDone);
  4910. return;
  4911. }
  4912. IR::Opnd *memAddress = dst;
  4913. if (dst->IsRegOpnd())
  4914. {
  4915. IR::SymOpnd *symOpnd = nullptr;
  4916. if (dst->GetType() == TyFloat32)
  4917. {
  4918. symOpnd = IR::SymOpnd::New(StackSym::New(TyFloat32, this->m_func), TyFloat32, this->m_func);
  4919. this->m_func->StackAllocate(symOpnd->m_sym->AsStackSym(), sizeof(float));
  4920. }
  4921. else
  4922. {
  4923. symOpnd = IR::SymOpnd::New(StackSym::New(TyFloat64,this->m_func), TyMachDouble, this->m_func);
  4924. this->m_func->StackAllocate(symOpnd->m_sym->AsStackSym(), sizeof(double));
  4925. }
  4926. memAddress = symOpnd;
  4927. }
  4928. // LEA r3, dst
  4929. IR::RegOpnd *reg3Opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4930. instr = IR::Instr::New(Js::OpCode::LEA, reg3Opnd, memAddress, this->m_func);
  4931. insertInstr->InsertBefore(instr);
  4932. // to_number_full(value, &dst, scriptContext);
  4933. // Create dummy binary op to convert into helper
  4934. instr = IR::Instr::New(Js::OpCode::Add_A, this->m_func);
  4935. instr->SetSrc1(src);
  4936. instr->SetSrc2(reg3Opnd);
  4937. insertInstr->InsertBefore(instr);
  4938. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  4939. {
  4940. _Analysis_assume_(instrBailOut != nullptr);
  4941. instr = instr->ConvertToBailOutInstr(instrBailOut->GetBailOutInfo(), bailOutKind);
  4942. if (instrBailOut->GetBailOutInfo()->bailOutInstr == instrBailOut)
  4943. {
  4944. IR::Instr * instrShare = instrBailOut->ShareBailOut();
  4945. m_lowerer->LowerBailTarget(instrShare);
  4946. }
  4947. }
  4948. IR::JnHelperMethod helper;
  4949. if (dst->GetType() == TyFloat32)
  4950. {
  4951. helper = IR::HelperOp_ConvFloat_Helper;
  4952. }
  4953. else
  4954. {
  4955. helper = IR::HelperOp_ConvNumber_Helper;
  4956. }
  4957. this->m_lowerer->LowerBinaryHelperMem(instr, helper);
  4958. if (dst->IsRegOpnd())
  4959. {
  4960. Js::OpCode opcode = (dst->GetType() == TyFloat32)? Js::OpCode::VLDR32: Js::OpCode::VLDR;
  4961. instr = IR::Instr::New(opcode, dst , memAddress, this->m_func);
  4962. insertInstr->InsertBefore(instr);
  4963. LegalizeMD::LegalizeInstr(instr, false);
  4964. }
  4965. // $Done
  4966. insertInstr->InsertBefore(labelDone);
  4967. }
  4968. void
  4969. LowererMD::GenerateNumberAllocation(IR::RegOpnd * opndDst, IR::Instr * instrInsert, bool isHelper)
  4970. {
  4971. size_t alignedAllocSize = Js::RecyclerJavascriptNumberAllocator::GetAlignedAllocSize(
  4972. m_func->GetScriptContextInfo()->IsRecyclerVerifyEnabled(),
  4973. m_func->GetScriptContextInfo()->GetRecyclerVerifyPad());
  4974. IR::RegOpnd * loadAllocatorAddressOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  4975. IR::Instr * loadAllocatorAddressInstr = IR::Instr::New(Js::OpCode::LDIMM, loadAllocatorAddressOpnd,
  4976. m_lowerer->LoadScriptContextValueOpnd(instrInsert, ScriptContextValue::ScriptContextNumberAllocator), this->m_func);
  4977. instrInsert->InsertBefore(loadAllocatorAddressInstr);
  4978. IR::IndirOpnd * endAddressOpnd = IR::IndirOpnd::New(loadAllocatorAddressOpnd,
  4979. Js::RecyclerJavascriptNumberAllocator::GetEndAddressOffset(), TyMachPtr, this->m_func);
  4980. IR::IndirOpnd * freeObjectListOpnd = IR::IndirOpnd::New(loadAllocatorAddressOpnd,
  4981. Js::RecyclerJavascriptNumberAllocator::GetFreeObjectListOffset(), TyMachPtr, this->m_func);
  4982. // LDR dst, allocator->freeObjectList
  4983. IR::Instr * loadMemBlockInstr = IR::Instr::New(Js::OpCode::LDR, opndDst, freeObjectListOpnd, this->m_func);
  4984. instrInsert->InsertBefore(loadMemBlockInstr);
  4985. // nextMemBlock = ADD dst, allocSize
  4986. IR::RegOpnd * nextMemBlockOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  4987. IR::Instr * loadNextMemBlockInstr = IR::Instr::New(Js::OpCode::ADD, nextMemBlockOpnd, opndDst,
  4988. IR::IntConstOpnd::New(alignedAllocSize, TyInt32, this->m_func), this->m_func);
  4989. instrInsert->InsertBefore(loadNextMemBlockInstr);
  4990. // CMP nextMemBlock, allocator->endAddress
  4991. IR::Instr * checkInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4992. checkInstr->SetSrc1(nextMemBlockOpnd);
  4993. checkInstr->SetSrc2(endAddressOpnd);
  4994. instrInsert->InsertBefore(checkInstr);
  4995. LegalizeMD::LegalizeInstr(checkInstr, false);
  4996. // BHI $helper
  4997. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4998. IR::BranchInstr * branchInstr = IR::BranchInstr::New(Js::OpCode::BHI, helperLabel, this->m_func);
  4999. instrInsert->InsertBefore(branchInstr);
  5000. // LDR allocator->freeObjectList, nextMemBlock
  5001. IR::Instr * setFreeObjectListInstr = IR::Instr::New(Js::OpCode::LDR, freeObjectListOpnd, nextMemBlockOpnd, this->m_func);
  5002. instrInsert->InsertBefore(setFreeObjectListInstr);
  5003. // B $done
  5004. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  5005. IR::BranchInstr * branchToDoneInstr = IR::BranchInstr::New(Js::OpCode::B, doneLabel, this->m_func);
  5006. instrInsert->InsertBefore(branchToDoneInstr);
  5007. // $helper:
  5008. instrInsert->InsertBefore(helperLabel);
  5009. // arg1 = allocator
  5010. this->LoadHelperArgument(instrInsert, m_lowerer->LoadScriptContextValueOpnd(instrInsert, ScriptContextValue::ScriptContextNumberAllocator));
  5011. // dst = Call AllocUninitializedNumber
  5012. IR::Instr * instrCall = IR::Instr::New(Js::OpCode::Call, opndDst,
  5013. IR::HelperCallOpnd::New(IR::HelperAllocUninitializedNumber, this->m_func), this->m_func);
  5014. instrInsert->InsertBefore(instrCall);
  5015. this->LowerCall(instrCall, 0);
  5016. // $done:
  5017. instrInsert->InsertBefore(doneLabel);
  5018. }
  5019. void
  5020. LowererMD::GenerateFastRecyclerAlloc(size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, IR::LabelInstr* allocHelperLabel, IR::LabelInstr* allocDoneLabel)
  5021. {
  5022. ScriptContextInfo* scriptContext = this->m_func->GetScriptContextInfo();
  5023. void* allocatorAddress;
  5024. uint32 endAddressOffset;
  5025. uint32 freeListOffset;
  5026. size_t alignedSize = HeapInfo::GetAlignedSizeNoCheck(allocSize);
  5027. bool allowNativeCodeBumpAllocation = scriptContext->GetRecyclerAllowNativeCodeBumpAllocation();
  5028. Recycler::GetNormalHeapBlockAllocatorInfoForNativeAllocation((void*)scriptContext->GetRecyclerAddr(), alignedSize,
  5029. allocatorAddress, endAddressOffset, freeListOffset,
  5030. allowNativeCodeBumpAllocation, this->m_func->IsOOPJIT());
  5031. IR::RegOpnd * allocatorAddressRegOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  5032. // LDIMM allocatorAddressRegOpnd, allocator
  5033. IR::AddrOpnd* allocatorAddressOpnd = IR::AddrOpnd::New(allocatorAddress, IR::AddrOpndKindDynamicMisc, this->m_func);
  5034. IR::Instr * loadAllocatorAddressInstr = IR::Instr::New(Js::OpCode::LDIMM, allocatorAddressRegOpnd, allocatorAddressOpnd, this->m_func);
  5035. insertionPointInstr->InsertBefore(loadAllocatorAddressInstr);
  5036. IR::IndirOpnd * endAddressOpnd = IR::IndirOpnd::New(allocatorAddressRegOpnd, endAddressOffset, TyMachPtr, this->m_func);
  5037. IR::IndirOpnd * freeObjectListOpnd = IR::IndirOpnd::New(allocatorAddressRegOpnd, freeListOffset, TyMachPtr, this->m_func);
  5038. // LDR newObjDst, allocator->freeObjectList
  5039. IR::Instr * loadMemBlockInstr = IR::Instr::New(Js::OpCode::LDR, newObjDst, freeObjectListOpnd, this->m_func);
  5040. insertionPointInstr->InsertBefore(loadMemBlockInstr);
  5041. LegalizeMD::LegalizeInstr(loadMemBlockInstr, false);
  5042. // nextMemBlock = ADD newObjDst, allocSize
  5043. IR::RegOpnd * nextMemBlockOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  5044. IR::IntConstOpnd* allocSizeOpnd = IR::IntConstOpnd::New((int32)allocSize, TyInt32, this->m_func);
  5045. IR::Instr * loadNextMemBlockInstr = IR::Instr::New(Js::OpCode::ADD, nextMemBlockOpnd, newObjDst, allocSizeOpnd, this->m_func);
  5046. insertionPointInstr->InsertBefore(loadNextMemBlockInstr);
  5047. LegalizeMD::LegalizeInstr(loadNextMemBlockInstr, false);
  5048. // CMP nextMemBlock, allocator->endAddress
  5049. IR::Instr * checkInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  5050. checkInstr->SetSrc1(nextMemBlockOpnd);
  5051. checkInstr->SetSrc2(endAddressOpnd);
  5052. insertionPointInstr->InsertBefore(checkInstr);
  5053. LegalizeMD::LegalizeInstr(checkInstr, false);
  5054. // BHI $allocHelper
  5055. IR::BranchInstr * branchToAllocHelperInstr = IR::BranchInstr::New(Js::OpCode::BHI, allocHelperLabel, this->m_func);
  5056. insertionPointInstr->InsertBefore(branchToAllocHelperInstr);
  5057. // LDR allocator->freeObjectList, nextMemBlock
  5058. IR::Instr * setFreeObjectListInstr = IR::Instr::New(Js::OpCode::LDR, freeObjectListOpnd, nextMemBlockOpnd, this->m_func);
  5059. insertionPointInstr->InsertBefore(setFreeObjectListInstr);
  5060. LegalizeMD::LegalizeInstr(setFreeObjectListInstr, false);
  5061. // B $allocDone
  5062. IR::BranchInstr * branchToAllocDoneInstr = IR::BranchInstr::New(Js::OpCode::B, allocDoneLabel, this->m_func);
  5063. insertionPointInstr->InsertBefore(branchToAllocDoneInstr);
  5064. }
  5065. void
  5066. LowererMD::GenerateClz(IR::Instr * instr)
  5067. {
  5068. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32());
  5069. Assert(IRType_IsNativeInt(instr->GetDst()->GetType()));
  5070. instr->m_opcode = Js::OpCode::CLZ;
  5071. LegalizeMD::LegalizeInstr(instr, false);
  5072. }
  5073. void
  5074. LowererMD::SaveDoubleToVar(IR::RegOpnd * dstOpnd, IR::RegOpnd *opndFloat, IR::Instr *instrOrig, IR::Instr *instrInsert, bool isHelper)
  5075. {
  5076. // Call JSNumber::ToVar to save the float operand to the result of the original (var) instruction
  5077. IR::Opnd * symVTableDst;
  5078. IR::Opnd * symDblDst;
  5079. IR::Opnd * symTypeDst;
  5080. IR::Instr *newInstr;
  5081. IR::Instr * numberInitInsertInstr = nullptr;
  5082. if (instrOrig->dstIsTempNumber)
  5083. {
  5084. // Use the original dst to get the temp number sym
  5085. StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(instrOrig->GetDst(), instrOrig->dstIsTempNumberTransferred);
  5086. // LEA dst, &tempSym
  5087. IR::SymOpnd * symTempSrc = IR::SymOpnd::New(tempNumberSym, TyMachPtr, this->m_func);
  5088. newInstr = IR::Instr::New(Js::OpCode::LEA, dstOpnd, symTempSrc, this->m_func);
  5089. instrInsert->InsertBefore(newInstr);
  5090. LegalizeMD::LegalizeInstr(newInstr, false);
  5091. symVTableDst = IR::SymOpnd::New(tempNumberSym, TyMachPtr, this->m_func);
  5092. symDblDst = IR::SymOpnd::New(tempNumberSym, (uint32)Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func);
  5093. symTypeDst = IR::SymOpnd::New(tempNumberSym, (uint32)Js::JavascriptNumber::GetOffsetOfType(), TyMachPtr, this->m_func);
  5094. if (this->m_lowerer->outerMostLoopLabel == nullptr)
  5095. {
  5096. // If we are not in loop, just insert in place
  5097. numberInitInsertInstr = instrInsert;
  5098. }
  5099. else
  5100. {
  5101. // Otherwise, initialize in the outer most loop top if we haven't initialize it yet.
  5102. numberInitInsertInstr = this->m_lowerer->initializedTempSym->TestAndSet(tempNumberSym->m_id) ?
  5103. nullptr : this->m_lowerer->outerMostLoopLabel;
  5104. }
  5105. }
  5106. else
  5107. {
  5108. this->GenerateNumberAllocation(dstOpnd, instrInsert, isHelper);
  5109. symVTableDst = IR::IndirOpnd::New(dstOpnd, 0, TyMachPtr, this->m_func);
  5110. symDblDst = IR::IndirOpnd::New(dstOpnd, (uint32)Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func);
  5111. symTypeDst = IR::IndirOpnd::New(dstOpnd, (uint32)Js::JavascriptNumber::GetOffsetOfType(), TyMachPtr, this->m_func);
  5112. numberInitInsertInstr = instrInsert;
  5113. }
  5114. if (numberInitInsertInstr)
  5115. {
  5116. IR::Opnd *jsNumberVTable = m_lowerer->LoadVTableValueOpnd(numberInitInsertInstr, VTableValue::VtableJavascriptNumber);
  5117. // STR dst->vtable, JavascriptNumber::vtable
  5118. newInstr = IR::Instr::New(Js::OpCode::STR, symVTableDst, jsNumberVTable, this->m_func);
  5119. numberInitInsertInstr->InsertBefore(newInstr);
  5120. LegalizeMD::LegalizeInstr(newInstr, false);
  5121. // STR dst->type, JavascriptNumber_type
  5122. IR::Opnd *typeOpnd = m_lowerer->LoadLibraryValueOpnd(numberInitInsertInstr, LibraryValue::ValueNumberTypeStatic);
  5123. newInstr = IR::Instr::New(Js::OpCode::STR, symTypeDst, typeOpnd, this->m_func);
  5124. numberInitInsertInstr->InsertBefore(newInstr);
  5125. LegalizeMD::LegalizeInstr(newInstr, false);
  5126. }
  5127. // VSTR dst->value, opndFloat ; copy the float result to the temp JavascriptNumber
  5128. newInstr = IR::Instr::New(Js::OpCode::VSTR, symDblDst, opndFloat, this->m_func);
  5129. instrInsert->InsertBefore(newInstr);
  5130. LegalizeMD::LegalizeInstr(newInstr, false);
  5131. }
  5132. void
  5133. LowererMD::GenerateFastAbs(IR::Opnd *dst, IR::Opnd *src, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *labelDone)
  5134. {
  5135. // src32 = ASRS src, VarShift
  5136. // BCC $helper <== float abs if emitFloatAbs
  5137. // dst32 = EOR src32, src32 ASR #31
  5138. // dst32 = SUB dst32, src32 ASR #31
  5139. // TIOFLW src32
  5140. // BMI $helper
  5141. // dst = LSL src32, VarShift
  5142. // dst = ADD dst, AtomTag
  5143. // B $done
  5144. // $float
  5145. // CMP [src], JavascriptNumber.vtable
  5146. // BNE $helper
  5147. // VLDR dx, [src + offsetof(value)]
  5148. // VABS.f64 dx, dx
  5149. // dst = DoubleToVar(dx)
  5150. // $helper:
  5151. // <call helper>
  5152. // $done:
  5153. bool isInt = false;
  5154. bool isNotInt = false;
  5155. IR::Instr *instr;
  5156. IR::LabelInstr *labelFloat = nullptr;
  5157. if (src->IsRegOpnd())
  5158. {
  5159. if (src->AsRegOpnd()->IsTaggedInt())
  5160. {
  5161. isInt = true;
  5162. }
  5163. else if (src->AsRegOpnd()->IsNotInt())
  5164. {
  5165. isNotInt = true;
  5166. }
  5167. }
  5168. else if (src->IsAddrOpnd())
  5169. {
  5170. IR::AddrOpnd *varOpnd = src->AsAddrOpnd();
  5171. Assert(varOpnd->IsVar() && Js::TaggedInt::Is(varOpnd->m_address));
  5172. int absValue = abs(Js::TaggedInt::ToInt32(varOpnd->m_address));
  5173. if (!Js::TaggedInt::IsOverflow(absValue))
  5174. {
  5175. varOpnd->SetAddress(Js::TaggedInt::ToVarUnchecked(absValue), IR::AddrOpndKindConstantVar);
  5176. LowererMD::CreateAssign(dst, varOpnd, insertInstr);
  5177. }
  5178. }
  5179. if (src->IsRegOpnd() == false)
  5180. {
  5181. //Lets legalize right away as floating point fast path works on the same src.
  5182. IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  5183. instr = IR::Instr::New(Js::OpCode::MOV, regOpnd, src, this->m_func);
  5184. insertInstr->InsertBefore(instr);
  5185. LegalizeMD::LegalizeInstr(instr, false);
  5186. src = regOpnd;
  5187. }
  5188. bool emitFloatAbs = !isInt;
  5189. if (!isNotInt)
  5190. {
  5191. // src32 = ASRS src, VarTag_Shift
  5192. IR::RegOpnd *src32 = src32 = IR::RegOpnd::New(TyMachReg, this->m_func);
  5193. instr = IR::Instr::New(
  5194. Js::OpCode::ASRS, src32, src, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt32, this->m_func), this->m_func);
  5195. insertInstr->InsertBefore(instr);
  5196. if (!isInt)
  5197. {
  5198. if (emitFloatAbs)
  5199. {
  5200. // BCC $float
  5201. labelFloat = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5202. instr = IR::BranchInstr::New(Js::OpCode::BCC, labelFloat, this->m_func);
  5203. insertInstr->InsertBefore(instr);
  5204. }
  5205. else
  5206. {
  5207. instr = IR::BranchInstr::New(Js::OpCode::BCC, labelHelper, this->m_func);
  5208. insertInstr->InsertBefore(instr);
  5209. }
  5210. }
  5211. // dst32 = EOR src32, src32 ASR #31
  5212. IR::RegOpnd *dst32 = IR::RegOpnd::New(TyMachReg, this->m_func);
  5213. instr = IR::Instr::New(Js::OpCode::CLRSIGN, dst32, src32, this->m_func);
  5214. insertInstr->InsertBefore(instr);
  5215. // dst32 = SUB dst32, src32 ASR #31
  5216. instr = IR::Instr::New(Js::OpCode::SBCMPLNT, dst32, dst32, src32, this->m_func);
  5217. insertInstr->InsertBefore(instr);
  5218. // TEQ dst32, dst32 LSL #1
  5219. instr = IR::Instr::New(Js::OpCode::TIOFLW, this->m_func);
  5220. instr->SetSrc1(dst32);
  5221. insertInstr->InsertBefore(instr);
  5222. // BMI $helper
  5223. instr = IR::BranchInstr::New(Js::OpCode::BMI, labelHelper, this->m_func);
  5224. insertInstr->InsertBefore(instr);
  5225. // dst32 = LSL dst32, VarShift
  5226. instr = IR::Instr::New(
  5227. Js::OpCode::LSL, dst32, dst32, IR::IntConstOpnd::New(Js::VarTag_Shift, TyMachReg, this->m_func), this->m_func);
  5228. insertInstr->InsertBefore(instr);
  5229. // dst = ADD dst, AtomTag
  5230. instr = IR::Instr::New(
  5231. Js::OpCode::ADD, dst, dst32, IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func), this->m_func);
  5232. insertInstr->InsertBefore(instr);
  5233. LegalizeMD::LegalizeInstr(instr, false);
  5234. }
  5235. if (labelFloat)
  5236. {
  5237. // B $done
  5238. instr = IR::BranchInstr::New(Js::OpCode::B, labelDone, this->m_func);
  5239. insertInstr->InsertBefore(instr);
  5240. // $float
  5241. insertInstr->InsertBefore(labelFloat);
  5242. }
  5243. if (emitFloatAbs)
  5244. {
  5245. // CMP [src], JavascriptNumber.vtable
  5246. IR::Opnd *opnd = IR::IndirOpnd::New(src->AsRegOpnd(), (int32)0, TyMachPtr, this->m_func);
  5247. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  5248. instr->SetSrc1(opnd);
  5249. instr->SetSrc2(m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptNumber));
  5250. insertInstr->InsertBefore(instr);
  5251. LegalizeMD::LegalizeInstr(instr, false);
  5252. // BNE $helper
  5253. instr = IR::BranchInstr::New(Js::OpCode::BNE, labelHelper, this->m_func);
  5254. insertInstr->InsertBefore(instr);
  5255. // VLDR dx, [src + offsetof(value)]
  5256. opnd = IR::IndirOpnd::New(src->AsRegOpnd(), Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  5257. IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyMachDouble, this->m_func);
  5258. instr = IR::Instr::New(Js::OpCode::VLDR, regOpnd, opnd, this->m_func);
  5259. insertInstr->InsertBefore(instr);
  5260. // VABS.f64 dy, dx
  5261. IR::RegOpnd *resultRegOpnd = IR::RegOpnd::New(TyMachDouble, this->m_func);
  5262. instr = IR::Instr::New(Js::OpCode::VABS, resultRegOpnd, regOpnd, this->m_func);
  5263. insertInstr->InsertBefore(instr);
  5264. // dst = DoubleToVar(dy)
  5265. SaveDoubleToVar(callInstr->GetDst()->AsRegOpnd(), resultRegOpnd, callInstr, insertInstr);
  5266. }
  5267. }
  5268. bool LowererMD::GenerateFastCharAt(Js::BuiltinFunction index, IR::Opnd *dst, IR::Opnd *srcStr, IR::Opnd *srcIndex, IR::Instr *callInstr,
  5269. IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *labelDone)
  5270. {
  5271. // TST regSrc, AtomTag
  5272. // BNE $helper
  5273. // type = LDR [regSrc + offset(type)]
  5274. // typeid = LDR [type + offset(typeid)]
  5275. // CMP typeid, TypeIds_String
  5276. // BNE $helper
  5277. // psz = LDR [regSrc + offset(m_pszValue)]
  5278. // CMP psz, 0
  5279. // BEQ $helper
  5280. // index32 = ASRS srcIndex, VarShift
  5281. // BCC $helper
  5282. // length = LDR [regSrc + offset(length)]
  5283. // CMP length, index32
  5284. // BLS $helper
  5285. // char = LDRH [regSrc + index32, LSL #1]
  5286. //
  5287. // if (charAt)
  5288. // (r1) = MOV char
  5289. // (r0) = LDIMM scriptContext
  5290. // dst = CALL GetStringFromChar
  5291. //
  5292. // else
  5293. // if (codePointAt)
  5294. // Lowerer.GenerateFastCodePointAt -- Common inline functions
  5295. //
  5296. // char = LSL char, VarShift
  5297. // dst = ADD char, AtomTag
  5298. bool isInt = false;
  5299. IR::Instr *instr;
  5300. IR::IndirOpnd *indirOpnd;
  5301. IR::RegOpnd *regSrcStr;
  5302. if (srcStr->IsRegOpnd())
  5303. {
  5304. if (srcStr->AsRegOpnd()->IsTaggedInt())
  5305. {
  5306. isInt = true;
  5307. }
  5308. }
  5309. if (isInt)
  5310. {
  5311. // Insert delete branch opcode to tell the dbChecks not to assert on this helper label
  5312. IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
  5313. insertInstr->InsertBefore(fakeBr);
  5314. // The "string" is an int. Just bail out.
  5315. instr = IR::BranchInstr::New(Js::OpCode::B, labelHelper, this->m_func);
  5316. insertInstr->InsertBefore(instr);
  5317. return false;
  5318. }
  5319. // Bail out if index a constant and is less than zero.
  5320. if (srcIndex->IsImmediateOpnd() && srcIndex->GetImmediateValue(this->m_func) < 0)
  5321. {
  5322. instr = IR::BranchInstr::New(Js::OpCode::B, labelHelper, this->m_func);
  5323. insertInstr->InsertBefore(instr);
  5324. return false;
  5325. }
  5326. // Force the string into a reg at the top. Otherwise we'll be loading it over and over...
  5327. if (srcStr->IsRegOpnd())
  5328. {
  5329. regSrcStr = srcStr->AsRegOpnd();
  5330. }
  5331. else
  5332. {
  5333. regSrcStr = IR::RegOpnd::New(TyMachReg, this->m_func);
  5334. LowererMD::CreateAssign(regSrcStr, srcStr, insertInstr);
  5335. }
  5336. this->m_lowerer->GenerateStringTest(regSrcStr, insertInstr, labelHelper);
  5337. // psz = LDR [regSrc + offset(m_pszValue)]
  5338. IR::RegOpnd *psz = IR::RegOpnd::New(TyMachPtr, this->m_func);
  5339. indirOpnd = IR::IndirOpnd::New(regSrcStr, Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, this->m_func);
  5340. LowererMD::CreateAssign(psz, indirOpnd, insertInstr);
  5341. // CMP psz, 0
  5342. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  5343. instr->SetSrc1(psz);
  5344. instr->SetSrc2(IR::IntConstOpnd::New(0, TyMachPtr, this->m_func));
  5345. insertInstr->InsertBefore(instr);
  5346. // BEQ $helper
  5347. instr = IR::BranchInstr::New(Js::OpCode::BEQ, labelHelper, this->m_func);
  5348. insertInstr->InsertBefore(instr);
  5349. // Arm should change to Uint32 for the length
  5350. // length = LDR [regSrcStr + offsetof(length)]
  5351. IR::RegOpnd *length = IR::RegOpnd::New(TyMachReg, this->m_func);
  5352. indirOpnd = IR::IndirOpnd::New(regSrcStr, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func);
  5353. LowererMD::CreateAssign(length, indirOpnd, insertInstr);
  5354. if (srcIndex->IsAddrOpnd())
  5355. {
  5356. // The index is a constant, so just use it.
  5357. uint32 constIndex = Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address);
  5358. // CMP length, index32
  5359. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  5360. instr->SetSrc1(length);
  5361. instr->SetSrc2(IR::IntConstOpnd::New(constIndex, TyUint32, this->m_func));
  5362. insertInstr->InsertBefore(instr);
  5363. LegalizeMD::LegalizeInstr(instr, false);
  5364. // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
  5365. // BLS $helper
  5366. instr = IR::BranchInstr::New(Js::OpCode::BLS, labelHelper, this->m_func);
  5367. insertInstr->InsertBefore(instr);
  5368. // indir = [psz + index32 * 2]
  5369. indirOpnd = IR::IndirOpnd::New(psz, constIndex * sizeof(char16), TyUint16, this->m_func);
  5370. }
  5371. else
  5372. {
  5373. // index32 = ASRS srcIndex, VarShift
  5374. IR::RegOpnd *index32 = IR::RegOpnd::New(TyMachReg, this->m_func);
  5375. instr = IR::Instr::New(Js::OpCode::ASRS, index32, srcIndex, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  5376. insertInstr->InsertBefore(instr);
  5377. if (!srcIndex->IsRegOpnd() || !srcIndex->AsRegOpnd()->IsTaggedInt())
  5378. {
  5379. // BCC $helper
  5380. instr = IR::BranchInstr::New(Js::OpCode::BCC, labelHelper, this->m_func);
  5381. insertInstr->InsertBefore(instr);
  5382. }
  5383. // CMP length, index32
  5384. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  5385. instr->SetSrc1(length);
  5386. instr->SetSrc2(index32);
  5387. insertInstr->InsertBefore(instr);
  5388. // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
  5389. // BLS $helper
  5390. instr = IR::BranchInstr::New(Js::OpCode::BLS, labelHelper, this->m_func);
  5391. insertInstr->InsertBefore(instr);
  5392. // indir = [psz + index32 * 2]
  5393. indirOpnd = IR::IndirOpnd::New(psz, index32, (byte)Math::Log2(sizeof(char16)), TyUint16, this->m_func);
  5394. }
  5395. // char = LDRH [regSrc + index32, LSL #1]
  5396. IR::RegOpnd *charResult = IR::RegOpnd::New(TyUint32, this->m_func);
  5397. LowererMD::CreateAssign(charResult, indirOpnd, insertInstr);
  5398. if (index == Js::BuiltinFunction::JavascriptString_CharAt)
  5399. {
  5400. IR::Opnd *resultOpnd;
  5401. if (dst->IsEqual(srcStr))
  5402. {
  5403. resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  5404. }
  5405. else
  5406. {
  5407. resultOpnd = dst;
  5408. }
  5409. this->m_lowerer->GenerateGetSingleCharString(charResult, resultOpnd, labelHelper, labelDone, insertInstr, false);
  5410. }
  5411. else
  5412. {
  5413. Assert(index == Js::BuiltinFunction::JavascriptString_CharCodeAt || index == Js::BuiltinFunction::JavascriptString_CodePointAt);
  5414. if (index == Js::BuiltinFunction::JavascriptString_CodePointAt)
  5415. {
  5416. this->m_lowerer->GenerateFastInlineStringCodePointAt(insertInstr, this->m_func, length, srcIndex, charResult, psz);
  5417. }
  5418. // result = LSL result, VarShift
  5419. instr = IR::Instr::New(Js::OpCode::LSL, charResult, charResult, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  5420. insertInstr->InsertBefore(instr);
  5421. // dst = ADD result, AtomTag
  5422. instr = IR::Instr::New(Js::OpCode::ADD, dst, charResult, IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func), this->m_func);
  5423. insertInstr->InsertBefore(instr);
  5424. LegalizeMD::LegalizeInstr(instr, false);
  5425. }
  5426. return true;
  5427. }
  5428. IR::Instr *
  5429. LowererMD::LoadStackAddress(StackSym *sym, IR::RegOpnd* regDst)
  5430. {
  5431. if (regDst == nullptr)
  5432. {
  5433. regDst = IR::RegOpnd::New(TyMachReg, this->m_func);
  5434. }
  5435. IR::SymOpnd * symSrc = IR::SymOpnd::New(sym, TyMachPtr, this->m_func);
  5436. IR::Instr * lea = IR::Instr::New(Js::OpCode::LEA, regDst, symSrc, this->m_func);
  5437. return lea;
  5438. }
  5439. void
  5440. LowererMD::EmitInt4Instr(IR::Instr *instr)
  5441. {
  5442. IR::Instr * newInstr;
  5443. IR::Opnd * src1;
  5444. IR::Opnd * src2;
  5445. switch (instr->m_opcode)
  5446. {
  5447. case Js::OpCode::Neg_I4:
  5448. instr->m_opcode = Js::OpCode::RSB;
  5449. instr->SetSrc2(IR::IntConstOpnd::New(0, TyInt32, instr->m_func));
  5450. break;
  5451. case Js::OpCode::Not_I4:
  5452. instr->m_opcode = Js::OpCode::MVN;
  5453. break;
  5454. case Js::OpCode::Add_I4:
  5455. ChangeToAdd(instr, false /* needFlags */);
  5456. break;
  5457. case Js::OpCode::Sub_I4:
  5458. ChangeToSub(instr, false /* needFlags */);
  5459. break;
  5460. case Js::OpCode::Mul_I4:
  5461. instr->m_opcode = Js::OpCode::MUL;
  5462. break;
  5463. case Js::OpCode::DivU_I4:
  5464. AssertMsg(UNREACHED, "Unsigned div NYI");
  5465. case Js::OpCode::Div_I4:
  5466. instr->m_opcode = Js::OpCode::SDIV;
  5467. break;
  5468. case Js::OpCode::RemU_I4:
  5469. AssertMsg(UNREACHED, "Unsigned rem NYI");
  5470. case Js::OpCode::Rem_I4:
  5471. instr->m_opcode = Js::OpCode::REM;
  5472. break;
  5473. case Js::OpCode::Or_I4:
  5474. instr->m_opcode = Js::OpCode::ORR;
  5475. break;
  5476. case Js::OpCode::Xor_I4:
  5477. instr->m_opcode = Js::OpCode::EOR;
  5478. break;
  5479. case Js::OpCode::And_I4:
  5480. instr->m_opcode = Js::OpCode::AND;
  5481. break;
  5482. case Js::OpCode::Shl_I4:
  5483. case Js::OpCode::ShrU_I4:
  5484. case Js::OpCode::Shr_I4:
  5485. ChangeToShift(instr, false /* needFlags */);
  5486. break;
  5487. case Js::OpCode::BrTrue_I4:
  5488. instr->m_opcode = Js::OpCode::BNE;
  5489. goto br1_Common;
  5490. case Js::OpCode::BrFalse_I4:
  5491. instr->m_opcode = Js::OpCode::BEQ;
  5492. br1_Common:
  5493. src1 = instr->UnlinkSrc1();
  5494. newInstr = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  5495. instr->InsertBefore(newInstr);
  5496. newInstr->SetSrc1(src1);
  5497. newInstr->SetSrc2(IR::IntConstOpnd::New(0, TyInt32, instr->m_func));
  5498. // We know this CMP is legal.
  5499. return;
  5500. case Js::OpCode::BrEq_I4:
  5501. instr->m_opcode = Js::OpCode::BEQ;
  5502. goto br2_Common;
  5503. case Js::OpCode::BrNeq_I4:
  5504. instr->m_opcode = Js::OpCode::BNE;
  5505. goto br2_Common;
  5506. case Js::OpCode::BrGt_I4:
  5507. instr->m_opcode = Js::OpCode::BGT;
  5508. goto br2_Common;
  5509. case Js::OpCode::BrGe_I4:
  5510. instr->m_opcode = Js::OpCode::BGE;
  5511. goto br2_Common;
  5512. case Js::OpCode::BrLe_I4:
  5513. instr->m_opcode = Js::OpCode::BLE;
  5514. goto br2_Common;
  5515. case Js::OpCode::BrLt_I4:
  5516. instr->m_opcode = Js::OpCode::BLT;
  5517. goto br2_Common;
  5518. case Js::OpCode::BrUnGt_I4:
  5519. instr->m_opcode = Js::OpCode::BHI;
  5520. goto br2_Common;
  5521. case Js::OpCode::BrUnGe_I4:
  5522. instr->m_opcode = Js::OpCode::BCS;
  5523. goto br2_Common;
  5524. case Js::OpCode::BrUnLt_I4:
  5525. instr->m_opcode = Js::OpCode::BCC;
  5526. goto br2_Common;
  5527. case Js::OpCode::BrUnLe_I4:
  5528. instr->m_opcode = Js::OpCode::BLS;
  5529. goto br2_Common;
  5530. br2_Common:
  5531. src1 = instr->UnlinkSrc1();
  5532. src2 = instr->UnlinkSrc2();
  5533. newInstr = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  5534. instr->InsertBefore(newInstr);
  5535. newInstr->SetSrc1(src1);
  5536. newInstr->SetSrc2(src2);
  5537. // Let instr point to the CMP so we can legalize it.
  5538. instr = newInstr;
  5539. break;
  5540. default:
  5541. AssertMsg(UNREACHED, "NYI I4 instr");
  5542. break;
  5543. }
  5544. LegalizeMD::LegalizeInstr(instr, false);
  5545. }
  5546. void
  5547. LowererMD::LowerInt4NegWithBailOut(
  5548. IR::Instr *const instr,
  5549. const IR::BailOutKind bailOutKind,
  5550. IR::LabelInstr *const bailOutLabel,
  5551. IR::LabelInstr *const skipBailOutLabel)
  5552. {
  5553. Assert(instr);
  5554. Assert(instr->m_opcode == Js::OpCode::Neg_I4);
  5555. Assert(!instr->HasBailOutInfo());
  5556. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  5557. Assert(bailOutLabel);
  5558. Assert(instr->m_next == bailOutLabel);
  5559. Assert(skipBailOutLabel);
  5560. Assert(instr->GetDst()->IsInt32());
  5561. Assert(instr->GetSrc1()->IsInt32());
  5562. // RSBS dst, src1, #0
  5563. // BVS $bailOutLabel
  5564. // BEQ $bailOutLabel
  5565. // B $skipBailOut
  5566. // $bailOut:
  5567. // ...
  5568. // $skipBailOut:
  5569. // Lower the instruction
  5570. instr->m_opcode = Js::OpCode::RSBS;
  5571. instr->SetSrc2(IR::IntConstOpnd::New(0, TyInt32, instr->m_func));
  5572. Legalize(instr);
  5573. if(bailOutKind & IR::BailOutOnOverflow)
  5574. {
  5575. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::BVS, bailOutLabel, instr->m_func));
  5576. }
  5577. if(bailOutKind & IR::BailOutOnNegativeZero)
  5578. {
  5579. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::BEQ, bailOutLabel, instr->m_func));
  5580. }
  5581. // Skip bailout
  5582. bailOutLabel->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, skipBailOutLabel, instr->m_func));
  5583. }
  5584. void
  5585. LowererMD::LowerInt4AddWithBailOut(
  5586. IR::Instr *const instr,
  5587. const IR::BailOutKind bailOutKind,
  5588. IR::LabelInstr *const bailOutLabel,
  5589. IR::LabelInstr *const skipBailOutLabel)
  5590. {
  5591. Assert(instr);
  5592. Assert(instr->m_opcode == Js::OpCode::Add_I4);
  5593. Assert(!instr->HasBailOutInfo());
  5594. Assert(
  5595. (bailOutKind & IR::BailOutOnResultConditions) == IR::BailOutOnOverflow ||
  5596. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  5597. Assert(bailOutLabel);
  5598. Assert(instr->m_next == bailOutLabel);
  5599. Assert(skipBailOutLabel);
  5600. Assert(instr->GetDst()->IsInt32());
  5601. Assert(instr->GetSrc1()->IsInt32());
  5602. Assert(instr->GetSrc2()->IsInt32());
  5603. // ADDS dst, src1, src2
  5604. // BVC skipBailOutLabel
  5605. // fallthrough to bailout
  5606. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5607. Assert(dst->IsRegOpnd());
  5608. const bool dstEquSrc1 = dst->IsEqual(src1), dstEquSrc2 = dst->IsEqual(src2);
  5609. if(dstEquSrc1 || dstEquSrc2)
  5610. {
  5611. LowererMD::ChangeToAssign(instr->SinkDst(Js::OpCode::Ld_I4, RegNOREG, skipBailOutLabel));
  5612. }
  5613. // Lower the instruction
  5614. ChangeToAdd(instr, true /* needFlags */);
  5615. Legalize(instr);
  5616. // Skip bailout on no overflow
  5617. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::BVC, skipBailOutLabel, instr->m_func));
  5618. // Fall through to bailOutLabel
  5619. }
  5620. void
  5621. LowererMD::LowerInt4SubWithBailOut(
  5622. IR::Instr *const instr,
  5623. const IR::BailOutKind bailOutKind,
  5624. IR::LabelInstr *const bailOutLabel,
  5625. IR::LabelInstr *const skipBailOutLabel)
  5626. {
  5627. Assert(instr);
  5628. Assert(instr->m_opcode == Js::OpCode::Sub_I4);
  5629. Assert(!instr->HasBailOutInfo());
  5630. Assert(
  5631. (bailOutKind & IR::BailOutOnResultConditions) == IR::BailOutOnOverflow ||
  5632. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  5633. Assert(bailOutLabel);
  5634. Assert(instr->m_next == bailOutLabel);
  5635. Assert(skipBailOutLabel);
  5636. Assert(instr->GetDst()->IsInt32());
  5637. Assert(instr->GetSrc1()->IsInt32());
  5638. Assert(instr->GetSrc2()->IsInt32());
  5639. // SUBS dst, src1, src2
  5640. // BVC skipBailOutLabel
  5641. // fallthrough to bailout
  5642. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5643. Assert(dst->IsRegOpnd());
  5644. const bool dstEquSrc1 = dst->IsEqual(src1), dstEquSrc2 = dst->IsEqual(src2);
  5645. if(dstEquSrc1 || dstEquSrc2)
  5646. {
  5647. LowererMD::ChangeToAssign(instr->SinkDst(Js::OpCode::Ld_I4, RegNOREG, skipBailOutLabel));
  5648. }
  5649. // Lower the instruction
  5650. ChangeToSub(instr, true /* needFlags */);
  5651. Legalize(instr);
  5652. // Skip bailout on no overflow
  5653. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::BVC, skipBailOutLabel, instr->m_func));
  5654. // Fall through to bailOutLabel
  5655. }
  5656. void
  5657. LowererMD::LowerInt4MulWithBailOut(
  5658. IR::Instr *const instr,
  5659. const IR::BailOutKind bailOutKind,
  5660. IR::LabelInstr *const bailOutLabel,
  5661. IR::LabelInstr *const skipBailOutLabel)
  5662. {
  5663. Assert(instr);
  5664. Assert(instr->m_opcode == Js::OpCode::Mul_I4);
  5665. Assert(!instr->HasBailOutInfo());
  5666. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  5667. Assert(bailOutLabel);
  5668. Assert(instr->m_next == bailOutLabel);
  5669. Assert(skipBailOutLabel);
  5670. IR::Opnd *dst = instr->GetDst();
  5671. IR::Opnd *src1 = instr->GetSrc1();
  5672. IR::Opnd *src2 = instr->GetSrc2();
  5673. IR::Instr *insertInstr;
  5674. Assert(dst->IsInt32());
  5675. Assert(src1->IsInt32());
  5676. Assert(src2->IsInt32());
  5677. // (r12:)dst = SMULL dst, (r12,) src1, src2 -- do the signed mul into 64bit r12:dst, the result will be src1 * src2 * 2
  5678. instr->m_opcode = Js::OpCode::SMULL;
  5679. Legalize(instr);
  5680. //check negative zero
  5681. //
  5682. //If the result is zero, we need to check and only bail out if it would be -0.
  5683. // We know that if the result is 0/-0, at least operand should be zero.
  5684. // We should bailout if src1 + src2 < 0, as this proves that the other operand is negative
  5685. //
  5686. // CMN src1, src2
  5687. // BPL $skipBailOutLabel
  5688. //
  5689. //$bailOutLabel
  5690. // GenerateBailout
  5691. //
  5692. //$skipBailOutLabel
  5693. IR::LabelInstr *checkForNegativeZeroLabel = nullptr;
  5694. if(bailOutKind & IR::BailOutOnNegativeZero)
  5695. {
  5696. checkForNegativeZeroLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true);
  5697. bailOutLabel->InsertBefore(checkForNegativeZeroLabel);
  5698. Assert(dst->IsRegOpnd());
  5699. Assert(!src1->IsEqual(src2)); // cannot result in -0 if both operands are the same; GlobOpt should have figured that out
  5700. // CMN src1, src2
  5701. // BPL $skipBailOutLabel
  5702. insertInstr = IR::Instr::New(Js::OpCode::CMN, instr->m_func);
  5703. insertInstr->SetSrc1(src1);
  5704. insertInstr->SetSrc2(src2);
  5705. bailOutLabel->InsertBefore(insertInstr);
  5706. LegalizeMD::LegalizeInstr(insertInstr, false);
  5707. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::BPL, skipBailOutLabel, instr->m_func));
  5708. // Fall through to bailOutLabel
  5709. }
  5710. const auto insertBeforeInstr = checkForNegativeZeroLabel ? checkForNegativeZeroLabel : bailOutLabel;
  5711. //check overflow
  5712. // CMP_ASR31 r12, dst
  5713. // BNE $bailOutLabel
  5714. if(bailOutKind & IR::BailOutOnMulOverflow || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck)
  5715. {
  5716. // (SMULL doesn't set the flags but we don't have 32bit overflow <=> r12-unsigned ? r12==0 : all 33 bits of 64bit result are 1's
  5717. // CMP r12, dst, ASR #31 -- check for overflow (== means no overflow)
  5718. IR::RegOpnd* opndRegR12 = IR::RegOpnd::New(nullptr, RegR12, TyMachReg, instr->m_func);
  5719. insertInstr = IR::Instr::New(Js::OpCode::CMP_ASR31, instr->m_func);
  5720. insertInstr->SetSrc1(opndRegR12);
  5721. insertInstr->SetSrc2(dst);
  5722. insertBeforeInstr->InsertBefore(insertInstr);
  5723. // BNE $bailOutLabel -- bail if the result overflowed
  5724. insertInstr = IR::BranchInstr::New(Js::OpCode::BNE, bailOutLabel, instr->m_func);
  5725. insertBeforeInstr->InsertBefore(insertInstr);
  5726. }
  5727. if(bailOutKind & IR::BailOutOnNegativeZero)
  5728. {
  5729. // TST dst, dst
  5730. // BEQ $checkForNegativeZeroLabel
  5731. insertInstr = IR::Instr::New(Js::OpCode::TST, instr->m_func);
  5732. insertInstr->SetSrc1(dst);
  5733. insertInstr->SetSrc2(dst);
  5734. insertBeforeInstr->InsertBefore(insertInstr);
  5735. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::BEQ, checkForNegativeZeroLabel, instr->m_func));
  5736. }
  5737. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::B, skipBailOutLabel, instr->m_func));
  5738. }
  5739. void
  5740. LowererMD::LowerInt4RemWithBailOut(
  5741. IR::Instr *const instr,
  5742. const IR::BailOutKind bailOutKind,
  5743. IR::LabelInstr *const bailOutLabel,
  5744. IR::LabelInstr *const skipBailOutLabel) const
  5745. {
  5746. Assert(instr);
  5747. Assert(instr->m_opcode == Js::OpCode::Rem_I4);
  5748. Assert(!instr->HasBailOutInfo());
  5749. Assert(bailOutKind & IR::BailOutOnResultConditions);
  5750. Assert(bailOutLabel);
  5751. Assert(instr->m_next == bailOutLabel);
  5752. Assert(skipBailOutLabel);
  5753. IR::Opnd *dst = instr->GetDst();
  5754. IR::Opnd *src1 = instr->GetSrc1();
  5755. IR::Opnd *src2 = instr->GetSrc2();
  5756. Assert(dst->IsInt32());
  5757. Assert(src1->IsInt32());
  5758. Assert(src2->IsInt32());
  5759. //Lower the instruction
  5760. EmitInt4Instr(instr);
  5761. //check for negative zero
  5762. //We have, dst = src1 % src2
  5763. //We need to bailout if dst == 0 and src1 < 0
  5764. // tst dst, dst
  5765. // bne $skipBailOutLabel
  5766. // tst src1,src1
  5767. // bpl $skipBailOutLabel
  5768. //
  5769. //$bailOutLabel
  5770. // GenerateBailout();
  5771. //
  5772. //$skipBailOutLabel
  5773. if(bailOutKind & IR::BailOutOnNegativeZero)
  5774. {
  5775. IR::LabelInstr *checkForNegativeZeroLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true);
  5776. bailOutLabel->InsertBefore(checkForNegativeZeroLabel);
  5777. IR::Instr *insertInstr = IR::Instr::New(Js::OpCode::TST, instr->m_func);
  5778. insertInstr->SetSrc1(dst);
  5779. insertInstr->SetSrc2(dst);
  5780. bailOutLabel->InsertBefore(insertInstr);
  5781. IR::Instr *branchInstr = IR::BranchInstr::New(Js::OpCode::BNE, skipBailOutLabel, instr->m_func);
  5782. bailOutLabel->InsertBefore(branchInstr);
  5783. insertInstr = IR::Instr::New(Js::OpCode::TST, instr->m_func);
  5784. insertInstr->SetSrc1(src1);
  5785. insertInstr->SetSrc2(src1);
  5786. bailOutLabel->InsertBefore(insertInstr);
  5787. branchInstr = IR::BranchInstr::New(Js::OpCode::BPL, skipBailOutLabel, instr->m_func);
  5788. bailOutLabel->InsertBefore(branchInstr);
  5789. }
  5790. // Fall through to bailOutLabel
  5791. }
  5792. void
  5793. LowererMD::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  5794. {
  5795. // s2 = LSL s1, Js::VarTag_Shift -- restore the var tag on the result
  5796. // BO $ToVar (branch on overflow)
  5797. // dst = OR s2, 1
  5798. // B $done
  5799. //$ToVar:
  5800. // EmitLoadVarNoCheck
  5801. //$Done:
  5802. AssertMsg(instrLoad->GetSrc1()->IsRegOpnd(), "Should be regOpnd");
  5803. bool isInt = false;
  5804. bool isNotInt = false;
  5805. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  5806. IR::LabelInstr *labelToVar = nullptr;
  5807. IR::LabelInstr *labelDone = nullptr;
  5808. IR::Instr *instr;
  5809. if (src1->IsTaggedInt())
  5810. {
  5811. isInt = true;
  5812. }
  5813. else if (src1->IsNotInt())
  5814. {
  5815. isNotInt = true;
  5816. }
  5817. if (!isNotInt)
  5818. {
  5819. IR::Opnd * opnd32src1 = src1->UseWithNewType(TyInt32, this->m_func);
  5820. IR::RegOpnd * opndReg2 = IR::RegOpnd::New(TyMachReg, this->m_func);
  5821. IR::Opnd * opnd32Reg2 = opndReg2->UseWithNewType(TyInt32, this->m_func);
  5822. if (!isInt)
  5823. {
  5824. labelToVar = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5825. if (!isFromUint32)
  5826. {
  5827. // TEQ src1,src1 LS_u(#1) - TIOFLW is an alias for this pattern.
  5828. // XOR the src with itself shifted left one. If there's no overflow,
  5829. // the result should be positive (top bit clear).
  5830. instr = IR::Instr::New(Js::OpCode::TIOFLW, this->m_func);
  5831. instr->SetSrc1(opnd32src1);
  5832. instrLoad->InsertBefore(instr);
  5833. // BMI $ToVar
  5834. // Branch on negative result of the preceding test.
  5835. instr = IR::BranchInstr::New(Js::OpCode::BMI, labelToVar, this->m_func);
  5836. instrLoad->InsertBefore(instr);
  5837. }
  5838. else
  5839. {
  5840. //TST src1, 0xC0000000 -- test for length that is negative or overflows tagged int
  5841. instr = IR::Instr::New(Js::OpCode::TST, this->m_func);
  5842. instr->SetSrc1(opnd32src1);
  5843. instr->SetSrc2(IR::IntConstOpnd::New((int32)0x80000000 >> Js::VarTag_Shift, TyInt32, this->m_func));
  5844. instrLoad->InsertBefore(instr);
  5845. LegalizeMD::LegalizeInstr(instr, false);
  5846. // BNE $helper
  5847. instr = IR::BranchInstr::New(Js::OpCode::BNE, labelToVar, this->m_func);
  5848. instrLoad->InsertBefore(instr);
  5849. }
  5850. }
  5851. // s2 = LSL s1, Js::VarTag_Shift -- restore the var tag on the result
  5852. instr = IR::Instr::New(Js::OpCode::LSL, opnd32Reg2, opnd32src1,
  5853. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func),
  5854. this->m_func);
  5855. instrLoad->InsertBefore(instr);
  5856. // dst = ADD s2, 1
  5857. instr = IR::Instr::New(Js::OpCode::ADD, instrLoad->GetDst(), opndReg2, IR::IntConstOpnd::New(1, TyMachReg, this->m_func), this->m_func);
  5858. instrLoad->InsertBefore(instr);
  5859. if (!isInt)
  5860. {
  5861. // B $done
  5862. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  5863. instr = IR::BranchInstr::New(Js::OpCode::B, labelDone, this->m_func);
  5864. instrLoad->InsertBefore(instr);
  5865. }
  5866. }
  5867. instr = instrLoad;
  5868. if (!isInt)
  5869. {
  5870. //$ToVar:
  5871. if (labelToVar)
  5872. {
  5873. instrLoad->InsertBefore(labelToVar);
  5874. }
  5875. this->EmitLoadVarNoCheck(instrLoad->GetDst()->AsRegOpnd(), src1, instrLoad, isFromUint32, isHelper);
  5876. }
  5877. //$Done:
  5878. if (labelDone)
  5879. {
  5880. instr->InsertAfter(labelDone);
  5881. }
  5882. instrLoad->Remove();
  5883. }
  5884. void
  5885. LowererMD::EmitLoadVarNoCheck(IR::RegOpnd * dst, IR::RegOpnd * src, IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  5886. {
  5887. IR::RegOpnd * floatReg = IR::RegOpnd::New(TyFloat64, this->m_func);
  5888. if (isFromUint32)
  5889. {
  5890. this->EmitUIntToFloat(floatReg, src, instrLoad);
  5891. }
  5892. else
  5893. {
  5894. this->EmitIntToFloat(floatReg, src, instrLoad);
  5895. }
  5896. this->SaveDoubleToVar(dst, floatReg, instrLoad, instrLoad, isHelper);
  5897. }
  5898. bool
  5899. LowererMD::EmitLoadInt32(IR::Instr *instrLoad, bool conversionFromObjectAllowed, bool bailOutOnHelper, IR::LabelInstr * labelBailOut)
  5900. {
  5901. // isInt:
  5902. // dst = ASR r1, AtomTag
  5903. // isNotInt:
  5904. // dst = ToInt32(r1)
  5905. // else:
  5906. // dst = ASRS r1, AtomTag
  5907. // BCS $Done
  5908. // dst = ToInt32(r1)
  5909. // $Done
  5910. AssertMsg(instrLoad->GetSrc1()->IsRegOpnd(), "Should be regOpnd");
  5911. bool isInt = false;
  5912. bool isNotInt = false;
  5913. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  5914. IR::LabelInstr *labelDone = nullptr;
  5915. IR::LabelInstr *labelFloat = nullptr;
  5916. IR::LabelInstr *labelHelper = nullptr;
  5917. IR::Instr *instr;
  5918. if (src1->IsTaggedInt())
  5919. {
  5920. isInt = true;
  5921. }
  5922. else if (src1->IsNotInt())
  5923. {
  5924. isNotInt = true;
  5925. }
  5926. if (isInt)
  5927. {
  5928. instrLoad->m_opcode = Js::OpCode::ASR;
  5929. instrLoad->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func));
  5930. }
  5931. else
  5932. {
  5933. const ValueType src1ValueType(src1->GetValueType());
  5934. const bool doFloatToIntFastPath =
  5935. (src1ValueType.IsLikelyFloat() || src1ValueType.IsLikelyUntaggedInt()) &&
  5936. !(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger));
  5937. if (isNotInt)
  5938. {
  5939. // Known to be non-integer. If we are required to bail out on helper call, just re-jit.
  5940. if (!doFloatToIntFastPath && bailOutOnHelper)
  5941. {
  5942. if(!GlobOpt::DoEliminateArrayAccessHelperCall(this->m_func))
  5943. {
  5944. // Array access helper call removal is already off for some reason. Prevent trying to rejit again
  5945. // because it won't help and the same thing will happen again. Just abort jitting this function.
  5946. if(PHASE_TRACE(Js::BailOutPhase, this->m_func))
  5947. {
  5948. Output::Print(_u(" Aborting JIT because EliminateArrayAccessHelperCall is already off\n"));
  5949. Output::Flush();
  5950. }
  5951. throw Js::OperationAbortedException();
  5952. }
  5953. throw Js::RejitException(RejitReason::ArrayAccessHelperCallEliminationDisabled);
  5954. }
  5955. }
  5956. else
  5957. {
  5958. // Could be an integer in this case.
  5959. if (!isInt)
  5960. {
  5961. if(doFloatToIntFastPath)
  5962. {
  5963. labelFloat = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, false);
  5964. }
  5965. else
  5966. {
  5967. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, true);
  5968. }
  5969. this->GenerateSmIntTest(src1, instrLoad, labelFloat ? labelFloat : labelHelper);
  5970. }
  5971. instr = IR::Instr::New(
  5972. Js::OpCode::ASRS, instrLoad->GetDst(), src1, IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func), this->m_func);
  5973. instrLoad->InsertBefore(instr);
  5974. labelDone = instrLoad->GetOrCreateContinueLabel();
  5975. instr = IR::BranchInstr::New(Js::OpCode::BCS, labelDone, this->m_func);
  5976. instrLoad->InsertBefore(instr);
  5977. }
  5978. if(doFloatToIntFastPath)
  5979. {
  5980. if(labelFloat)
  5981. {
  5982. instrLoad->InsertBefore(labelFloat);
  5983. }
  5984. if(!labelHelper)
  5985. {
  5986. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, true);
  5987. }
  5988. if(!labelDone)
  5989. {
  5990. labelDone = instrLoad->GetOrCreateContinueLabel();
  5991. }
  5992. IR::RegOpnd *floatReg = IR::RegOpnd::New(TyFloat64, this->m_func);
  5993. this->LoadFloatValue(src1, floatReg, labelHelper, instrLoad, instrLoad->HasBailOutInfo());
  5994. this->ConvertFloatToInt32(instrLoad->GetDst(), floatReg, labelHelper, labelDone, instrLoad);
  5995. }
  5996. if(labelHelper)
  5997. {
  5998. instrLoad->InsertBefore(labelHelper);
  5999. }
  6000. if(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger))
  6001. {
  6002. // Avoid bailout if we have a JavascriptNumber whose value is a signed 32-bit integer
  6003. m_lowerer->LoadInt32FromUntaggedVar(instrLoad);
  6004. // Need to bail out instead of calling a helper
  6005. return true;
  6006. }
  6007. if (bailOutOnHelper)
  6008. {
  6009. Assert(labelBailOut);
  6010. this->m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, instrLoad);
  6011. instrLoad->Remove();
  6012. }
  6013. else if (conversionFromObjectAllowed)
  6014. {
  6015. this->m_lowerer->LowerUnaryHelperMem(instrLoad, IR::HelperConv_ToInt32);
  6016. }
  6017. else
  6018. {
  6019. this->m_lowerer->LowerUnaryHelperMemWithBoolReference(instrLoad, IR::HelperConv_ToInt32_NoObjects, true /*useBoolForBailout*/);
  6020. }
  6021. }
  6022. return false;
  6023. }
  6024. IR::Instr *
  6025. LowererMD::LowerGetCachedFunc(IR::Instr *instr)
  6026. {
  6027. // src1 is an ActivationObjectEx, and we want to get the function object identified by the index (src2)
  6028. // dst = MOV (src1)->GetFuncCacheEntry(src2)->func
  6029. //
  6030. // => [src1 + (offsetof(src1, cache) + (src2 * sizeof(FuncCacheEntry)) + offsetof(FuncCacheEntry, func))]
  6031. IR::IntConstOpnd *src2Opnd = instr->UnlinkSrc2()->AsIntConstOpnd();
  6032. IR::RegOpnd *src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
  6033. IR::Instr *instrPrev = instr->m_prev;
  6034. instr->SetSrc1(IR::IndirOpnd::New(src1Opnd, (src2Opnd->GetValue() * sizeof(Js::FuncCacheEntry)) + Js::ActivationObjectEx::GetOffsetOfCache() + offsetof(Js::FuncCacheEntry, func), TyVar, this->m_func));
  6035. this->ChangeToAssign(instr);
  6036. src2Opnd->Free(this->m_func);
  6037. return instrPrev;
  6038. }
  6039. IR::Instr *
  6040. LowererMD::LowerCommitScope(IR::Instr *instrCommit)
  6041. {
  6042. IR::Instr *instrPrev = instrCommit->m_prev;
  6043. IR::RegOpnd *baseOpnd = instrCommit->UnlinkSrc1()->AsRegOpnd();
  6044. IR::Opnd *opnd;
  6045. IR::Instr * insertInstr = instrCommit->m_next;
  6046. // Write undef to all the local var slots.
  6047. opnd = IR::IndirOpnd::New(baseOpnd, Js::ActivationObjectEx::GetOffsetOfCommitFlag(), TyInt8, this->m_func);
  6048. instrCommit->SetDst(opnd);
  6049. instrCommit->SetSrc1(IR::IntConstOpnd::New(1, TyInt8, this->m_func));
  6050. LowererMD::ChangeToAssign(instrCommit);
  6051. const Js::PropertyIdArray *propIds = instrCommit->m_func->GetJITFunctionBody()->GetFormalsPropIdArray();
  6052. uint firstVarSlot = (uint)Js::ActivationObjectEx::GetFirstVarSlot(propIds);
  6053. if (firstVarSlot < propIds->count)
  6054. {
  6055. // On ARM, instead of re-using the address of "undefined" for each store, put the address in a register
  6056. // and re-use that. (Would that be good for x86/amd64 as well?)
  6057. IR::RegOpnd *undefOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  6058. LowererMD::CreateAssign(undefOpnd, m_lowerer->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueUndefined), insertInstr);
  6059. IR::RegOpnd *slotBaseOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  6060. // Load a pointer to the aux slots. We assume that all ActivationObject's have only aux slots.
  6061. opnd = IR::IndirOpnd::New(baseOpnd, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  6062. this->CreateAssign(slotBaseOpnd, opnd, insertInstr);
  6063. for (uint i = firstVarSlot; i < propIds->count; i++)
  6064. {
  6065. opnd = IR::IndirOpnd::New(slotBaseOpnd, i << this->GetDefaultIndirScale(), TyMachReg, this->m_func);
  6066. LowererMD::CreateAssign(opnd, undefOpnd, insertInstr);
  6067. }
  6068. }
  6069. return instrPrev;
  6070. }
  6071. void
  6072. LowererMD::ImmedSrcToReg(IR::Instr * instr, IR::Opnd * newOpnd, int srcNum)
  6073. {
  6074. if (srcNum == 2)
  6075. {
  6076. instr->SetSrc2(newOpnd);
  6077. }
  6078. else
  6079. {
  6080. Assert(srcNum == 1);
  6081. instr->SetSrc1(newOpnd);
  6082. }
  6083. switch (instr->m_opcode)
  6084. {
  6085. case Js::OpCode::LDIMM:
  6086. instr->m_opcode = Js::OpCode::MOV;
  6087. break;
  6088. default:
  6089. // Nothing to do (unless we have immed/reg variations for other instructions).
  6090. break;
  6091. }
  6092. }
  6093. void
  6094. LowererMD::MarkOneFltTmpSym(StackSym *sym, BVSparse<JitArenaAllocator> *bvTmps, bool fFltPrefOp)
  6095. {
  6096. // Nothing to do here. It may be called when lowering a switch if fast paths are on.
  6097. }
  6098. IR::LabelInstr *
  6099. LowererMD::GetBailOutStackRestoreLabel(BailOutInfo * bailOutInfo, IR::LabelInstr * exitTargetInstr)
  6100. {
  6101. return exitTargetInstr;
  6102. }
  6103. bool
  6104. LowererMD::AnyFloatTmps()
  6105. {
  6106. // no float preferencing for ARM yet
  6107. return false;
  6108. }
  6109. IR::LabelInstr*
  6110. LowererMD::InsertBeforeRecoveryForFloatTemps(
  6111. IR::Instr * insertBefore,
  6112. IR::LabelInstr * labelRecover,
  6113. const bool isInHelperBlock)
  6114. {
  6115. AssertMsg(0, "NYI");
  6116. return nullptr;
  6117. }
  6118. StackSym *
  6119. LowererMD::GetImplicitParamSlotSym(Js::ArgSlot argSlot)
  6120. {
  6121. return GetImplicitParamSlotSym(argSlot, this->m_func);
  6122. }
  6123. StackSym *
  6124. LowererMD::GetImplicitParamSlotSym(Js::ArgSlot argSlot, Func * func)
  6125. {
  6126. // For ARM, offset for implicit params always start at 0
  6127. // TODO: Consider not to use the argSlot number for the param slot sym, which can
  6128. // be confused with arg slot number from javascript
  6129. StackSym * stackSym = StackSym::NewParamSlotSym(argSlot, func);
  6130. func->SetArgOffset(stackSym, argSlot * MachPtr);
  6131. func->SetHasImplicitParamLoad();
  6132. return stackSym;
  6133. }
  6134. IR::LabelInstr *
  6135. LowererMD::EnsureEpilogLabel()
  6136. {
  6137. if (this->m_func->m_epilogLabel)
  6138. {
  6139. return this->m_func->m_epilogLabel;
  6140. }
  6141. IR::Instr *exitInstr = this->m_func->m_exitInstr;
  6142. IR::Instr *prevInstr = exitInstr->GetPrevRealInstrOrLabel();
  6143. if (prevInstr->IsLabelInstr())
  6144. {
  6145. this->m_func->m_epilogLabel = prevInstr->AsLabelInstr();
  6146. return prevInstr->AsLabelInstr();
  6147. }
  6148. IR::LabelInstr *labelInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6149. exitInstr->InsertBefore(labelInstr);
  6150. this->m_func->m_epilogLabel = labelInstr;
  6151. return labelInstr;
  6152. }
  6153. bool
  6154. LowererMD::GenerateLdThisStrict(IR::Instr* insertInstr)
  6155. {
  6156. IR::RegOpnd * src1 = insertInstr->GetSrc1()->AsRegOpnd();
  6157. IR::RegOpnd * typeId = IR::RegOpnd::New(TyMachReg, this->m_func);
  6158. IR::RegOpnd * type = IR::RegOpnd::New(TyMachReg, this->m_func);
  6159. IR::LabelInstr * done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  6160. IR::LabelInstr * fallthrough = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  6161. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*helper*/ true);
  6162. IR::Instr* instr;
  6163. bool assign = insertInstr->GetDst() && !insertInstr->GetDst()->IsEqual(src1);
  6164. if (!src1->m_sym->m_isNotInt)
  6165. {
  6166. GenerateObjectTest(src1, insertInstr, assign ? done : fallthrough);
  6167. }
  6168. // LDR r1, [src1 + offset(type)]
  6169. {
  6170. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  6171. this->CreateAssign(type, indirOpnd, insertInstr);
  6172. }
  6173. // LDR r1, [r1 + offset(typeId)]
  6174. {
  6175. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(type, Js::Type::GetOffsetOfTypeId(), TyMachReg, this->m_func);
  6176. this->CreateAssign(typeId, indirOpnd, insertInstr);
  6177. }
  6178. // CMP typeid, TypeIds_ActivationObject
  6179. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6180. instr->SetSrc1(typeId);
  6181. instr->SetSrc2(IR::IntConstOpnd::New(Js::TypeIds_ActivationObject, TyMachReg, this->m_func));
  6182. insertInstr->InsertBefore(instr);
  6183. LegalizeMD::LegalizeInstr(instr, false);
  6184. // BE $helper
  6185. instr = IR::BranchInstr::New(Js::OpCode::BEQ, helper, this->m_func);
  6186. insertInstr->InsertBefore(instr);
  6187. if(assign)
  6188. {
  6189. //$done:
  6190. insertInstr->InsertBefore(done);
  6191. // LDR $dest, $src
  6192. LowererMD::CreateAssign(insertInstr->GetDst(), insertInstr->GetSrc1(), insertInstr);
  6193. }
  6194. // B $fallthrough
  6195. instr = IR::BranchInstr::New(Js::OpCode::B, fallthrough, this->m_func);
  6196. insertInstr->InsertBefore(instr);
  6197. insertInstr->InsertBefore(helper);
  6198. if(insertInstr->GetDst())
  6199. {
  6200. // LDR dst, undefined
  6201. LowererMD::CreateAssign(insertInstr->GetDst(), m_lowerer->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueUndefined), insertInstr);
  6202. }
  6203. // $fallthrough:
  6204. insertInstr->InsertAfter(fallthrough);
  6205. return true;
  6206. }
  6207. void LowererMD::GenerateIsDynamicObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool fContinueLabel)
  6208. {
  6209. // CMP [srcReg], Js::DynamicObject::`vtable'
  6210. IR::Instr *cmp = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6211. cmp->SetSrc1(IR::IndirOpnd::New(regOpnd, 0, TyMachPtr, m_func));
  6212. cmp->SetSrc2(m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableDynamicObject));
  6213. insertInstr->InsertBefore(cmp);
  6214. LegalizeMD::LegalizeInstr(cmp, false);
  6215. if (fContinueLabel)
  6216. {
  6217. // BEQ $continue
  6218. IR::Instr * jne = IR::BranchInstr::New(Js::OpCode::BEQ, labelHelper, this->m_func);
  6219. insertInstr->InsertBefore(jne);
  6220. }
  6221. else
  6222. {
  6223. // BNE $helper
  6224. IR::Instr * jne = IR::BranchInstr::New(Js::OpCode::BNE, labelHelper, this->m_func);
  6225. insertInstr->InsertBefore(jne);
  6226. }
  6227. }
  6228. void LowererMD::GenerateIsRecyclableObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool checkObjectAndDynamicObject)
  6229. {
  6230. // CMP [srcReg], Js::DynamicObject::`vtable'
  6231. // BEQ $fallThough
  6232. // LDR r1, [src1 + offset(type)]
  6233. // LDR r1, [r1 + offset(typeId)]
  6234. // SUB r1, -(~TypeIds_LastJavascriptPrimitiveType) -- if (typeId > TypeIds_LastJavascriptPrimitiveType && typeId <= TypeIds_LastTrueJavascriptObjectType)
  6235. // CMP r1, (TypeIds_LastTrueJavascriptObjectType - TypeIds_LastJavascriptPrimitiveType - 1)
  6236. // BHI $helper
  6237. //fallThrough:
  6238. IR::LabelInstr *labelFallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6239. if (checkObjectAndDynamicObject)
  6240. {
  6241. if (!regOpnd->m_sym->m_isNotInt)
  6242. {
  6243. GenerateObjectTest(regOpnd, insertInstr, labelHelper);
  6244. }
  6245. this->GenerateIsDynamicObject(regOpnd, insertInstr, labelFallthrough, true);
  6246. }
  6247. IR::RegOpnd * r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  6248. // LDR r1, [src1 + offset(type)]
  6249. {
  6250. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  6251. this->CreateAssign(r1, indirOpnd, insertInstr);
  6252. }
  6253. // LDR r1, [r1 + offset(typeId)]
  6254. {
  6255. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(r1, Js::Type::GetOffsetOfTypeId(), TyMachReg, this->m_func);
  6256. this->CreateAssign(r1, indirOpnd, insertInstr);
  6257. }
  6258. // SUB r1, -(~TypeIds_LastJavascriptPrimitiveType)
  6259. {
  6260. IR::Instr * add = IR::Instr::New(Js::OpCode::SUB, r1, r1, IR::IntConstOpnd::New(-(~Js::TypeIds_LastJavascriptPrimitiveType), TyInt32, this->m_func, true), this->m_func);
  6261. insertInstr->InsertBefore(add);
  6262. LegalizeMD::LegalizeInstr(add, false);
  6263. }
  6264. // CMP r1, (TypeIds_LastTrueJavascriptObjectType - TypeIds_LastJavascriptPrimitiveType - 1)
  6265. {
  6266. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6267. cmp->SetSrc1(r1);
  6268. cmp->SetSrc2(IR::IntConstOpnd::New(Js::TypeIds_LastTrueJavascriptObjectType - Js::TypeIds_LastJavascriptPrimitiveType - 1, TyInt32, this->m_func));
  6269. insertInstr->InsertBefore(cmp);
  6270. LegalizeMD::LegalizeInstr(cmp, false);
  6271. }
  6272. // BHI $helper
  6273. {
  6274. IR::Instr * jbe = IR::BranchInstr::New(Js::OpCode::BHI, labelHelper, this->m_func);
  6275. insertInstr->InsertBefore(jbe);
  6276. }
  6277. // $fallThrough
  6278. insertInstr->InsertBefore(labelFallthrough);
  6279. }
  6280. bool
  6281. LowererMD::GenerateLdThisCheck(IR::Instr * instr)
  6282. {
  6283. //
  6284. // If not an object, jump to $helper
  6285. // MOV dst, src1 -- return the object itself
  6286. // B $fallthrough
  6287. // $helper:
  6288. // (caller generates helper call)
  6289. // $fallthrough:
  6290. //
  6291. IR::RegOpnd * src1 = instr->GetSrc1()->AsRegOpnd();
  6292. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  6293. IR::LabelInstr * fallthrough = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  6294. this->GenerateIsRecyclableObject(src1, instr, helper);
  6295. // MOV dst, src1
  6296. if (instr->GetDst() && !instr->GetDst()->IsEqual(src1))
  6297. {
  6298. this->CreateAssign(instr->GetDst(), src1, instr);
  6299. }
  6300. // B $fallthrough
  6301. {
  6302. IR::Instr * jmp = IR::BranchInstr::New(Js::OpCode::B, fallthrough, this->m_func);
  6303. instr->InsertBefore(jmp);
  6304. }
  6305. // $helper:
  6306. // (caller generates helper call)
  6307. // $fallthrough:
  6308. instr->InsertBefore(helper);
  6309. instr->InsertAfter(fallthrough);
  6310. return true;
  6311. }
  6312. // given object instanceof function, functionReg is a register with function,
  6313. // objectReg is a register with instance and inlineCache is an InstIsInlineCache.
  6314. // We want to generate:
  6315. //
  6316. // fallback on helper (will patch the inline cache) if function does not match the cache
  6317. // LDIMM dst, Js::false
  6318. // LDR cache, [&(inlineCache->function)]
  6319. // CMP functionReg, cache
  6320. // BNE helper
  6321. //
  6322. // fallback if object is a tagged int
  6323. // TST objectReg, Js::AtomTag
  6324. // BNE done
  6325. //
  6326. // return false if object is a primitive
  6327. // LDR typeReg, objectSrc + offsetof(RecyclableObject::type)
  6328. // LDR typeID, [typeReg + offsetof(Type::typeid)]
  6329. // CMP typeID, TypeIds_LastJavascriptPrimitiveType
  6330. // BLE done
  6331. //
  6332. // fallback if object's type is not the cached type
  6333. // CMP typeReg, [&(inlineCache->type]
  6334. // BNE helper
  6335. //
  6336. // use the cached result and fallthrough
  6337. // LDR dst, [&(inlineCache->result)]
  6338. // B done
  6339. //
  6340. // $helper
  6341. // $done
  6342. bool
  6343. LowererMD::GenerateFastIsInst(IR::Instr * instr)
  6344. {
  6345. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  6346. IR::LabelInstr * done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  6347. IR::RegOpnd * typeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  6348. IR::Opnd * objectSrc;
  6349. IR::RegOpnd * objectReg;
  6350. IR::Opnd * functionSrc;
  6351. IR::RegOpnd * functionReg;
  6352. intptr_t inlineCache;
  6353. IR::Instr * instrArg;
  6354. // We are going to use the extra ArgOut_A instructions to lower the helper call later,
  6355. // so we leave them alone here and clean them up then.
  6356. inlineCache = instr->m_func->GetJITFunctionBody()->GetIsInstInlineCache(instr->GetSrc1()->AsIntConstOpnd()->GetValue());
  6357. Assert(instr->GetSrc2()->AsRegOpnd()->m_sym->m_isSingleDef);
  6358. instrArg = instr->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
  6359. objectSrc = instrArg->GetSrc1();
  6360. Assert(instrArg->GetSrc2()->AsRegOpnd()->m_sym->m_isSingleDef);
  6361. instrArg = instrArg->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
  6362. functionSrc = instrArg->GetSrc1();
  6363. Assert(instrArg->GetSrc2() == nullptr);
  6364. IR::Opnd* opndDst = instr->GetDst();
  6365. if (!opndDst->IsRegOpnd())
  6366. {
  6367. opndDst = IR::RegOpnd::New(opndDst->GetType(), this->m_func);
  6368. }
  6369. // LDR dst, Js::false
  6370. instr->InsertBefore(IR::Instr::New(Js::OpCode::LDR, opndDst,
  6371. m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), m_func));
  6372. if (functionSrc->IsRegOpnd())
  6373. {
  6374. functionReg = functionSrc->AsRegOpnd();
  6375. }
  6376. else
  6377. {
  6378. functionReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  6379. LowererMD::CreateAssign(functionReg, functionSrc, instr);
  6380. }
  6381. // CMP functionReg, [&(inlineCache->function)]
  6382. {
  6383. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  6384. cmp->SetSrc1(functionReg);
  6385. cmp->SetSrc2(IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfFunction(), TyMachReg, m_func,
  6386. IR::AddrOpndKindDynamicIsInstInlineCacheFunctionRef));
  6387. instr->InsertBefore(cmp);
  6388. LegalizeMD::LegalizeInstr(cmp, false);
  6389. }
  6390. // BNE helper
  6391. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::BNE, helper, m_func));
  6392. if (objectSrc->IsRegOpnd())
  6393. {
  6394. objectReg = objectSrc->AsRegOpnd();
  6395. }
  6396. else
  6397. {
  6398. objectReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  6399. LowererMD::CreateAssign(objectReg, objectSrc, instr);
  6400. }
  6401. // TST objectReg, Js::AtomTag
  6402. // BNE done
  6403. if (!objectReg->m_sym->m_isNotInt)
  6404. {
  6405. GenerateObjectTest(objectReg, instr, done);
  6406. }
  6407. // LDR typeReg, objectSrc + offsetof(RecyclableObject::type)
  6408. instr->InsertBefore(IR::Instr::New(Js::OpCode::LDR, typeReg,
  6409. IR::IndirOpnd::New(objectReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  6410. m_func));
  6411. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_LastJavascriptPrimitiveType
  6412. {
  6413. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  6414. cmp->SetSrc1(IR::IndirOpnd::New(typeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func));
  6415. cmp->SetSrc2(IR::IntConstOpnd::New(Js::TypeId::TypeIds_LastJavascriptPrimitiveType, TyInt32, m_func));
  6416. instr->InsertBefore(cmp);
  6417. LegalizeMD::LegalizeInstr(cmp, false);
  6418. }
  6419. // BLE done
  6420. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::BLE, done, m_func));
  6421. // CMP typeReg, [&(inlineCache->type]
  6422. {
  6423. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  6424. cmp->SetSrc1(typeReg);
  6425. cmp->SetSrc2(IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfType(), TyMachReg, m_func));
  6426. instr->InsertBefore(cmp);
  6427. LegalizeMD::LegalizeInstr(cmp, false);
  6428. }
  6429. // BNE helper
  6430. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::BNE, helper, m_func));
  6431. // LDR dst, [&(inlineCache->result)]
  6432. IR::Instr *result = IR::Instr::New(Js::OpCode::LDR, opndDst,
  6433. IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfResult(), TyMachReg, m_func), m_func);
  6434. instr->InsertBefore(result);
  6435. LegalizeMD::LegalizeInstr(result, false);
  6436. if (opndDst != instr->GetDst())
  6437. {
  6438. LowererMD::CreateAssign(instr->GetDst(), opndDst, instr);
  6439. }
  6440. // B done
  6441. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::B, done, m_func));
  6442. // LABEL helper
  6443. instr->InsertBefore(helper);
  6444. instr->InsertAfter(done);
  6445. return true;
  6446. }
  6447. // Helper method: inserts legalized assign for given srcOpnd into RegD0 in front of given instr in the following way:
  6448. // dstReg = CreateAssign srcOpnd
  6449. // Used to put args of inline built-in call into RegD0 and RegD1 before we call actual CRT function.
  6450. void LowererMD::GenerateAssignForBuiltinArg(RegNum dstReg, IR::Opnd* srcOpnd, IR::Instr* instr)
  6451. {
  6452. IR::RegOpnd* tempDst = IR::RegOpnd::New(nullptr, dstReg, TyMachDouble, this->m_func);
  6453. tempDst->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  6454. this->CreateAssign(tempDst, srcOpnd, instr);
  6455. }
  6456. // For given InlineMathXXX instr, generate the call to actual CRT function/CPU instr.
  6457. void LowererMD::GenerateFastInlineBuiltInCall(IR::Instr* instr, IR::JnHelperMethod helperMethod)
  6458. {
  6459. switch (instr->m_opcode)
  6460. {
  6461. case Js::OpCode::InlineMathSqrt:
  6462. // Sqrt maps directly to the VFP instruction.
  6463. // src and dst are already float, all we need is just change the opcode and legalize.
  6464. // Before:
  6465. // dst = InlineMathSqrt src1
  6466. // After:
  6467. // <potential VSTR by legalizer if src1 is not a register>
  6468. // dst = VSQRT src1
  6469. Assert(helperMethod == (IR::JnHelperMethod)0);
  6470. Assert(instr->GetSrc2() == nullptr);
  6471. instr->m_opcode = Js::OpCode::VSQRT;
  6472. LegalizeMD::LegalizeInstr(instr, /* fPostRegAlloc = */ false);
  6473. break;
  6474. case Js::OpCode::InlineMathAbs:
  6475. Assert(helperMethod == (IR::JnHelperMethod)0);
  6476. return GenerateFastInlineBuiltInMathAbs(instr);
  6477. case Js::OpCode::InlineMathFloor:
  6478. Assert(helperMethod == (IR::JnHelperMethod)0);
  6479. return GenerateFastInlineBuiltInMathFloor(instr);
  6480. case Js::OpCode::InlineMathCeil:
  6481. Assert(helperMethod == (IR::JnHelperMethod)0);
  6482. return GenerateFastInlineBuiltInMathCeil(instr);
  6483. case Js::OpCode::InlineMathRound:
  6484. Assert(helperMethod == (IR::JnHelperMethod)0);
  6485. return GenerateFastInlineBuiltInMathRound(instr);
  6486. case Js::OpCode::InlineMathMin:
  6487. case Js::OpCode::InlineMathMax:
  6488. {
  6489. IR::Opnd* src1 = instr->GetSrc1();
  6490. IR::Opnd* src2 = instr->GetSrc2();
  6491. IR::Opnd* dst = instr->GetDst();
  6492. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6493. IR::LabelInstr* labelNaNHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6494. IR::LabelInstr* labelNegZeroCheckHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6495. IR::Instr* branchInstr;
  6496. bool min = instr->m_opcode == Js::OpCode::InlineMathMin ? true : false;
  6497. //(V)MOV dst, src1;
  6498. Assert(!dst->IsEqual(src1));
  6499. this->m_lowerer->InsertMove(dst, src1, instr);
  6500. if(dst->IsInt32())
  6501. {
  6502. // CMP src1, src2
  6503. if(min)
  6504. {
  6505. // BLT $continueLabel
  6506. branchInstr = IR::BranchInstr::New(Js::OpCode::BrLt_I4, doneLabel, src1, src2, instr->m_func);
  6507. instr->InsertBefore(branchInstr);
  6508. this->EmitInt4Instr(branchInstr);
  6509. }
  6510. else
  6511. {
  6512. // BGT $continueLabel
  6513. branchInstr = IR::BranchInstr::New(Js::OpCode::BrGt_I4, doneLabel, src1, src2, instr->m_func);
  6514. instr->InsertBefore(branchInstr);
  6515. this->EmitInt4Instr(branchInstr);
  6516. }
  6517. // MOV dst, src2
  6518. this->m_lowerer->InsertMove(dst, src2, instr);
  6519. }
  6520. else if(dst->IsFloat64())
  6521. {
  6522. // VCMPF64 src1, src2
  6523. // BCC (min)/ BGT (max) $doneLabel
  6524. // BVS $labelNaNHelper
  6525. // BEQ $labelNegZeroCheckHelper
  6526. // VMOV dst, src2
  6527. // B $doneLabel
  6528. //
  6529. // $labelNegZeroCheckHelper
  6530. // if(min)
  6531. // {
  6532. // if(src2 == -0.0)
  6533. // VMOV dst, src2
  6534. // }
  6535. // else
  6536. // {
  6537. // if(src1 == -0.0)
  6538. // VMOV dst, src2
  6539. // }
  6540. // B $doneLabel
  6541. //
  6542. // $labelNaNHelper
  6543. // VMOV dst, NaN
  6544. //
  6545. // $doneLabel
  6546. if(min)
  6547. {
  6548. this->m_lowerer->InsertCompareBranch(src1, src2, Js::OpCode::BrLt_A, doneLabel, instr); // Lowering of BrLt_A for floats is done to JA with operands swapped
  6549. }
  6550. else
  6551. {
  6552. this->m_lowerer->InsertCompareBranch(src1, src2, Js::OpCode::BrGt_A, doneLabel, instr);
  6553. }
  6554. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::BVS, labelNaNHelper, instr->m_func));
  6555. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::BEQ, labelNegZeroCheckHelper, instr->m_func));
  6556. this->m_lowerer->InsertMove(dst, src2, instr);
  6557. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::B, doneLabel, instr->m_func));
  6558. instr->InsertBefore(labelNegZeroCheckHelper);
  6559. IR::Opnd* isNegZero;
  6560. if(min)
  6561. {
  6562. isNegZero = IsOpndNegZero(src2, instr);
  6563. }
  6564. else
  6565. {
  6566. isNegZero = IsOpndNegZero(src1, instr);
  6567. }
  6568. this->m_lowerer->InsertCompareBranch(isNegZero, IR::IntConstOpnd::New(0x00000000, IRType::TyInt32, this->m_func), Js::OpCode::BrEq_A, doneLabel, instr);
  6569. this->m_lowerer->InsertMove(dst, src2, instr);
  6570. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::B, doneLabel, instr->m_func));
  6571. instr->InsertBefore(labelNaNHelper);
  6572. IR::Opnd * opndNaN = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNaNAddr(), IRType::TyFloat64, this->m_func,
  6573. IR::AddrOpndKindDynamicDoubleRef);
  6574. this->m_lowerer->InsertMove(dst, opndNaN, instr);
  6575. }
  6576. instr->InsertBefore(doneLabel);
  6577. instr->Remove();
  6578. break;
  6579. }
  6580. default:
  6581. // Before:
  6582. // dst = <Built-in call> src1, src2
  6583. // After:
  6584. // d0 = CreateAssign src1
  6585. // lr = MOV helperAddr
  6586. // BLX lr
  6587. // dst = CreateAssign call->dst (d0)
  6588. // Src1
  6589. AssertMsg(instr->GetDst()->IsFloat(), "Currently accepting only float args for math helpers -- dst.");
  6590. AssertMsg(instr->GetSrc1()->IsFloat(), "Currently accepting only float args for math helpers -- src1.");
  6591. AssertMsg(!instr->GetSrc2() || instr->GetSrc2()->IsFloat(), "Currently accepting only float args for math helpers -- src2.");
  6592. this->GenerateAssignForBuiltinArg((RegNum)FIRST_FLOAT_REG, instr->UnlinkSrc1(), instr);
  6593. // Src2
  6594. if (instr->GetSrc2() != nullptr)
  6595. {
  6596. this->GenerateAssignForBuiltinArg((RegNum)(FIRST_FLOAT_REG + 1), instr->UnlinkSrc2(), instr);
  6597. }
  6598. // Call CRT.
  6599. IR::RegOpnd* floatCallDst = IR::RegOpnd::New(nullptr, (RegNum)(FIRST_FLOAT_REG), TyMachDouble, this->m_func); // Dst in d0.
  6600. IR::Instr* floatCall = IR::Instr::New(Js::OpCode::BLX, floatCallDst, this->m_func);
  6601. instr->InsertBefore(floatCall);
  6602. // lr = MOV helperAddr
  6603. // BLX lr
  6604. IR::AddrOpnd* targetAddr = IR::AddrOpnd::New((Js::Var)IR::GetMethodOriginalAddress(m_func->GetThreadContextInfo(), helperMethod), IR::AddrOpndKind::AddrOpndKindDynamicMisc, this->m_func);
  6605. IR::RegOpnd *targetOpnd = IR::RegOpnd::New(nullptr, RegLR, TyMachPtr, this->m_func);
  6606. IR::Instr *movInstr = IR::Instr::New(Js::OpCode::LDIMM, targetOpnd, targetAddr, this->m_func);
  6607. targetOpnd->m_isCallArg = true;
  6608. floatCall->SetSrc1(targetOpnd);
  6609. floatCall->InsertBefore(movInstr);
  6610. // Save the result.
  6611. this->CreateAssign(instr->UnlinkDst(), floatCall->GetDst(), instr);
  6612. instr->Remove();
  6613. break;
  6614. }
  6615. }
  6616. void
  6617. LowererMD::GenerateFastInlineBuiltInMathAbs(IR::Instr *inlineInstr)
  6618. {
  6619. IR::Opnd* src = inlineInstr->GetSrc1()->Copy(this->m_func);
  6620. IR::Opnd* dst = inlineInstr->UnlinkDst();
  6621. Assert(src);
  6622. IR::Instr* tmpInstr;
  6623. IRType srcType = src->GetType();
  6624. IR::Instr* nextInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  6625. IR::Instr* continueInstr = m_lowerer->LowerBailOnIntMin(inlineInstr);
  6626. continueInstr->InsertAfter(nextInstr);
  6627. if (srcType == IRType::TyInt32)
  6628. {
  6629. // Note: if execution gets so far, we always get (untagged) int32 here.
  6630. // Since -x = ~x + 1, abs(x) = x, abs(-x) = -x, sign-extend(x) = 0, sign_extend(-x) = -1, where 0 <= x.
  6631. // Then: abs(x) = sign-extend(x) XOR x - sign-extend(x)
  6632. // Expected input (otherwise bailout):
  6633. // - src1 is (untagged) int, not equal to int_min (abs(int_min) would produce overflow, as there's no corresponding positive int).
  6634. Assert(src->IsRegOpnd());
  6635. // tmpDst = EOR src, src ASR #31
  6636. IR::RegOpnd *tmpDst = IR::RegOpnd::New(TyMachReg, this->m_func);
  6637. tmpInstr = IR::Instr::New(Js::OpCode::CLRSIGN, tmpDst, src, this->m_func);
  6638. nextInstr->InsertBefore(tmpInstr);
  6639. // tmpDst = SUB tmpDst, src ASR #31
  6640. tmpInstr = IR::Instr::New(Js::OpCode::SBCMPLNT, tmpDst, tmpDst, src, this->m_func);
  6641. nextInstr->InsertBefore(tmpInstr);
  6642. // MOV dst, tmpDst
  6643. tmpInstr = IR::Instr::New(Js::OpCode::MOV, dst, tmpDst, this->m_func);
  6644. nextInstr->InsertBefore(tmpInstr);
  6645. }
  6646. else if (srcType == IRType::TyFloat64)
  6647. {
  6648. // VABS dst, src
  6649. tmpInstr = IR::Instr::New(Js::OpCode::VABS, dst, src, this->m_func);
  6650. nextInstr->InsertBefore(tmpInstr);
  6651. }
  6652. else
  6653. {
  6654. AssertMsg(FALSE, "GenerateFastInlineBuiltInMathAbs: unexpected type of the src!");
  6655. }
  6656. }
  6657. void
  6658. LowererMD::GenerateFastInlineBuiltInMathFloor(IR::Instr* instr)
  6659. {
  6660. Assert(instr->GetDst()->IsInt32());
  6661. IR::LabelInstr * checkNegZeroLabelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6662. IR::LabelInstr * checkOverflowLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6663. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6664. // VMOV floatOpnd, src
  6665. IR::Opnd * src = instr->UnlinkSrc1();
  6666. IR::RegOpnd* floatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  6667. this->m_lowerer->InsertMove(floatOpnd, src, instr);
  6668. IR::LabelInstr * bailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /*helperLabel*/true);;
  6669. bool sharedBailout = (instr->GetBailOutInfo()->bailOutInstr != instr) ? true : false;
  6670. // NaN check
  6671. IR::Instr *instrCmp = IR::Instr::New(Js::OpCode::VCMPF64, this->m_func);
  6672. instrCmp->SetSrc1(floatOpnd);
  6673. instrCmp->SetSrc2(floatOpnd);
  6674. instr->InsertBefore(instrCmp);
  6675. LegalizeMD::LegalizeInstr(instrCmp, false);
  6676. // VMRS APSR, FPSCR
  6677. // BVS $bailoutLabel
  6678. instr->InsertBefore(IR::Instr::New(Js::OpCode::VMRS, this->m_func));
  6679. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::BVS, bailoutLabel, this->m_func));
  6680. IR::Opnd * zeroReg = IR::RegOpnd::New(TyFloat64, this->m_func);
  6681. this->LoadFloatZero(zeroReg, instr);
  6682. // VMRS Rorig, FPSCR
  6683. // VMRS Rt, FPSCR
  6684. // BIC Rt, Rt, 0x400000
  6685. // ORR Rt, Rt, 0x800000
  6686. // VMSR FPSCR, Rt
  6687. IR::Opnd* regOrig = IR::RegOpnd::New(TyInt32, this->m_func);
  6688. IR::Opnd* reg = IR::RegOpnd::New(TyInt32, this->m_func);
  6689. instr->InsertBefore(
  6690. IR::Instr::New(Js::OpCode::VMRSR, regOrig, instr->m_func));
  6691. instr->InsertBefore(
  6692. IR::Instr::New(Js::OpCode::VMRSR, reg, instr->m_func));
  6693. instr->InsertBefore(
  6694. IR::Instr::New(Js::OpCode::BIC, reg, reg, IR::IntConstOpnd::New(0x400000, IRType::TyInt32, this->m_func), instr->m_func));
  6695. instr->InsertBefore(
  6696. IR::Instr::New(Js::OpCode::ORR, reg, reg, IR::IntConstOpnd::New(0x800000, IRType::TyInt32, this->m_func), instr->m_func));
  6697. IR::Instr* setFPSCRInstr = IR::Instr::New(Js::OpCode::VMSR, instr->m_func);
  6698. setFPSCRInstr->SetSrc1(reg);
  6699. instr->InsertBefore(setFPSCRInstr);
  6700. // VCVTRS32F64 floatreg, floatOpnd
  6701. IR::RegOpnd *floatReg = IR::RegOpnd::New(TyFloat32, this->m_func);
  6702. IR::Opnd * intOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  6703. instr->InsertBefore(
  6704. IR::Instr::New(Js::OpCode::VCVTRS32F64, floatReg, floatOpnd, instr->m_func));
  6705. // VMOVARMVFP intOpnd, floatReg
  6706. instr->InsertBefore(IR::Instr::New(Js::OpCode::VMOVARMVFP, intOpnd, floatReg, this->m_func));
  6707. // VMSR FPSCR, Rorig
  6708. IR::Instr* restoreFPSCRInstr = IR::Instr::New(Js::OpCode::VMSR, instr->m_func);
  6709. restoreFPSCRInstr->SetSrc1(regOrig);
  6710. instr->InsertBefore(restoreFPSCRInstr);
  6711. //negZero bailout
  6712. // TST intOpnd, intOpnd
  6713. // BNE checkOverflowLabel
  6714. this->m_lowerer->InsertTestBranch(intOpnd, intOpnd, Js::OpCode::BNE, checkOverflowLabel, instr);
  6715. instr->InsertBefore(checkNegZeroLabelHelper);
  6716. if(instr->ShouldCheckForNegativeZero())
  6717. {
  6718. IR::Opnd * isNegZero = IR::RegOpnd::New(TyInt32, this->m_func);
  6719. isNegZero = this->IsOpndNegZero(src, instr);
  6720. this->m_lowerer->InsertCompareBranch(isNegZero, IR::IntConstOpnd::New(0x00000000, IRType::TyInt32, this->m_func), Js::OpCode::BrNeq_A, bailoutLabel, instr);
  6721. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::B, doneLabel, instr->m_func));
  6722. }
  6723. instr->InsertBefore(checkOverflowLabel);
  6724. CheckOverflowOnFloatToInt32(instr, intOpnd, bailoutLabel, doneLabel);
  6725. IR::Opnd * dst = instr->UnlinkDst();
  6726. instr->InsertAfter(doneLabel);
  6727. if(!sharedBailout)
  6728. {
  6729. instr->InsertBefore(bailoutLabel);
  6730. }
  6731. // In case of a shared bailout, we should jump to the code that sets some data on the bailout record which is specific
  6732. // to this bailout. Pass the bailoutLabel to GenerateFunction so that it may use the label as the collectRuntimeStatsLabel.
  6733. this->m_lowerer->GenerateBailOut(instr, nullptr, nullptr, sharedBailout ? bailoutLabel : nullptr);
  6734. // MOV dst, intOpnd
  6735. IR::Instr* movInstr = IR::Instr::New(Js::OpCode::MOV, dst, intOpnd, this->m_func);
  6736. doneLabel->InsertAfter(movInstr);
  6737. }
  6738. void
  6739. LowererMD::GenerateFastInlineBuiltInMathCeil(IR::Instr* instr)
  6740. {
  6741. Assert(instr->GetDst()->IsInt32());
  6742. IR::LabelInstr * checkNegZeroLabelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6743. IR::LabelInstr * checkOverflowLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6744. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6745. // VMOV floatOpnd, src
  6746. IR::Opnd * src = instr->UnlinkSrc1();
  6747. IR::RegOpnd* floatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  6748. this->m_lowerer->InsertMove(floatOpnd, src, instr);
  6749. IR::LabelInstr * bailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /*helperLabel*/true);;
  6750. bool sharedBailout = (instr->GetBailOutInfo()->bailOutInstr != instr) ? true : false;
  6751. // NaN check
  6752. IR::Instr *instrCmp = IR::Instr::New(Js::OpCode::VCMPF64, this->m_func);
  6753. instrCmp->SetSrc1(floatOpnd);
  6754. instrCmp->SetSrc2(floatOpnd);
  6755. instr->InsertBefore(instrCmp);
  6756. LegalizeMD::LegalizeInstr(instrCmp, false);
  6757. // VMRS APSR, FPSCR
  6758. // BVS $bailoutLabel
  6759. instr->InsertBefore(IR::Instr::New(Js::OpCode::VMRS, this->m_func));
  6760. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::BVS, bailoutLabel, this->m_func));
  6761. IR::Opnd * zeroReg = IR::RegOpnd::New(TyFloat64, this->m_func);
  6762. this->LoadFloatZero(zeroReg, instr);
  6763. // VMRS Rorig, FPSCR
  6764. // VMRS Rt, FPSCR
  6765. // BIC Rt, Rt, 0x800000
  6766. // ORR Rt, Rt, 0x400000
  6767. // VMSR FPSCR, Rt
  6768. IR::Opnd* regOrig = IR::RegOpnd::New(TyInt32, this->m_func);
  6769. IR::Opnd* reg = IR::RegOpnd::New(TyInt32, this->m_func);
  6770. instr->InsertBefore(
  6771. IR::Instr::New(Js::OpCode::VMRSR, regOrig, instr->m_func));
  6772. instr->InsertBefore(
  6773. IR::Instr::New(Js::OpCode::VMRSR, reg, instr->m_func));
  6774. instr->InsertBefore(
  6775. IR::Instr::New(Js::OpCode::BIC, reg, reg, IR::IntConstOpnd::New(0x800000, IRType::TyInt32, this->m_func), instr->m_func));
  6776. instr->InsertBefore(
  6777. IR::Instr::New(Js::OpCode::ORR, reg, reg, IR::IntConstOpnd::New(0x400000, IRType::TyInt32, this->m_func), instr->m_func));
  6778. IR::Instr* setFPSCRInstr = IR::Instr::New(Js::OpCode::VMSR, instr->m_func);
  6779. setFPSCRInstr->SetSrc1(reg);
  6780. instr->InsertBefore(setFPSCRInstr);
  6781. // VCVTRS32F64 floatreg, floatOpnd
  6782. IR::RegOpnd *floatReg = IR::RegOpnd::New(TyFloat32, this->m_func);
  6783. IR::Opnd * intOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  6784. instr->InsertBefore(
  6785. IR::Instr::New(Js::OpCode::VCVTRS32F64, floatReg, floatOpnd, instr->m_func));
  6786. // VMOVARMVFP intOpnd, floatReg
  6787. instr->InsertBefore(IR::Instr::New(Js::OpCode::VMOVARMVFP, intOpnd, floatReg, this->m_func));
  6788. // VMSR FPSCR, Rorig
  6789. IR::Instr* restoreFPSCRInstr = IR::Instr::New(Js::OpCode::VMSR, instr->m_func);
  6790. restoreFPSCRInstr->SetSrc1(regOrig);
  6791. instr->InsertBefore(restoreFPSCRInstr);
  6792. //negZero bailout
  6793. // TST intOpnd, intOpnd
  6794. // BNE checkOverflowLabel
  6795. this->m_lowerer->InsertTestBranch(intOpnd, intOpnd, Js::OpCode::BNE, checkOverflowLabel, instr);
  6796. instr->InsertBefore(checkNegZeroLabelHelper);
  6797. if(instr->ShouldCheckForNegativeZero())
  6798. {
  6799. IR::Opnd * isNegZero = IR::RegOpnd::New(TyInt32, this->m_func);
  6800. IR::Opnd * negOne = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNegOneAddr(), IRType::TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6801. this->m_lowerer->InsertCompareBranch(floatOpnd, negOne, Js::OpCode::BrNotGe_A, doneLabel, instr);
  6802. this->m_lowerer->InsertCompareBranch(floatOpnd, zeroReg, Js::OpCode::BrNotGe_A, bailoutLabel, instr);
  6803. isNegZero = this->IsOpndNegZero(src, instr);
  6804. this->m_lowerer->InsertCompareBranch(isNegZero, IR::IntConstOpnd::New(0x00000000, IRType::TyInt32, this->m_func), Js::OpCode::BrNeq_A, bailoutLabel, instr);
  6805. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::B, doneLabel, instr->m_func));
  6806. }
  6807. instr->InsertBefore(checkOverflowLabel);
  6808. CheckOverflowOnFloatToInt32(instr, intOpnd, bailoutLabel, doneLabel);
  6809. IR::Opnd * dst = instr->UnlinkDst();
  6810. instr->InsertAfter(doneLabel);
  6811. if(!sharedBailout)
  6812. {
  6813. instr->InsertBefore(bailoutLabel);
  6814. }
  6815. // In case of a shared bailout, we should jump to the code that sets some data on the bailout record which is specific
  6816. // to this bailout. Pass the bailoutLabel to GenerateFunction so that it may use the label as the collectRuntimeStatsLabel.
  6817. this->m_lowerer->GenerateBailOut(instr, nullptr, nullptr, sharedBailout ? bailoutLabel : nullptr);
  6818. // MOV dst, intOpnd
  6819. IR::Instr* movInstr = IR::Instr::New(Js::OpCode::MOV, dst, intOpnd, this->m_func);
  6820. doneLabel->InsertAfter(movInstr);
  6821. }
  6822. void
  6823. LowererMD::GenerateFastInlineBuiltInMathRound(IR::Instr* instr)
  6824. {
  6825. Assert(instr->GetDst()->IsInt32());
  6826. IR::LabelInstr * checkNegZeroLabelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6827. IR::LabelInstr * checkOverflowLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6828. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6829. // VMOV floatOpnd, src
  6830. IR::Opnd * src = instr->UnlinkSrc1();
  6831. IR::RegOpnd* floatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  6832. this->m_lowerer->InsertMove(floatOpnd, src, instr);
  6833. IR::LabelInstr * bailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /*helperLabel*/true);;
  6834. bool sharedBailout = (instr->GetBailOutInfo()->bailOutInstr != instr) ? true : false;
  6835. // NaN check
  6836. IR::Instr *instrCmp = IR::Instr::New(Js::OpCode::VCMPF64, this->m_func);
  6837. instrCmp->SetSrc1(floatOpnd);
  6838. instrCmp->SetSrc2(floatOpnd);
  6839. instr->InsertBefore(instrCmp);
  6840. LegalizeMD::LegalizeInstr(instrCmp, false);
  6841. // VMRS APSR, FPSCR
  6842. // BVS $bailoutLabel
  6843. instr->InsertBefore(IR::Instr::New(Js::OpCode::VMRS, this->m_func));
  6844. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::BVS, bailoutLabel, this->m_func));
  6845. IR::Opnd * zeroReg = IR::RegOpnd::New(TyFloat64, this->m_func);
  6846. this->LoadFloatZero(zeroReg, instr);
  6847. // Add 0.5
  6848. IR::Opnd * pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoublePointFiveAddr(), IRType::TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6849. this->m_lowerer->InsertAdd(false, floatOpnd, floatOpnd, pointFive, instr);
  6850. // VMRS Rorig, FPSCR
  6851. // VMRS Rt, FPSCR
  6852. // BIC Rt, Rt, 0x400000
  6853. // ORR Rt, Rt, 0x800000
  6854. // VMSR FPSCR, Rt
  6855. IR::Opnd* regOrig = IR::RegOpnd::New(TyInt32, this->m_func);
  6856. IR::Opnd* reg = IR::RegOpnd::New(TyInt32, this->m_func);
  6857. instr->InsertBefore(
  6858. IR::Instr::New(Js::OpCode::VMRSR, regOrig, instr->m_func));
  6859. instr->InsertBefore(
  6860. IR::Instr::New(Js::OpCode::VMRSR, reg, instr->m_func));
  6861. instr->InsertBefore(
  6862. IR::Instr::New(Js::OpCode::BIC, reg, reg, IR::IntConstOpnd::New(0x400000, IRType::TyInt32, this->m_func), instr->m_func));
  6863. instr->InsertBefore(
  6864. IR::Instr::New(Js::OpCode::ORR, reg, reg, IR::IntConstOpnd::New(0x800000, IRType::TyInt32, this->m_func), instr->m_func));
  6865. IR::Instr* setFPSCRInstr = IR::Instr::New(Js::OpCode::VMSR, instr->m_func);
  6866. setFPSCRInstr->SetSrc1(reg);
  6867. instr->InsertBefore(setFPSCRInstr);
  6868. // VCVTRS32F64 floatreg, floatOpnd
  6869. IR::RegOpnd *floatReg = IR::RegOpnd::New(TyFloat32, this->m_func);
  6870. IR::Opnd * intOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  6871. instr->InsertBefore(
  6872. IR::Instr::New(Js::OpCode::VCVTRS32F64, floatReg, floatOpnd, instr->m_func));
  6873. // VMOVARMVFP intOpnd, floatReg
  6874. instr->InsertBefore(IR::Instr::New(Js::OpCode::VMOVARMVFP, intOpnd, floatReg, this->m_func));
  6875. // VMSR FPSCR, Rorig
  6876. IR::Instr* restoreFPSCRInstr = IR::Instr::New(Js::OpCode::VMSR, instr->m_func);
  6877. restoreFPSCRInstr->SetSrc1(regOrig);
  6878. instr->InsertBefore(restoreFPSCRInstr);
  6879. //negZero bailout
  6880. // TST intOpnd, intOpnd
  6881. // BNE checkOverflowLabel
  6882. this->m_lowerer->InsertTestBranch(intOpnd, intOpnd, Js::OpCode::BNE, checkOverflowLabel, instr);
  6883. instr->InsertBefore(checkNegZeroLabelHelper);
  6884. if(instr->ShouldCheckForNegativeZero())
  6885. {
  6886. IR::Opnd * isNegZero = IR::RegOpnd::New(TyInt32, this->m_func);
  6887. IR::Opnd * negPointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNegPointFiveAddr(), IRType::TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6888. this->m_lowerer->InsertCompareBranch(src, negPointFive, Js::OpCode::BrNotGe_A, doneLabel, instr);
  6889. this->m_lowerer->InsertCompareBranch(src, zeroReg, Js::OpCode::BrNotGe_A, bailoutLabel, instr);
  6890. isNegZero = this->IsOpndNegZero(src, instr);
  6891. this->m_lowerer->InsertCompareBranch(isNegZero, IR::IntConstOpnd::New(0x00000000, IRType::TyInt32, this->m_func), Js::OpCode::BrNeq_A, bailoutLabel, instr);
  6892. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::B, doneLabel, instr->m_func));
  6893. }
  6894. instr->InsertBefore(checkOverflowLabel);
  6895. CheckOverflowOnFloatToInt32(instr, intOpnd, bailoutLabel, doneLabel);
  6896. IR::Opnd * dst = instr->UnlinkDst();
  6897. instr->InsertAfter(doneLabel);
  6898. if(!sharedBailout)
  6899. {
  6900. instr->InsertBefore(bailoutLabel);
  6901. }
  6902. // In case of a shared bailout, we should jump to the code that sets some data on the bailout record which is specific
  6903. // to this bailout. Pass the bailoutLabel to GenerateFunction so that it may use the label as the collectRuntimeStatsLabel.
  6904. this->m_lowerer->GenerateBailOut(instr, nullptr, nullptr, sharedBailout ? bailoutLabel : nullptr);
  6905. // MOV dst, intOpnd
  6906. IR::Instr* movInstr = IR::Instr::New(Js::OpCode::MOV, dst, intOpnd, this->m_func);
  6907. doneLabel->InsertAfter(movInstr);
  6908. }
  6909. IR::Opnd* LowererMD::IsOpndNegZero(IR::Opnd* opnd, IR::Instr* instr)
  6910. {
  6911. IR::Opnd * isNegZero = IR::RegOpnd::New(TyInt32, this->m_func);
  6912. LoadDoubleHelperArgument(instr, opnd);
  6913. IR::Instr * helperCallInstr = IR::Instr::New(Js::OpCode::Call, isNegZero, this->m_func);
  6914. instr->InsertBefore(helperCallInstr);
  6915. this->ChangeToHelperCall(helperCallInstr, IR::HelperIsNegZero);
  6916. return isNegZero;
  6917. }
  6918. IR::Instr *
  6919. LowererMD::LowerToFloat(IR::Instr *instr)
  6920. {
  6921. switch (instr->m_opcode)
  6922. {
  6923. case Js::OpCode::Add_A:
  6924. instr->m_opcode = Js::OpCode::VADDF64;
  6925. break;
  6926. case Js::OpCode::Sub_A:
  6927. instr->m_opcode = Js::OpCode::VSUBF64;
  6928. break;
  6929. case Js::OpCode::Mul_A:
  6930. instr->m_opcode = Js::OpCode::VMULF64;
  6931. break;
  6932. case Js::OpCode::Div_A:
  6933. instr->m_opcode = Js::OpCode::VDIVF64;
  6934. break;
  6935. case Js::OpCode::Neg_A:
  6936. instr->m_opcode = Js::OpCode::VNEGF64;
  6937. break;
  6938. case Js::OpCode::BrEq_A:
  6939. case Js::OpCode::BrNeq_A:
  6940. case Js::OpCode::BrSrEq_A:
  6941. case Js::OpCode::BrSrNeq_A:
  6942. case Js::OpCode::BrGt_A:
  6943. case Js::OpCode::BrGe_A:
  6944. case Js::OpCode::BrLt_A:
  6945. case Js::OpCode::BrLe_A:
  6946. case Js::OpCode::BrNotEq_A:
  6947. case Js::OpCode::BrNotNeq_A:
  6948. case Js::OpCode::BrSrNotEq_A:
  6949. case Js::OpCode::BrSrNotNeq_A:
  6950. case Js::OpCode::BrNotGt_A:
  6951. case Js::OpCode::BrNotGe_A:
  6952. case Js::OpCode::BrNotLt_A:
  6953. case Js::OpCode::BrNotLe_A:
  6954. return this->LowerFloatCondBranch(instr->AsBranchInstr());
  6955. default:
  6956. Assume(UNREACHED);
  6957. }
  6958. LegalizeMD::LegalizeInstr(instr, false);
  6959. return instr;
  6960. }
  6961. IR::BranchInstr *
  6962. LowererMD::LowerFloatCondBranch(IR::BranchInstr *instrBranch, bool ignoreNaN)
  6963. {
  6964. IR::Instr *instr;
  6965. Js::OpCode brOpcode = Js::OpCode::InvalidOpCode;
  6966. bool addNaNCheck = false;
  6967. Func * func = instrBranch->m_func;
  6968. IR::Opnd *src1 = instrBranch->UnlinkSrc1();
  6969. IR::Opnd *src2 = instrBranch->UnlinkSrc2();
  6970. IR::Instr *instrCmp = IR::Instr::New(Js::OpCode::VCMPF64, func);
  6971. instrCmp->SetSrc1(src1);
  6972. instrCmp->SetSrc2(src2);
  6973. instrBranch->InsertBefore(instrCmp);
  6974. LegalizeMD::LegalizeInstr(instrCmp, false);
  6975. instrBranch->InsertBefore(IR::Instr::New(Js::OpCode::VMRS, func));
  6976. switch (instrBranch->m_opcode)
  6977. {
  6978. case Js::OpCode::BrSrEq_A:
  6979. case Js::OpCode::BrEq_A:
  6980. case Js::OpCode::BrNotNeq_A:
  6981. case Js::OpCode::BrSrNotNeq_A:
  6982. brOpcode = Js::OpCode::BEQ;
  6983. break;
  6984. case Js::OpCode::BrNeq_A:
  6985. case Js::OpCode::BrSrNeq_A:
  6986. case Js::OpCode::BrSrNotEq_A:
  6987. case Js::OpCode::BrNotEq_A:
  6988. brOpcode = Js::OpCode::BNE;
  6989. addNaNCheck = !ignoreNaN; //Special check for BNE as it is set when the operands are unordered (NaN).
  6990. break;
  6991. case Js::OpCode::BrLe_A:
  6992. brOpcode = Js::OpCode::BLS; //Can't use BLE as it is set when the operands are unordered (NaN).
  6993. break;
  6994. case Js::OpCode::BrLt_A:
  6995. brOpcode = Js::OpCode::BCC; //Can't use BLT as is set when the operands are unordered (NaN).
  6996. break;
  6997. case Js::OpCode::BrGe_A:
  6998. brOpcode = Js::OpCode::BGE;
  6999. break;
  7000. case Js::OpCode::BrGt_A:
  7001. brOpcode = Js::OpCode::BGT;
  7002. break;
  7003. case Js::OpCode::BrNotLe_A:
  7004. brOpcode = Js::OpCode::BHI;
  7005. break;
  7006. case Js::OpCode::BrNotLt_A:
  7007. brOpcode = Js::OpCode::BPL;
  7008. break;
  7009. case Js::OpCode::BrNotGe_A:
  7010. brOpcode = Js::OpCode::BLT;
  7011. break;
  7012. case Js::OpCode::BrNotGt_A:
  7013. brOpcode = Js::OpCode::BLE;
  7014. break;
  7015. default:
  7016. Assert(false);
  7017. break;
  7018. }
  7019. if (addNaNCheck)
  7020. {
  7021. instr = IR::BranchInstr::New(Js::OpCode::BVS, instrBranch->GetTarget(), func);
  7022. instrBranch->InsertBefore(instr);
  7023. }
  7024. instr = IR::BranchInstr::New(brOpcode, instrBranch->GetTarget(), func);
  7025. instrBranch->InsertBefore(instr);
  7026. instrBranch->Remove();
  7027. return instr->AsBranchInstr();
  7028. }
  7029. void
  7030. LowererMD::EmitIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  7031. {
  7032. IR::Instr *instr;
  7033. IR::RegOpnd *floatReg = IR::RegOpnd::New(TyFloat64, this->m_func);
  7034. Assert(dst->IsRegOpnd() && dst->IsFloat64());
  7035. Assert(src->IsRegOpnd() && src->IsInt32());
  7036. instr = IR::Instr::New(Js::OpCode::VMOVARMVFP, floatReg, src, this->m_func);
  7037. instrInsert->InsertBefore(instr);
  7038. // Convert to Float
  7039. instr = IR::Instr::New(Js::OpCode::VCVTF64S32, dst, floatReg, this->m_func);
  7040. instrInsert->InsertBefore(instr);
  7041. }
  7042. void
  7043. LowererMD::EmitUIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  7044. {
  7045. IR::Instr *instr;
  7046. IR::RegOpnd *floatReg = IR::RegOpnd::New(TyFloat64, this->m_func);
  7047. Assert(dst->IsRegOpnd() && dst->IsFloat64());
  7048. Assert(src->IsRegOpnd() && src->IsInt32());
  7049. instr = IR::Instr::New(Js::OpCode::VMOVARMVFP, floatReg, src, this->m_func);
  7050. instrInsert->InsertBefore(instr);
  7051. // Convert to Float
  7052. instr = IR::Instr::New(Js::OpCode::VCVTF64U32, dst, floatReg, this->m_func);
  7053. instrInsert->InsertBefore(instr);
  7054. }
  7055. void LowererMD::ConvertFloatToInt32(IR::Opnd* intOpnd, IR::Opnd* floatOpnd, IR::LabelInstr * labelHelper, IR::LabelInstr * labelDone, IR::Instr * instrInsert)
  7056. {
  7057. Assert(floatOpnd->IsFloat64());
  7058. Assert(intOpnd->IsInt32());
  7059. IR::RegOpnd *floatReg = IR::RegOpnd::New(TyFloat32, this->m_func);
  7060. // VCVTS32F64 dst.i32, src.f64
  7061. // Convert to int
  7062. IR::Instr * instr = IR::Instr::New(Js::OpCode::VCVTS32F64, floatReg, floatOpnd, this->m_func);
  7063. instrInsert->InsertBefore(instr);
  7064. Legalize(instr);
  7065. //Move to integer reg
  7066. instr = IR::Instr::New(Js::OpCode::VMOVARMVFP, intOpnd, floatReg, this->m_func);
  7067. instrInsert->InsertBefore(instr);
  7068. Legalize(instr);
  7069. this->CheckOverflowOnFloatToInt32(instrInsert, intOpnd, labelHelper, labelDone);
  7070. }
  7071. void
  7072. LowererMD::EmitIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  7073. {
  7074. Assert(UNREACHED);
  7075. }
  7076. void
  7077. LowererMD::EmitUIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  7078. {
  7079. Assert(UNREACHED);
  7080. }
  7081. void
  7082. LowererMD::EmitLongToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  7083. {
  7084. Assert(UNREACHED);
  7085. }
  7086. void
  7087. LowererMD::CheckOverflowOnFloatToInt32(IR::Instr* instrInsert, IR::Opnd* intOpnd, IR::LabelInstr * labelHelper, IR::LabelInstr * labelDone)
  7088. {
  7089. // CMP intOpnd, 0x80000000 -- Check for overflow
  7090. IR::Instr* instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  7091. instr->SetSrc1(intOpnd);
  7092. instr->SetSrc2(IR::IntConstOpnd::New(0x80000000, TyInt32, this->m_func, true));
  7093. instrInsert->InsertBefore(instr);
  7094. LegalizeMD::LegalizeInstr(instr, false);
  7095. // BEQ $helper
  7096. instr = IR::BranchInstr::New(Js::OpCode::BEQ, labelHelper, this->m_func);
  7097. instrInsert->InsertBefore(instr);
  7098. // CMP intOpnd, 0x7fffffff -- Check for overflow
  7099. IR::RegOpnd *regOpnd= IR::RegOpnd::New(TyMachReg, this->m_func);
  7100. instr = IR::Instr::New(Js::OpCode::MVN,
  7101. regOpnd,
  7102. IR::IntConstOpnd::New(0x80000000, TyInt32, this->m_func, true),
  7103. this->m_func);
  7104. instrInsert->InsertBefore(instr);
  7105. LegalizeMD::LegalizeInstr(instr, false);
  7106. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  7107. instr->SetSrc1(intOpnd);
  7108. instr->SetSrc2(regOpnd);
  7109. instrInsert->InsertBefore(instr);
  7110. LegalizeMD::LegalizeInstr(instr, false);
  7111. // BNE $done
  7112. instr = IR::BranchInstr::New(Js::OpCode::BNE, labelDone, this->m_func);
  7113. instrInsert->InsertBefore(instr);
  7114. }
  7115. void
  7116. LowererMD::EmitFloatToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert, IR::Instr * instrBailOut, IR::LabelInstr * labelBailOut)
  7117. {
  7118. IR::BailOutKind bailOutKind = IR::BailOutInvalid;
  7119. if (instrBailOut && instrBailOut->HasBailOutInfo())
  7120. {
  7121. bailOutKind = instrBailOut->GetBailOutKind();
  7122. if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  7123. {
  7124. // Bail out instead of calling helper. If this is happening unconditionally, the caller should instead throw a rejit exception.
  7125. Assert(labelBailOut);
  7126. m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, instrInsert);
  7127. return;
  7128. }
  7129. }
  7130. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7131. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  7132. IR::Instr *instr;
  7133. ConvertFloatToInt32(dst, src, labelHelper, labelDone, instrInsert);
  7134. // $Helper
  7135. instrInsert->InsertBefore(labelHelper);
  7136. instr = IR::Instr::New(Js::OpCode::Call, dst, this->m_func);
  7137. instrInsert->InsertBefore(instr);
  7138. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  7139. {
  7140. _Analysis_assume_(instrBailOut != nullptr);
  7141. instr = instr->ConvertToBailOutInstr(instrBailOut->GetBailOutInfo(), bailOutKind);
  7142. if (instrBailOut->GetBailOutInfo()->bailOutInstr == instrBailOut)
  7143. {
  7144. IR::Instr * instrShare = instrBailOut->ShareBailOut();
  7145. m_lowerer->LowerBailTarget(instrShare);
  7146. }
  7147. }
  7148. // dst = ToInt32Core(src);
  7149. LoadDoubleHelperArgument(instr, src);
  7150. this->ChangeToHelperCall(instr, IR::HelperConv_ToInt32Core);
  7151. // $Done
  7152. instrInsert->InsertBefore(labelDone);
  7153. }
  7154. IR::Instr *
  7155. LowererMD::InsertConvertFloat64ToInt32(const RoundMode roundMode, IR::Opnd *const dst, IR::Opnd *const src, IR::Instr *const insertBeforeInstr)
  7156. {
  7157. Assert(dst);
  7158. Assert(dst->IsInt32());
  7159. Assert(src);
  7160. Assert(src->IsFloat64());
  7161. Assert(insertBeforeInstr);
  7162. // The caller is expected to check for overflow. To have that work be done automatically, use LowererMD::EmitFloatToInt.
  7163. Func *const func = insertBeforeInstr->m_func;
  7164. IR::AutoReuseOpnd autoReuseSrcPlusHalf;
  7165. IR::Instr *instr = nullptr;
  7166. switch (roundMode)
  7167. {
  7168. case RoundModeTowardInteger:
  7169. {
  7170. // Conversion with rounding towards nearest integer is not supported by the architecture. Add 0.5 and do a
  7171. // round-toward-zero conversion instead.
  7172. IR::RegOpnd *const srcPlusHalf = IR::RegOpnd::New(TyFloat64, func);
  7173. autoReuseSrcPlusHalf.Initialize(srcPlusHalf, func);
  7174. Lowerer::InsertAdd(
  7175. false /* needFlags */,
  7176. srcPlusHalf,
  7177. src,
  7178. IR::MemRefOpnd::New(insertBeforeInstr->m_func->GetThreadContextInfo()->GetDoublePointFiveAddr(), TyFloat64, func,
  7179. IR::AddrOpndKindDynamicDoubleRef),
  7180. insertBeforeInstr);
  7181. instr = IR::Instr::New(LowererMD::MDConvertFloat64ToInt32Opcode(RoundModeTowardZero), dst, srcPlusHalf, func);
  7182. insertBeforeInstr->InsertBefore(instr);
  7183. LowererMD::Legalize(instr);
  7184. return instr;
  7185. }
  7186. case RoundModeHalfToEven:
  7187. {
  7188. // On ARM we need to set the rounding mode bits of the FPSCR.
  7189. // These are bits 22 and 23 and we need them to be off for "Round to Nearest (RN) mode"
  7190. // After doing the convert (via VCVTRS32F64) we need to restore the original FPSCR state.
  7191. // VMRS Rorig, FPSCR
  7192. // VMRS Rt, FPSCR
  7193. // BIC Rt, Rt, 0xC00000
  7194. // VMSR FPSCR, Rt
  7195. IR::Opnd* regOrig = IR::RegOpnd::New(TyInt32, func);
  7196. IR::Opnd* reg = IR::RegOpnd::New(TyInt32, func);
  7197. insertBeforeInstr->InsertBefore(
  7198. IR::Instr::New(Js::OpCode::VMRSR, regOrig, func));
  7199. insertBeforeInstr->InsertBefore(
  7200. IR::Instr::New(Js::OpCode::VMRSR, reg, func));
  7201. insertBeforeInstr->InsertBefore(
  7202. IR::Instr::New(Js::OpCode::BIC, reg, reg, IR::IntConstOpnd::New(0xC00000, IRType::TyInt32, func), func));
  7203. IR::Instr* setFPSCRInstr = IR::Instr::New(Js::OpCode::VMSR, func);
  7204. setFPSCRInstr->SetSrc1(reg);
  7205. insertBeforeInstr->InsertBefore(setFPSCRInstr);
  7206. // VCVTRS32F64 floatreg, regSrc
  7207. IR::RegOpnd *floatReg = IR::RegOpnd::New(TyFloat32, func);
  7208. insertBeforeInstr->InsertBefore(
  7209. IR::Instr::New(LowererMD::MDConvertFloat64ToInt32Opcode(RoundModeHalfToEven), floatReg, src, func));
  7210. // VMOVARMVFP regOpnd, floatReg
  7211. insertBeforeInstr->InsertBefore(IR::Instr::New(Js::OpCode::VMOVARMVFP, dst, floatReg, func));
  7212. // VMSR FPSCR, Rorig
  7213. IR::Instr* restoreFPSCRInstr = IR::Instr::New(Js::OpCode::VMSR, func);
  7214. restoreFPSCRInstr->SetSrc1(regOrig);
  7215. insertBeforeInstr->InsertBefore(restoreFPSCRInstr);
  7216. return restoreFPSCRInstr;
  7217. }
  7218. default:
  7219. AssertMsg(0, "RoundMode not supported.");
  7220. return nullptr;
  7221. }
  7222. }
  7223. IR::Instr *
  7224. LowererMD::LoadFloatZero(IR::Opnd * opndDst, IR::Instr * instrInsert)
  7225. {
  7226. Assert(opndDst->GetType() == TyFloat64);
  7227. IR::Opnd * zero = IR::MemRefOpnd::New(instrInsert->m_func->GetThreadContextInfo()->GetDoubleZeroAddr(), TyFloat64, instrInsert->m_func, IR::AddrOpndKindDynamicDoubleRef);
  7228. return Lowerer::InsertMove(opndDst, zero, instrInsert);
  7229. }
  7230. IR::Instr *
  7231. LowererMD::LoadFloatValue(IR::Opnd * opndDst, double value, IR::Instr * instrInsert)
  7232. {
  7233. // Floating point zero is a common value to load. Let's use a single memory location instead of allocating new memory for each.
  7234. const bool isFloatZero = value == 0.0 && !Js::JavascriptNumber::IsNegZero(value); // (-0.0 == 0.0) yields true
  7235. if (isFloatZero)
  7236. {
  7237. return LowererMD::LoadFloatZero(opndDst, instrInsert);
  7238. }
  7239. void * pValue = NativeCodeDataNewNoFixup(instrInsert->m_func->GetNativeCodeDataAllocator(), DoubleType<DataDesc_LowererMD_LoadFloatValue_Double>, value);
  7240. IR::Opnd * opnd;
  7241. if (instrInsert->m_func->IsOOPJIT())
  7242. {
  7243. int offset = NativeCodeData::GetDataTotalOffset(pValue);
  7244. auto addressRegOpnd = IR::RegOpnd::New(TyMachPtr, instrInsert->m_func);
  7245. Lowerer::InsertMove(
  7246. addressRegOpnd,
  7247. IR::MemRefOpnd::New(instrInsert->m_func->GetWorkItem()->GetWorkItemData()->nativeDataAddr, TyMachPtr, instrInsert->m_func, IR::AddrOpndKindDynamicNativeCodeDataRef),
  7248. instrInsert);
  7249. opnd = IR::IndirOpnd::New(addressRegOpnd, offset, TyMachDouble,
  7250. #if DBG
  7251. NativeCodeData::GetDataDescription(pValue, instrInsert->m_func->m_alloc),
  7252. #endif
  7253. instrInsert->m_func, true);
  7254. }
  7255. else
  7256. {
  7257. opnd = IR::MemRefOpnd::New((void*)pValue, TyMachDouble, instrInsert->m_func);
  7258. }
  7259. IR::Instr * instr = IR::Instr::New(Js::OpCode::VLDR, opndDst, opnd, instrInsert->m_func);
  7260. instrInsert->InsertBefore(instr);
  7261. LegalizeMD::LegalizeInstr(instr,false);
  7262. return instr;
  7263. }
  7264. void LowererMD::GenerateFloatTest(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper, const bool checkForNullInLoopBody)
  7265. {
  7266. if (opndSrc->GetValueType().IsFloat())
  7267. {
  7268. return;
  7269. }
  7270. if(checkForNullInLoopBody && m_func->IsLoopBody())
  7271. {
  7272. // It's possible that the value was determined dead by the jitted function and was not restored. The jitted loop
  7273. // body may not realize that it's dead and may try to use it. Check for null in loop bodies.
  7274. // test src1, src1
  7275. // jz $helper (bail out)
  7276. m_lowerer->InsertCompareBranch(
  7277. opndSrc,
  7278. IR::AddrOpnd::NewNull(m_func),
  7279. Js::OpCode::BrEq_A,
  7280. labelHelper,
  7281. insertInstr);
  7282. }
  7283. IR::RegOpnd *vt = IR::RegOpnd::New(TyMachPtr, this->m_func);
  7284. IR::Opnd* opnd = IR::IndirOpnd::New(opndSrc, (int32)0, TyMachPtr, this->m_func);
  7285. LowererMD::CreateAssign(vt, opnd, insertInstr);
  7286. // CMP [number], JavascriptNumber::vtable
  7287. IR::Instr* instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  7288. instr->SetSrc1(vt);
  7289. instr->SetSrc2(m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptNumber));
  7290. insertInstr->InsertBefore(instr);
  7291. LegalizeMD::LegalizeInstr(instr,false);
  7292. // BNE $helper
  7293. instr = IR::BranchInstr::New(Js::OpCode::BNE, labelHelper, this->m_func);
  7294. insertInstr->InsertBefore(instr);
  7295. }
  7296. void LowererMD::LoadFloatValue(IR::RegOpnd * javascriptNumber, IR::RegOpnd * opndFloat, IR::LabelInstr * labelHelper, IR::Instr * instrInsert, const bool checkForNullInLoopBody)
  7297. {
  7298. IR::Instr* instr;
  7299. IR::Opnd* opnd;
  7300. // Make sure it is float
  7301. this->GenerateFloatTest(javascriptNumber, instrInsert, labelHelper, checkForNullInLoopBody);
  7302. // VLDR opndFloat, [number + offsetof(value)]
  7303. opnd = IR::IndirOpnd::New(javascriptNumber, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  7304. instr = IR::Instr::New(Js::OpCode::VLDR, opndFloat, opnd, this->m_func);
  7305. instrInsert->InsertBefore(instr);
  7306. }
  7307. template <bool verify>
  7308. void
  7309. LowererMD::Legalize(IR::Instr *const instr, bool fPostRegAlloc)
  7310. {
  7311. Func *const func = instr->m_func;
  7312. if(instr->m_opcode == Js::OpCode::VCVTS32F64 && instr->GetDst()->IsInt32())
  7313. {
  7314. if (verify)
  7315. {
  7316. AssertMsg(false, "Missing legalization");
  7317. return;
  7318. }
  7319. // This needs to be split into two steps
  7320. IR::RegOpnd *const float32Reg = IR::RegOpnd::New(TyFloat32, func);
  7321. const IR::AutoReuseOpnd autoReuseFloat32Reg(float32Reg, func);
  7322. IR::Instr *const newInstr = IR::Instr::New(Js::OpCode::VCVTS32F64, float32Reg, instr->GetSrc1(), func);
  7323. instr->InsertBefore(newInstr);
  7324. LegalizeMD::LegalizeInstr(newInstr, false);
  7325. instr->m_opcode = Js::OpCode::VMOVARMVFP;
  7326. instr->ReplaceSrc1(float32Reg);
  7327. }
  7328. if (verify)
  7329. {
  7330. // NYI for the rest of legalization
  7331. return;
  7332. }
  7333. LegalizeMD::LegalizeInstr(instr, fPostRegAlloc);
  7334. }
  7335. template void LowererMD::Legalize<false>(IR::Instr *const instr, bool fPostRegalloc);
  7336. #if DBG
  7337. template void LowererMD::Legalize<true>(IR::Instr *const instr, bool fPostRegalloc);
  7338. #endif
  7339. void
  7340. LowererMD::FinalLower()
  7341. {
  7342. NoRecoverMemoryArenaAllocator tempAlloc(_u("BE-ARMFinalLower"), m_func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  7343. EncodeReloc *pRelocList = nullptr;
  7344. uint32 instrOffset = 0;
  7345. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, this->m_func->m_tailInstr, this->m_func->m_headInstr)
  7346. {
  7347. if (instr->IsLowered() == false)
  7348. {
  7349. if (instr->IsLabelInstr())
  7350. {
  7351. //This is not the real set, Real offset gets set in encoder.
  7352. IR::LabelInstr *labelInstr = instr->AsLabelInstr();
  7353. labelInstr->SetOffset(instrOffset);
  7354. }
  7355. switch (instr->m_opcode)
  7356. {
  7357. case Js::OpCode::Ret:
  7358. instr->Remove();
  7359. break;
  7360. case Js::OpCode::Leave:
  7361. Assert(this->m_func->DoOptimizeTry() && !this->m_func->IsLoopBodyInTry());
  7362. instrPrev = this->LowerLeave(instr, instr->AsBranchInstr()->GetTarget(), true /*fromFinalLower*/);
  7363. break;
  7364. }
  7365. }
  7366. else
  7367. {
  7368. //We are conservative here, assume each instruction take 4 bytes
  7369. instrOffset = instrOffset + MachMaxInstrSize;
  7370. if (instr->IsBranchInstr())
  7371. {
  7372. IR::BranchInstr *branchInstr = instr->AsBranchInstr();
  7373. if (branchInstr->GetTarget() && !LowererMD::IsUnconditionalBranch(branchInstr)) //Ignore BX register based branches & B
  7374. {
  7375. uint32 targetOffset = branchInstr->GetTarget()->GetOffset();
  7376. if (targetOffset != 0)
  7377. {
  7378. // this is backward reference
  7379. if (LegalizeMD::LegalizeDirectBranch(branchInstr, instrOffset))
  7380. {
  7381. //There might be an instruction inserted for legalizing conditional branch
  7382. instrOffset = instrOffset + MachMaxInstrSize;
  7383. }
  7384. }
  7385. else
  7386. {
  7387. EncodeReloc::New(&pRelocList, RelocTypeBranch20, (BYTE*)instrOffset, branchInstr, &tempAlloc);
  7388. //Assume this is a forward long branch, we shall fix up after complete pass, be conservative here
  7389. instrOffset = instrOffset + MachMaxInstrSize;
  7390. }
  7391. }
  7392. }
  7393. else if (LowererMD::IsAssign(instr) || instr->m_opcode == Js::OpCode::LEA || instr->m_opcode == Js::OpCode::LDARGOUTSZ || instr->m_opcode == Js::OpCode::REM)
  7394. {
  7395. // Cleanup spill code
  7396. // INSTR_BACKWARD_EDITING_IN_RANGE implies that next loop iteration will use instrPrev (instr->m_prev computed before entering current loop iteration).
  7397. IR::Instr* instrNext = instr->m_next;
  7398. bool canExpand = this->FinalLowerAssign(instr);
  7399. if (canExpand)
  7400. {
  7401. uint32 expandedInstrCount = 0; // The number of instrs the LDIMM expands into.
  7402. FOREACH_INSTR_IN_RANGE(instrCount, instrPrev->m_next, instrNext)
  7403. {
  7404. ++expandedInstrCount;
  7405. }
  7406. NEXT_INSTR_IN_RANGE;
  7407. Assert(expandedInstrCount > 0);
  7408. // Adjust the offset for expanded instrs.
  7409. instrOffset += (expandedInstrCount - 1) * MachMaxInstrSize; // We already accounted for one MachMaxInstrSize.
  7410. }
  7411. }
  7412. }
  7413. } NEXT_INSTR_BACKWARD_EDITING_IN_RANGE;
  7414. //Fixup all the forward branches
  7415. for (EncodeReloc *reloc = pRelocList; reloc; reloc = reloc->m_next)
  7416. {
  7417. AssertMsg((uint32)reloc->m_consumerOffset < reloc->m_relocInstr->AsBranchInstr()->GetTarget()->GetOffset(), "Only forward branches require fixup");
  7418. LegalizeMD::LegalizeDirectBranch(reloc->m_relocInstr->AsBranchInstr(), (uint32)reloc->m_consumerOffset);
  7419. }
  7420. return;
  7421. }
  7422. // Returns true, if and only if the assign may expand into multiple instrs.
  7423. bool
  7424. LowererMD::FinalLowerAssign(IR::Instr * instr)
  7425. {
  7426. if (instr->m_opcode == Js::OpCode::LDIMM)
  7427. {
  7428. LegalizeMD::LegalizeInstr(instr, true);
  7429. // LDIMM can expand into MOV/MOVT when the immediate is more than 16 bytes,
  7430. // it can also expand into multiple different no-op (normally MOV) instrs when we obfuscate it, which is randomly.
  7431. return true;
  7432. }
  7433. else if (EncoderMD::IsLoad(instr) || instr->m_opcode == Js::OpCode::LEA)
  7434. {
  7435. Assert(instr->GetDst()->IsRegOpnd());
  7436. if (!instr->GetSrc1()->IsRegOpnd())
  7437. {
  7438. LegalizeMD::LegalizeSrc(instr, instr->GetSrc1(), 1, true);
  7439. return true;
  7440. }
  7441. instr->m_opcode = (instr->GetSrc1()->GetType() == TyMachDouble) ? Js::OpCode::VMOV : Js::OpCode::MOV;
  7442. }
  7443. else if (EncoderMD::IsStore(instr))
  7444. {
  7445. Assert(instr->GetSrc1()->IsRegOpnd());
  7446. if (!instr->GetDst()->IsRegOpnd())
  7447. {
  7448. LegalizeMD::LegalizeDst(instr, true);
  7449. return true;
  7450. }
  7451. instr->m_opcode = (instr->GetDst()->GetType() == TyMachDouble) ? Js::OpCode::VMOV : Js::OpCode::MOV;
  7452. }
  7453. else if (instr->m_opcode == Js::OpCode::LDARGOUTSZ)
  7454. {
  7455. Assert(instr->GetDst()->IsRegOpnd());
  7456. Assert((instr->GetSrc1() == nullptr) && (instr->GetSrc2() == nullptr));
  7457. // dst = LDARGOUTSZ
  7458. // This loads the function's arg out area size into the dst operand. We need a pseudo-op,
  7459. // because we generate the instruction during Lower but don't yet know the value of the constant it needs
  7460. // to load. Change it to the appropriate LDIMM here.
  7461. uint32 argOutSize = UInt32Math::Mul(this->m_func->m_argSlotsForFunctionsCalled, MachRegInt, Js::Throw::OutOfMemory);
  7462. instr->SetSrc1(IR::IntConstOpnd::New(argOutSize, TyMachReg, this->m_func));
  7463. instr->m_opcode = Js::OpCode::LDIMM;
  7464. LegalizeMD::LegalizeInstr(instr, true);
  7465. return true;
  7466. }
  7467. else if (instr->m_opcode == Js::OpCode::REM)
  7468. {
  7469. IR::Opnd* dst = instr->GetDst();
  7470. IR::Opnd* src1 = instr->GetSrc1();
  7471. IR::Opnd* src2 = instr->GetSrc2();
  7472. Assert(src1->IsRegOpnd() && src1->AsRegOpnd()->GetReg() != RegR12);
  7473. Assert(src2->IsRegOpnd() && src2->AsRegOpnd()->GetReg() != RegR12);
  7474. //r12 = SDIV src1, src2
  7475. IR::RegOpnd *regR12 = IR::RegOpnd::New(nullptr, RegR12, TyMachReg, instr->m_func);
  7476. IR::Instr *insertInstr = IR::Instr::New(Js::OpCode::SDIV, regR12, src1, src2, instr->m_func);
  7477. instr->InsertBefore(insertInstr);
  7478. // dst = MLS (r12,) src2, src1
  7479. insertInstr = IR::Instr::New(Js::OpCode::MLS, dst, src2, src1, instr->m_func);
  7480. instr->InsertBefore(insertInstr);
  7481. instr->Remove();
  7482. return true;
  7483. }
  7484. return false;
  7485. }
  7486. IR::Opnd *
  7487. LowererMD::GenerateArgOutForStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  7488. {
  7489. return this->m_lowerer->GenerateArgOutForStackArgs(callInstr, stackArgsInstr);
  7490. }
  7491. IR::Instr *
  7492. LowererMD::LowerDivI4AndBailOnReminder(IR::Instr * instr, IR::LabelInstr * bailOutLabel)
  7493. {
  7494. // result = SDIV numerator, denominator
  7495. // mulResult = MUL result, denominator
  7496. // CMP mulResult, numerator
  7497. // BNE bailout
  7498. // <Caller insert more checks here>
  7499. // dst = MOV result <-- insertBeforeInstr
  7500. instr->m_opcode = Js::OpCode::SDIV;
  7501. // delay assigning to the final dst.
  7502. IR::Instr * sinkedInstr = instr->SinkDst(Js::OpCode::MOV);
  7503. LegalizeMD::LegalizeInstr(instr, false);
  7504. LegalizeMD::LegalizeInstr(sinkedInstr, false);
  7505. IR::Opnd * resultOpnd = instr->GetDst();
  7506. IR::Opnd * numerator = instr->GetSrc1();
  7507. IR::Opnd * denominatorOpnd = instr->GetSrc2();
  7508. // Insert all check before the assignment to the actual
  7509. IR::Instr * insertBeforeInstr = instr->m_next;
  7510. // Jump to bailout if the reminder is not 0 (or the divResult * denominator is not same as the numerator)
  7511. IR::RegOpnd * mulResult = IR::RegOpnd::New(TyInt32, m_func);
  7512. IR::Instr * mulInstr = IR::Instr::New(Js::OpCode::MUL, mulResult, resultOpnd, denominatorOpnd, m_func);
  7513. insertBeforeInstr->InsertBefore(mulInstr);
  7514. LegalizeMD::LegalizeInstr(mulInstr, false);
  7515. this->m_lowerer->InsertCompareBranch(mulResult, numerator, Js::OpCode::BrNeq_A, bailOutLabel, insertBeforeInstr);
  7516. return insertBeforeInstr;
  7517. }
  7518. void
  7519. LowererMD::LowerInlineSpreadArgOutLoop(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd)
  7520. {
  7521. this->m_lowerer->LowerInlineSpreadArgOutLoopUsingRegisters(callInstr, indexOpnd, arrayElementsStartOpnd);
  7522. }
  7523. void
  7524. LowererMD::LowerTypeof(IR::Instr* typeOfInstr)
  7525. {
  7526. Func * func = typeOfInstr->m_func;
  7527. IR::Opnd * src1 = typeOfInstr->GetSrc1();
  7528. IR::Opnd * dst = typeOfInstr->GetDst();
  7529. Assert(src1->IsRegOpnd() && dst->IsRegOpnd());
  7530. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  7531. IR::LabelInstr * taggedIntLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  7532. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  7533. // MOV typeDisplayStringsArray, &javascriptLibrary->typeDisplayStrings
  7534. IR::RegOpnd * typeDisplayStringsArrayOpnd = IR::RegOpnd::New(TyMachPtr, func);
  7535. m_lowerer->InsertMove(typeDisplayStringsArrayOpnd, IR::AddrOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetTypeDisplayStringsOffset(), IR::AddrOpndKindConstantAddress, this->m_func), typeOfInstr);
  7536. GenerateObjectTest(src1, typeOfInstr, taggedIntLabel);
  7537. // MOV typeRegOpnd, [src1 + offset(Type)]
  7538. IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, func);
  7539. m_lowerer->InsertMove(typeRegOpnd,
  7540. IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func),
  7541. typeOfInstr);
  7542. IR::LabelInstr * falsyLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  7543. m_lowerer->GenerateFalsyObjectTest(typeOfInstr, typeRegOpnd, falsyLabel);
  7544. // <$not falsy>
  7545. // MOV typeId, TypeIds_Object
  7546. // MOV objTypeId, [typeRegOpnd + offsetof(typeId)]
  7547. // CMP objTypeId, TypeIds_Limit /*external object test*/
  7548. // BCS $externalObjectLabel
  7549. // MOV typeId, objTypeId
  7550. // $loadTypeDisplayStringLabel:
  7551. // MOV dst, typeDisplayStrings[typeId]
  7552. // TEST dst, dst
  7553. // BEQ $helper
  7554. // B $done
  7555. IR::RegOpnd * typeIdOpnd = IR::RegOpnd::New(TyUint32, func);
  7556. m_lowerer->InsertMove(typeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_Object, TyUint32, func), typeOfInstr);
  7557. IR::RegOpnd * objTypeIdOpnd = IR::RegOpnd::New(TyUint32, func);
  7558. m_lowerer->InsertMove(objTypeIdOpnd, IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, func), typeOfInstr);
  7559. IR::LabelInstr * loadTypeDisplayStringLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  7560. m_lowerer->InsertCompareBranch(objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_Limit, TyUint32, func), Js::OpCode::BrGe_A, true /*unsigned*/, loadTypeDisplayStringLabel, typeOfInstr);
  7561. m_lowerer->InsertMove(typeIdOpnd, objTypeIdOpnd, typeOfInstr);
  7562. typeOfInstr->InsertBefore(loadTypeDisplayStringLabel);
  7563. if (dst->IsEqual(src1))
  7564. {
  7565. ChangeToAssign(typeOfInstr->HoistSrc1(Js::OpCode::Ld_A));
  7566. }
  7567. m_lowerer->InsertMove(dst, IR::IndirOpnd::New(typeDisplayStringsArrayOpnd, typeIdOpnd, this->GetDefaultIndirScale(), TyMachPtr, func), typeOfInstr);
  7568. m_lowerer->InsertTestBranch(dst, dst, Js::OpCode::BrEq_A, helperLabel, typeOfInstr);
  7569. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, typeOfInstr);
  7570. // $taggedInt:
  7571. // MOV dst, typeDisplayStrings[TypeIds_Number]
  7572. // B $done
  7573. typeOfInstr->InsertBefore(taggedIntLabel);
  7574. m_lowerer->InsertMove(dst, IR::IndirOpnd::New(typeDisplayStringsArrayOpnd, Js::TypeIds_Number * sizeof(Js::Var), TyMachPtr, func), typeOfInstr);
  7575. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, typeOfInstr);
  7576. // $falsy:
  7577. // MOV dst, "undefined"
  7578. // B $done
  7579. typeOfInstr->InsertBefore(falsyLabel);
  7580. IR::Opnd * undefinedDisplayStringOpnd = IR::IndirOpnd::New(typeDisplayStringsArrayOpnd, Js::TypeIds_Undefined, TyMachPtr, func);
  7581. m_lowerer->InsertMove(dst, undefinedDisplayStringOpnd, typeOfInstr);
  7582. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, typeOfInstr);
  7583. // $helper
  7584. // CALL OP_TypeOf
  7585. // $done
  7586. typeOfInstr->InsertBefore(helperLabel);
  7587. typeOfInstr->InsertAfter(doneLabel);
  7588. m_lowerer->LowerUnaryHelperMem(typeOfInstr, IR::HelperOp_Typeof);
  7589. }
  7590. #if DBG
  7591. //
  7592. // Helps in debugging of fast paths.
  7593. //
  7594. void LowererMD::GenerateDebugBreak( IR::Instr * insertInstr )
  7595. {
  7596. IR::Instr *int3 = IR::Instr::New(Js::OpCode::DEBUGBREAK, insertInstr->m_func);
  7597. insertInstr->InsertBefore(int3);
  7598. }
  7599. #endif