LowerMDShared.cpp 340 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Language/JavascriptFunctionArgIndex.h"
  7. const Js::OpCode LowererMD::MDUncondBranchOpcode = Js::OpCode::JMP;
  8. const Js::OpCode LowererMD::MDTestOpcode = Js::OpCode::TEST;
  9. const Js::OpCode LowererMD::MDOrOpcode = Js::OpCode::OR;
  10. const Js::OpCode LowererMD::MDXorOpcode = Js::OpCode::XOR;
  11. #if _M_X64
  12. const Js::OpCode LowererMD::MDMovUint64ToFloat64Opcode = Js::OpCode::MOVQ;
  13. #endif
  14. const Js::OpCode LowererMD::MDOverflowBranchOpcode = Js::OpCode::JO;
  15. const Js::OpCode LowererMD::MDNotOverflowBranchOpcode = Js::OpCode::JNO;
  16. const Js::OpCode LowererMD::MDConvertFloat32ToFloat64Opcode = Js::OpCode::CVTSS2SD;
  17. const Js::OpCode LowererMD::MDConvertFloat64ToFloat32Opcode = Js::OpCode::CVTSD2SS;
  18. const Js::OpCode LowererMD::MDCallOpcode = Js::OpCode::CALL;
  19. const Js::OpCode LowererMD::MDImulOpcode = Js::OpCode::IMUL2;
  20. static const int TWO_31_FLOAT = 0x4f000000;
  21. static const int FLOAT_INT_MIN = 0xcf000000;
  22. //
  23. // Static utility fn()
  24. //
  25. bool
  26. LowererMD::IsAssign(IR::Instr *instr)
  27. {
  28. return instr->GetDst() && instr->m_opcode == LowererMDArch::GetAssignOp(instr->GetDst()->GetType());
  29. }
  30. ///----------------------------------------------------------------------------
  31. ///
  32. /// LowererMD::IsCall
  33. ///
  34. ///----------------------------------------------------------------------------
  35. bool
  36. LowererMD::IsCall(IR::Instr *instr)
  37. {
  38. return instr->m_opcode == Js::OpCode::CALL;
  39. }
  40. ///----------------------------------------------------------------------------
  41. ///
  42. /// LowererMD::IsUnconditionalBranch
  43. ///
  44. ///----------------------------------------------------------------------------
  45. bool
  46. LowererMD::IsUnconditionalBranch(const IR::Instr *instr)
  47. {
  48. return (instr->m_opcode == Js::OpCode::JMP);
  49. }
  50. // GenerateMemRef: Return an opnd that can be used to access the given address.
  51. IR::Opnd *
  52. LowererMD::GenerateMemRef(intptr_t addr, IRType type, IR::Instr *instr, bool dontEncode)
  53. {
  54. return IR::MemRefOpnd::New(addr, type, this->m_func);
  55. }
  56. ///----------------------------------------------------------------------------
  57. ///
  58. /// LowererMD::InvertBranch
  59. ///
  60. ///----------------------------------------------------------------------------
  61. void
  62. LowererMD::InvertBranch(IR::BranchInstr *branchInstr)
  63. {
  64. switch (branchInstr->m_opcode)
  65. {
  66. case Js::OpCode::JA:
  67. branchInstr->m_opcode = Js::OpCode::JBE;
  68. break;
  69. case Js::OpCode::JAE:
  70. branchInstr->m_opcode = Js::OpCode::JB;
  71. break;
  72. case Js::OpCode::JB:
  73. branchInstr->m_opcode = Js::OpCode::JAE;
  74. break;
  75. case Js::OpCode::JBE:
  76. branchInstr->m_opcode = Js::OpCode::JA;
  77. break;
  78. case Js::OpCode::JEQ:
  79. branchInstr->m_opcode = Js::OpCode::JNE;
  80. break;
  81. case Js::OpCode::JNE:
  82. branchInstr->m_opcode = Js::OpCode::JEQ;
  83. break;
  84. case Js::OpCode::JGE:
  85. branchInstr->m_opcode = Js::OpCode::JLT;
  86. break;
  87. case Js::OpCode::JGT:
  88. branchInstr->m_opcode = Js::OpCode::JLE;
  89. break;
  90. case Js::OpCode::JLT:
  91. branchInstr->m_opcode = Js::OpCode::JGE;
  92. break;
  93. case Js::OpCode::JLE:
  94. branchInstr->m_opcode = Js::OpCode::JGT;
  95. break;
  96. case Js::OpCode::JO:
  97. branchInstr->m_opcode = Js::OpCode::JNO;
  98. break;
  99. case Js::OpCode::JNO:
  100. branchInstr->m_opcode = Js::OpCode::JO;
  101. break;
  102. case Js::OpCode::JP:
  103. branchInstr->m_opcode = Js::OpCode::JNP;
  104. break;
  105. case Js::OpCode::JNP:
  106. branchInstr->m_opcode = Js::OpCode::JP;
  107. break;
  108. case Js::OpCode::JSB:
  109. branchInstr->m_opcode = Js::OpCode::JNSB;
  110. break;
  111. case Js::OpCode::JNSB:
  112. branchInstr->m_opcode = Js::OpCode::JSB;
  113. break;
  114. default:
  115. AssertMsg(UNREACHED, "JCC missing in InvertBranch()");
  116. }
  117. }
  118. void
  119. LowererMD::ReverseBranch(IR::BranchInstr *branchInstr)
  120. {
  121. switch (branchInstr->m_opcode)
  122. {
  123. case Js::OpCode::JA:
  124. branchInstr->m_opcode = Js::OpCode::JB;
  125. break;
  126. case Js::OpCode::JAE:
  127. branchInstr->m_opcode = Js::OpCode::JBE;
  128. break;
  129. case Js::OpCode::JB:
  130. branchInstr->m_opcode = Js::OpCode::JA;
  131. break;
  132. case Js::OpCode::JBE:
  133. branchInstr->m_opcode = Js::OpCode::JAE;
  134. break;
  135. case Js::OpCode::JGE:
  136. branchInstr->m_opcode = Js::OpCode::JLE;
  137. break;
  138. case Js::OpCode::JGT:
  139. branchInstr->m_opcode = Js::OpCode::JLT;
  140. break;
  141. case Js::OpCode::JLT:
  142. branchInstr->m_opcode = Js::OpCode::JGT;
  143. break;
  144. case Js::OpCode::JLE:
  145. branchInstr->m_opcode = Js::OpCode::JGE;
  146. break;
  147. case Js::OpCode::JEQ:
  148. case Js::OpCode::JNE:
  149. case Js::OpCode::JO:
  150. case Js::OpCode::JNO:
  151. case Js::OpCode::JP:
  152. case Js::OpCode::JNP:
  153. case Js::OpCode::JSB:
  154. case Js::OpCode::JNSB:
  155. break;
  156. default:
  157. AssertMsg(UNREACHED, "JCC missing in ReverseBranch()");
  158. }
  159. }
  160. IR::Instr *
  161. LowererMD::LowerCallHelper(IR::Instr *instrCall)
  162. {
  163. IR::Opnd *argOpnd = instrCall->UnlinkSrc2();
  164. IR::Instr *prevInstr = nullptr;
  165. IR::JnHelperMethod helperMethod = instrCall->GetSrc1()->AsHelperCallOpnd()->m_fnHelper;
  166. instrCall->FreeSrc1();
  167. #ifndef _M_X64
  168. prevInstr = ChangeToHelperCall(instrCall, helperMethod);
  169. #endif
  170. prevInstr = instrCall;
  171. while (argOpnd)
  172. {
  173. Assert(argOpnd->IsRegOpnd());
  174. IR::RegOpnd *regArg = argOpnd->AsRegOpnd();
  175. Assert(regArg->m_sym->m_isSingleDef);
  176. IR::Instr *instrArg = regArg->m_sym->m_instrDef;
  177. Assert(instrArg->m_opcode == Js::OpCode::ArgOut_A ||
  178. (helperMethod == IR::JnHelperMethod::HelperOP_InitCachedScope && instrArg->m_opcode == Js::OpCode::ExtendArg_A));
  179. prevInstr = LoadHelperArgument(prevInstr, instrArg->GetSrc1());
  180. argOpnd = instrArg->GetSrc2();
  181. if (prevInstr == instrArg)
  182. {
  183. prevInstr = prevInstr->m_prev;
  184. }
  185. if (instrArg->m_opcode == Js::OpCode::ArgOut_A)
  186. {
  187. instrArg->UnlinkSrc1();
  188. if (argOpnd)
  189. {
  190. instrArg->UnlinkSrc2();
  191. }
  192. regArg->Free(this->m_func);
  193. instrArg->Remove();
  194. }
  195. }
  196. prevInstr = m_lowerer->LoadScriptContext(prevInstr);
  197. #ifdef _M_X64
  198. FlipHelperCallArgsOrder();
  199. ChangeToHelperCall(instrCall, helperMethod);
  200. #else
  201. this->lowererMDArch.ResetHelperArgsCount();
  202. #endif
  203. // There might be ToVar in between the ArgOut, need to continue lower from the call still
  204. return instrCall;
  205. }
  206. //
  207. // forwarding functions
  208. //
  209. IR::Instr *
  210. LowererMD::LowerCall(IR::Instr * callInstr, Js::ArgSlot argCount)
  211. {
  212. return this->lowererMDArch.LowerCall(callInstr, argCount);
  213. }
  214. IR::Instr *
  215. LowererMD::LowerCallI(IR::Instr * callInstr, ushort callFlags, bool isHelper, IR::Instr * insertBeforeInstrForCFG)
  216. {
  217. return this->lowererMDArch.LowerCallI(callInstr, callFlags, isHelper, insertBeforeInstrForCFG);
  218. }
  219. IR::Instr *
  220. LowererMD::LowerAsmJsCallI(IR::Instr * callInstr)
  221. {
  222. #if DBG
  223. if (PHASE_ON(Js::AsmjsCallDebugBreakPhase, this->m_func))
  224. {
  225. this->GenerateDebugBreak(callInstr->m_next);
  226. }
  227. #endif
  228. return this->lowererMDArch.LowerAsmJsCallI(callInstr);
  229. }
  230. IR::Instr *
  231. LowererMD::LowerAsmJsCallE(IR::Instr * callInstr)
  232. {
  233. #if DBG
  234. if (PHASE_ON(Js::AsmjsCallDebugBreakPhase, this->m_func))
  235. {
  236. this->GenerateDebugBreak(callInstr->m_next);
  237. }
  238. #endif
  239. return this->lowererMDArch.LowerAsmJsCallE(callInstr);
  240. }
  241. IR::Instr *
  242. LowererMD::LowerWasmMemOp(IR::Instr * instr, IR::Opnd *addrOpnd)
  243. {
  244. return this->lowererMDArch.LowerWasmMemOp(instr, addrOpnd);
  245. }
  246. IR::Instr *
  247. LowererMD::LowerAsmJsLdElemHelper(IR::Instr * callInstr)
  248. {
  249. return this->lowererMDArch.LowerAsmJsLdElemHelper(callInstr);
  250. }
  251. IR::Instr *
  252. LowererMD::LowerAsmJsStElemHelper(IR::Instr * callInstr)
  253. {
  254. return this->lowererMDArch.LowerAsmJsStElemHelper(callInstr);
  255. }
  256. IR::Instr *
  257. LowererMD::LowerCallPut(IR::Instr * callInstr)
  258. {
  259. int32 argCount = this->lowererMDArch.LowerCallArgs(callInstr, Js::CallFlags_None, 2);
  260. // load native entry point from script function into eax
  261. IR::Opnd * functionWrapOpnd = callInstr->UnlinkSrc1();
  262. AssertMsg(functionWrapOpnd->IsRegOpnd() && functionWrapOpnd->AsRegOpnd()->m_sym->IsStackSym(),
  263. "Expected call src to be stackSym");
  264. this->LoadHelperArgument(callInstr, functionWrapOpnd);
  265. this->m_lowerer->LoadScriptContext(callInstr);
  266. IR::HelperCallOpnd *helperCallOpnd = IR::HelperCallOpnd::New(IR::HelperOp_InvokePut, this->m_func);
  267. callInstr->SetSrc1(helperCallOpnd);
  268. return this->lowererMDArch.LowerCall(callInstr, argCount);
  269. }
  270. IR::Instr *
  271. LowererMD::LoadInt64HelperArgument(IR::Instr * instr, IR::Opnd* opnd)
  272. {
  273. return this->lowererMDArch.LoadInt64HelperArgument(instr, opnd);
  274. }
  275. IR::Instr *
  276. LowererMD::LoadHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  277. {
  278. return this->lowererMDArch.LoadHelperArgument(instr, opndArg);
  279. }
  280. IR::Instr *
  281. LowererMD::LoadDoubleHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  282. {
  283. return this->lowererMDArch.LoadDoubleHelperArgument(instr, opndArg);
  284. }
  285. IR::Instr *
  286. LowererMD::LoadFloatHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  287. {
  288. return this->lowererMDArch.LoadFloatHelperArgument(instr, opndArg);
  289. }
  290. IR::Instr *
  291. LowererMD::LowerEntryInstr(IR::EntryInstr * entryInstr)
  292. {
  293. return this->lowererMDArch.LowerEntryInstr(entryInstr);
  294. }
  295. IR::Instr *
  296. LowererMD::LowerExitInstr(IR::ExitInstr * exitInstr)
  297. {
  298. return this->lowererMDArch.LowerExitInstr(exitInstr);
  299. }
  300. IR::Instr *
  301. LowererMD::LowerEntryInstrAsmJs(IR::EntryInstr * entryInstr)
  302. {
  303. return this->lowererMDArch.LowerEntryInstrAsmJs(entryInstr);
  304. }
  305. IR::Instr *
  306. LowererMD::LowerExitInstrAsmJs(IR::ExitInstr * exitInstr)
  307. {
  308. return this->lowererMDArch.LowerExitInstrAsmJs(exitInstr);
  309. }
  310. IR::Instr *
  311. LowererMD::LoadNewScObjFirstArg(IR::Instr * instr, IR::Opnd * dst, ushort extraArgs)
  312. {
  313. return this->lowererMDArch.LoadNewScObjFirstArg(instr, dst, extraArgs);
  314. }
  315. IR::Instr *
  316. LowererMD::LowerTry(IR::Instr *tryInstr, IR::JnHelperMethod helperMethod)
  317. {
  318. // Mark the entry to the try
  319. IR::Instr *instr = tryInstr->GetNextRealInstrOrLabel();
  320. AssertMsg(instr->IsLabelInstr(), "No label at the entry to a try?");
  321. IR::LabelInstr *tryAddr = instr->AsLabelInstr();
  322. // Arg 5: ScriptContext
  323. this->m_lowerer->LoadScriptContext(tryAddr);
  324. if (tryInstr->m_opcode == Js::OpCode::TryCatch || this->m_func->DoOptimizeTry())
  325. {
  326. // Arg 4 : hasBailedOutOffset
  327. IR::Opnd * hasBailedOutOffset = IR::IntConstOpnd::New(this->m_func->m_hasBailedOutSym->m_offset, TyInt32, this->m_func);
  328. this->LoadHelperArgument(tryAddr, hasBailedOutOffset);
  329. }
  330. #ifdef _M_X64
  331. // Arg: args size
  332. IR::RegOpnd *argsSizeOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  333. tryAddr->InsertBefore(IR::Instr::New(Js::OpCode::LdArgSize, argsSizeOpnd, this->m_func));
  334. this->LoadHelperArgument(tryAddr, argsSizeOpnd);
  335. // Arg: spill size
  336. IR::RegOpnd *spillSizeOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  337. tryAddr->InsertBefore(IR::Instr::New(Js::OpCode::LdSpillSize, spillSizeOpnd, this->m_func));
  338. this->LoadHelperArgument(tryAddr, spillSizeOpnd);
  339. #endif
  340. // Arg 3: frame pointer
  341. IR::RegOpnd *ebpOpnd = IR::RegOpnd::New(nullptr, lowererMDArch.GetRegBlockPointer(), TyMachReg, this->m_func);
  342. this->LoadHelperArgument(tryAddr, ebpOpnd);
  343. // Arg 2: handler address
  344. IR::LabelInstr *helperAddr = tryInstr->AsBranchInstr()->GetTarget();
  345. this->LoadHelperArgument(tryAddr, IR::LabelOpnd::New(helperAddr, this->m_func));
  346. // Arg 1: try address
  347. this->LoadHelperArgument(tryAddr, IR::LabelOpnd::New(tryAddr, this->m_func));
  348. // Call the helper
  349. IR::RegOpnd *continuationAddr =
  350. IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), lowererMDArch.GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  351. IR::Instr *callInstr = IR::Instr::New(
  352. Js::OpCode::Call, continuationAddr, IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
  353. tryAddr->InsertBefore(callInstr);
  354. this->LowerCall(callInstr, 0);
  355. #ifdef _M_X64
  356. {
  357. // Emit some instruction to separate the CALL from the JMP following it. The OS stack unwinder
  358. // mistakes the JMP for the start of the epilog otherwise.
  359. IR::Instr *nop = IR::Instr::New(Js::OpCode::NOP, m_func);
  360. tryAddr->InsertBefore(nop);
  361. }
  362. #endif
  363. // Jump to the continuation address supplied by the helper
  364. IR::BranchInstr *branchInstr = IR::MultiBranchInstr::New(Js::OpCode::JMP, continuationAddr, this->m_func);
  365. tryAddr->InsertBefore(branchInstr);
  366. return tryInstr->m_prev;
  367. }
  368. IR::Instr *
  369. LowererMD::LowerLeave(IR::Instr *leaveInstr, IR::LabelInstr *targetInstr, bool fromFinalLower, bool isOrphanedLeave)
  370. {
  371. if (isOrphanedLeave)
  372. {
  373. Assert(this->m_func->IsLoopBodyInTry());
  374. leaveInstr->m_opcode = Js::OpCode::JMP;
  375. return leaveInstr->m_prev;
  376. }
  377. IR::Instr *instrPrev = leaveInstr->m_prev;
  378. IR::LabelOpnd *labelOpnd = IR::LabelOpnd::New(targetInstr, this->m_func);
  379. lowererMDArch.LowerEHRegionReturn(leaveInstr, labelOpnd);
  380. if (fromFinalLower)
  381. {
  382. instrPrev = leaveInstr->m_prev; // Need to lower LdArgSize and LdSpillSize
  383. }
  384. leaveInstr->Remove();
  385. return instrPrev;
  386. }
  387. IR::Instr *
  388. LowererMD::LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * targetOpnd)
  389. {
  390. return lowererMDArch.LowerEHRegionReturn(insertBeforeInstr, targetOpnd);
  391. }
  392. IR::Instr *
  393. LowererMD::LowerLeaveNull(IR::Instr *finallyEndInstr)
  394. {
  395. IR::Instr *instrPrev = finallyEndInstr->m_prev;
  396. IR::Instr *instr = nullptr;
  397. // Return a null continuation address to the helper: execution will resume at the point determined by the try
  398. // or the exception handler.
  399. IR::RegOpnd *retReg = IR::RegOpnd::New(StackSym::New(TyMachReg,this->m_func), lowererMDArch.GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  400. instr = IR::Instr::New(Js::OpCode::XOR, retReg, this->m_func);
  401. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(nullptr, lowererMDArch.GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  402. instr->SetSrc1(eaxOpnd);
  403. instr->SetSrc2(eaxOpnd);
  404. finallyEndInstr->InsertBefore(instr);
  405. #if _M_X64
  406. {
  407. // amd64_ReturnFromCallWithFakeFrame expects to find the spill size and args size
  408. // in REG_EH_SPILL_SIZE and REG_EH_ARGS_SIZE.
  409. // MOV REG_EH_SPILL_SIZE, spillSize
  410. IR::Instr *movR8 = IR::Instr::New(Js::OpCode::LdSpillSize,
  411. IR::RegOpnd::New(nullptr, REG_EH_SPILL_SIZE, TyMachReg, m_func),
  412. m_func);
  413. finallyEndInstr->InsertBefore(movR8);
  414. // MOV REG_EH_ARGS_SIZE, argsSize
  415. IR::Instr *movR9 = IR::Instr::New(Js::OpCode::LdArgSize,
  416. IR::RegOpnd::New(nullptr, REG_EH_ARGS_SIZE, TyMachReg, m_func),
  417. m_func);
  418. finallyEndInstr->InsertBefore(movR9);
  419. IR::Opnd *targetOpnd = IR::RegOpnd::New(nullptr, REG_EH_TARGET, TyMachReg, m_func);
  420. IR::Instr *movTarget = IR::Instr::New(Js::OpCode::MOV,
  421. targetOpnd,
  422. IR::HelperCallOpnd::New(IR::HelperOp_ReturnFromCallWithFakeFrame, m_func),
  423. m_func);
  424. finallyEndInstr->InsertBefore(movTarget);
  425. IR::Instr *push = IR::Instr::New(Js::OpCode::PUSH, m_func);
  426. push->SetSrc1(targetOpnd);
  427. finallyEndInstr->InsertBefore(push);
  428. }
  429. #endif
  430. IR::IntConstOpnd *intSrc = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  431. instr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  432. instr->SetSrc1(intSrc);
  433. instr->SetSrc2(retReg);
  434. finallyEndInstr->InsertBefore(instr);
  435. finallyEndInstr->Remove();
  436. return instrPrev;
  437. }
  438. ///----------------------------------------------------------------------------
  439. ///
  440. /// LowererMD::Init
  441. ///
  442. ///----------------------------------------------------------------------------
  443. void
  444. LowererMD::Init(Lowerer *lowerer)
  445. {
  446. m_lowerer = lowerer;
  447. this->lowererMDArch.Init(this);
  448. #ifdef ENABLE_SIMDJS
  449. Simd128InitOpcodeMap();
  450. #endif
  451. }
  452. ///----------------------------------------------------------------------------
  453. ///
  454. /// LowererMD::LoadInputParamCount
  455. ///
  456. /// Load the passed-in parameter count from the appropriate EBP slot.
  457. ///
  458. ///----------------------------------------------------------------------------
  459. IR::Instr *
  460. LowererMD::LoadInputParamCount(IR::Instr * instrInsert, int adjust, bool needFlags)
  461. {
  462. IR::Instr * instr;
  463. IR::RegOpnd * dstOpnd;
  464. IR::SymOpnd * srcOpnd;
  465. srcOpnd = Lowerer::LoadCallInfo(instrInsert);
  466. dstOpnd = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), TyMachReg, this->m_func);
  467. instr = IR::Instr::New(Js::OpCode::MOV, dstOpnd, srcOpnd, this->m_func);
  468. instrInsert->InsertBefore(instr);
  469. // Copy the callinfo before masking off the param count
  470. Assert(Js::CallInfo::ksizeofCount == 24);
  471. // Mask off call flags from callinfo
  472. instr = IR::Instr::New(Js::OpCode::AND, dstOpnd, dstOpnd,
  473. IR::IntConstOpnd::New((Js::CallFlags_ExtraArg << static_cast<unsigned>(Js::CallInfo::ksizeofCount)) | 0x00FFFFFF, TyMachReg, this->m_func, true), this->m_func);
  474. instrInsert->InsertBefore(instr);
  475. // Shift and mask the "calling eval" bit and subtract it from the incoming count.
  476. // ("Calling eval" means the last param is the frame display, which only the eval built-in should see.)
  477. instr = IR::Instr::New(Js::OpCode::BTR, dstOpnd, dstOpnd, IR::IntConstOpnd::New(Math::Log2(Js::CallFlags_ExtraArg) + Js::CallInfo::ksizeofCount, TyInt8, this->m_func), this->m_func);
  478. instrInsert->InsertBefore(instr);
  479. instr = IR::Instr::New(Js::OpCode::SBB, dstOpnd, dstOpnd, IR::IntConstOpnd::New(-adjust, TyMachReg, this->m_func), this->m_func);
  480. instrInsert->InsertBefore(instr);
  481. return instr;
  482. }
  483. IR::Instr *
  484. LowererMD::LoadStackArgPtr(IR::Instr * instr)
  485. {
  486. if (this->m_func->IsLoopBody())
  487. {
  488. // Get the first user param from the interpreter frame instance that was passed in.
  489. // These args don't include the func object and callinfo; we just need to advance past "this".
  490. // t1 = MOV [prm1 + m_inParams]
  491. // dst = LEA &[t1 + sizeof(var)]
  492. Assert(this->m_func->m_loopParamSym);
  493. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  494. size_t offset = Js::InterpreterStackFrame::GetOffsetOfInParams();
  495. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, this->m_func);
  496. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  497. IR::Instr *instrLdParams = IR::Instr::New(Js::OpCode::MOV, tmpOpnd, indirOpnd, this->m_func);
  498. instr->InsertBefore(instrLdParams);
  499. indirOpnd = IR::IndirOpnd::New(tmpOpnd, sizeof(Js::Var), TyMachReg, this->m_func);
  500. instr->SetSrc1(indirOpnd);
  501. instr->m_opcode = Js::OpCode::LEA;
  502. return instr->m_prev;
  503. }
  504. else
  505. {
  506. return this->lowererMDArch.LoadStackArgPtr(instr);
  507. }
  508. }
  509. IR::Instr *
  510. LowererMD::LoadArgumentsFromFrame(IR::Instr * instr)
  511. {
  512. if (this->m_func->IsLoopBody())
  513. {
  514. // Get the arguments ptr from the interpreter frame instance that was passed in.
  515. Assert(this->m_func->m_loopParamSym);
  516. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  517. int32 offset = (int32)Js::InterpreterStackFrame::GetOffsetOfArguments();
  518. instr->SetSrc1(IR::IndirOpnd::New(baseOpnd, offset, TyMachReg, this->m_func));
  519. }
  520. else
  521. {
  522. instr->SetSrc1(this->CreateStackArgumentsSlotOpnd());
  523. }
  524. instr->m_opcode = Js::OpCode::MOV;
  525. return instr->m_prev;
  526. }
  527. // load argument count as I4
  528. IR::Instr *
  529. LowererMD::LoadArgumentCount(IR::Instr * instr)
  530. {
  531. if (this->m_func->IsLoopBody())
  532. {
  533. // Pull the arg count from the interpreter frame instance that was passed in.
  534. // (The callinfo in the loop body's frame just shows the single parameter, the interpreter frame.)
  535. Assert(this->m_func->m_loopParamSym);
  536. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  537. size_t offset = Js::InterpreterStackFrame::GetOffsetOfInSlotsCount();
  538. instr->SetSrc1(IR::IndirOpnd::New(baseOpnd, (int32)offset, TyInt32, this->m_func));
  539. }
  540. else
  541. {
  542. StackSym *sym = StackSym::New(TyVar, this->m_func);
  543. this->m_func->SetArgOffset(sym, (Js::JavascriptFunctionArgIndex_CallInfo - Js::JavascriptFunctionArgIndex_Frame) * sizeof(Js::Var));
  544. instr->SetSrc1(IR::SymOpnd::New(sym, TyMachReg, this->m_func));
  545. }
  546. instr->m_opcode = Js::OpCode::MOV;
  547. return instr->m_prev;
  548. }
  549. IR::Instr *
  550. LowererMD::LoadHeapArguments(IR::Instr * instrArgs)
  551. {
  552. return this->lowererMDArch.LoadHeapArguments(instrArgs);
  553. }
  554. IR::Instr *
  555. LowererMD::LoadHeapArgsCached(IR::Instr * instrArgs)
  556. {
  557. return this->lowererMDArch.LoadHeapArgsCached(instrArgs);
  558. }
  559. ///----------------------------------------------------------------------------
  560. ///
  561. /// LowererMD::ChangeToHelperCall
  562. ///
  563. /// Change the current instruction to a call to the given helper.
  564. ///
  565. ///----------------------------------------------------------------------------
  566. IR::Instr *
  567. LowererMD::ChangeToHelperCall(IR::Instr * callInstr, IR::JnHelperMethod helperMethod, IR::LabelInstr *labelBailOut,
  568. IR::Opnd *opndBailOutArg, IR::PropertySymOpnd *propSymOpnd, bool isHelperContinuation)
  569. {
  570. IR::Instr * bailOutInstr = callInstr;
  571. if (callInstr->HasBailOutInfo())
  572. {
  573. IR::BailOutKind bailOutKind = callInstr->GetBailOutKind();
  574. if (bailOutKind == IR::BailOutOnNotPrimitive ||
  575. bailOutKind == IR::BailOutOnPowIntIntOverflow)
  576. {
  577. callInstr = IR::Instr::New(callInstr->m_opcode, callInstr->m_func);
  578. bailOutInstr->TransferTo(callInstr);
  579. bailOutInstr->InsertBefore(callInstr);
  580. bailOutInstr->m_opcode = bailOutKind == IR::BailOutOnNotPrimitive
  581. ? Js::OpCode::BailOnNotPrimitive
  582. : Js::OpCode::BailOnPowIntIntOverflow;
  583. bailOutInstr->SetSrc1(opndBailOutArg);
  584. }
  585. else
  586. {
  587. bailOutInstr = this->m_lowerer->SplitBailOnImplicitCall(callInstr);
  588. }
  589. }
  590. callInstr->m_opcode = Js::OpCode::CALL;
  591. IR::HelperCallOpnd *helperCallOpnd = Lowerer::CreateHelperCallOpnd(helperMethod, this->lowererMDArch.GetHelperArgsCount(), m_func);
  592. if (helperCallOpnd->IsDiagHelperCallOpnd())
  593. {
  594. // Load arguments for the wrapper.
  595. this->LoadHelperArgument(callInstr, IR::AddrOpnd::New((Js::Var)IR::GetMethodOriginalAddress(m_func->GetThreadContextInfo(), helperMethod), IR::AddrOpndKindDynamicMisc, m_func));
  596. this->m_lowerer->LoadScriptContext(callInstr);
  597. }
  598. callInstr->SetSrc1(helperCallOpnd);
  599. IR::Instr * instrRet = this->lowererMDArch.LowerCall(callInstr, 0);
  600. if (bailOutInstr != callInstr)
  601. {
  602. // The bailout needs to be lowered after we lower the helper call because the helper argument
  603. // has already been loaded. We need to drain them on AMD64 before starting another helper call
  604. if (bailOutInstr->m_opcode == Js::OpCode::BailOnNotObject)
  605. {
  606. this->m_lowerer->LowerBailOnNotObject(bailOutInstr, nullptr, labelBailOut);
  607. }
  608. else if (bailOutInstr->m_opcode == Js::OpCode::BailOnNotPrimitive ||
  609. bailOutInstr->m_opcode == Js::OpCode::BailOnPowIntIntOverflow)
  610. {
  611. this->m_lowerer->LowerBailOnTrue(bailOutInstr, labelBailOut);
  612. }
  613. else if (bailOutInstr->m_opcode == Js::OpCode::BailOut)
  614. {
  615. this->m_lowerer->GenerateBailOut(bailOutInstr, nullptr, labelBailOut);
  616. }
  617. else
  618. {
  619. this->m_lowerer->LowerBailOnEqualOrNotEqual(bailOutInstr, nullptr, labelBailOut, propSymOpnd, isHelperContinuation);
  620. }
  621. }
  622. #if DBG
  623. if (PHASE_ON(Js::AsmjsCallDebugBreakPhase, this->m_func))
  624. {
  625. this->GenerateDebugBreak(instrRet->m_next);
  626. }
  627. #endif
  628. return instrRet;
  629. }
  630. IR::Instr* LowererMD::ChangeToHelperCallMem(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  631. {
  632. this->m_lowerer->LoadScriptContext(instr);
  633. return this->ChangeToHelperCall(instr, helperMethod);
  634. }
  635. ///----------------------------------------------------------------------------
  636. ///
  637. /// LowererMD::ChangeToAssign
  638. ///
  639. /// Change to a MOV.
  640. ///
  641. ///----------------------------------------------------------------------------
  642. IR::Instr *
  643. LowererMD::ChangeToAssignNoBarrierCheck(IR::Instr * instr)
  644. {
  645. return ChangeToAssign(instr, instr->GetDst()->GetType());
  646. }
  647. IR::Instr *
  648. LowererMD::ChangeToAssign(IR::Instr * instr)
  649. {
  650. return ChangeToWriteBarrierAssign(instr, instr->m_func);
  651. }
  652. IR::Instr *
  653. LowererMD::ChangeToAssign(IR::Instr * instr, IRType type)
  654. {
  655. Assert(!instr->HasBailOutInfo() || instr->GetBailOutKind() == IR::BailOutExpectingString);
  656. instr->m_opcode = LowererMDArch::GetAssignOp(type);
  657. Legalize(instr);
  658. return instr;
  659. }
  660. ///----------------------------------------------------------------------------
  661. ///
  662. /// LowererMD::ChangeToLea
  663. ///
  664. /// Change to an LEA.
  665. ///
  666. ///----------------------------------------------------------------------------
  667. IR::Instr *
  668. LowererMD::ChangeToLea(IR::Instr * instr, bool postRegAlloc)
  669. {
  670. Assert(instr);
  671. Assert(instr->GetDst());
  672. Assert(instr->GetDst()->IsRegOpnd());
  673. Assert(instr->GetSrc1());
  674. Assert(instr->GetSrc1()->IsIndirOpnd() || instr->GetSrc1()->IsSymOpnd());
  675. Assert(!instr->GetSrc2());
  676. instr->m_opcode = Js::OpCode::LEA;
  677. return instr;
  678. }
  679. ///----------------------------------------------------------------------------
  680. ///
  681. /// LowererMD::CreateAssign
  682. ///
  683. /// Create a MOV.
  684. ///
  685. ///----------------------------------------------------------------------------
  686. IR::Instr *
  687. LowererMD::CreateAssign(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsertPt, bool generateWriteBarrier)
  688. {
  689. return Lowerer::InsertMove(dst, src, instrInsertPt, generateWriteBarrier);
  690. }
  691. ///----------------------------------------------------------------------------
  692. ///
  693. /// LowererMD::LowerRet
  694. ///
  695. /// Lower Ret to "MOV EAX, src"
  696. /// The real RET is inserted at the exit of the function when emitting the
  697. /// epilog.
  698. ///
  699. ///----------------------------------------------------------------------------
  700. IR::Instr *
  701. LowererMD::LowerRet(IR::Instr * retInstr)
  702. {
  703. IR::RegOpnd * retReg;
  704. #ifdef ASMJS_PLAT
  705. if (m_func->GetJITFunctionBody()->IsAsmJsMode() && !m_func->IsLoopBody()) // for loop body ret is the bytecodeoffset
  706. {
  707. Js::AsmJsRetType::Which asmType = m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetRetType();
  708. IRType regType = TyInt32;
  709. switch (asmType)
  710. {
  711. case Js::AsmJsRetType::Double:
  712. regType = TyFloat64;
  713. break;
  714. case Js::AsmJsRetType::Float:
  715. regType = TyFloat32;
  716. break;
  717. case Js::AsmJsRetType::Int64:
  718. {
  719. regType = TyInt64;
  720. #if LOWER_SPLIT_INT64
  721. regType = TyInt32;
  722. {
  723. IR::Opnd* lowOpnd = nullptr;
  724. IR::Opnd* highOpnd = nullptr;
  725. if (retInstr->GetSrc1()->IsRegOpnd())
  726. {
  727. Int64RegPair srcPair = m_func->FindOrCreateInt64Pair(retInstr->GetSrc1()->AsRegOpnd());
  728. lowOpnd = srcPair.low;
  729. highOpnd = srcPair.high;
  730. }
  731. else if (retInstr->GetSrc1()->IsImmediateOpnd())
  732. {
  733. int64 value = retInstr->GetSrc1()->GetImmediateValue(m_func);
  734. lowOpnd = IR::IntConstOpnd::New(value & UINT_MAX, regType, m_func);
  735. highOpnd = IR::IntConstOpnd::New(value >> 32, regType, m_func);
  736. }
  737. else
  738. {
  739. Assert(UNREACHED);
  740. }
  741. retInstr->UnlinkSrc1();
  742. retInstr->SetSrc1(lowOpnd);
  743. // Mov high bits to edx
  744. IR::RegOpnd* regEdx = IR::RegOpnd::New(regType, this->m_func);
  745. regEdx->SetReg(RegEDX);
  746. Lowerer::InsertMove(regEdx, highOpnd, retInstr);
  747. retInstr->SetSrc2(regEdx);
  748. }
  749. #endif
  750. break;
  751. }
  752. case Js::AsmJsRetType::Signed:
  753. case Js::AsmJsRetType::Void:
  754. regType = TyInt32;
  755. break;
  756. case Js::AsmJsRetType::Float32x4:
  757. regType = TySimd128F4;
  758. break;
  759. case Js::AsmJsRetType::Int32x4:
  760. regType = TySimd128I4;
  761. break;
  762. case Js::AsmJsRetType::Float64x2:
  763. regType = TySimd128D2;
  764. break;
  765. case Js::AsmJsRetType::Int16x8:
  766. regType = TySimd128I8;
  767. break;
  768. case Js::AsmJsRetType::Int8x16:
  769. regType = TySimd128I16;
  770. break;
  771. case Js::AsmJsRetType::Uint32x4:
  772. regType = TySimd128U4;
  773. break;
  774. case Js::AsmJsRetType::Uint16x8:
  775. regType = TySimd128U8;
  776. break;
  777. case Js::AsmJsRetType::Uint8x16:
  778. regType = TySimd128U16;
  779. break;
  780. case Js::AsmJsRetType::Bool32x4:
  781. regType = TySimd128B4;
  782. break;
  783. case Js::AsmJsRetType::Bool16x8:
  784. regType = TySimd128B8;
  785. break;
  786. case Js::AsmJsRetType::Bool8x16:
  787. regType = TySimd128B16;
  788. break;
  789. default:
  790. Assert(UNREACHED);
  791. }
  792. retReg = IR::RegOpnd::New(regType, m_func);
  793. retReg->SetReg(lowererMDArch.GetRegReturnAsmJs(regType));
  794. }
  795. else
  796. #endif
  797. {
  798. retReg = IR::RegOpnd::New(TyMachReg, m_func);
  799. retReg->SetReg(lowererMDArch.GetRegReturn(TyMachReg));
  800. }
  801. Lowerer::InsertMove(retReg, retInstr->UnlinkSrc1(), retInstr);
  802. retInstr->SetSrc1(retReg);
  803. return retInstr;
  804. }
  805. ///----------------------------------------------------------------------------
  806. ///
  807. /// LowererMD::LowerUncondBranch
  808. ///
  809. ///----------------------------------------------------------------------------
  810. IR::Instr *
  811. LowererMD::LowerUncondBranch(IR::Instr * instr)
  812. {
  813. instr->m_opcode = Js::OpCode::JMP;
  814. return instr;
  815. }
  816. ///----------------------------------------------------------------------------
  817. ///
  818. /// LowererMD::LowerMultiBranch
  819. ///
  820. ///----------------------------------------------------------------------------
  821. IR::Instr *
  822. LowererMD::LowerMultiBranch(IR::Instr * instr)
  823. {
  824. return LowerUncondBranch(instr);
  825. }
  826. ///----------------------------------------------------------------------------
  827. ///
  828. /// LowererMD::LowerCondBranch
  829. ///
  830. ///----------------------------------------------------------------------------
  831. IR::Instr *
  832. LowererMD::LowerCondBranch(IR::Instr * instr)
  833. {
  834. AssertMsg(instr->GetSrc1() != nullptr, "Expected src opnds on conditional branch");
  835. Assert(!instr->HasBailOutInfo());
  836. IR::Opnd * opndSrc1 = instr->UnlinkSrc1();
  837. IR::Instr * instrPrev = nullptr;
  838. switch (instr->m_opcode)
  839. {
  840. case Js::OpCode::BrTrue_A:
  841. case Js::OpCode::BrFalse_A:
  842. case Js::OpCode::BrNotNull_A:
  843. case Js::OpCode::BrOnObject_A:
  844. case Js::OpCode::BrOnClassConstructor:
  845. case Js::OpCode::BrOnBaseConstructorKind:
  846. Assert(!opndSrc1->IsFloat64());
  847. AssertMsg(instr->GetSrc2() == nullptr, "Expected 1 src on boolean branch");
  848. instrPrev = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  849. instrPrev->SetSrc1(opndSrc1);
  850. instrPrev->SetSrc2(opndSrc1);
  851. instr->InsertBefore(instrPrev);
  852. if (instr->m_opcode != Js::OpCode::BrFalse_A)
  853. {
  854. instr->m_opcode = Js::OpCode::JNE;
  855. }
  856. else
  857. {
  858. instr->m_opcode = Js::OpCode::JEQ;
  859. }
  860. break;
  861. case Js::OpCode::BrOnEmpty:
  862. case Js::OpCode::BrOnNotEmpty:
  863. AssertMsg(0, "BrOnEmpty opcodes should not be passed to MD lowerer");
  864. break;
  865. default:
  866. IR::Opnd * opndSrc2 = instr->UnlinkSrc2();
  867. AssertMsg(opndSrc2 != nullptr, "Expected 2 src's on non-boolean branch");
  868. if (opndSrc1->IsFloat())
  869. {
  870. Assert(opndSrc1->GetType() == opndSrc2->GetType());
  871. instrPrev = IR::Instr::New(opndSrc1->IsFloat64() ? Js::OpCode::COMISD : Js::OpCode::COMISS, m_func);
  872. instrPrev->SetSrc1(opndSrc1);
  873. instrPrev->SetSrc2(opndSrc2);
  874. instr->InsertBefore(instrPrev);
  875. }
  876. else
  877. {
  878. // This check assumes src1 is a variable.
  879. if (opndSrc2->IsIntConstOpnd() && opndSrc2->AsIntConstOpnd()->GetValue() == 0)
  880. {
  881. instrPrev = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  882. instrPrev->SetSrc1(opndSrc1);
  883. instrPrev->SetSrc2(opndSrc1);
  884. instr->InsertBefore(instrPrev);
  885. opndSrc2->Free(this->m_func);
  886. }
  887. else
  888. {
  889. instrPrev = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  890. //
  891. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  892. // relevant only on AMD64.
  893. //
  894. opndSrc1 = instrPrev->SetSrc1(opndSrc1);
  895. opndSrc2 = instrPrev->SetSrc2(opndSrc2);
  896. instr->InsertBefore(instrPrev);
  897. LowererMD::Legalize(instrPrev);
  898. }
  899. }
  900. instr->m_opcode = LowererMD::MDBranchOpcode(instr->m_opcode);
  901. break;
  902. }
  903. return instrPrev;
  904. }
  905. ///----------------------------------------------------------------------------
  906. ///
  907. /// LowererMD::MDBranchOpcode
  908. ///
  909. /// Map HIR branch opcode to machine-dependent equivalent.
  910. ///
  911. ///----------------------------------------------------------------------------
  912. Js::OpCode
  913. LowererMD::MDBranchOpcode(Js::OpCode opcode)
  914. {
  915. switch (opcode)
  916. {
  917. case Js::OpCode::BrSrEq_A:
  918. case Js::OpCode::BrEq_A:
  919. case Js::OpCode::BrSrNotNeq_A:
  920. case Js::OpCode::BrNotNeq_A:
  921. case Js::OpCode::BrAddr_A:
  922. return Js::OpCode::JEQ;
  923. case Js::OpCode::BrSrNeq_A:
  924. case Js::OpCode::BrNeq_A:
  925. case Js::OpCode::BrSrNotEq_A:
  926. case Js::OpCode::BrNotEq_A:
  927. case Js::OpCode::BrNotAddr_A:
  928. return Js::OpCode::JNE;
  929. case Js::OpCode::BrLt_A:
  930. case Js::OpCode::BrNotGe_A:
  931. return Js::OpCode::JLT;
  932. case Js::OpCode::BrLe_A:
  933. case Js::OpCode::BrNotGt_A:
  934. return Js::OpCode::JLE;
  935. case Js::OpCode::BrGt_A:
  936. case Js::OpCode::BrNotLe_A:
  937. return Js::OpCode::JGT;
  938. case Js::OpCode::BrGe_A:
  939. case Js::OpCode::BrNotLt_A:
  940. return Js::OpCode::JGE;
  941. default:
  942. AssertMsg(0, "Branch opcode has no MD mapping");
  943. return opcode;
  944. }
  945. }
  946. Js::OpCode
  947. LowererMD::MDConvertFloat64ToInt32Opcode(const RoundMode roundMode)
  948. {
  949. switch (roundMode)
  950. {
  951. case RoundModeTowardZero:
  952. return Js::OpCode::CVTTSD2SI;
  953. case RoundModeTowardInteger:
  954. return Js::OpCode::Nop;
  955. case RoundModeHalfToEven:
  956. return Js::OpCode::CVTSD2SI;
  957. default:
  958. AssertMsg(0, "RoundMode has no MD mapping.");
  959. return Js::OpCode::Nop;
  960. }
  961. }
  962. Js::OpCode
  963. LowererMD::MDUnsignedBranchOpcode(Js::OpCode opcode)
  964. {
  965. switch (opcode)
  966. {
  967. case Js::OpCode::BrEq_A:
  968. case Js::OpCode::BrSrEq_A:
  969. case Js::OpCode::BrSrNotNeq_A:
  970. case Js::OpCode::BrNotNeq_A:
  971. case Js::OpCode::BrAddr_A:
  972. return Js::OpCode::JEQ;
  973. case Js::OpCode::BrNeq_A:
  974. case Js::OpCode::BrSrNeq_A:
  975. case Js::OpCode::BrSrNotEq_A:
  976. case Js::OpCode::BrNotEq_A:
  977. case Js::OpCode::BrNotAddr_A:
  978. return Js::OpCode::JNE;
  979. case Js::OpCode::BrLt_A:
  980. case Js::OpCode::BrNotGe_A:
  981. return Js::OpCode::JB;
  982. case Js::OpCode::BrLe_A:
  983. case Js::OpCode::BrNotGt_A:
  984. return Js::OpCode::JBE;
  985. case Js::OpCode::BrGt_A:
  986. case Js::OpCode::BrNotLe_A:
  987. return Js::OpCode::JA;
  988. case Js::OpCode::BrGe_A:
  989. case Js::OpCode::BrNotLt_A:
  990. return Js::OpCode::JAE;
  991. default:
  992. AssertMsg(0, "Branch opcode has no MD mapping");
  993. return opcode;
  994. }
  995. }
  996. Js::OpCode LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode opcode)
  997. {
  998. Assert(opcode == Js::OpCode::BrLt_A || opcode == Js::OpCode::BrGe_A);
  999. return opcode == Js::OpCode::BrLt_A ? Js::OpCode::JSB : Js::OpCode::JNSB;
  1000. }
  1001. void LowererMD::ChangeToAdd(IR::Instr *const instr, const bool needFlags)
  1002. {
  1003. Assert(instr);
  1004. Assert(instr->GetDst());
  1005. Assert(instr->GetSrc1());
  1006. Assert(instr->GetSrc2());
  1007. if(instr->GetDst()->IsFloat64())
  1008. {
  1009. Assert(instr->GetSrc1()->IsFloat64());
  1010. Assert(instr->GetSrc2()->IsFloat64());
  1011. Assert(!needFlags);
  1012. instr->m_opcode = Js::OpCode::ADDSD;
  1013. return;
  1014. }
  1015. else if (instr->GetDst()->IsFloat32())
  1016. {
  1017. Assert(instr->GetSrc1()->IsFloat32());
  1018. Assert(instr->GetSrc2()->IsFloat32());
  1019. Assert(!needFlags);
  1020. instr->m_opcode = Js::OpCode::ADDSS;
  1021. return;
  1022. }
  1023. instr->m_opcode = Js::OpCode::ADD;
  1024. MakeDstEquSrc1(instr);
  1025. if (!needFlags)
  1026. {
  1027. // Prefer INC for add by one
  1028. if ((instr->GetDst()->IsEqual(instr->GetSrc1()) &&
  1029. instr->GetSrc2()->IsIntConstOpnd() &&
  1030. instr->GetSrc2()->AsIntConstOpnd()->GetValue() == 1) ||
  1031. (instr->GetDst()->IsEqual(instr->GetSrc2()) &&
  1032. instr->GetSrc1()->IsIntConstOpnd() &&
  1033. instr->GetSrc1()->AsIntConstOpnd()->GetValue() == 1))
  1034. {
  1035. if (instr->GetSrc1()->IsIntConstOpnd())
  1036. {
  1037. // Swap the operands, such that we would create (dst = INC src2)
  1038. instr->SwapOpnds();
  1039. }
  1040. instr->FreeSrc2();
  1041. instr->m_opcode = Js::OpCode::INC;
  1042. }
  1043. }
  1044. }
  1045. void LowererMD::ChangeToSub(IR::Instr *const instr, const bool needFlags)
  1046. {
  1047. Assert(instr);
  1048. Assert(instr->GetDst());
  1049. Assert(instr->GetSrc1());
  1050. Assert(instr->GetSrc2());
  1051. if(instr->GetDst()->IsFloat64())
  1052. {
  1053. Assert(instr->GetSrc1()->IsFloat64());
  1054. Assert(instr->GetSrc2()->IsFloat64());
  1055. Assert(!needFlags);
  1056. instr->m_opcode = Js::OpCode::SUBSD;
  1057. return;
  1058. }
  1059. // Prefer DEC for sub by one
  1060. if(instr->GetDst()->IsEqual(instr->GetSrc1()) &&
  1061. instr->GetSrc2()->IsIntConstOpnd() &&
  1062. instr->GetSrc2()->AsIntConstOpnd()->GetValue() == 1)
  1063. {
  1064. instr->FreeSrc2();
  1065. instr->m_opcode = Js::OpCode::DEC;
  1066. return;
  1067. }
  1068. instr->m_opcode = Js::OpCode::SUB;
  1069. }
  1070. void LowererMD::ChangeToShift(IR::Instr *const instr, const bool needFlags)
  1071. {
  1072. Assert(instr);
  1073. Assert(instr->GetDst());
  1074. Assert(instr->GetSrc1());
  1075. Assert(instr->GetSrc2());
  1076. switch(instr->m_opcode)
  1077. {
  1078. case Js::OpCode::Shl_A:
  1079. case Js::OpCode::Shl_I4:
  1080. instr->m_opcode = Js::OpCode::SHL;
  1081. break;
  1082. case Js::OpCode::Shr_A:
  1083. case Js::OpCode::Shr_I4:
  1084. instr->m_opcode = Js::OpCode::SAR;
  1085. break;
  1086. case Js::OpCode::ShrU_A:
  1087. case Js::OpCode::ShrU_I4:
  1088. instr->m_opcode = Js::OpCode::SHR;
  1089. break;
  1090. case Js::OpCode::Rol_I4:
  1091. instr->m_opcode = Js::OpCode::ROL;
  1092. break;
  1093. case Js::OpCode::Ror_I4:
  1094. instr->m_opcode = Js::OpCode::ROR;
  1095. break;
  1096. default:
  1097. Assert(false);
  1098. __assume(false);
  1099. }
  1100. if(instr->GetSrc2()->IsIntConstOpnd())
  1101. {
  1102. // Only values between 0-31 mean anything
  1103. IntConstType value = instr->GetSrc2()->AsIntConstOpnd()->GetValue();
  1104. value &= 0x1f;
  1105. instr->GetSrc2()->AsIntConstOpnd()->SetValue(value);
  1106. }
  1107. }
  1108. void LowererMD::ChangeToIMul(IR::Instr *const instr, bool hasOverflowCheck)
  1109. {
  1110. // If non-32 bit overflow check is needed, we have to use the IMUL form.
  1111. if (hasOverflowCheck && !instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow())
  1112. {
  1113. IR::RegOpnd *regEAX = IR::RegOpnd::New(TyInt32, instr->m_func);
  1114. IR::Opnd *temp2 = nullptr;
  1115. // MOV eax, src1
  1116. regEAX->SetReg(LowererMDArch::GetRegIMulDestLower());
  1117. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, regEAX, instr->GetSrc1(), instr->m_func));
  1118. if (instr->GetSrc2()->IsImmediateOpnd())
  1119. {
  1120. // MOV reg, imm
  1121. temp2 = IR::RegOpnd::New(TyInt32, instr->m_func);
  1122. IR::Opnd * src2 = instr->GetSrc2();
  1123. bool dontEncode = false;
  1124. if (src2->IsHelperCallOpnd())
  1125. {
  1126. dontEncode = true;
  1127. }
  1128. else if (src2->IsIntConstOpnd() || src2->IsAddrOpnd())
  1129. {
  1130. dontEncode = src2->IsIntConstOpnd() ? src2->AsIntConstOpnd()->m_dontEncode : src2->AsAddrOpnd()->m_dontEncode;
  1131. }
  1132. else if (src2->IsInt64ConstOpnd())
  1133. {
  1134. dontEncode = false;
  1135. }
  1136. else
  1137. {
  1138. AssertMsg(false, "Unexpected immediate opnd");
  1139. throw Js::OperationAbortedException();
  1140. }
  1141. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, temp2,
  1142. IR::IntConstOpnd::New((IntConstType)instr->GetSrc2()->GetImmediateValue(instr->m_func), TyInt32, instr->m_func, dontEncode),
  1143. instr->m_func));
  1144. }
  1145. // eax = IMUL eax, reg
  1146. instr->m_opcode = Js::OpCode::IMUL;
  1147. instr->ReplaceSrc1(regEAX);
  1148. if (temp2 != nullptr)
  1149. instr->ReplaceSrc2(temp2);
  1150. auto *dst = instr->GetDst()->Copy(instr->m_func);
  1151. instr->ReplaceDst(regEAX);
  1152. // MOV dst, eax
  1153. instr->InsertAfter(IR::Instr::New(Js::OpCode::MOV, dst, regEAX, instr->m_func));
  1154. }
  1155. else
  1156. EmitInt4Instr(instr); // IMUL2
  1157. }
  1158. const uint16
  1159. LowererMD::GetFormalParamOffset()
  1160. {
  1161. //In x86\x64 formal params were offset from EBP by the EBP chain, return address, and the 2 non-user params
  1162. return 4;
  1163. }
  1164. IR::Instr *
  1165. LowererMD::LowerCatch(IR::Instr * instr)
  1166. {
  1167. // t1 = catch => t2(eax) = catch
  1168. // => t1 = t2(eax)
  1169. IR::Opnd *catchObj = instr->UnlinkDst();
  1170. IR::RegOpnd *catchParamReg = IR::RegOpnd::New(TyMachPtr, this->m_func);
  1171. catchParamReg->SetReg(this->lowererMDArch.GetRegReturn(TyMachReg));
  1172. instr->SetDst(catchParamReg);
  1173. instr->InsertAfter(IR::Instr::New(Js::OpCode::MOV, catchObj, catchParamReg, this->m_func));
  1174. return instr->m_prev;
  1175. }
  1176. ///----------------------------------------------------------------------------
  1177. ///
  1178. /// LowererMD::ForceDstToReg
  1179. ///
  1180. ///----------------------------------------------------------------------------
  1181. void
  1182. LowererMD::ForceDstToReg(IR::Instr *instr)
  1183. {
  1184. IR::Opnd * dst = instr->GetDst();
  1185. if (dst->IsRegOpnd())
  1186. {
  1187. return;
  1188. }
  1189. if(dst->IsFloat64())
  1190. {
  1191. instr->SinkDst(Js::OpCode::MOVSD);
  1192. return;
  1193. }
  1194. instr->SinkDst(Js::OpCode::MOV);
  1195. }
  1196. template <bool verify>
  1197. void
  1198. LowererMD::Legalize(IR::Instr *const instr, bool fPostRegAlloc)
  1199. {
  1200. Assert(instr);
  1201. Assert(!instr->isInlineeEntryInstr
  1202. || (instr->m_opcode == Js::OpCode::MOV && instr->GetSrc1()->IsIntConstOpnd()));
  1203. switch(instr->m_opcode)
  1204. {
  1205. case Js::OpCode::MOV:
  1206. {
  1207. Assert(instr->GetSrc2() == nullptr);
  1208. IR::Opnd *const dst = instr->GetDst();
  1209. const IRType dstType = dst->GetType();
  1210. IR::Opnd *const src = instr->GetSrc1();
  1211. const IRType srcType = src->GetType();
  1212. if(TySize[dstType] > TySize[srcType])
  1213. {
  1214. if (verify)
  1215. {
  1216. return;
  1217. }
  1218. #if DBG
  1219. switch(dstType)
  1220. {
  1221. case TyInt32:
  1222. case TyUint32:
  1223. #ifdef _M_X64
  1224. case TyInt64:
  1225. case TyUint64:
  1226. #endif
  1227. case TyVar:
  1228. break;
  1229. default:
  1230. Assert(false);
  1231. }
  1232. #endif
  1233. IR::IntConstOpnd *const intConstantSrc = src->IsIntConstOpnd() ? src->AsIntConstOpnd() : nullptr;
  1234. const auto UpdateIntConstantSrc = [&](const size_t extendedValue)
  1235. {
  1236. Assert(intConstantSrc);
  1237. #ifdef _M_X64
  1238. if(TySize[dstType] > sizeof(IntConstType))
  1239. {
  1240. instr->ReplaceSrc1(
  1241. IR::AddrOpnd::New(
  1242. reinterpret_cast<void *>(extendedValue),
  1243. IR::AddrOpndKindConstantVar,
  1244. instr->m_func,
  1245. intConstantSrc->m_dontEncode));
  1246. }
  1247. else
  1248. #endif
  1249. {
  1250. intConstantSrc->SetType(dstType);
  1251. intConstantSrc->SetValue(static_cast<IntConstType>(extendedValue));
  1252. }
  1253. };
  1254. switch(srcType)
  1255. {
  1256. case TyInt8:
  1257. if(intConstantSrc)
  1258. {
  1259. UpdateIntConstantSrc(static_cast<int8>(intConstantSrc->GetValue())); // sign-extend
  1260. break;
  1261. }
  1262. instr->m_opcode = Js::OpCode::MOVSX;
  1263. break;
  1264. case TyUint8:
  1265. if(intConstantSrc)
  1266. {
  1267. UpdateIntConstantSrc(static_cast<uint8>(intConstantSrc->GetValue())); // zero-extend
  1268. break;
  1269. }
  1270. instr->m_opcode = Js::OpCode::MOVZX;
  1271. break;
  1272. case TyInt16:
  1273. if(intConstantSrc)
  1274. {
  1275. UpdateIntConstantSrc(static_cast<int16>(intConstantSrc->GetValue())); // sign-extend
  1276. break;
  1277. }
  1278. instr->m_opcode = Js::OpCode::MOVSXW;
  1279. break;
  1280. case TyUint16:
  1281. if(intConstantSrc)
  1282. {
  1283. UpdateIntConstantSrc(static_cast<uint16>(intConstantSrc->GetValue())); // zero-extend
  1284. break;
  1285. }
  1286. instr->m_opcode = Js::OpCode::MOVZXW;
  1287. break;
  1288. #ifdef _M_X64
  1289. case TyInt32:
  1290. if(intConstantSrc)
  1291. {
  1292. UpdateIntConstantSrc(static_cast<int32>(intConstantSrc->GetValue())); // sign-extend
  1293. break;
  1294. }
  1295. instr->m_opcode = Js::OpCode::MOVSXD;
  1296. break;
  1297. case TyUint32:
  1298. if(intConstantSrc)
  1299. {
  1300. UpdateIntConstantSrc(static_cast<uint32>(intConstantSrc->GetValue())); // zero-extend
  1301. break;
  1302. }
  1303. switch(dst->GetKind())
  1304. {
  1305. case IR::OpndKindReg:
  1306. // (mov r0.u32, r1.u32) clears the upper 32 bits of r0
  1307. dst->SetType(TyUint32);
  1308. instr->m_opcode = Js::OpCode::MOV_TRUNC;
  1309. break;
  1310. case IR::OpndKindSym:
  1311. case IR::OpndKindIndir:
  1312. case IR::OpndKindMemRef:
  1313. // Even if the src is a reg, we don't know if the upper 32 bits are zero. Copy the value to a
  1314. // reg first to zero-extend it to 64 bits, and then copy the 64-bit value to the original dst.
  1315. instr->HoistSrc1(Js::OpCode::MOV_TRUNC);
  1316. instr->GetSrc1()->SetType(dstType);
  1317. break;
  1318. default:
  1319. Assert(false);
  1320. __assume(false);
  1321. }
  1322. break;
  1323. #endif
  1324. default:
  1325. Assert(false);
  1326. __assume(false);
  1327. }
  1328. }
  1329. else if (TySize[dstType] < TySize[srcType])
  1330. {
  1331. instr->GetSrc1()->SetType(dst->GetType());
  1332. }
  1333. if(instr->m_opcode == Js::OpCode::MOV)
  1334. {
  1335. uint src1Forms = L_Reg | L_Mem | L_Ptr; // Allow 64 bit values in x64 as well
  1336. if (dst->IsMemoryOpnd())
  1337. {
  1338. #if _M_X64
  1339. // Only allow <= 32 bit values
  1340. src1Forms = L_Reg | L_Imm32;
  1341. #else
  1342. src1Forms = L_Reg | L_Ptr;
  1343. #endif
  1344. }
  1345. LegalizeOpnds<verify>(
  1346. instr,
  1347. L_Reg | L_Mem,
  1348. src1Forms,
  1349. L_None);
  1350. }
  1351. else
  1352. {
  1353. LegalizeOpnds<verify>(
  1354. instr,
  1355. L_Reg,
  1356. L_Reg | L_Mem,
  1357. L_None);
  1358. }
  1359. break;
  1360. }
  1361. case Js::OpCode::CMOVA:
  1362. case Js::OpCode::CMOVAE:
  1363. case Js::OpCode::CMOVB:
  1364. case Js::OpCode::CMOVBE:
  1365. case Js::OpCode::CMOVE:
  1366. case Js::OpCode::CMOVG:
  1367. case Js::OpCode::CMOVGE:
  1368. case Js::OpCode::CMOVL:
  1369. case Js::OpCode::CMOVLE:
  1370. case Js::OpCode::CMOVNE:
  1371. case Js::OpCode::CMOVNO:
  1372. case Js::OpCode::CMOVNP:
  1373. case Js::OpCode::CMOVNS:
  1374. case Js::OpCode::CMOVO:
  1375. case Js::OpCode::CMOVP:
  1376. case Js::OpCode::CMOVS:
  1377. if (instr->GetSrc2())
  1378. {
  1379. Assert(instr->GetDst()->GetSize() == instr->GetSrc2()->GetSize());
  1380. Assert(instr->GetDst()->GetSize() == instr->GetSrc1()->GetSize());
  1381. // 0 shouldn't be the src2 of a CMOVcc.
  1382. // CMOVcc doesn't support moving a constant and the legalizer will hoist the load of the constant
  1383. // to a register. If the constant was 0, Peeps will turn it into a XOR which, in turn, may change
  1384. // the zero flags and hence the result of CMOVcc. If you do want to CMOVcc 0, you should load 0
  1385. // into a register before the instruction whose result the CMOVcc depends on.
  1386. Assert(!instr->GetSrc2()->IsIntConstOpnd() || instr->GetSrc2()->AsIntConstOpnd()->GetValue() != 0);
  1387. // sometimes we have fake src1 to help reg alloc
  1388. LegalizeOpnds<verify>(
  1389. instr,
  1390. L_Reg,
  1391. L_Reg,
  1392. L_Reg | L_Mem);
  1393. }
  1394. else
  1395. {
  1396. Assert(instr->GetDst()->GetSize() == instr->GetSrc1()->GetSize());
  1397. LegalizeOpnds<verify>(
  1398. instr,
  1399. L_Reg,
  1400. L_Reg | L_Mem,
  1401. L_None);
  1402. }
  1403. break;
  1404. case Js::OpCode::MOVSD:
  1405. Assert(AutoSystemInfo::Data.SSE2Available());
  1406. case Js::OpCode::MOVSS:
  1407. {
  1408. Assert(instr->GetDst()->GetType() == (instr->m_opcode == Js::OpCode::MOVSD? TyFloat64 : TyFloat32) || instr->GetDst()->IsSimd128());
  1409. Assert(instr->GetSrc1()->GetType() == (instr->m_opcode == Js::OpCode::MOVSD ? TyFloat64 : TyFloat32) || instr->GetSrc1()->IsSimd128());
  1410. LegalizeOpnds<verify>(
  1411. instr,
  1412. L_Reg | L_Mem,
  1413. instr->GetDst()->IsMemoryOpnd()?
  1414. L_Reg : L_Reg | L_Mem, // LegalizeOpnds doesn't check if dst/src1 are both memopnd, check it here.
  1415. L_None);
  1416. break;
  1417. }
  1418. case Js::OpCode::MOVUPS:
  1419. case Js::OpCode::MOVAPS:
  1420. {
  1421. LegalizeOpnds<verify>(
  1422. instr,
  1423. L_Reg | L_Mem,
  1424. instr->GetDst()->IsMemoryOpnd()?
  1425. L_Reg : L_Reg | L_Mem, // LegalizeOpnds doesn't check if dst/src1 are both memopnd, check it here.
  1426. L_None);
  1427. break;
  1428. }
  1429. case Js::OpCode::CMP:
  1430. LegalizeOpnds<verify>(
  1431. instr,
  1432. L_None,
  1433. L_Reg | L_Mem,
  1434. L_Reg | L_Mem | L_Imm32);
  1435. break;
  1436. case Js::OpCode::TEST:
  1437. if((instr->GetSrc1()->IsImmediateOpnd() && !instr->GetSrc2()->IsImmediateOpnd()) ||
  1438. (instr->GetSrc2()->IsMemoryOpnd() && !instr->GetSrc1()->IsMemoryOpnd()))
  1439. {
  1440. if (verify)
  1441. {
  1442. AssertMsg(false, "Missing legalization");
  1443. return;
  1444. }
  1445. instr->SwapOpnds();
  1446. }
  1447. LegalizeOpnds<verify>(
  1448. instr,
  1449. L_None,
  1450. L_Reg | L_Mem,
  1451. L_Reg | L_Imm32);
  1452. break;
  1453. case Js::OpCode::COMISD:
  1454. case Js::OpCode::UCOMISD:
  1455. Assert(AutoSystemInfo::Data.SSE2Available());
  1456. case Js::OpCode::COMISS:
  1457. case Js::OpCode::UCOMISS:
  1458. LegalizeOpnds<verify>(
  1459. instr,
  1460. L_None,
  1461. L_Reg,
  1462. L_Reg | L_Mem);
  1463. break;
  1464. case Js::OpCode::INC:
  1465. case Js::OpCode::DEC:
  1466. case Js::OpCode::NEG:
  1467. MakeDstEquSrc1<verify>(instr);
  1468. LegalizeOpnds<verify>(
  1469. instr,
  1470. L_Reg | L_Mem,
  1471. L_Reg | L_Mem,
  1472. L_None);
  1473. break;
  1474. #ifdef _M_IX86
  1475. case Js::OpCode::ADC:
  1476. #endif
  1477. case Js::OpCode::ADD:
  1478. case Js::OpCode::SUB:
  1479. case Js::OpCode::SBB:
  1480. case Js::OpCode::AND:
  1481. case Js::OpCode::OR:
  1482. case Js::OpCode::XOR:
  1483. MakeDstEquSrc1<verify>(instr);
  1484. LegalizeOpnds<verify>(
  1485. instr,
  1486. L_Reg | L_Mem,
  1487. L_Reg | L_Mem,
  1488. L_Reg | L_Mem | L_Imm32);
  1489. break;
  1490. case Js::OpCode::ADDSD:
  1491. case Js::OpCode::ADDPD:
  1492. case Js::OpCode::SUBSD:
  1493. case Js::OpCode::ANDPD:
  1494. case Js::OpCode::ANDNPD:
  1495. case Js::OpCode::DIVPD:
  1496. case Js::OpCode::MAXPD:
  1497. case Js::OpCode::MINPD:
  1498. case Js::OpCode::MULPD:
  1499. case Js::OpCode::SUBPD:
  1500. Assert(AutoSystemInfo::Data.SSE2Available());
  1501. case Js::OpCode::ADDPS:
  1502. case Js::OpCode::ADDSS:
  1503. case Js::OpCode::SUBSS:
  1504. case Js::OpCode::ANDPS:
  1505. case Js::OpCode::ANDNPS:
  1506. case Js::OpCode::DIVPS:
  1507. case Js::OpCode::MAXPS:
  1508. case Js::OpCode::MINPS:
  1509. case Js::OpCode::MULPS:
  1510. case Js::OpCode::ORPS:
  1511. case Js::OpCode::PADDB:
  1512. case Js::OpCode::PADDSB:
  1513. case Js::OpCode::PADDD:
  1514. case Js::OpCode::PADDW:
  1515. case Js::OpCode::PADDSW:
  1516. case Js::OpCode::PADDUSB:
  1517. case Js::OpCode::PADDUSW:
  1518. case Js::OpCode::PAND:
  1519. case Js::OpCode::PANDN:
  1520. case Js::OpCode::PCMPEQB:
  1521. case Js::OpCode::PCMPEQD:
  1522. case Js::OpCode::PCMPEQW:
  1523. case Js::OpCode::PCMPGTB:
  1524. case Js::OpCode::PCMPGTW:
  1525. case Js::OpCode::PCMPGTD:
  1526. case Js::OpCode::PMAXSW:
  1527. case Js::OpCode::PMAXUB:
  1528. case Js::OpCode::PMINSW:
  1529. case Js::OpCode::PMINUB:
  1530. case Js::OpCode::PMULLW:
  1531. case Js::OpCode::PMULUDQ:
  1532. case Js::OpCode::POR:
  1533. case Js::OpCode::PSUBB:
  1534. case Js::OpCode::PSUBSB:
  1535. case Js::OpCode::PSUBD:
  1536. case Js::OpCode::PSUBW:
  1537. case Js::OpCode::PSUBSW:
  1538. case Js::OpCode::PSUBUSB:
  1539. case Js::OpCode::PSUBUSW:
  1540. case Js::OpCode::PXOR:
  1541. case Js::OpCode::SUBPS:
  1542. case Js::OpCode::XORPS:
  1543. case Js::OpCode::CMPLTPS:
  1544. case Js::OpCode::CMPLEPS:
  1545. case Js::OpCode::CMPEQPS:
  1546. case Js::OpCode::CMPNEQPS:
  1547. case Js::OpCode::CMPLTPD:
  1548. case Js::OpCode::CMPLEPD:
  1549. case Js::OpCode::CMPEQPD:
  1550. case Js::OpCode::CMPNEQPD:
  1551. case Js::OpCode::CMPUNORDPS:
  1552. case Js::OpCode::PUNPCKLBW:
  1553. case Js::OpCode::PUNPCKLDQ:
  1554. case Js::OpCode::PUNPCKLWD:
  1555. MakeDstEquSrc1<verify>(instr);
  1556. LegalizeOpnds<verify>(
  1557. instr,
  1558. L_Reg,
  1559. L_Reg,
  1560. L_Reg | L_Mem);
  1561. break;
  1562. case Js::OpCode::SHL:
  1563. case Js::OpCode::SHR:
  1564. case Js::OpCode::SAR:
  1565. case Js::OpCode::ROL:
  1566. case Js::OpCode::ROR:
  1567. if (verify)
  1568. {
  1569. Assert(instr->GetSrc2()->IsIntConstOpnd()
  1570. || instr->GetSrc2()->AsRegOpnd()->GetReg() == LowererMDArch::GetRegShiftCount());
  1571. }
  1572. else
  1573. {
  1574. if(!instr->GetSrc2()->IsIntConstOpnd())
  1575. {
  1576. IR::Instr *const newInstr = instr->HoistSrc2(Js::OpCode::MOV);
  1577. newInstr->GetDst()->AsRegOpnd()->SetReg(LowererMDArch::GetRegShiftCount());
  1578. instr->GetSrc2()->AsRegOpnd()->SetReg(LowererMDArch::GetRegShiftCount());
  1579. }
  1580. instr->GetSrc2()->SetType(TyUint8);
  1581. }
  1582. MakeDstEquSrc1<verify>(instr);
  1583. LegalizeOpnds<verify>(
  1584. instr,
  1585. L_Reg | L_Mem,
  1586. L_Reg | L_Mem,
  1587. L_Reg | L_Imm32);
  1588. break;
  1589. case Js::OpCode::IMUL2:
  1590. MakeDstEquSrc1<verify>(instr); // the encoder does not support IMUL3 r, r/m, imm
  1591. LegalizeOpnds<verify>(
  1592. instr,
  1593. L_Reg,
  1594. L_Reg,
  1595. L_Reg | L_Mem | L_Imm32); // for L_Imm32, the encoder converts it into an IMUL3
  1596. break;
  1597. case Js::OpCode::TZCNT:
  1598. case Js::OpCode::LZCNT:
  1599. Assert(
  1600. (instr->m_opcode == Js::OpCode::LZCNT && AutoSystemInfo::Data.LZCntAvailable()) ||
  1601. (instr->m_opcode == Js::OpCode::TZCNT && AutoSystemInfo::Data.TZCntAvailable())
  1602. );
  1603. case Js::OpCode::BSF:
  1604. case Js::OpCode::BSR:
  1605. LegalizeOpnds<verify>(
  1606. instr,
  1607. L_Reg,
  1608. L_Reg | L_Mem,
  1609. L_None);
  1610. break;
  1611. case Js::OpCode::LEA:
  1612. Assert(instr->GetDst()->IsRegOpnd());
  1613. Assert(instr->GetSrc1()->IsIndirOpnd() || instr->GetSrc1()->IsSymOpnd()
  1614. || instr->GetSrc1()->IsMemRefOpnd()); // We may convert IndirOpnd to MemRefOpnd
  1615. Assert(!instr->GetSrc2());
  1616. break;
  1617. case Js::OpCode::PSRLDQ:
  1618. case Js::OpCode::PSLLDQ:
  1619. case Js::OpCode::PSRLW:
  1620. case Js::OpCode::PSRLD:
  1621. case Js::OpCode::PSRAW:
  1622. case Js::OpCode::PSRAD:
  1623. case Js::OpCode::PSLLW:
  1624. case Js::OpCode::PSLLD:
  1625. Assert(AutoSystemInfo::Data.SSE2Available());
  1626. MakeDstEquSrc1<verify>(instr);
  1627. LegalizeOpnds<verify>(
  1628. instr,
  1629. L_Reg,
  1630. L_Reg,
  1631. L_Reg | L_Imm32);
  1632. break;
  1633. case Js::OpCode::ROUNDSD:
  1634. case Js::OpCode::ROUNDSS:
  1635. Assert(AutoSystemInfo::Data.SSE4_1Available());
  1636. break;
  1637. case Js::OpCode::CVTDQ2PD:
  1638. case Js::OpCode::CVTDQ2PS:
  1639. case Js::OpCode::CVTPD2PS:
  1640. case Js::OpCode::CVTPS2PD:
  1641. case Js::OpCode::CVTSD2SI:
  1642. case Js::OpCode::CVTSD2SS:
  1643. case Js::OpCode::CVTSI2SD:
  1644. case Js::OpCode::CVTSS2SD:
  1645. case Js::OpCode::CVTTPD2DQ:
  1646. case Js::OpCode::CVTTPS2DQ:
  1647. case Js::OpCode::CVTTSD2SI:
  1648. case Js::OpCode::DIVSD:
  1649. case Js::OpCode::SQRTPD:
  1650. case Js::OpCode::SQRTSD:
  1651. case Js::OpCode::SHUFPD:
  1652. Assert(AutoSystemInfo::Data.SSE2Available());
  1653. break;
  1654. }
  1655. #if DBG
  1656. // Asserting general rules
  1657. // There should be at most 1 memory opnd in an instruction
  1658. if (instr->GetDst() && instr->GetDst()->IsMemoryOpnd())
  1659. {
  1660. // All memref address need to fit in a dword
  1661. Assert(!instr->GetDst()->IsMemRefOpnd() || Math::FitsInDWord((size_t)instr->GetDst()->AsMemRefOpnd()->GetMemLoc()));
  1662. if (instr->GetSrc1())
  1663. {
  1664. Assert(instr->GetSrc1()->IsEqual(instr->GetDst()) || !instr->GetSrc1()->IsMemoryOpnd());
  1665. if (instr->GetSrc2())
  1666. {
  1667. Assert(!instr->GetSrc2()->IsMemoryOpnd());
  1668. }
  1669. }
  1670. }
  1671. else if (instr->GetSrc1() && instr->GetSrc1()->IsMemoryOpnd())
  1672. {
  1673. // All memref address need to fit in a dword
  1674. Assert(!instr->GetSrc1()->IsMemRefOpnd() || Math::FitsInDWord((size_t)instr->GetSrc1()->AsMemRefOpnd()->GetMemLoc()));
  1675. Assert(!instr->GetSrc2() || !instr->GetSrc2()->IsMemoryOpnd());
  1676. }
  1677. else if (instr->GetSrc2() && instr->GetSrc2()->IsMemRefOpnd())
  1678. {
  1679. // All memref address need to fit in a dword
  1680. Assert(Math::FitsInDWord((size_t)instr->GetSrc2()->AsMemRefOpnd()->GetMemLoc()));
  1681. }
  1682. // Non-MOV (second operand) immediate need to fit in DWORD for AMD64
  1683. Assert(!instr->GetSrc2() || !instr->GetSrc2()->IsImmediateOpnd()
  1684. || (TySize[instr->GetSrc2()->GetType()] != 8) || Math::FitsInDWord(instr->GetSrc2()->GetImmediateValue(instr->m_func)));
  1685. #endif
  1686. }
  1687. template <bool verify>
  1688. void LowererMD::LegalizeOpnds(IR::Instr *const instr, const uint dstForms, const uint src1Forms, uint src2Forms)
  1689. {
  1690. Assert(instr);
  1691. Assert(!instr->GetDst() == !dstForms);
  1692. Assert(!instr->GetSrc1() == !src1Forms);
  1693. Assert(!instr->GetSrc2() == !src2Forms);
  1694. Assert(src1Forms || !src2Forms);
  1695. const auto NormalizeForms = [](uint forms) -> uint
  1696. {
  1697. #ifdef _M_X64
  1698. if(forms & L_Ptr)
  1699. {
  1700. forms |= L_Imm32;
  1701. }
  1702. #else
  1703. if(forms & (L_Imm32 | L_Ptr))
  1704. {
  1705. forms |= L_Imm32 | L_Ptr;
  1706. }
  1707. #endif
  1708. return forms;
  1709. };
  1710. if(dstForms)
  1711. {
  1712. LegalizeDst<verify>(instr, NormalizeForms(dstForms));
  1713. }
  1714. if(!src1Forms)
  1715. {
  1716. return;
  1717. }
  1718. LegalizeSrc<verify>(instr, instr->GetSrc1(), NormalizeForms(src1Forms));
  1719. if(src2Forms & L_Mem && instr->GetSrc1()->IsMemoryOpnd())
  1720. {
  1721. src2Forms ^= L_Mem;
  1722. }
  1723. if(src2Forms)
  1724. {
  1725. LegalizeSrc<verify>(instr, instr->GetSrc2(), NormalizeForms(src2Forms));
  1726. }
  1727. }
  1728. template <bool verify>
  1729. void LowererMD::LegalizeDst(IR::Instr *const instr, const uint forms)
  1730. {
  1731. Assert(instr);
  1732. Assert(forms);
  1733. IR::Opnd *dst = instr->GetDst();
  1734. Assert(dst);
  1735. #ifndef _M_X64
  1736. AssertMsg(!dst->IsInt64(), "Int64 supported only on x64");
  1737. #endif
  1738. switch(dst->GetKind())
  1739. {
  1740. case IR::OpndKindReg:
  1741. Assert(forms & L_Reg);
  1742. return;
  1743. case IR::OpndKindMemRef:
  1744. {
  1745. IR::MemRefOpnd *const memRefOpnd = dst->AsMemRefOpnd();
  1746. if(!LowererMDArch::IsLegalMemLoc(memRefOpnd))
  1747. {
  1748. if (verify)
  1749. {
  1750. AssertMsg(false, "Missing legalization");
  1751. return;
  1752. }
  1753. dst = instr->HoistMemRefAddress(memRefOpnd, Js::OpCode::MOV);
  1754. }
  1755. // fall through
  1756. }
  1757. case IR::OpndKindSym:
  1758. case IR::OpndKindIndir:
  1759. if(forms & L_Mem)
  1760. {
  1761. return;
  1762. }
  1763. break;
  1764. default:
  1765. Assert(false);
  1766. __assume(false);
  1767. }
  1768. if (verify)
  1769. {
  1770. AssertMsg(false, "Missing legalization");
  1771. return;
  1772. }
  1773. // Use a reg dst, then store that reg into the original dst
  1774. Assert(forms & L_Reg);
  1775. const IRType irType = dst->GetType();
  1776. IR::RegOpnd *const regOpnd = IR::RegOpnd::New(irType, instr->m_func);
  1777. regOpnd->SetValueType(dst->GetValueType());
  1778. instr->UnlinkDst();
  1779. instr->SetDst(regOpnd);
  1780. instr->InsertAfter(IR::Instr::New(GetStoreOp(irType), dst, regOpnd, instr->m_func));
  1781. // If the original dst is the same as one of the srcs, hoist a src into the same reg and replace the same srcs with the reg
  1782. const bool equalsSrc1 = instr->GetSrc1() && dst->IsEqual(instr->GetSrc1());
  1783. const bool equalsSrc2 = instr->GetSrc2() && dst->IsEqual(instr->GetSrc2());
  1784. if(!(equalsSrc1 || equalsSrc2))
  1785. {
  1786. return;
  1787. }
  1788. const Js::OpCode loadOpCode = GetLoadOp(irType);
  1789. if(equalsSrc1)
  1790. {
  1791. instr->HoistSrc1(loadOpCode, RegNOREG, regOpnd->m_sym);
  1792. if(equalsSrc2)
  1793. {
  1794. instr->ReplaceSrc2(regOpnd);
  1795. }
  1796. }
  1797. else
  1798. {
  1799. instr->HoistSrc2(loadOpCode, RegNOREG, regOpnd->m_sym);
  1800. }
  1801. }
  1802. bool LowererMD::HoistLargeConstant(IR::IndirOpnd *indirOpnd, IR::Opnd *src, IR::Instr *instr) {
  1803. if (indirOpnd != nullptr)
  1804. {
  1805. if (indirOpnd->GetOffset() == 0)
  1806. {
  1807. instr->ReplaceSrc(src, indirOpnd->GetBaseOpnd());
  1808. }
  1809. else
  1810. {
  1811. // Hoist the address load as LEA [reg + offset]
  1812. // with the reg = MOV <some address within 32-bit range at the start of the function
  1813. IR::RegOpnd * regOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  1814. Lowerer::InsertLea(regOpnd, indirOpnd, instr);
  1815. instr->ReplaceSrc(src, regOpnd);
  1816. }
  1817. return true;
  1818. }
  1819. return false;
  1820. }
  1821. template <bool verify>
  1822. void LowererMD::LegalizeSrc(IR::Instr *const instr, IR::Opnd *src, const uint forms)
  1823. {
  1824. Assert(instr);
  1825. Assert(src);
  1826. Assert(src == instr->GetSrc1() || src == instr->GetSrc2());
  1827. Assert(forms);
  1828. #ifndef _M_X64
  1829. AssertMsg(!src->IsInt64(), "Int64 supported only on x64");
  1830. #endif
  1831. switch(src->GetKind())
  1832. {
  1833. case IR::OpndKindReg:
  1834. Assert(forms & L_Reg);
  1835. return;
  1836. case IR::OpndKindIntConst:
  1837. if(forms & L_Ptr)
  1838. {
  1839. return;
  1840. }
  1841. #ifdef _M_X64
  1842. {
  1843. IR::IntConstOpnd * intOpnd = src->AsIntConstOpnd();
  1844. if ((TySize[intOpnd->GetType()] != 8) ||
  1845. (!instr->isInlineeEntryInstr && Math::FitsInDWord(intOpnd->GetValue())))
  1846. {
  1847. if (forms & L_Imm32)
  1848. {
  1849. // the constant fits in 32-bit, no need to hoist
  1850. return;
  1851. }
  1852. break;
  1853. }
  1854. if (verify)
  1855. {
  1856. AssertMsg(false, "Missing legalization");
  1857. return;
  1858. }
  1859. // The actual value for inlinee entry instr isn't determined until encoder
  1860. // So it need to be hoisted conventionally.
  1861. if (!instr->isInlineeEntryInstr)
  1862. {
  1863. Assert(forms & L_Reg);
  1864. IR::IntConstOpnd * newIntOpnd = intOpnd->Copy(instr->m_func)->AsIntConstOpnd();
  1865. IR::IndirOpnd * indirOpnd = instr->m_func->GetTopFunc()->GetConstantAddressIndirOpnd(intOpnd->GetValue(), newIntOpnd, IR::AddrOpndKindConstantAddress, TyMachPtr, Js::OpCode::MOV);
  1866. if (HoistLargeConstant(indirOpnd, src, instr))
  1867. {
  1868. return;
  1869. }
  1870. }
  1871. }
  1872. #endif
  1873. break;
  1874. case IR::OpndKindFloatConst:
  1875. break; // assume for now that it always needs to be hoisted
  1876. case IR::OpndKindInt64Const:
  1877. if (forms & L_Ptr)
  1878. {
  1879. return;
  1880. }
  1881. #ifdef _M_X64
  1882. {
  1883. IR::Int64ConstOpnd * int64Opnd = src->AsInt64ConstOpnd();
  1884. if ((forms & L_Imm32) && ((src->GetSize() != 8) ||
  1885. (!instr->isInlineeEntryInstr && Math::FitsInDWord(int64Opnd->GetValue()))))
  1886. {
  1887. // the immediate fits in 32-bit, no need to hoist
  1888. return;
  1889. }
  1890. if (verify)
  1891. {
  1892. AssertMsg(false, "Missing legalization");
  1893. return;
  1894. }
  1895. IR::Opnd* regOpnd = IR::RegOpnd::New(src->GetType(), instr->m_func);
  1896. IR::Instr* moveToReg = IR::Instr::New(Js::OpCode::MOV, regOpnd, src, instr->m_func);
  1897. instr->InsertBefore(moveToReg);
  1898. instr->ReplaceSrc(src, regOpnd);
  1899. return;
  1900. }
  1901. #endif
  1902. break;
  1903. case IR::OpndKindAddr:
  1904. if (forms & L_Ptr)
  1905. {
  1906. return;
  1907. }
  1908. #ifdef _M_X64
  1909. {
  1910. IR::AddrOpnd * addrOpnd = src->AsAddrOpnd();
  1911. if ((forms & L_Imm32) && ((TySize[addrOpnd->GetType()] != 8) ||
  1912. (!instr->isInlineeEntryInstr && Math::FitsInDWord((size_t)addrOpnd->m_address))))
  1913. {
  1914. // the address fits in 32-bit, no need to hoist
  1915. return;
  1916. }
  1917. if (verify)
  1918. {
  1919. AssertMsg(false, "Missing legalization");
  1920. return;
  1921. }
  1922. Assert(!instr->isInlineeEntryInstr);
  1923. Assert(forms & L_Reg);
  1924. // TODO: michhol, remove cast after making m_address intptr
  1925. IR::AddrOpnd * newAddrOpnd = addrOpnd->Copy(instr->m_func)->AsAddrOpnd();
  1926. IR::IndirOpnd * indirOpnd = instr->m_func->GetTopFunc()->GetConstantAddressIndirOpnd((intptr_t)addrOpnd->m_address, newAddrOpnd, addrOpnd->GetAddrOpndKind(), TyMachPtr, Js::OpCode::MOV);
  1927. if (HoistLargeConstant(indirOpnd, src, instr))
  1928. {
  1929. return;
  1930. }
  1931. }
  1932. #endif
  1933. break;
  1934. case IR::OpndKindMemRef:
  1935. {
  1936. IR::MemRefOpnd *const memRefOpnd = src->AsMemRefOpnd();
  1937. if(!LowererMDArch::IsLegalMemLoc(memRefOpnd))
  1938. {
  1939. if (verify)
  1940. {
  1941. AssertMsg(false, "Missing legalization");
  1942. return;
  1943. }
  1944. src = instr->HoistMemRefAddress(memRefOpnd, Js::OpCode::MOV);
  1945. }
  1946. // fall through
  1947. }
  1948. case IR::OpndKindSym:
  1949. case IR::OpndKindIndir:
  1950. if(forms & L_Mem)
  1951. {
  1952. return;
  1953. }
  1954. break;
  1955. case IR::OpndKindHelperCall:
  1956. case IR::OpndKindLabel:
  1957. Assert(!instr->isInlineeEntryInstr);
  1958. Assert(forms & L_Ptr);
  1959. return;
  1960. default:
  1961. Assert(false);
  1962. __assume(false);
  1963. }
  1964. if (verify)
  1965. {
  1966. AssertMsg(false, "Missing legalization");
  1967. return;
  1968. }
  1969. // Hoist the src into a reg
  1970. Assert(forms & L_Reg);
  1971. Assert(!(instr->GetDst() && instr->GetDst()->IsEqual(src)));
  1972. const Js::OpCode loadOpCode = GetLoadOp(src->GetType());
  1973. if(src == instr->GetSrc2())
  1974. {
  1975. instr->HoistSrc2(loadOpCode);
  1976. return;
  1977. }
  1978. const bool equalsSrc2 = instr->GetSrc2() && src->IsEqual(instr->GetSrc2());
  1979. IR::Instr * hoistInstr = instr->HoistSrc1(loadOpCode);
  1980. if(equalsSrc2)
  1981. {
  1982. instr->ReplaceSrc2(hoistInstr->GetDst());
  1983. }
  1984. hoistInstr->isInlineeEntryInstr = instr->isInlineeEntryInstr;
  1985. instr->isInlineeEntryInstr = false;
  1986. }
  1987. template void LowererMD::Legalize<false>(IR::Instr *const instr, bool fPostRegAlloc);
  1988. template void LowererMD::LegalizeOpnds<false>(IR::Instr *const instr, const uint dstForms, const uint src1Forms, uint src2Forms);
  1989. template void LowererMD::LegalizeDst<false>(IR::Instr *const instr, const uint forms);
  1990. template void LowererMD::LegalizeSrc<false>(IR::Instr *const instr, IR::Opnd *src, const uint forms);
  1991. template void LowererMD::MakeDstEquSrc1<false>(IR::Instr *const instr);
  1992. #if DBG
  1993. template void LowererMD::Legalize<true>(IR::Instr *const instr, bool fPostRegAlloc);
  1994. template void LowererMD::LegalizeOpnds<true>(IR::Instr *const instr, const uint dstForms, const uint src1Forms, uint src2Forms);
  1995. template void LowererMD::LegalizeDst<true>(IR::Instr *const instr, const uint forms);
  1996. template void LowererMD::LegalizeSrc<true>(IR::Instr *const instr, IR::Opnd *src, const uint forms);
  1997. template void LowererMD::MakeDstEquSrc1<true>(IR::Instr *const instr);
  1998. #endif
  1999. IR::Instr *
  2000. LowererMD::LoadFunctionObjectOpnd(IR::Instr *instr, IR::Opnd *&functionObjOpnd)
  2001. {
  2002. IR::Opnd * src1 = instr->GetSrc1();
  2003. IR::Instr * instrPrev = instr->m_prev;
  2004. if (src1 == nullptr)
  2005. {
  2006. IR::RegOpnd * regOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  2007. StackSym *paramSym = StackSym::New(TyMachPtr, m_func);
  2008. IR::SymOpnd *paramOpnd = IR::SymOpnd::New(paramSym, TyMachPtr, m_func);
  2009. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  2010. IR::Instr * mov1 = IR::Instr::New(Js::OpCode::MOV, regOpnd, paramOpnd, m_func);
  2011. instr->InsertBefore(mov1);
  2012. functionObjOpnd = mov1->GetDst()->AsRegOpnd();
  2013. instrPrev = mov1;
  2014. instr->m_func->SetHasImplicitParamLoad();
  2015. }
  2016. else
  2017. {
  2018. // Inlinee, use the function object opnd on the instruction
  2019. functionObjOpnd = instr->UnlinkSrc1();
  2020. if (!functionObjOpnd->IsRegOpnd())
  2021. {
  2022. Assert(functionObjOpnd->IsAddrOpnd());
  2023. }
  2024. }
  2025. return instrPrev;
  2026. }
  2027. IR::Instr *
  2028. LowererMD::LowerLdSuper(IR::Instr *instr, IR::JnHelperMethod helperOpCode)
  2029. {
  2030. IR::Opnd * functionObjOpnd;
  2031. IR::Instr * instrPrev = LoadFunctionObjectOpnd(instr, functionObjOpnd);
  2032. m_lowerer->LoadScriptContext(instr);
  2033. LoadHelperArgument(instr, functionObjOpnd);
  2034. ChangeToHelperCall(instr, helperOpCode);
  2035. return instrPrev;
  2036. }
  2037. void
  2038. LowererMD::GenerateFastDivByPow2(IR::Instr *instr)
  2039. {
  2040. //
  2041. // Given:
  2042. // dst = Div_A src1, src2
  2043. // where src2 == power of 2
  2044. //
  2045. // Generate:
  2046. // MOV s1, src1
  2047. // AND s1, 0xFFFF000000000000 | (src2Value-1) ----- test for tagged int and divisibility by src2Value [int32]
  2048. // AND s1, 0x00000001 | ((src2Value-1)<<1) [int31]
  2049. // CMP s1, AtomTag_IntPtr
  2050. // JNE $divbyhalf
  2051. // MOV s1, src1
  2052. // SAR s1, log2(src2Value) ------ perform the divide
  2053. // OR s1, 1
  2054. // MOV dst, s1
  2055. // JMP $done
  2056. // $divbyhalf:
  2057. // AND s1, 0xFFFF000000000000 | (src2Value-1>>1) ----- test for tagged int and divisibility by src2Value /2 [int32]
  2058. // AND s1, 0x00000001 | ((src2Value-1)) [int31]
  2059. // CMP s1, AtomTag_IntPtr
  2060. // JNE $helper
  2061. // MOV s1, src1
  2062. // SAR s1, log2(src2Value) [int32]
  2063. // SAR s1, log2(src2Value) + 1 ------ removes the tag and divides [int31]
  2064. // PUSH s1
  2065. // PUSH 0xXXXXXXXX (ScriptContext)
  2066. // CALL Op_FinishOddDivByPow2
  2067. // MOV dst, eax
  2068. // JMP $done
  2069. // $helper:
  2070. // ...
  2071. // $done:
  2072. //
  2073. if (instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->IsNotInt())
  2074. return;
  2075. IR::Opnd *dst = instr->GetDst();
  2076. IR::Opnd *src1 = instr->GetSrc1();
  2077. IR::AddrOpnd *src2 = instr->GetSrc2()->IsAddrOpnd() ? instr->GetSrc2()->AsAddrOpnd() : nullptr;
  2078. IR::LabelInstr *divbyhalf = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2079. IR::LabelInstr *helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  2080. IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2081. IR::RegOpnd *s1 = IR::RegOpnd::New(TyVar, m_func);
  2082. AnalysisAssert(src2);
  2083. Assert(src2->IsVar() && Js::TaggedInt::Is(src2->m_address) && (Math::IsPow2(Js::TaggedInt::ToInt32(src2->m_address))));
  2084. int32 src2Value = Js::TaggedInt::ToInt32(src2->m_address);
  2085. // MOV s1, src1
  2086. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, s1, src1, m_func));
  2087. #if INT32VAR
  2088. // dontEncode as src2 is a power of 2.
  2089. IR::Opnd *constant = IR::AddrOpnd::New((Js::Var)(0xFFFF000000000000 | (src2Value - 1)), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true);
  2090. #else
  2091. IR::Opnd *constant = IR::IntConstOpnd::New((0x00000001 | ((src2Value - 1) << 1)), TyInt32, m_func);
  2092. #endif
  2093. // AND s1, constant
  2094. {
  2095. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, s1, s1, constant, m_func);
  2096. instr->InsertBefore(andInstr);
  2097. Legalize(andInstr);
  2098. }
  2099. // CMP s1, AtomTag_IntPtr
  2100. {
  2101. IR::Instr *cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  2102. cmp->SetSrc1(s1);
  2103. cmp->SetSrc2(IR::AddrOpnd::New((Js::Var)(Js::AtomTag_IntPtr), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true));
  2104. instr->InsertBefore(cmp);
  2105. Legalize(cmp);
  2106. }
  2107. // JNE $divbyhalf
  2108. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, divbyhalf, m_func));
  2109. // MOV s1, src1
  2110. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, s1, src1, m_func));
  2111. s1 = s1->UseWithNewType(TyInt32, m_func)->AsRegOpnd();
  2112. // SAR s1, log2(src2Value)
  2113. instr->InsertBefore(IR::Instr::New(Js::OpCode::SAR, s1, s1, IR::IntConstOpnd::New(Math::Log2(src2Value), TyInt32, m_func), m_func));
  2114. if(s1->GetSize() != MachPtr)
  2115. {
  2116. s1 = s1->UseWithNewType(TyMachPtr, m_func)->AsRegOpnd();
  2117. }
  2118. #if INT32VAR
  2119. GenerateInt32ToVarConversion(s1, instr);
  2120. #else
  2121. // OR s1, 1
  2122. instr->InsertBefore(IR::Instr::New(Js::OpCode::OR, s1, s1, IR::IntConstOpnd::New(1, TyInt32, m_func), m_func));
  2123. #endif
  2124. // MOV dst, s1
  2125. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, s1, m_func));
  2126. // JMP $done
  2127. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  2128. // $divbyhalf:
  2129. instr->InsertBefore(divbyhalf);
  2130. #if INT32VAR
  2131. constant = IR::AddrOpnd::New((Js::Var)(0xFFFF000000000000 | ((src2Value-1) >> 1)), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true);
  2132. #else
  2133. constant = IR::IntConstOpnd::New((0x00000001 | (src2Value-1)), TyInt32, m_func);
  2134. #endif
  2135. // AND s1, constant
  2136. {
  2137. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, s1, s1, constant, m_func);
  2138. instr->InsertBefore(andInstr);
  2139. Legalize(andInstr);
  2140. }
  2141. // CMP s1, AtomTag_IntPtr
  2142. {
  2143. IR::Instr *cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  2144. cmp->SetSrc1(s1);
  2145. cmp->SetSrc2(IR::AddrOpnd::New((Js::Var)(Js::AtomTag_IntPtr), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true));
  2146. instr->InsertBefore(cmp);
  2147. Legalize(cmp);
  2148. }
  2149. // JNE $helper
  2150. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, helper, m_func));
  2151. // MOV s1, src1
  2152. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, s1, src1, m_func));
  2153. s1 = s1->UseWithNewType(TyInt32, this->m_func)->AsRegOpnd();
  2154. #if INT32VAR
  2155. IR::Opnd* shiftOpnd = IR::IntConstOpnd::New(Math::Log2(src2Value), TyInt32, m_func);
  2156. #else
  2157. IR::Opnd* shiftOpnd = IR::IntConstOpnd::New(Math::Log2(src2Value) + 1, TyInt32, m_func);
  2158. #endif
  2159. // SAR s1, shiftOpnd
  2160. instr->InsertBefore(IR::Instr::New(Js::OpCode::SAR, s1, s1, shiftOpnd, m_func));
  2161. // PUSH s1
  2162. // PUSH ScriptContext
  2163. // CALL Op_FinishOddDivByPow2
  2164. {
  2165. IR::JnHelperMethod helperMethod;
  2166. if (instr->dstIsTempNumber)
  2167. {
  2168. IR::Opnd *tempOpnd;
  2169. helperMethod = IR::HelperOp_FinishOddDivByPow2InPlace;
  2170. Assert(dst->IsRegOpnd());
  2171. StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(dst, instr->dstIsTempNumberTransferred);
  2172. IR::Instr *load = this->LoadStackAddress(tempNumberSym);
  2173. instr->InsertBefore(load);
  2174. tempOpnd = load->GetDst();
  2175. this->lowererMDArch.LoadHelperArgument(instr, tempOpnd);
  2176. }
  2177. else
  2178. {
  2179. helperMethod = IR::HelperOp_FinishOddDivByPow2;
  2180. }
  2181. m_lowerer->LoadScriptContext(instr);
  2182. lowererMDArch.LoadHelperArgument(instr, s1);
  2183. IR::Instr *call = IR::Instr::New(Js::OpCode::Call, dst, IR::HelperCallOpnd::New(helperMethod, m_func), m_func);
  2184. instr->InsertBefore(call);
  2185. lowererMDArch.LowerCall(call, 0);
  2186. }
  2187. // JMP $done
  2188. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  2189. // $helper:
  2190. instr->InsertBefore(helper);
  2191. // $done:
  2192. instr->InsertAfter(done);
  2193. }
  2194. ///----------------------------------------------------------------------------
  2195. ///
  2196. /// LowererMD::GenerateFastCmSrEqConst
  2197. ///
  2198. ///----------------------------------------------------------------------------
  2199. bool
  2200. LowererMD::GenerateFastCmSrEqConst(IR::Instr *instr)
  2201. {
  2202. //
  2203. // Given:
  2204. // s1 = CmSrEq_A s2, s3
  2205. // where either s2 or s3 is 'null', 'true' or 'false'
  2206. //
  2207. // Generate:
  2208. //
  2209. // CMP s2, s3
  2210. // JEQ $mov_true
  2211. // MOV s1, Library.GetFalse()
  2212. // JMP $done
  2213. // $mov_true:
  2214. // MOV s1, Library.GetTrue()
  2215. // $done:
  2216. //
  2217. Assert(m_lowerer->IsConstRegOpnd(instr->GetSrc2()->AsRegOpnd()));
  2218. IR::Opnd *opnd = instr->GetSrc1();
  2219. IR::RegOpnd *opndReg = instr->GetSrc2()->AsRegOpnd();
  2220. IR::LabelInstr *labelMovTrue = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2221. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2222. if (!opnd->IsRegOpnd())
  2223. {
  2224. IR::RegOpnd *lhsReg = IR::RegOpnd::New(TyVar, m_func);
  2225. IR::Instr *mov = IR::Instr::New(Js::OpCode::MOV, lhsReg, opnd, m_func);
  2226. instr->InsertBefore(mov);
  2227. opnd = lhsReg;
  2228. }
  2229. Assert(opnd->IsRegOpnd());
  2230. // CMP s2, s3
  2231. // JEQ $mov_true
  2232. this->m_lowerer->InsertCompareBranch(opnd, opndReg->m_sym->GetConstOpnd(), Js::OpCode::BrEq_A, labelMovTrue, instr);
  2233. // MOV s1, 'false'
  2234. IR::Instr *instrMov = IR::Instr::New(Js::OpCode::MOV,
  2235. instr->GetDst(),
  2236. m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse),
  2237. m_func);
  2238. instr->InsertBefore(instrMov);
  2239. // JMP $done
  2240. IR::BranchInstr *jmp = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, this->m_func);
  2241. instr->InsertBefore(jmp);
  2242. // $mov_true:
  2243. instr->InsertBefore(labelMovTrue);
  2244. // MOV s1, 'true'
  2245. instr->m_opcode = Js::OpCode::MOV;
  2246. instr->UnlinkSrc1();
  2247. instr->UnlinkSrc2();
  2248. instr->SetSrc1(m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue));
  2249. instr->ClearBailOutInfo();
  2250. Legalize(instr);
  2251. // $done:
  2252. instr->InsertAfter(labelDone);
  2253. return true;
  2254. }
  2255. ///----------------------------------------------------------------------------
  2256. ///
  2257. /// LowererMD::GenerateFastCmXxTaggedInt
  2258. ///
  2259. ///----------------------------------------------------------------------------
  2260. bool LowererMD::GenerateFastCmXxTaggedInt(IR::Instr *instr, bool isInHelper /* = false */)
  2261. {
  2262. // The idea is to do an inline compare if we can prove that both sources
  2263. // are tagged ints (i.e., are vars with the low bit set).
  2264. //
  2265. // Given:
  2266. //
  2267. // Cmxx_A dst, src1, src2
  2268. //
  2269. // Generate:
  2270. //
  2271. // (If not Int31's, goto $helper)
  2272. // MOV r1, src1
  2273. // if (==, !=, !== or ===)
  2274. // SUB r1, src2
  2275. // NEG r1 // Sets CF if r1 != 0
  2276. // SBB r1, r1 // CF == 1 ? r1 = -1 : r1 = 0
  2277. // else
  2278. // MOV r2, 0
  2279. // CMP r1, src2
  2280. // SETcc r2
  2281. // DEC r2
  2282. // set r1 to r2
  2283. // AND r1, (notEqualResult - equalResult)
  2284. // ADD r1, equalResult
  2285. // MOV dst, r1
  2286. // JMP $fallthru
  2287. // $helper:
  2288. // (caller will generate normal helper call sequence)
  2289. // $fallthru:
  2290. IR::Opnd * src1 = instr->GetSrc1();
  2291. IR::Opnd * src2 = instr->GetSrc2();
  2292. IR::Opnd * dst = instr->GetDst();
  2293. IR::RegOpnd * r1 = IR::RegOpnd::New(TyMachReg, m_func);
  2294. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  2295. IR::LabelInstr * fallthru = IR::LabelInstr::New(Js::OpCode::Label, m_func, isInHelper);
  2296. Assert(src1 && src2 && dst);
  2297. // Not tagged ints?
  2298. if (src1->IsRegOpnd() && src1->AsRegOpnd()->IsNotInt())
  2299. {
  2300. return false;
  2301. }
  2302. if (src2->IsRegOpnd() && src2->AsRegOpnd()->IsNotInt())
  2303. {
  2304. return false;
  2305. }
  2306. bool isNeqOp = instr->m_opcode == Js::OpCode::CmSrNeq_A || instr->m_opcode == Js::OpCode::CmNeq_A;
  2307. intptr_t notEqualResult = isNeqOp ? m_func->GetScriptContextInfo()->GetTrueAddr() : m_func->GetScriptContextInfo()->GetFalseAddr();
  2308. intptr_t equalResult = !isNeqOp ? m_func->GetScriptContextInfo()->GetTrueAddr() : m_func->GetScriptContextInfo()->GetFalseAddr();
  2309. // Tagged ints?
  2310. bool isTaggedInts = false;
  2311. if (src1->IsTaggedInt())
  2312. {
  2313. if (src2->IsTaggedInt())
  2314. {
  2315. isTaggedInts = true;
  2316. }
  2317. }
  2318. if (!isTaggedInts)
  2319. {
  2320. this->GenerateSmIntPairTest(instr, src1, src2, helper);
  2321. }
  2322. // MOV r1, src1
  2323. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, r1, src1, m_func));
  2324. Js::OpCode setCC_Opcode = Js::OpCode::Nop;
  2325. switch(instr->m_opcode)
  2326. {
  2327. case Js::OpCode::CmSrEq_A:
  2328. case Js::OpCode::CmEq_A:
  2329. break;
  2330. case Js::OpCode::CmSrNeq_A:
  2331. case Js::OpCode::CmNeq_A:
  2332. break;
  2333. case Js::OpCode::CmGe_A:
  2334. setCC_Opcode = Js::OpCode::SETGE;
  2335. break;
  2336. case Js::OpCode::CmGt_A:
  2337. setCC_Opcode = Js::OpCode::SETG;
  2338. break;
  2339. case Js::OpCode::CmLe_A:
  2340. setCC_Opcode = Js::OpCode::SETLE;
  2341. break;
  2342. case Js::OpCode::CmLt_A:
  2343. setCC_Opcode = Js::OpCode::SETL;
  2344. break;
  2345. default:
  2346. Assume(UNREACHED);
  2347. }
  2348. if (setCC_Opcode == Js::OpCode::Nop)
  2349. {
  2350. // SUB r1, src2
  2351. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::SUB, r1, r1, src2, m_func);
  2352. instr->InsertBefore(subInstr);
  2353. Legalize(subInstr); // src2 may need legalizing
  2354. // NEG r1
  2355. instr->InsertBefore(IR::Instr::New(Js::OpCode::NEG, r1, r1, m_func));
  2356. // SBB r1, r1
  2357. instr->InsertBefore(IR::Instr::New(Js::OpCode::SBB, r1, r1, r1, m_func));
  2358. }
  2359. else
  2360. {
  2361. IR::Instr *instrNew;
  2362. IR::RegOpnd *r2 = IR::RegOpnd::New(TyMachPtr, this->m_func);
  2363. // MOV r2, 0
  2364. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, r2, IR::IntConstOpnd::New(0, TyMachReg, this->m_func), m_func));
  2365. // CMP r1, src2
  2366. IR::Opnd *r1_32 = r1->UseWithNewType(TyInt32, this->m_func);
  2367. IR::Opnd *src2_32 =src2->UseWithNewType(TyInt32, this->m_func);
  2368. instrNew = IR::Instr::New(Js::OpCode::CMP, m_func);
  2369. instrNew->SetSrc1(r1_32);
  2370. instrNew->SetSrc2(src2_32);
  2371. instr->InsertBefore(instrNew);
  2372. // SETcc r2
  2373. IR::RegOpnd *r2_i8 = (IR::RegOpnd*) r2->UseWithNewType(TyInt8, this->m_func);
  2374. instrNew = IR::Instr::New(setCC_Opcode, r2_i8, r2_i8, m_func);
  2375. instr->InsertBefore(instrNew);
  2376. // DEC r2
  2377. instr->InsertBefore(IR::Instr::New(Js::OpCode::DEC, r2, r2, m_func));
  2378. // r1 <- r2
  2379. r1 = r2;
  2380. }
  2381. // AND r1, (notEqualResult - equalResult)
  2382. {
  2383. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, r1, r1, m_func);
  2384. andInstr->SetSrc2(IR::AddrOpnd::New((void*)((size_t)notEqualResult - (size_t)equalResult), IR::AddrOpndKind::AddrOpndKindDynamicMisc, this->m_func));
  2385. instr->InsertBefore(andInstr);
  2386. Legalize(andInstr);
  2387. }
  2388. // ADD r1, equalResult
  2389. {
  2390. IR::Instr * add = IR::Instr::New(Js::OpCode::ADD, r1, r1, m_func);
  2391. add->SetSrc2(IR::AddrOpnd::New(equalResult, IR::AddrOpndKind::AddrOpndKindDynamicVar, this->m_func));
  2392. instr->InsertBefore(add);
  2393. Legalize(add);
  2394. }
  2395. // MOV dst, r1
  2396. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, r1, m_func));
  2397. if (isTaggedInts)
  2398. {
  2399. instr->Remove();
  2400. return true;
  2401. }
  2402. // JMP $fallthru
  2403. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, fallthru, m_func));
  2404. instr->InsertBefore(helper);
  2405. instr->InsertAfter(fallthru);
  2406. return false;
  2407. }
  2408. void LowererMD::GenerateFastCmXxR8(IR::Instr *instr)
  2409. {
  2410. GenerateFastCmXx(instr);
  2411. }
  2412. void LowererMD::GenerateFastCmXxI4(IR::Instr *instr)
  2413. {
  2414. GenerateFastCmXx(instr);
  2415. }
  2416. void LowererMD::GenerateFastCmXx(IR::Instr *instr)
  2417. {
  2418. // For float src:
  2419. // dst = MOV 0/1
  2420. // (U)COMISD src1, src2
  2421. // JP $done
  2422. // dst.i8 = SetCC dst.i8
  2423. // $done:
  2424. // for int src:
  2425. // CMP src1, src2
  2426. // dst = MOV 0 / false
  2427. // dst.i8 = SetCC dst.i8 / CMOCcc true
  2428. IR::Opnd * src1 = instr->UnlinkSrc1();
  2429. IR::Opnd * src2 = instr->UnlinkSrc2();
  2430. IR::Opnd * dst = instr->UnlinkDst();
  2431. IR::Opnd * tmp = dst;
  2432. bool isIntDst = dst->AsRegOpnd()->m_sym->IsInt32();
  2433. bool isFloatSrc = src1->IsFloat();
  2434. bool isInt64Src = src1->IsInt64();
  2435. Assert(!isFloatSrc || src2->IsFloat());
  2436. Assert(!isFloatSrc || isIntDst);
  2437. Assert(!isInt64Src || src2->IsInt64());
  2438. Assert(!isInt64Src || isIntDst);
  2439. Assert(!isFloatSrc || AutoSystemInfo::Data.SSE2Available());
  2440. IR::Opnd *opnd;
  2441. IR::Instr *newInstr;
  2442. Assert(src1->IsRegOpnd());
  2443. #if LOWER_SPLIT_INT64
  2444. Int64RegPair src1Pair, src2Pair;
  2445. if (isInt64Src)
  2446. {
  2447. src1Pair = this->m_func->FindOrCreateInt64Pair(src1);
  2448. src2Pair = this->m_func->FindOrCreateInt64Pair(src2);
  2449. src1 = src1Pair.high;
  2450. src2 = src2Pair.high;
  2451. }
  2452. #endif
  2453. IR::Instr * done;
  2454. if (isFloatSrc)
  2455. {
  2456. done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2457. instr->InsertBefore(done);
  2458. }
  2459. else
  2460. {
  2461. done = instr;
  2462. }
  2463. if (isIntDst)
  2464. {
  2465. // reg = MOV 0 will get peeped to XOR reg, reg which sets the flags.
  2466. // Put the MOV before the CMP, but use a tmp if dst == src1/src2
  2467. if (dst->IsEqual(src1) || dst->IsEqual(src2))
  2468. {
  2469. tmp = IR::RegOpnd::New(dst->GetType(), this->m_func);
  2470. }
  2471. // dst = MOV 0
  2472. if (isFloatSrc && instr->m_opcode == Js::OpCode::CmNeq_A)
  2473. {
  2474. opnd = IR::IntConstOpnd::New(1, TyInt32, this->m_func);
  2475. }
  2476. else
  2477. {
  2478. opnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  2479. }
  2480. m_lowerer->InsertMove(tmp, opnd, done);
  2481. }
  2482. Js::OpCode cmpOp;
  2483. if (isFloatSrc)
  2484. {
  2485. if (instr->m_opcode == Js::OpCode::CmEq_A || instr->m_opcode == Js::OpCode::CmNeq_A)
  2486. {
  2487. cmpOp = src1->IsFloat64() ? Js::OpCode::UCOMISD : Js::OpCode::UCOMISS;
  2488. }
  2489. else
  2490. {
  2491. cmpOp = src1->IsFloat64() ? Js::OpCode::COMISD : Js::OpCode::COMISS;
  2492. }
  2493. }
  2494. else
  2495. {
  2496. cmpOp = Js::OpCode::CMP;
  2497. }
  2498. // CMP src1, src2
  2499. newInstr = IR::Instr::New(cmpOp, this->m_func);
  2500. newInstr->SetSrc1(src1);
  2501. newInstr->SetSrc2(src2);
  2502. done->InsertBefore(newInstr);
  2503. LowererMD::Legalize(newInstr);
  2504. if (isFloatSrc)
  2505. {
  2506. newInstr = IR::BranchInstr::New(Js::OpCode::JP, done->AsLabelInstr(), this->m_func);
  2507. done->InsertBefore(newInstr);
  2508. }
  2509. if (!isIntDst)
  2510. {
  2511. opnd = this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse);
  2512. LowererMD::CreateAssign(tmp, opnd, done);
  2513. }
  2514. Js::OpCode useCC;
  2515. switch(instr->m_opcode)
  2516. {
  2517. case Js::OpCode::CmEq_I4:
  2518. case Js::OpCode::CmEq_A:
  2519. useCC = isIntDst ? Js::OpCode::SETE : Js::OpCode::CMOVE;
  2520. break;
  2521. case Js::OpCode::CmNeq_I4:
  2522. case Js::OpCode::CmNeq_A:
  2523. useCC = isIntDst ? Js::OpCode::SETNE : Js::OpCode::CMOVNE;
  2524. break;
  2525. case Js::OpCode::CmGe_I4:
  2526. useCC = isIntDst ? Js::OpCode::SETGE : Js::OpCode::CMOVGE;
  2527. break;
  2528. case Js::OpCode::CmGt_I4:
  2529. useCC = isIntDst ? Js::OpCode::SETG : Js::OpCode::CMOVG;
  2530. break;
  2531. case Js::OpCode::CmLe_I4:
  2532. useCC = isIntDst ? Js::OpCode::SETLE : Js::OpCode::CMOVLE;
  2533. break;
  2534. case Js::OpCode::CmLt_I4:
  2535. useCC = isIntDst ? Js::OpCode::SETL : Js::OpCode::CMOVL;
  2536. break;
  2537. case Js::OpCode::CmUnGe_I4:
  2538. case Js::OpCode::CmGe_A:
  2539. useCC = isIntDst ? Js::OpCode::SETAE : Js::OpCode::CMOVAE;
  2540. break;
  2541. case Js::OpCode::CmUnGt_I4:
  2542. case Js::OpCode::CmGt_A:
  2543. useCC = isIntDst ? Js::OpCode::SETA : Js::OpCode::CMOVA;
  2544. break;
  2545. case Js::OpCode::CmUnLe_I4:
  2546. case Js::OpCode::CmLe_A:
  2547. useCC = isIntDst ? Js::OpCode::SETBE : Js::OpCode::CMOVBE;
  2548. break;
  2549. case Js::OpCode::CmUnLt_I4:
  2550. case Js::OpCode::CmLt_A:
  2551. useCC = isIntDst ? Js::OpCode::SETB : Js::OpCode::CMOVB;
  2552. break;
  2553. default:
  2554. useCC = Js::OpCode::InvalidOpCode;
  2555. Assume(UNREACHED);
  2556. }
  2557. if (isIntDst)
  2558. {
  2559. // tmp.i8 = SetCC tmp.i8
  2560. IR::Opnd *tmp_i8 = tmp->UseWithNewType(TyInt8, this->m_func);
  2561. newInstr = IR::Instr::New(useCC, tmp_i8, tmp_i8, this->m_func);
  2562. }
  2563. else
  2564. {
  2565. // regTrue = MOV true
  2566. IR::Opnd *regTrue = IR::RegOpnd::New(TyMachPtr, this->m_func);
  2567. Lowerer::InsertMove(regTrue, this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), done);
  2568. // tmp = CMOVcc tmp, regTrue
  2569. newInstr = IR::Instr::New(useCC, tmp, tmp, regTrue, this->m_func);
  2570. }
  2571. done->InsertBefore(newInstr);
  2572. #ifndef _M_X64
  2573. if (isInt64Src)
  2574. {
  2575. IR::LabelInstr* skipLow = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2576. newInstr = IR::BranchInstr::New(Js::OpCode::JNE, skipLow, this->m_func);
  2577. done->InsertBefore(newInstr);
  2578. newInstr = IR::Instr::New(cmpOp, this->m_func);
  2579. newInstr->SetSrc1(src1Pair.low);
  2580. newInstr->SetSrc2(src2Pair.low);
  2581. done->InsertBefore(newInstr);
  2582. Js::OpCode lowUseCC = useCC;
  2583. // Need to do an unsigned compare for the lower part
  2584. switch (instr->m_opcode)
  2585. {
  2586. case Js::OpCode::CmGe_I4: lowUseCC = Js::OpCode::SETAE; break;
  2587. case Js::OpCode::CmGt_I4: lowUseCC = Js::OpCode::SETA; break;
  2588. case Js::OpCode::CmLe_I4: lowUseCC = Js::OpCode::SETBE; break;
  2589. case Js::OpCode::CmLt_I4: lowUseCC = Js::OpCode::SETB; break;
  2590. }
  2591. // tmp.i8 = SetCC tmp.i8
  2592. IR::Opnd *tmp_i8 = tmp->UseWithNewType(TyInt8, this->m_func);
  2593. newInstr = IR::Instr::New(lowUseCC, tmp_i8, tmp_i8, this->m_func);
  2594. done->InsertBefore(newInstr);
  2595. done->InsertBefore(skipLow);
  2596. }
  2597. #endif
  2598. if (tmp != dst)
  2599. {
  2600. newInstr = IR::Instr::New(Js::OpCode::MOV, dst, tmp, this->m_func);
  2601. instr->InsertBefore(newInstr);
  2602. }
  2603. instr->Remove();
  2604. }
  2605. IR::Instr * LowererMD::GenerateConvBool(IR::Instr *instr)
  2606. {
  2607. // TEST src1, src1
  2608. // dst = MOV true
  2609. // rf = MOV false
  2610. // dst = CMOV dst, rf
  2611. IR::Instr *instrNew, *instrFirst;
  2612. IR::RegOpnd *dst = instr->GetDst()->AsRegOpnd();
  2613. IR::RegOpnd *regFalse;
  2614. // TEST src1, src2
  2615. instrFirst = instrNew = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  2616. instrNew->SetSrc1(instr->GetSrc1());
  2617. instrNew->SetSrc2(instr->GetSrc1());
  2618. instr->InsertBefore(instrNew);
  2619. // dst = MOV true
  2620. Lowerer::InsertMove(dst, this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), instr);
  2621. // rf = MOV false
  2622. regFalse = IR::RegOpnd::New(TyMachPtr, this->m_func);
  2623. Lowerer::InsertMove(regFalse, this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
  2624. // Add dst as src1 of CMOV to create a pseudo use of dst. Otherwise, the register allocator
  2625. // won't know the previous dst is needed. and needed in the same register as the dst of the CMOV.
  2626. // dst = CMOV dst, rf
  2627. instrNew = IR::Instr::New(Js::OpCode::CMOVE, dst, dst, regFalse, this->m_func);
  2628. instr->InsertBefore(instrNew);
  2629. instr->Remove();
  2630. return instrFirst;
  2631. }
  2632. ///----------------------------------------------------------------------------
  2633. ///
  2634. /// LowererMD::GenerateFastAdd
  2635. ///
  2636. /// NOTE: We assume that only the sum of two Int31's will have 0x2 set. This
  2637. /// is only true until we have a var type with tag == 0x2.
  2638. ///
  2639. ///----------------------------------------------------------------------------
  2640. bool
  2641. LowererMD::GenerateFastAdd(IR::Instr * instrAdd)
  2642. {
  2643. // Given:
  2644. //
  2645. // dst = Add src1, src2
  2646. //
  2647. // Generate:
  2648. //
  2649. // (If not 2 Int31's, jump to $helper.)
  2650. // s1 = MOV src1
  2651. // s1 = DEC s1 -- Get rid of one of the tag [Int31 only]
  2652. // s1 = ADD s1, src2 -- try an inline add
  2653. // JO $helper -- bail if the add overflowed
  2654. // s1 = OR s1, AtomTag_IntPtr [Int32 only]
  2655. // dst = MOV s1
  2656. // JMP $fallthru
  2657. // $helper:
  2658. // (caller generates helper call)
  2659. // $fallthru:
  2660. IR::Instr * instr;
  2661. IR::LabelInstr * labelHelper;
  2662. IR::LabelInstr * labelFallThru;
  2663. IR::Opnd * opndReg;
  2664. IR::Opnd * opndSrc1;
  2665. IR::Opnd * opndSrc2;
  2666. opndSrc1 = instrAdd->GetSrc1();
  2667. opndSrc2 = instrAdd->GetSrc2();
  2668. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Add instruction");
  2669. // Generate fastpath for Incr_A anyway -
  2670. // Incrementing strings representing integers can be inter-mixed with integers e.g. "1"++ -> converts 1 to an int and thereafter, integer increment is expected.
  2671. if (opndSrc1->IsRegOpnd() && (opndSrc1->AsRegOpnd()->IsNotInt() || opndSrc1->GetValueType().IsString()
  2672. || (instrAdd->m_opcode != Js::OpCode::Incr_A && opndSrc1->GetValueType().IsLikelyString())))
  2673. {
  2674. return false;
  2675. }
  2676. if (opndSrc2->IsRegOpnd() && (opndSrc2->AsRegOpnd()->IsNotInt() ||
  2677. opndSrc2->GetValueType().IsLikelyString()))
  2678. {
  2679. return false;
  2680. }
  2681. // Tagged ints?
  2682. bool isTaggedInts = false;
  2683. if (opndSrc1->IsTaggedInt())
  2684. {
  2685. if (opndSrc2->IsTaggedInt())
  2686. {
  2687. isTaggedInts = true;
  2688. }
  2689. }
  2690. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2691. if (!isTaggedInts)
  2692. {
  2693. // (If not 2 Int31's, jump to $helper.)
  2694. this->GenerateSmIntPairTest(instrAdd, opndSrc1, opndSrc2, labelHelper);
  2695. }
  2696. if (opndSrc1->IsAddrOpnd())
  2697. {
  2698. // If opnd1 is a constant, just swap them.
  2699. IR::Opnd *opndTmp = opndSrc1;
  2700. opndSrc1 = opndSrc2;
  2701. opndSrc2 = opndTmp;
  2702. }
  2703. //
  2704. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2705. // relevant only on AMD64.
  2706. //
  2707. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2708. // s1 = MOV src1
  2709. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2710. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  2711. instrAdd->InsertBefore(instr);
  2712. #if !INT32VAR
  2713. // Do the DEC in place
  2714. if (opndSrc2->IsAddrOpnd())
  2715. {
  2716. Assert(opndSrc2->AsAddrOpnd()->GetAddrOpndKind() == IR::AddrOpndKindConstantVar);
  2717. opndSrc2 = IR::IntConstOpnd::New(*((int *)&(opndSrc2->AsAddrOpnd()->m_address)) - 1, TyInt32, this->m_func, opndSrc2->AsAddrOpnd()->m_dontEncode);
  2718. opndSrc2 = opndSrc2->Use(this->m_func);
  2719. }
  2720. else if (opndSrc2->IsIntConstOpnd())
  2721. {
  2722. Assert(opndSrc2->GetType() == TyInt32);
  2723. opndSrc2 = opndSrc2->Use(this->m_func);
  2724. opndSrc2->AsIntConstOpnd()->DecrValue(1);
  2725. }
  2726. else
  2727. {
  2728. // s1 = DEC s1
  2729. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2730. instr = IR::Instr::New(Js::OpCode::DEC, opndReg, opndReg, this->m_func);
  2731. instrAdd->InsertBefore(instr);
  2732. }
  2733. instr = IR::Instr::New(Js::OpCode::ADD, opndReg, opndReg, opndSrc2, this->m_func);
  2734. #else
  2735. if (opndSrc2->IsAddrOpnd())
  2736. {
  2737. // truncate to untag
  2738. int value = ::Math::PointerCastToIntegralTruncate<int>(opndSrc2->AsAddrOpnd()->m_address);
  2739. if (value == 1)
  2740. {
  2741. instr = IR::Instr::New(Js::OpCode::INC, opndReg, opndReg, this->m_func);
  2742. }
  2743. else
  2744. {
  2745. opndSrc2 = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
  2746. instr = IR::Instr::New(Js::OpCode::ADD, opndReg, opndReg, opndSrc2, this->m_func);
  2747. }
  2748. }
  2749. else
  2750. {
  2751. instr = IR::Instr::New(Js::OpCode::ADD, opndReg, opndReg, opndSrc2->UseWithNewType(TyInt32, this->m_func), this->m_func);
  2752. }
  2753. #endif
  2754. // s1 = ADD s1, src2
  2755. instrAdd->InsertBefore(instr);
  2756. Legalize(instr);
  2757. // JO $helper
  2758. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  2759. instrAdd->InsertBefore(instr);
  2760. //
  2761. // Convert TyInt32 operand, back to TyMachPtr type.
  2762. //
  2763. if(TyMachReg != opndReg->GetType())
  2764. {
  2765. opndReg = opndReg->UseWithNewType(TyMachPtr, this->m_func);
  2766. }
  2767. #if INT32VAR
  2768. // s1 = OR s1, AtomTag_IntPtr
  2769. GenerateInt32ToVarConversion(opndReg, instrAdd);
  2770. #endif
  2771. // dst = MOV s1
  2772. instr = IR::Instr::New(Js::OpCode::MOV, instrAdd->GetDst(), opndReg, this->m_func);
  2773. instrAdd->InsertBefore(instr);
  2774. // JMP $fallthru
  2775. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2776. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2777. instrAdd->InsertBefore(instr);
  2778. // $helper:
  2779. // (caller generates helper call)
  2780. // $fallthru:
  2781. instrAdd->InsertBefore(labelHelper);
  2782. instrAdd->InsertAfter(labelFallThru);
  2783. return true;
  2784. }
  2785. ///----------------------------------------------------------------------------
  2786. ///
  2787. /// LowererMD::GenerateFastSub
  2788. ///
  2789. ///
  2790. ///----------------------------------------------------------------------------
  2791. bool
  2792. LowererMD::GenerateFastSub(IR::Instr * instrSub)
  2793. {
  2794. // Given:
  2795. //
  2796. // dst = Sub src1, src2
  2797. //
  2798. // Generate:
  2799. //
  2800. // (If not 2 Int31's, jump to $helper.)
  2801. // s1 = MOV src1
  2802. // s1 = SUB s1, src2 -- try an inline sub
  2803. // JO $helper -- bail if the subtract overflowed
  2804. // JNE $helper
  2805. // s1 = INC s1 -- restore the var tag on the result [Int31 only]
  2806. // s1 = OR s1, AtomTag_IntPtr [Int32 only]
  2807. // dst = MOV s1
  2808. // JMP $fallthru
  2809. // $helper:
  2810. // (caller generates helper call)
  2811. // $fallthru:
  2812. IR::Instr * instr;
  2813. IR::LabelInstr * labelHelper;
  2814. IR::LabelInstr * labelFallThru;
  2815. IR::Opnd * opndReg;
  2816. IR::Opnd * opndSrc1;
  2817. IR::Opnd * opndSrc2;
  2818. opndSrc1 = instrSub->GetSrc1();
  2819. opndSrc2 = instrSub->GetSrc2();
  2820. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Sub instruction");
  2821. // Not tagged ints?
  2822. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2823. {
  2824. return false;
  2825. }
  2826. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2827. {
  2828. return false;
  2829. }
  2830. // Tagged ints?
  2831. bool isTaggedInts = false;
  2832. if (opndSrc1->IsTaggedInt())
  2833. {
  2834. if (opndSrc2->IsTaggedInt())
  2835. {
  2836. isTaggedInts = true;
  2837. }
  2838. }
  2839. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2840. if (!isTaggedInts)
  2841. {
  2842. // (If not 2 Int31's, jump to $helper.)
  2843. this->GenerateSmIntPairTest(instrSub, opndSrc1, opndSrc2, labelHelper);
  2844. }
  2845. //
  2846. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2847. // relevant only on AMD64.
  2848. //
  2849. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2850. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2851. // s1 = MOV src1
  2852. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2853. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  2854. instrSub->InsertBefore(instr);
  2855. // s1 = SUB s1, src2
  2856. instr = IR::Instr::New(Js::OpCode::SUB, opndReg, opndReg, opndSrc2, this->m_func);
  2857. instrSub->InsertBefore(instr);
  2858. // JO $helper
  2859. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  2860. instrSub->InsertBefore(instr);
  2861. #if !INT32VAR
  2862. // s1 = INC s1
  2863. instr = IR::Instr::New(Js::OpCode::INC, opndReg, opndReg, this->m_func);
  2864. instrSub->InsertBefore(instr);
  2865. #endif
  2866. //
  2867. // Convert TyInt32 operand, back to TyMachPtr type.
  2868. //
  2869. if(TyMachReg != opndReg->GetType())
  2870. {
  2871. opndReg = opndReg->UseWithNewType(TyMachPtr, this->m_func);
  2872. }
  2873. #if INT32VAR
  2874. // s1 = OR s1, AtomTag_IntPtr
  2875. GenerateInt32ToVarConversion(opndReg, instrSub);
  2876. #endif
  2877. // dst = MOV s1
  2878. instr = IR::Instr::New(Js::OpCode::MOV, instrSub->GetDst(), opndReg, this->m_func);
  2879. instrSub->InsertBefore(instr);
  2880. // JMP $fallthru
  2881. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2882. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2883. instrSub->InsertBefore(instr);
  2884. // $helper:
  2885. // (caller generates helper call)
  2886. // $fallthru:
  2887. instrSub->InsertBefore(labelHelper);
  2888. instrSub->InsertAfter(labelFallThru);
  2889. return true;
  2890. }
  2891. ///----------------------------------------------------------------------------
  2892. ///
  2893. /// LowererMD::GenerateFastMul
  2894. ///
  2895. ///----------------------------------------------------------------------------
  2896. bool
  2897. LowererMD::GenerateFastMul(IR::Instr * instrMul)
  2898. {
  2899. // Given:
  2900. //
  2901. // dst = Mul src1, src2
  2902. //
  2903. // Generate:
  2904. //
  2905. // (If not 2 Int31's, jump to $helper.)
  2906. // s1 = MOV src1
  2907. // s1 = DEC s1 -- clear the var tag from the value to be multiplied [Int31 only]
  2908. // s2 = MOV src2
  2909. // s2 = SAR s2, Js::VarTag_Shift -- extract the real src2 amount from the var [Int31 only]
  2910. // s1 = IMUL s1, s2 -- do the signed mul
  2911. // JO $helper -- bail if the result overflowed
  2912. // s3 = MOV s1
  2913. // TEST s3, s3 -- Check result is 0. might be -0. Result is -0 when a negative number is multiplied with 0.
  2914. // JEQ $zero
  2915. // JMP $nonzero
  2916. // $zero: -- result of mul was 0. try to check for -0
  2917. // s2 = ADD s2, src1 -- Add src1 to s2
  2918. // JGT $nonzero -- positive 0. [Int31 only]
  2919. // JGE $nonzero -- positive 0. [Int32 only]
  2920. // dst = ToVar(-0.0) -- load negative 0
  2921. // JMP $fallthru
  2922. // $nonzero:
  2923. // s3 = INC s3 -- restore the var tag on the result [Int31 only]
  2924. // s3 = OR s3, AtomTag_IntPtr [Int32 only]
  2925. // dst= MOV s3
  2926. // JMP $fallthru
  2927. // $helper:
  2928. // (caller generates helper call)
  2929. // $fallthru:
  2930. IR::LabelInstr * labelHelper;
  2931. IR::LabelInstr * labelFallThru;
  2932. IR::LabelInstr * labelNonZero;
  2933. IR::Instr * instr;
  2934. IR::RegOpnd * opndReg1;
  2935. IR::RegOpnd * opndReg2;
  2936. IR::RegOpnd * s3;
  2937. IR::Opnd * opndSrc1;
  2938. IR::Opnd * opndSrc2;
  2939. opndSrc1 = instrMul->GetSrc1();
  2940. opndSrc2 = instrMul->GetSrc2();
  2941. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on mul instruction");
  2942. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2943. {
  2944. return true;
  2945. }
  2946. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2947. {
  2948. return true;
  2949. }
  2950. // (If not 2 Int31's, jump to $helper.)
  2951. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2952. labelNonZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2953. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2954. this->GenerateSmIntPairTest(instrMul, opndSrc1, opndSrc2, labelHelper);
  2955. //
  2956. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2957. // relevant only on AMD64.
  2958. //
  2959. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2960. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2961. if (opndSrc1->IsImmediateOpnd())
  2962. {
  2963. IR::Opnd * temp = opndSrc1;
  2964. opndSrc1 = opndSrc2;
  2965. opndSrc2 = temp;
  2966. }
  2967. // s1 = MOV src1
  2968. opndReg1 = IR::RegOpnd::New(TyInt32, this->m_func);
  2969. instr = IR::Instr::New(Js::OpCode::MOV, opndReg1, opndSrc1, this->m_func);
  2970. instrMul->InsertBefore(instr);
  2971. #if !INT32VAR
  2972. // s1 = DEC s1
  2973. instr = IR::Instr::New(Js::OpCode::DEC, opndReg1, opndReg1, this->m_func);
  2974. instrMul->InsertBefore(instr);
  2975. #endif
  2976. if (opndSrc2->IsImmediateOpnd())
  2977. {
  2978. Assert(opndSrc2->IsAddrOpnd() && opndSrc2->AsAddrOpnd()->IsVar());
  2979. IR::Opnd *opnd2 = IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(opndSrc2->AsAddrOpnd()->m_address), TyInt32, this->m_func);
  2980. // s2 = MOV src2
  2981. opndReg2 = IR::RegOpnd::New(TyInt32, this->m_func);
  2982. instr = IR::Instr::New(Js::OpCode::MOV, opndReg2, opnd2, this->m_func);
  2983. instrMul->InsertBefore(instr);
  2984. }
  2985. else
  2986. {
  2987. // s2 = MOV src2
  2988. opndReg2 = IR::RegOpnd::New(TyInt32, this->m_func);
  2989. instr = IR::Instr::New(Js::OpCode::MOV, opndReg2, opndSrc2, this->m_func);
  2990. instrMul->InsertBefore(instr);
  2991. #if !INT32VAR
  2992. // s2 = SAR s2, Js::VarTag_Shift
  2993. instr = IR::Instr::New(
  2994. Js::OpCode::SAR, opndReg2, opndReg2,
  2995. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  2996. instrMul->InsertBefore(instr);
  2997. #endif
  2998. }
  2999. // s1 = IMUL s1, s2
  3000. instr = IR::Instr::New(Js::OpCode::IMUL2, opndReg1, opndReg1, opndReg2, this->m_func);
  3001. instrMul->InsertBefore(instr);
  3002. // JO $helper
  3003. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  3004. instrMul->InsertBefore(instr);
  3005. // MOV s3, s1
  3006. s3 = IR::RegOpnd::New(TyInt32, this->m_func);
  3007. instr = IR::Instr::New(Js::OpCode::MOV, s3, opndReg1, this->m_func);
  3008. instrMul->InsertBefore(instr);
  3009. // TEST s3, s3
  3010. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  3011. instr->SetSrc1(s3);
  3012. instr->SetSrc2(s3);
  3013. instrMul->InsertBefore(instr);
  3014. // JEQ $zero
  3015. IR::LabelInstr *labelZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3016. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelZero, this->m_func);
  3017. instrMul->InsertBefore(instr);
  3018. // JMP $nonzero
  3019. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelNonZero, this->m_func);
  3020. instrMul->InsertBefore(instr);
  3021. // $zero:
  3022. instrMul->InsertBefore(labelZero);
  3023. // s2 = ADD s2, src1
  3024. instr = IR::Instr::New(Js::OpCode::ADD, opndReg2, opndReg2, opndSrc1, this->m_func);
  3025. instrMul->InsertBefore(instr);
  3026. Legalize(instr);
  3027. // JGT $nonzero
  3028. #if INT32VAR
  3029. Js::OpCode greaterOpCode = Js::OpCode::JGE;
  3030. #else
  3031. Js::OpCode greaterOpCode = Js::OpCode::JGT;
  3032. #endif
  3033. instr = IR::BranchInstr::New(greaterOpCode, labelNonZero, this->m_func);
  3034. instrMul->InsertBefore(instr);
  3035. // dst = ToVar(-0.0) -- load negative 0
  3036. instr = IR::Instr::New(Js::OpCode::MOV, instrMul->GetDst(), m_lowerer->LoadLibraryValueOpnd(instrMul, LibraryValue::ValueNegativeZero), this->m_func);
  3037. instrMul->InsertBefore(instr);
  3038. // JMP $fallthru
  3039. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  3040. instrMul->InsertBefore(instr);
  3041. // $nonzero:
  3042. instrMul->InsertBefore(labelNonZero);
  3043. #if !INT32VAR
  3044. // s3 = INC s3
  3045. instr = IR::Instr::New(Js::OpCode::INC, s3, s3, this->m_func);
  3046. instrMul->InsertBefore(instr);
  3047. #endif
  3048. //
  3049. // Convert TyInt32 operand, back to TyMachPtr type.
  3050. // Cast is fine. We know ChangeType returns IR::Opnd * but it
  3051. // preserves the Type.
  3052. //
  3053. if(TyMachReg != s3->GetType())
  3054. {
  3055. s3 = static_cast<IR::RegOpnd *>(s3->UseWithNewType(TyMachPtr, this->m_func));
  3056. }
  3057. #if INT32VAR
  3058. // s3 = OR s3, AtomTag_IntPtr
  3059. GenerateInt32ToVarConversion(s3, instrMul);
  3060. #endif
  3061. // dst = MOV s3
  3062. instr = IR::Instr::New(Js::OpCode::MOV, instrMul->GetDst(), s3, this->m_func);
  3063. instrMul->InsertBefore(instr);
  3064. // JMP $fallthru
  3065. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  3066. instrMul->InsertBefore(instr);
  3067. // $helper:
  3068. // (caller generates helper call)
  3069. // $fallthru:
  3070. instrMul->InsertBefore(labelHelper);
  3071. instrMul->InsertAfter(labelFallThru);
  3072. return true;
  3073. }
  3074. bool
  3075. LowererMD::GenerateFastNeg(IR::Instr * instrNeg)
  3076. {
  3077. // Given:
  3078. //
  3079. // dst = Not src
  3080. //
  3081. // Generate:
  3082. //
  3083. // if not int, jump $helper
  3084. // if src == 0 -- test for zero (must be handled by the runtime to preserve
  3085. // JEQ $helper difference btw +0 and -0)
  3086. // dst = MOV src
  3087. // dst = NEG dst -- do an inline NEG
  3088. // dst = ADD dst, 2 -- restore the var tag on the result [int31 only]
  3089. // JO $helper
  3090. // dst = OR dst, AtomTag_Ptr [int32 only]
  3091. // JMP $fallthru
  3092. // $helper:
  3093. // (caller generates helper call)
  3094. // $fallthru:
  3095. IR::Instr * instr;
  3096. IR::LabelInstr * labelHelper = nullptr;
  3097. IR::LabelInstr * labelFallThru = nullptr;
  3098. IR::Opnd * opndSrc1;
  3099. IR::Opnd * opndDst;
  3100. bool usingNewDst = false;
  3101. opndSrc1 = instrNeg->GetSrc1();
  3102. AssertMsg(opndSrc1, "Expected src opnd on Neg instruction");
  3103. if(opndSrc1->IsEqual(instrNeg->GetDst()))
  3104. {
  3105. usingNewDst = true;
  3106. opndDst = IR::RegOpnd::New(TyInt32, this->m_func);
  3107. }
  3108. else
  3109. {
  3110. opndDst = instrNeg->GetDst()->UseWithNewType(TyInt32, this->m_func);
  3111. }
  3112. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->m_sym->IsIntConst())
  3113. {
  3114. IR::Opnd *newOpnd;
  3115. IntConstType value = opndSrc1->AsRegOpnd()->m_sym->GetIntConstValue();
  3116. if (value == 0)
  3117. {
  3118. // If the negate operand is zero, the result is -0.0, which is a Number rather than an Int31.
  3119. newOpnd = m_lowerer->LoadLibraryValueOpnd(instrNeg, LibraryValue::ValueNegativeZero);
  3120. }
  3121. else
  3122. {
  3123. // negation below can overflow because max negative int32 value > max positive value by 1.
  3124. newOpnd = IR::AddrOpnd::NewFromNumber(-(int64)value, m_func);
  3125. }
  3126. instrNeg->ClearBailOutInfo();
  3127. instrNeg->FreeSrc1();
  3128. instrNeg->SetSrc1(newOpnd);
  3129. instrNeg = this->ChangeToAssign(instrNeg);
  3130. // Skip lowering call to helper
  3131. return false;
  3132. }
  3133. bool isInt = (opndSrc1->IsTaggedInt());
  3134. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  3135. {
  3136. return true;
  3137. }
  3138. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3139. if (!isInt)
  3140. {
  3141. GenerateSmIntTest(opndSrc1, instrNeg, labelHelper);
  3142. }
  3143. //
  3144. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  3145. // relevant only on AMD64.
  3146. //
  3147. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  3148. GenerateTaggedZeroTest(opndSrc1, instrNeg, labelHelper);
  3149. // dst = MOV src
  3150. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndSrc1, this->m_func);
  3151. instrNeg->InsertBefore(instr);
  3152. // dst = NEG dst
  3153. instr = IR::Instr::New(Js::OpCode::NEG, opndDst, opndDst, this->m_func);
  3154. instrNeg->InsertBefore(instr);
  3155. #if !INT32VAR
  3156. // dst = ADD dst, 2
  3157. instr = IR::Instr::New(Js::OpCode::ADD, opndDst, opndDst, IR::IntConstOpnd::New(2, TyInt32, this->m_func), this->m_func);
  3158. instrNeg->InsertBefore(instr);
  3159. #endif
  3160. // JO $helper
  3161. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  3162. instrNeg->InsertBefore(instr);
  3163. //
  3164. // Convert TyInt32 operand, back to TyMachPtr type.
  3165. //
  3166. if(TyMachReg != opndDst->GetType())
  3167. {
  3168. opndDst = opndDst->UseWithNewType(TyMachPtr, this->m_func);
  3169. }
  3170. #if INT32VAR
  3171. GenerateInt32ToVarConversion(opndDst, instrNeg);
  3172. #endif
  3173. if(usingNewDst)
  3174. {
  3175. instr = IR::Instr::New(Js::OpCode::MOV, instrNeg->GetDst(), opndDst, this->m_func);
  3176. instrNeg->InsertBefore(instr);
  3177. }
  3178. // JMP $fallthru
  3179. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3180. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  3181. instrNeg->InsertBefore(instr);
  3182. // $helper:
  3183. // (caller generates helper sequence)
  3184. // $fallthru:
  3185. AssertMsg(labelHelper, "Should not be NULL");
  3186. instrNeg->InsertBefore(labelHelper);
  3187. instrNeg->InsertAfter(labelFallThru);
  3188. return true;
  3189. }
  3190. void
  3191. LowererMD::GenerateFastBrS(IR::BranchInstr *brInstr)
  3192. {
  3193. IR::Opnd *src1 = brInstr->UnlinkSrc1();
  3194. Assert(src1->IsIntConstOpnd() || src1->IsAddrOpnd() || src1->IsRegOpnd());
  3195. IR::Instr *cmpInstr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  3196. cmpInstr->SetSrc1(m_lowerer->LoadOptimizationOverridesValueOpnd(brInstr, OptimizationOverridesValue::OptimizationOverridesSideEffects));
  3197. cmpInstr->SetSrc2(src1);
  3198. brInstr->InsertBefore(cmpInstr);
  3199. Legalize(cmpInstr);
  3200. Js::OpCode opcode = Js::OpCode::InvalidOpCode;
  3201. switch(brInstr->m_opcode)
  3202. {
  3203. case Js::OpCode::BrHasSideEffects:
  3204. opcode = Js::OpCode::JNE;
  3205. break;
  3206. case Js::OpCode::BrNotHasSideEffects:
  3207. opcode = Js::OpCode::JEQ;
  3208. break;
  3209. default:
  3210. Assert(UNREACHED);
  3211. __assume(false);
  3212. }
  3213. brInstr->m_opcode = opcode;
  3214. }
  3215. ///----------------------------------------------------------------------------
  3216. ///
  3217. /// LowererMD::GenerateSmIntPairTest
  3218. ///
  3219. /// Generate code to test whether the given operands are both Int31 vars
  3220. /// and branch to the given label if not.
  3221. ///
  3222. ///----------------------------------------------------------------------------
  3223. #if !INT32VAR
  3224. IR::Instr *
  3225. LowererMD::GenerateSmIntPairTest(
  3226. IR::Instr * instrInsert,
  3227. IR::Opnd * opndSrc1,
  3228. IR::Opnd * opndSrc2,
  3229. IR::LabelInstr * labelFail)
  3230. {
  3231. IR::Opnd * opndReg;
  3232. IR::Instr * instrPrev = instrInsert->m_prev;
  3233. IR::Instr * instr;
  3234. Assert(opndSrc1->GetType() == TyVar);
  3235. Assert(opndSrc2->GetType() == TyVar);
  3236. if (opndSrc1->IsTaggedInt())
  3237. {
  3238. IR::Opnd *tempOpnd = opndSrc1;
  3239. opndSrc1 = opndSrc2;
  3240. opndSrc2 = tempOpnd;
  3241. }
  3242. if (opndSrc2->IsTaggedInt())
  3243. {
  3244. if (opndSrc1->IsTaggedInt())
  3245. {
  3246. return instrPrev;
  3247. }
  3248. // TEST src1, AtomTag
  3249. // JEQ $fail
  3250. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  3251. instr->SetSrc1(opndSrc1);
  3252. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func));
  3253. instrInsert->InsertBefore(instr);
  3254. }
  3255. else
  3256. {
  3257. // s1 = MOV src1
  3258. // s1 = AND s1, 1
  3259. // TEST s1, src2
  3260. // JEQ $fail
  3261. // s1 = MOV src1
  3262. opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  3263. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  3264. instrInsert->InsertBefore(instr);
  3265. // s1 = AND s1, AtomTag
  3266. instr = IR::Instr::New(
  3267. Js::OpCode::AND, opndReg, opndReg, IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func), this->m_func);
  3268. instrInsert->InsertBefore(instr);
  3269. // TEST s1, src2
  3270. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  3271. instr->SetSrc1(opndReg);
  3272. instr->SetSrc2(opndSrc2);
  3273. instrInsert->InsertBefore(instr);
  3274. }
  3275. // JEQ $fail
  3276. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelFail, this->m_func);
  3277. instrInsert->InsertBefore(instr);
  3278. return instrPrev;
  3279. }
  3280. #else
  3281. IR::Instr *
  3282. LowererMD::GenerateSmIntPairTest(
  3283. IR::Instr * instrInsert,
  3284. IR::Opnd * opndSrc1,
  3285. IR::Opnd * opndSrc2,
  3286. IR::LabelInstr * labelFail)
  3287. {
  3288. IR::Opnd * opndReg;
  3289. IR::Instr * instrPrev = instrInsert->m_prev;
  3290. IR::Instr * instr;
  3291. Assert(opndSrc1->GetType() == TyVar);
  3292. Assert(opndSrc2->GetType() == TyVar);
  3293. if (opndSrc1->IsTaggedInt())
  3294. {
  3295. IR::Opnd *tempOpnd = opndSrc1;
  3296. opndSrc1 = opndSrc2;
  3297. opndSrc2 = tempOpnd;
  3298. }
  3299. if (opndSrc2->IsTaggedInt())
  3300. {
  3301. if (opndSrc1->IsTaggedInt())
  3302. {
  3303. return instrPrev;
  3304. }
  3305. GenerateSmIntTest(opndSrc1, instrInsert, labelFail);
  3306. return instrPrev;
  3307. }
  3308. else
  3309. {
  3310. opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  3311. #ifdef SHIFTLOAD
  3312. instr = IR::Instr::New(Js::OpCode::SHLD, opndReg, opndSrc1, IR::IntConstOpnd::New(16, TyInt8, this->m_func), this->m_func);
  3313. instrInsert->InsertBefore(instr);
  3314. instr = IR::Instr::New(Js::OpCode::SHLD, opndReg, opndSrc2, IR::IntConstOpnd::New(16, TyInt8, this->m_func), this->m_func);
  3315. instrInsert->InsertBefore(instr);
  3316. #else
  3317. IR::Opnd * opndReg1;
  3318. // s1 = MOV src1
  3319. // s1 = SHR s1, VarTag_Shift
  3320. // s2 = MOV src2
  3321. // s2 = SHR s2, 32
  3322. // s1 = OR s1, s2 ------ move both tags to the lower 32 bits
  3323. // CMP s1, AtomTag_Pair ------ compare the tags together to the expected tag pair
  3324. // JNE $fail
  3325. // s1 = MOV src1
  3326. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  3327. instrInsert->InsertBefore(instr);
  3328. // s1 = SHR s1, VarTag_Shift
  3329. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  3330. instrInsert->InsertBefore(instr);
  3331. // s2 = MOV src2
  3332. opndReg1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  3333. instr = IR::Instr::New(Js::OpCode::MOV, opndReg1, opndSrc2, this->m_func);
  3334. instrInsert->InsertBefore(instr);
  3335. // s2 = SHR s2, 32
  3336. instr = IR::Instr::New(Js::OpCode::SHR, opndReg1, opndReg1, IR::IntConstOpnd::New(32, TyInt8, this->m_func), this->m_func);
  3337. instrInsert->InsertBefore(instr);
  3338. // s1 = OR s1, s2
  3339. instr = IR::Instr::New(Js::OpCode::OR, opndReg, opndReg, opndReg1, this->m_func);
  3340. instrInsert->InsertBefore(instr);
  3341. #endif
  3342. opndReg = opndReg->UseWithNewType(TyInt32, this->m_func)->AsRegOpnd();
  3343. // CMP s1, AtomTag_Pair
  3344. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  3345. instr->SetSrc1(opndReg);
  3346. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag_Pair, TyInt32, this->m_func, true));
  3347. instrInsert->InsertBefore(instr);
  3348. }
  3349. // JNE $fail
  3350. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelFail, this->m_func);
  3351. instrInsert->InsertBefore(instr);
  3352. return instrPrev;
  3353. }
  3354. #endif
  3355. IR::BranchInstr *
  3356. LowererMD::GenerateLocalInlineCacheCheck(
  3357. IR::Instr * instrLdSt,
  3358. IR::RegOpnd * opndType,
  3359. IR::RegOpnd * inlineCache,
  3360. IR::LabelInstr * labelNext,
  3361. bool checkTypeWithoutProperty)
  3362. {
  3363. // Generate:
  3364. //
  3365. // CMP s1, [&(inlineCache->u.local.type/typeWithoutProperty)]
  3366. // JNE $next
  3367. IR::Instr * instr;
  3368. IR::Opnd* typeOpnd;
  3369. if (checkTypeWithoutProperty)
  3370. {
  3371. typeOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.local.typeWithoutProperty), TyMachReg, instrLdSt->m_func);
  3372. }
  3373. else
  3374. {
  3375. typeOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.local.type), TyMachReg, instrLdSt->m_func);
  3376. }
  3377. // CMP type, [&(inlineCache->u.local.type/typeWithoutProperty)]
  3378. instr = IR::Instr::New(Js::OpCode::CMP, instrLdSt->m_func);
  3379. instr->SetSrc1(opndType);
  3380. instr->SetSrc2(typeOpnd);
  3381. instrLdSt->InsertBefore(instr);
  3382. // JNE $next
  3383. IR::BranchInstr * branchInstr = IR::BranchInstr::New(Js::OpCode::JNE, labelNext, instrLdSt->m_func);
  3384. instrLdSt->InsertBefore(branchInstr);
  3385. return branchInstr;
  3386. }
  3387. IR::BranchInstr *
  3388. LowererMD::GenerateProtoInlineCacheCheck(
  3389. IR::Instr * instrLdSt,
  3390. IR::RegOpnd * opndType,
  3391. IR::RegOpnd * inlineCache,
  3392. IR::LabelInstr * labelNext)
  3393. {
  3394. // Generate:
  3395. //
  3396. // CMP s1, [&(inlineCache->u.proto.type)]
  3397. // JNE $next
  3398. IR::Instr * instr;
  3399. IR::Opnd* typeOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.proto.type), TyMachReg, instrLdSt->m_func);
  3400. // CMP s1, [&(inlineCache->u.proto.type)]
  3401. instr = IR::Instr::New(Js::OpCode::CMP, instrLdSt->m_func);
  3402. instr->SetSrc1(opndType);
  3403. instr->SetSrc2(typeOpnd);
  3404. instrLdSt->InsertBefore(instr);
  3405. // JNE $next
  3406. IR::BranchInstr * branchInstr = IR::BranchInstr::New(Js::OpCode::JNE, labelNext, instrLdSt->m_func);
  3407. instrLdSt->InsertBefore(branchInstr);
  3408. return branchInstr;
  3409. }
  3410. IR::BranchInstr *
  3411. LowererMD::GenerateFlagInlineCacheCheck(
  3412. IR::Instr * instrLdSt,
  3413. IR::RegOpnd * opndType,
  3414. IR::RegOpnd * opndInlineCache,
  3415. IR::LabelInstr * labelNext)
  3416. {
  3417. // Generate:
  3418. //
  3419. // CMP s1, [&(inlineCache->u.accessor.type)]
  3420. // JNE $next
  3421. IR::Instr * instr;
  3422. IR::Opnd* typeOpnd;
  3423. typeOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.type), TyMachReg, instrLdSt->m_func);
  3424. // CMP s1, [&(inlineCache->u.flag.type)]
  3425. instr = IR::Instr::New(Js::OpCode::CMP, instrLdSt->m_func);
  3426. instr->SetSrc1(opndType);
  3427. instr->SetSrc2(typeOpnd);
  3428. instrLdSt->InsertBefore(instr);
  3429. // JNE $next
  3430. IR::BranchInstr * branchInstr = IR::BranchInstr::New(Js::OpCode::JNE, labelNext, instrLdSt->m_func);
  3431. instrLdSt->InsertBefore(branchInstr);
  3432. return branchInstr;
  3433. }
  3434. IR::BranchInstr *
  3435. LowererMD::GenerateFlagInlineCacheCheckForNoGetterSetter(
  3436. IR::Instr * instrLdSt,
  3437. IR::RegOpnd * opndInlineCache,
  3438. IR::LabelInstr * labelNext)
  3439. {
  3440. // Generate:
  3441. //
  3442. // TEST [&(inlineCache->u.accessor.flags)], (Js::InlineCacheGetterFlag | Js::InlineCacheSetterFlag)
  3443. // JNE $next
  3444. IR::Instr * instr;
  3445. IR::Opnd* flagsOpnd;
  3446. flagsOpnd = IR::IndirOpnd::New(opndInlineCache, 0, TyInt8, instrLdSt->m_func);
  3447. // TEST [&(inlineCache->u.accessor.flags)], (Js::InlineCacheGetterFlag | Js::InlineCacheSetterFlag)
  3448. instr = IR::Instr::New(Js::OpCode::TEST,instrLdSt->m_func);
  3449. instr->SetSrc1(flagsOpnd);
  3450. instr->SetSrc2(IR::IntConstOpnd::New((Js::InlineCacheGetterFlag | Js::InlineCacheSetterFlag) << 1, TyInt8, instrLdSt->m_func));
  3451. instrLdSt->InsertBefore(instr);
  3452. // JNE $next
  3453. IR::BranchInstr * branchInstr = IR::BranchInstr::New(Js::OpCode::JNE, labelNext, instrLdSt->m_func);
  3454. instrLdSt->InsertBefore(branchInstr);
  3455. return branchInstr;
  3456. }
  3457. void
  3458. LowererMD::GenerateFlagInlineCacheCheckForGetterSetter(
  3459. IR::Instr * insertBeforeInstr,
  3460. IR::RegOpnd * opndInlineCache,
  3461. IR::LabelInstr * labelNext)
  3462. {
  3463. uint accessorFlagMask;
  3464. if (PHASE_OFF(Js::InlineGettersPhase, insertBeforeInstr->m_func))
  3465. {
  3466. accessorFlagMask = Js::InlineCache::GetSetterFlagMask();
  3467. }
  3468. else if (PHASE_OFF(Js::InlineSettersPhase, insertBeforeInstr->m_func))
  3469. {
  3470. accessorFlagMask = Js::InlineCache::GetGetterFlagMask();
  3471. }
  3472. else
  3473. {
  3474. accessorFlagMask = Js::InlineCache::GetGetterSetterFlagMask();
  3475. }
  3476. // Generate:
  3477. //
  3478. // TEST [&(inlineCache->u.accessor.flags)], Js::InlineCacheGetterFlag | Js::InlineCacheSetterFlag
  3479. // JEQ $next
  3480. IR::Instr * instr;
  3481. IR::Opnd* flagsOpnd;
  3482. flagsOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.rawUInt16), TyInt8, insertBeforeInstr->m_func);
  3483. // TEST [&(inlineCache->u.accessor.flags)], InlineCacheGetterFlag | InlineCacheSetterFlag
  3484. instr = IR::Instr::New(Js::OpCode::TEST,this->m_func);
  3485. instr->SetSrc1(flagsOpnd);
  3486. instr->SetSrc2(IR::IntConstOpnd::New(accessorFlagMask, TyInt8, this->m_func));
  3487. insertBeforeInstr->InsertBefore(instr);
  3488. // JEQ $next
  3489. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelNext, this->m_func);
  3490. insertBeforeInstr->InsertBefore(instr);
  3491. }
  3492. void
  3493. LowererMD::GenerateLdFldFromLocalInlineCache(
  3494. IR::Instr * instrLdFld,
  3495. IR::RegOpnd * opndBase,
  3496. IR::Opnd * opndDst,
  3497. IR::RegOpnd * inlineCache,
  3498. IR::LabelInstr * labelFallThru,
  3499. bool isInlineSlot)
  3500. {
  3501. // Generate:
  3502. //
  3503. // s1 = MOV base->slots -- load the slot array
  3504. // s2 = MOVZXw [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  3505. // dst = MOV [s1 + s2* Scale] -- load the value directly from the slot
  3506. // JMP $fallthru
  3507. IR::Instr * instr;
  3508. IR::Opnd* slotIndexOpnd;
  3509. IR::IndirOpnd * opndIndir;
  3510. IR::RegOpnd * opndSlotArray = nullptr;
  3511. if (!isInlineSlot)
  3512. {
  3513. opndSlotArray = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3514. opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrLdFld->m_func);
  3515. instr = IR::Instr::New(Js::OpCode::MOV, opndSlotArray, opndIndir, instrLdFld->m_func);
  3516. instrLdFld->InsertBefore(instr);
  3517. }
  3518. // s2 = MOVZXw [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  3519. IR::RegOpnd * opndReg2 = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3520. slotIndexOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.local.slotIndex), TyUint16, instrLdFld->m_func);
  3521. instr = IR::Instr::New(Js::OpCode::MOVZXW, opndReg2, slotIndexOpnd, instrLdFld->m_func);
  3522. instrLdFld->InsertBefore(instr);
  3523. if (isInlineSlot)
  3524. {
  3525. // dst = MOV [base + s2* Scale] -- load the value directly from the slot
  3526. opndIndir = IR::IndirOpnd::New(opndBase, opndReg2, LowererMDArch::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3527. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndIndir, instrLdFld->m_func);
  3528. instrLdFld->InsertBefore(instr);
  3529. }
  3530. else
  3531. {
  3532. // dst = MOV [s1 + s2* Scale] -- load the value directly from the slot
  3533. opndIndir = IR::IndirOpnd::New(opndSlotArray, opndReg2, LowererMDArch::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3534. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndIndir, instrLdFld->m_func);
  3535. instrLdFld->InsertBefore(instr);
  3536. }
  3537. // JMP $fallthru
  3538. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrLdFld->m_func);
  3539. instrLdFld->InsertBefore(instr);
  3540. }
  3541. void
  3542. LowererMD::GenerateLdLocalFldFromFlagInlineCache(
  3543. IR::Instr * instrLdFld,
  3544. IR::RegOpnd * opndBase,
  3545. IR::Opnd * opndDst,
  3546. IR::RegOpnd * opndInlineCache,
  3547. IR::LabelInstr * labelFallThru,
  3548. bool isInlineSlot)
  3549. {
  3550. // Generate:
  3551. //
  3552. // s1 = MOV [&base->slots] -- load the slot array
  3553. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  3554. // dst = MOV [s1 + s2*4]
  3555. // JMP $fallthru
  3556. IR::Instr * instr;
  3557. IR::Opnd* slotIndexOpnd;
  3558. IR::IndirOpnd * opndIndir;
  3559. IR::RegOpnd * opndSlotArray = nullptr;
  3560. if (!isInlineSlot)
  3561. {
  3562. opndSlotArray = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3563. opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrLdFld->m_func);
  3564. instr = IR::Instr::New(Js::OpCode::MOV, opndSlotArray, opndIndir, instrLdFld->m_func);
  3565. instrLdFld->InsertBefore(instr);
  3566. }
  3567. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  3568. IR::RegOpnd *opndSlotIndex = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3569. slotIndexOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.slotIndex), TyUint16, instrLdFld->m_func);
  3570. instr = IR::Instr::New(Js::OpCode::MOVZXW, opndSlotIndex, slotIndexOpnd, instrLdFld->m_func);
  3571. instrLdFld->InsertBefore(instr);
  3572. if (isInlineSlot)
  3573. {
  3574. // dst = MOV [s1 + s2*4]
  3575. opndIndir = IR::IndirOpnd::New(opndBase, opndSlotIndex, LowererMDArch::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3576. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndIndir, instrLdFld->m_func);
  3577. instrLdFld->InsertBefore(instr);
  3578. }
  3579. else
  3580. {
  3581. // dst = MOV [s1 + s2*4]
  3582. opndIndir = IR::IndirOpnd::New(opndSlotArray, opndSlotIndex, LowererMDArch::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3583. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndIndir, instrLdFld->m_func);
  3584. instrLdFld->InsertBefore(instr);
  3585. }
  3586. // JMP $fallthru
  3587. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrLdFld->m_func);
  3588. instrLdFld->InsertBefore(instr);
  3589. }
  3590. void
  3591. LowererMD::GenerateLdFldFromFlagInlineCache(
  3592. IR::Instr * insertBeforeInstr,
  3593. IR::RegOpnd * opndBase,
  3594. IR::Opnd * opndDst,
  3595. IR::RegOpnd * opndInlineCache,
  3596. IR::LabelInstr * labelFallThru,
  3597. bool isInlineSlot)
  3598. {
  3599. // Generate:
  3600. //
  3601. // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
  3602. // s1 = MOV [&s1->slots] -- load the slot array
  3603. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  3604. // dst = MOV [s1 + s2*4]
  3605. // JMP $fallthru
  3606. IR::Instr * instr;
  3607. IR::Opnd* inlineCacheObjOpnd;
  3608. IR::IndirOpnd * opndIndir;
  3609. IR::RegOpnd * opndObjSlots = nullptr;
  3610. inlineCacheObjOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.object), TyMachReg, this->m_func);
  3611. // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
  3612. IR::RegOpnd *opndObject = IR::RegOpnd::New(TyMachReg, this->m_func);
  3613. instr = IR::Instr::New(Js::OpCode::MOV, opndObject, inlineCacheObjOpnd, this->m_func);
  3614. insertBeforeInstr->InsertBefore(instr);
  3615. if (!isInlineSlot)
  3616. {
  3617. // s1 = MOV [&s1->slots] -- load the slot array
  3618. opndObjSlots = IR::RegOpnd::New(TyMachReg, this->m_func);
  3619. opndIndir = IR::IndirOpnd::New(opndObject, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  3620. instr = IR::Instr::New(Js::OpCode::MOV, opndObjSlots, opndIndir, this->m_func);
  3621. insertBeforeInstr->InsertBefore(instr);
  3622. }
  3623. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  3624. IR::RegOpnd *opndSlotIndex = IR::RegOpnd::New(TyMachReg, this->m_func);
  3625. IR::Opnd* slotIndexOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.slotIndex), TyUint16, this->m_func);
  3626. instr = IR::Instr::New(Js::OpCode::MOVZXW, opndSlotIndex, slotIndexOpnd, this->m_func);
  3627. insertBeforeInstr->InsertBefore(instr);
  3628. if (isInlineSlot)
  3629. {
  3630. // dst = MOV [s1 + s2*4]
  3631. opndIndir = IR::IndirOpnd::New(opndObject, opndSlotIndex, this->lowererMDArch.GetDefaultIndirScale(), TyMachReg, this->m_func);
  3632. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndIndir, this->m_func);
  3633. insertBeforeInstr->InsertBefore(instr);
  3634. }
  3635. else
  3636. {
  3637. // dst = MOV [s1 + s2*4]
  3638. opndIndir = IR::IndirOpnd::New(opndObjSlots, opndSlotIndex, this->lowererMDArch.GetDefaultIndirScale(), TyMachReg, this->m_func);
  3639. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndIndir, this->m_func);
  3640. insertBeforeInstr->InsertBefore(instr);
  3641. }
  3642. // JMP $fallthru
  3643. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  3644. insertBeforeInstr->InsertBefore(instr);
  3645. }
  3646. void
  3647. LowererMD::GenerateLdFldFromProtoInlineCache(
  3648. IR::Instr * instrLdFld,
  3649. IR::RegOpnd * opndBase,
  3650. IR::Opnd * opndDst,
  3651. IR::RegOpnd * inlineCache,
  3652. IR::LabelInstr * labelFallThru,
  3653. bool isInlineSlot)
  3654. {
  3655. // Generate:
  3656. //
  3657. // s1 = MOV [&(inlineCache->u.proto.prototypeObject)] -- load the cached prototype object
  3658. // s1 = MOV [&s1->slots] -- load the slot array
  3659. // s2 = MOVZXW [&(inlineCache->u.proto.slotIndex)] -- load the cached slot index
  3660. // dst = MOV [s1 + s2*4]
  3661. // JMP $fallthru
  3662. IR::Instr * instr;
  3663. IR::Opnd* inlineCacheProtoOpnd;
  3664. IR::IndirOpnd * opndIndir;
  3665. IR::RegOpnd * opndProtoSlots = nullptr;
  3666. inlineCacheProtoOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.proto.prototypeObject), TyMachReg, instrLdFld->m_func);
  3667. // s1 = MOV [&(inlineCache->u.proto.prototypeObject)] -- load the cached prototype object
  3668. IR::RegOpnd *opndProto = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3669. instr = IR::Instr::New(Js::OpCode::MOV, opndProto, inlineCacheProtoOpnd, instrLdFld->m_func);
  3670. instrLdFld->InsertBefore(instr);
  3671. if (!isInlineSlot)
  3672. {
  3673. // s1 = MOV [&s1->slots] -- load the slot array
  3674. opndProtoSlots = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3675. opndIndir = IR::IndirOpnd::New(opndProto, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrLdFld->m_func);
  3676. instr = IR::Instr::New(Js::OpCode::MOV, opndProtoSlots, opndIndir, instrLdFld->m_func);
  3677. instrLdFld->InsertBefore(instr);
  3678. }
  3679. // s2 = MOVZXW [&(inlineCache->u.proto.slotIndex)] -- load the cached slot index
  3680. IR::RegOpnd *opndSlotIndex = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3681. IR::Opnd* slotIndexOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.proto.slotIndex), TyUint16, instrLdFld->m_func);
  3682. instr = IR::Instr::New(Js::OpCode::MOVZXW, opndSlotIndex, slotIndexOpnd, instrLdFld->m_func);
  3683. instrLdFld->InsertBefore(instr);
  3684. if (isInlineSlot)
  3685. {
  3686. // dst = MOV [s1 + s2*4]
  3687. opndIndir = IR::IndirOpnd::New(opndProto, opndSlotIndex, LowererMDArch::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3688. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndIndir, instrLdFld->m_func);
  3689. instrLdFld->InsertBefore(instr);
  3690. }
  3691. else
  3692. {
  3693. // dst = MOV [s1 + s2*4]
  3694. opndIndir = IR::IndirOpnd::New(opndProtoSlots, opndSlotIndex, LowererMDArch::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3695. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndIndir, instrLdFld->m_func);
  3696. instrLdFld->InsertBefore(instr);
  3697. }
  3698. // JMP $fallthru
  3699. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrLdFld->m_func);
  3700. instrLdFld->InsertBefore(instr);
  3701. }
  3702. void
  3703. LowererMD::GenerateLoadTaggedType(IR::Instr * instrLdSt, IR::RegOpnd * opndType, IR::RegOpnd * opndTaggedType)
  3704. {
  3705. // Generate
  3706. //
  3707. // MOV taggedType, type
  3708. // OR taggedType, InlineCacheAuxSlotTypeTag
  3709. // MOV taggedType, type
  3710. {
  3711. IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV, opndTaggedType, opndType, instrLdSt->m_func);
  3712. instrLdSt->InsertBefore(instrMov);
  3713. }
  3714. // OR taggedType, InlineCacheAuxSlotTypeTag
  3715. {
  3716. IR::IntConstOpnd * opndAuxSlotTag = IR::IntConstOpnd::New(InlineCacheAuxSlotTypeTag, TyMachPtr, instrLdSt->m_func);
  3717. IR::Instr * instrAnd = IR::Instr::New(Js::OpCode::OR, opndTaggedType, opndTaggedType, opndAuxSlotTag, instrLdSt->m_func);
  3718. instrLdSt->InsertBefore(instrAnd);
  3719. }
  3720. }
  3721. ///----------------------------------------------------------------------------
  3722. ///
  3723. /// LowererMD::GenerateFastLdMethodFromFlags
  3724. ///
  3725. /// Make use of the helper to cache the type and slot index used to do a LdFld
  3726. /// and do an inline load from the appropriate slot if the type hasn't changed
  3727. /// since the last time this LdFld was executed.
  3728. ///
  3729. ///----------------------------------------------------------------------------
  3730. bool
  3731. LowererMD::GenerateFastLdMethodFromFlags(IR::Instr * instrLdFld)
  3732. {
  3733. IR::LabelInstr * labelFallThru;
  3734. IR::LabelInstr * bailOutLabel;
  3735. IR::Opnd * opndSrc;
  3736. IR::Opnd * opndDst;
  3737. IR::RegOpnd * opndBase;
  3738. IR::RegOpnd * opndType;
  3739. IR::RegOpnd * opndInlineCache;
  3740. opndSrc = instrLdFld->GetSrc1();
  3741. AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->IsPropertySymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(),
  3742. "Expected property sym operand as src of LdFldFlags");
  3743. IR::PropertySymOpnd * propertySymOpnd = opndSrc->AsPropertySymOpnd();
  3744. Assert(!instrLdFld->DoStackArgsOpt(this->m_func));
  3745. if (propertySymOpnd->IsTypeCheckSeqCandidate())
  3746. {
  3747. AssertMsg(propertySymOpnd->HasObjectTypeSym(), "Type optimized property sym operand without a type sym?");
  3748. StackSym *typeSym = propertySymOpnd->GetObjectTypeSym();
  3749. opndType = IR::RegOpnd::New(typeSym, TyMachReg, this->m_func);
  3750. }
  3751. else
  3752. {
  3753. opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3754. }
  3755. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  3756. opndDst = instrLdFld->GetDst();
  3757. opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  3758. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3759. // Label to jump to (or fall through to) when bailing out
  3760. bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instrLdFld->m_func, true /* isOpHelper */);
  3761. instrLdFld->InsertBefore(IR::Instr::New(Js::OpCode::MOV, opndInlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrLdFld, propertySymOpnd), this->m_func));
  3762. IR::LabelInstr * labelFlagAux = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3763. // Check the flag cache with the untagged type
  3764. this->m_lowerer->GenerateObjectTestAndTypeLoad(instrLdFld, opndBase, opndType, bailOutLabel);
  3765. // Blindly do the check for getter flag first and then do the type check
  3766. // We avoid repeated check for getter flag when the function object may be in either
  3767. // inline slots or auxiliary slots
  3768. GenerateFlagInlineCacheCheckForGetterSetter(instrLdFld, opndInlineCache, bailOutLabel);
  3769. GenerateFlagInlineCacheCheck(instrLdFld, opndType, opndInlineCache, labelFlagAux);
  3770. GenerateLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  3771. // Check the flag cache with the tagged type
  3772. instrLdFld->InsertBefore(labelFlagAux);
  3773. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3774. GenerateLoadTaggedType(instrLdFld, opndType, opndTaggedType);
  3775. GenerateFlagInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, bailOutLabel);
  3776. GenerateLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  3777. instrLdFld->InsertBefore(bailOutLabel);
  3778. instrLdFld->InsertAfter(labelFallThru);
  3779. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  3780. // ordering instructions anymore.
  3781. instrLdFld->UnlinkSrc1();
  3782. this->m_lowerer->GenerateBailOut(instrLdFld);
  3783. return true;
  3784. }
  3785. void
  3786. LowererMD::GenerateLoadPolymorphicInlineCacheSlot(IR::Instr * instrLdSt, IR::RegOpnd * opndInlineCache, IR::RegOpnd * opndType, uint polymorphicInlineCacheSize)
  3787. {
  3788. // Generate
  3789. //
  3790. // MOV r1, type
  3791. // SHR r1, PolymorphicInlineCacheShift
  3792. // AND r1, (size - 1)
  3793. // SHL r1, log2(sizeof(Js::InlineCache))
  3794. // LEA inlineCache, [inlineCache + r1]
  3795. // MOV r1, type
  3796. IR::RegOpnd * opndOffset = IR::RegOpnd::New(TyMachPtr, instrLdSt->m_func);
  3797. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndOffset, opndType, instrLdSt->m_func);
  3798. instrLdSt->InsertBefore(instr);
  3799. IntConstType rightShiftAmount = PolymorphicInlineCacheShift;
  3800. IntConstType leftShiftAmount = Math::Log2(sizeof(Js::InlineCache));
  3801. // instead of generating
  3802. // SHR r1, PolymorphicInlineCacheShift
  3803. // AND r1, (size - 1)
  3804. // SHL r1, log2(sizeof(Js::InlineCache))
  3805. //
  3806. // we can generate:
  3807. // SHR r1, (PolymorphicInlineCacheShift - log2(sizeof(Js::InlineCache))
  3808. // AND r1, (size - 1) << log2(sizeof(Js::InlineCache))
  3809. Assert(rightShiftAmount > leftShiftAmount);
  3810. instr = IR::Instr::New(Js::OpCode::SHR, opndOffset, opndOffset, IR::IntConstOpnd::New(rightShiftAmount - leftShiftAmount, TyUint8, instrLdSt->m_func, true), instrLdSt->m_func);
  3811. instrLdSt->InsertBefore(instr);
  3812. instr = IR::Instr::New(Js::OpCode::AND, opndOffset, opndOffset, IR::IntConstOpnd::New(((__int64)(polymorphicInlineCacheSize - 1) << leftShiftAmount), TyMachReg, instrLdSt->m_func, true), instrLdSt->m_func);
  3813. instrLdSt->InsertBefore(instr);
  3814. // LEA inlineCache, [inlineCache + r1]
  3815. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(opndInlineCache, opndOffset, TyMachPtr, instrLdSt->m_func);
  3816. instr = IR::Instr::New(Js::OpCode::LEA, opndInlineCache, indirOpnd, instrLdSt->m_func);
  3817. instrLdSt->InsertBefore(instr);
  3818. }
  3819. IR::Instr *
  3820. LowererMD::ChangeToWriteBarrierAssign(IR::Instr * assignInstr, const Func* func)
  3821. {
  3822. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3823. IR::Opnd* dest = assignInstr->GetDst();
  3824. auto threadContextInfo = func->GetTopFunc()->GetThreadContextInfo();
  3825. void* destAddr = nullptr;
  3826. bool isPossibleBarrieredDest = false;
  3827. if (TySize[dest->GetType()] == sizeof(void*))
  3828. {
  3829. if (dest->IsIndirOpnd())
  3830. {
  3831. Assert(!dest->AsIndirOpnd()->HasAddrKind());
  3832. isPossibleBarrieredDest = true;
  3833. }
  3834. else if (dest->IsMemRefOpnd())
  3835. {
  3836. // looks all thread context field access are from MemRefOpnd
  3837. destAddr = (void*)dest->AsMemRefOpnd()->GetMemLoc();
  3838. isPossibleBarrieredDest = destAddr != nullptr
  3839. && ((intptr_t)destAddr % sizeof(void*)) == 0
  3840. && destAddr != (void*)threadContextInfo->GetImplicitCallFlagsAddr()
  3841. && destAddr != (void*)threadContextInfo->GetDisableImplicitFlagsAddr()
  3842. && destAddr != (void*)threadContextInfo->GetBailOutRegisterSaveSpaceAddr();
  3843. if (isPossibleBarrieredDest)
  3844. {
  3845. Assert(Recycler::WBCheckIsRecyclerAddress((char*)destAddr));
  3846. }
  3847. }
  3848. }
  3849. #endif
  3850. IR::Instr * instr = ChangeToAssignNoBarrierCheck(assignInstr);
  3851. // Now insert write barrier if necessary
  3852. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3853. if (isPossibleBarrieredDest
  3854. && assignInstr->m_opcode == Js::OpCode::MOV // ignore SSE instructions like MOVSD
  3855. && assignInstr->GetSrc1()->IsWriteBarrierTriggerableValue())
  3856. {
  3857. instr = LowererMD::GenerateWriteBarrier(assignInstr);
  3858. }
  3859. #endif
  3860. return instr;
  3861. }
  3862. void
  3863. LowererMD::GenerateWriteBarrierAssign(IR::MemRefOpnd * opndDst, IR::Opnd * opndSrc, IR::Instr * insertBeforeInstr)
  3864. {
  3865. Lowerer::InsertMove(opndDst, opndSrc, insertBeforeInstr);
  3866. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3867. if (opndSrc->IsWriteBarrierTriggerableValue())
  3868. {
  3869. void * address = (void *)opndDst->AsMemRefOpnd()->GetMemLoc();
  3870. #ifdef RECYCLER_WRITE_BARRIER_BYTE
  3871. // WriteBarrier-TODO: need to pass card table address through RPC
  3872. IR::MemRefOpnd * cardTableEntry = IR::MemRefOpnd::New(
  3873. &RecyclerWriteBarrierManager::GetAddressOfCardTable()[RecyclerWriteBarrierManager::GetCardTableIndex(address)], TyInt8, insertBeforeInstr->m_func);
  3874. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, cardTableEntry, IR::IntConstOpnd::New(1, TyInt8, insertBeforeInstr->m_func), insertBeforeInstr->m_func);
  3875. insertBeforeInstr->InsertBefore(movInstr);
  3876. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  3877. if (CONFIG_FLAG(ForceSoftwareWriteBarrier) && CONFIG_FLAG(RecyclerVerifyMark))
  3878. {
  3879. this->LoadHelperArgument(insertBeforeInstr, opndDst);
  3880. IR::Instr* instrCall = IR::Instr::New(Js::OpCode::Call, m_func);
  3881. insertBeforeInstr->InsertBefore(instrCall);
  3882. this->ChangeToHelperCall(instrCall, IR::HelperWriteBarrierSetVerifyBit);
  3883. }
  3884. #endif
  3885. #else
  3886. IR::MemRefOpnd * cardTableEntry = IR::MemRefOpnd::New(
  3887. &RecyclerWriteBarrierManager::GetAddressOfCardTable()[RecyclerWriteBarrierManager::GetCardTableIndex(address)], TyMachPtr, assignInstr->m_func);
  3888. IR::Instr * orInstr = IR::Instr::New(Js::OpCode::OR, cardTableEntry,
  3889. IR::IntConstOpnd::New(1 << ((uint)address >> 7), TyInt32, assignInstr->m_func), assignInstr->m_func);
  3890. assignInstr->InsertBefore(orInstr);
  3891. #endif
  3892. }
  3893. #endif
  3894. }
  3895. void
  3896. LowererMD::GenerateWriteBarrierAssign(IR::IndirOpnd * opndDst, IR::Opnd * opndSrc, IR::Instr * insertBeforeInstr)
  3897. {
  3898. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3899. if (opndSrc->IsWriteBarrierTriggerableValue())
  3900. {
  3901. IR::RegOpnd * writeBarrierAddrRegOpnd = IR::RegOpnd::New(TyMachPtr, insertBeforeInstr->m_func);
  3902. insertBeforeInstr->InsertBefore(IR::Instr::New(Js::OpCode::LEA, writeBarrierAddrRegOpnd, opndDst, insertBeforeInstr->m_func));
  3903. IR::Instr* movInstr = IR::Instr::New(Js::OpCode::MOV,
  3904. IR::IndirOpnd::New(writeBarrierAddrRegOpnd, 0, TyMachReg, insertBeforeInstr->m_func), opndSrc, insertBeforeInstr->m_func);
  3905. insertBeforeInstr->InsertBefore(movInstr);
  3906. GenerateWriteBarrier(movInstr);
  3907. // The mov happens above, and it's slightly faster doing it that way since we've already calculated the address we're writing to
  3908. return;
  3909. }
  3910. #endif
  3911. Lowerer::InsertMove(opndDst, opndSrc, insertBeforeInstr);
  3912. return;
  3913. }
  3914. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3915. IR::Instr*
  3916. LowererMD::GenerateWriteBarrier(IR::Instr * assignInstr)
  3917. {
  3918. #if defined(RECYCLER_WRITE_BARRIER_BYTE)
  3919. PHASE_PRINT_TRACE(Js::JitWriteBarrierPhase, assignInstr->m_func, _u("Generating write barrier\n"));
  3920. IR::RegOpnd * indexOpnd = IR::RegOpnd::New(TyMachPtr, assignInstr->m_func);
  3921. IR::Instr * loadIndexInstr = IR::Instr::New(Js::OpCode::LEA, indexOpnd, assignInstr->GetDst(), assignInstr->m_func);
  3922. assignInstr->InsertBefore(loadIndexInstr);
  3923. IR::Instr * shiftBitInstr = IR::Instr::New(Js::OpCode::SHR, indexOpnd, indexOpnd,
  3924. IR::IntConstOpnd::New(12 /* 1 << 12 = 4096 */, TyInt8, assignInstr->m_func), assignInstr->m_func);
  3925. assignInstr->InsertAfter(shiftBitInstr);
  3926. // The cardtable address is likely 64 bits already so we have to load it to a register
  3927. // That is, we have to do the following:
  3928. // LEA reg1, targetOfWrite
  3929. // SHR reg1, 12
  3930. // MOV reg2, cardTableAddress
  3931. // MOV [reg1 + reg2], 1
  3932. //
  3933. // Instead of doing this:
  3934. // LEA reg1, targetOfWrite
  3935. // SHR reg1, 12
  3936. // MOV [cardTableAddress + reg2], 1
  3937. //
  3938. //TODO: (leish)(swb) hoist RecyclerWriteBarrierManager::GetAddressOfCardTable()
  3939. IR::RegOpnd * cardTableRegOpnd = IR::RegOpnd::New(TyMachReg, assignInstr->m_func);
  3940. IR::Instr * cardTableAddrInstr = IR::Instr::New(Js::OpCode::MOV, cardTableRegOpnd,
  3941. IR::AddrOpnd::New(RecyclerWriteBarrierManager::GetAddressOfCardTable(), IR::AddrOpndKindWriteBarrierCardTable, assignInstr->m_func),
  3942. assignInstr->m_func);
  3943. shiftBitInstr->InsertAfter(cardTableAddrInstr);
  3944. IR::IndirOpnd * cardTableEntryOpnd = IR::IndirOpnd::New(cardTableRegOpnd, indexOpnd,
  3945. TyInt8, assignInstr->m_func);
  3946. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, cardTableEntryOpnd, IR::IntConstOpnd::New(1, TyInt8, assignInstr->m_func), assignInstr->m_func);
  3947. cardTableAddrInstr->InsertAfter(movInstr);
  3948. return loadIndexInstr;
  3949. #else
  3950. Assert(writeBarrierAddrRegOpnd->IsRegOpnd());
  3951. IR::RegOpnd * shiftBitOpnd = IR::RegOpnd::New(TyInt32, assignInstr->m_func);
  3952. shiftBitOpnd->SetReg(LowererMDArch::GetRegShiftCount());
  3953. IR::Instr * moveShiftBitOpnd = IR::Instr::New(Js::OpCode::MOV, shiftBitOpnd, writeBarrierAddrRegOpnd, assignInstr->m_func);
  3954. assignInstr->InsertBefore(moveShiftBitOpnd);
  3955. IR::Instr * shiftBitInstr = IR::Instr::New(Js::OpCode::SHR, shiftBitOpnd, shiftBitOpnd,
  3956. IR::IntConstOpnd::New(7 /* 1 << 7 = 128 */, TyInt32, assignInstr->m_func), assignInstr->m_func);
  3957. assignInstr->InsertBefore(shiftBitInstr);
  3958. IR::RegOpnd * bitOpnd = IR::RegOpnd::New(TyInt32, assignInstr->m_func);
  3959. IR::Instr * mov1Instr = IR::Instr::New(Js::OpCode::MOV, bitOpnd,
  3960. IR::IntConstOpnd::New(1, TyInt32, assignInstr->m_func), assignInstr->m_func);
  3961. assignInstr->InsertBefore(mov1Instr);
  3962. IR::Instr * bitInstr = IR::Instr::New(Js::OpCode::SHL, bitOpnd, bitOpnd, shiftBitOpnd, assignInstr->m_func);
  3963. assignInstr->InsertBefore(bitInstr);
  3964. IR::RegOpnd * indexOpnd = shiftBitOpnd;
  3965. IR::Instr * indexInstr = IR::Instr::New(Js::OpCode::SHR, indexOpnd, indexOpnd,
  3966. IR::IntConstOpnd::New(5 /* 1 << 5 = 32 */, TyInt32, assignInstr->m_func), assignInstr->m_func);
  3967. assignInstr->InsertBefore(indexInstr);
  3968. IR::RegOpnd * cardTableRegOpnd = IR::RegOpnd::New(TyMachReg, assignInstr->m_func);
  3969. IR::Instr * cardTableAddrInstr = IR::Instr::New(Js::OpCode::MOV, cardTableRegOpnd,
  3970. IR::AddrOpnd::New(RecyclerWriteBarrierManager::GetAddressOfCardTable(), IR::AddrOpndKindDynamicMisc, assignInstr->m_func),
  3971. assignInstr->m_func);
  3972. assignInstr->InsertBefore(cardTableAddrInstr);
  3973. IR::IndirOpnd * cardTableEntryOpnd = IR::IndirOpnd::New(cardTableRegOpnd, indexOpnd, LowererMDArch::GetDefaultIndirScale(),
  3974. TyInt32, assignInstr->m_func);
  3975. IR::Instr * orInstr = IR::Instr::New(Js::OpCode::OR, cardTableEntryOpnd, cardTableEntryOpnd,
  3976. bitOpnd, assignInstr->m_func);
  3977. assignInstr->InsertBefore(orInstr);
  3978. #endif
  3979. }
  3980. #endif
  3981. void
  3982. LowererMD::GenerateStFldFromLocalInlineCache(
  3983. IR::Instr * instrStFld,
  3984. IR::RegOpnd * opndBase,
  3985. IR::Opnd * opndSrc,
  3986. IR::RegOpnd * inlineCache,
  3987. IR::LabelInstr * labelFallThru,
  3988. bool isInlineSlot)
  3989. {
  3990. IR::Instr * instr;
  3991. IR::Opnd* slotIndexOpnd;
  3992. IR::RegOpnd * opndIndirBase = opndBase;
  3993. if (!isInlineSlot)
  3994. {
  3995. // slotArray = MOV base->slots -- load the slot array
  3996. IR::RegOpnd * opndSlotArray = IR::RegOpnd::New(TyMachReg, instrStFld->m_func);
  3997. IR::IndirOpnd * opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrStFld->m_func);
  3998. instr = IR::Instr::New(Js::OpCode::MOV, opndSlotArray, opndIndir, instrStFld->m_func);
  3999. instrStFld->InsertBefore(instr);
  4000. opndIndirBase = opndSlotArray;
  4001. }
  4002. // slotIndex = MOV [&inlineCache->u.local.inlineSlotOffsetOrAuxSlotIndex] -- load the cached slot offset or index
  4003. IR::RegOpnd * opndSlotIndex = IR::RegOpnd::New(TyMachReg, instrStFld->m_func);
  4004. slotIndexOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.local.slotIndex), TyUint16, instrStFld->m_func);
  4005. instr = IR::Instr::New(Js::OpCode::MOVZXW, opndSlotIndex, slotIndexOpnd, instrStFld->m_func);
  4006. instrStFld->InsertBefore(instr);
  4007. // [base + slotIndex * (1 << indirScale)] = MOV src -- store the value directly to the slot
  4008. // [slotArray + slotIndex * (1 << indirScale)] = MOV src -- store the value directly to the slot
  4009. IR::IndirOpnd * storeLocIndirOpnd = IR::IndirOpnd::New(opndIndirBase, opndSlotIndex,
  4010. LowererMDArch::GetDefaultIndirScale(), TyMachReg, instrStFld->m_func);
  4011. GenerateWriteBarrierAssign(storeLocIndirOpnd, opndSrc, instrStFld);
  4012. // JMP $fallthru
  4013. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrStFld->m_func);
  4014. instrStFld->InsertBefore(instr);
  4015. }
  4016. //----------------------------------------------------------------------------
  4017. //
  4018. // LowererMD::GenerateFastScopedLdFld
  4019. //
  4020. // Make use of the helper to cache the type and slot index used to do a ScopedLdFld
  4021. // when the scope is an array of length 1.
  4022. // Extract the only element from array and do an inline load from the appropriate slot
  4023. // if the type hasn't changed since the last time this ScopedLdFld was executed.
  4024. //
  4025. //----------------------------------------------------------------------------
  4026. IR::Instr *
  4027. LowererMD::GenerateFastScopedLdFld(IR::Instr * instrLdScopedFld)
  4028. {
  4029. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  4030. // JNE $helper
  4031. // MOV r1, [base + offset(scopes)] -- load the first scope
  4032. // MOV r2, r1->type
  4033. // CMP r2, [&(inlineCache->u.local.type)] -- check type
  4034. // JNE $helper
  4035. // MOV r1, r1->slots -- load the slots array
  4036. // MOV r2 , [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  4037. // MOV dst, [r1+r2] -- load the value from the slot
  4038. // JMP $fallthru
  4039. // $helper:
  4040. // dst = CALL PatchGetPropertyScoped(inlineCache, base, field, defaultInstance, scriptContext)
  4041. // $fallthru:
  4042. IR::RegOpnd * opndBase;
  4043. IR::Instr * instr;
  4044. IR::IndirOpnd * indirOpnd;
  4045. IR::LabelInstr * labelHelper;
  4046. IR::Opnd * opndDst;
  4047. IR::RegOpnd * inlineCache;
  4048. IR::RegOpnd *r1;
  4049. IR::LabelInstr * labelFallThru;
  4050. IR::Opnd *propertySrc = instrLdScopedFld->GetSrc1();
  4051. AssertMsg(propertySrc->IsSymOpnd() && propertySrc->AsSymOpnd()->IsPropertySymOpnd() && propertySrc->AsSymOpnd()->m_sym->IsPropertySym(),
  4052. "Expected property sym operand as src of LdScoped");
  4053. IR::PropertySymOpnd * propertySymOpnd = propertySrc->AsPropertySymOpnd();
  4054. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  4055. IR::Opnd *srcBase = instrLdScopedFld->GetSrc2();
  4056. AssertMsg(srcBase->IsRegOpnd(), "Expected reg opnd as src2");
  4057. //opndBase = srcBase;
  4058. //IR::IndirOpnd * indirOpnd = src->AsIndirOpnd();
  4059. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4060. AssertMsg(opndBase->m_sym->m_isSingleDef, "We assume this isn't redefined");
  4061. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  4062. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfLength(), TyInt16, this->m_func);
  4063. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4064. instr->SetSrc1(indirOpnd);
  4065. instr->SetSrc2(IR::IntConstOpnd::New(0x1, TyInt8, this->m_func));
  4066. instrLdScopedFld->InsertBefore(instr);
  4067. // JNE $helper
  4068. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  4069. instrLdScopedFld->InsertBefore(instr);
  4070. // MOV r1, [base + offset(scopes)] -- load the first scope
  4071. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg, this->m_func);
  4072. r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  4073. instr = IR::Instr::New(Js::OpCode::MOV, r1, indirOpnd, this->m_func);
  4074. instrLdScopedFld->InsertBefore(instr);
  4075. //first load the inlineCache type
  4076. inlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  4077. Assert(inlineCache != nullptr);
  4078. IR::RegOpnd * opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  4079. opndDst = instrLdScopedFld->GetDst();
  4080. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4081. r1->m_sym->m_isNotInt = true;
  4082. // Load the type
  4083. this->m_lowerer->GenerateObjectTestAndTypeLoad(instrLdScopedFld, r1, opndType, labelHelper);
  4084. // Check the local cache with the tagged type
  4085. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  4086. GenerateLoadTaggedType(instrLdScopedFld, opndType, opndTaggedType);
  4087. instrLdScopedFld->InsertBefore(IR::Instr::New(Js::OpCode::MOV, inlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrLdScopedFld, propertySymOpnd), this->m_func));
  4088. GenerateLocalInlineCacheCheck(instrLdScopedFld, opndTaggedType, inlineCache, labelHelper);
  4089. GenerateLdFldFromLocalInlineCache(instrLdScopedFld, r1, opndDst, inlineCache, labelFallThru, false);
  4090. // $helper:
  4091. // dst = CALL PatchGetPropertyScoped(inlineCache, opndBase, propertyId, srcBase, scriptContext)
  4092. // $fallthru:
  4093. instrLdScopedFld->InsertBefore(labelHelper);
  4094. instrLdScopedFld->InsertAfter(labelFallThru);
  4095. return instrLdScopedFld->m_prev;
  4096. }
  4097. //----------------------------------------------------------------------------
  4098. //
  4099. // LowererMD::GenerateFastScopedStFld
  4100. //
  4101. // Make use of the helper to cache the type and slot index used to do a ScopedStFld
  4102. // when the scope is an array of length 1.
  4103. // Extract the only element from array and do an inline load from the appropriate slot
  4104. // if the type hasn't changed since the last time this ScopedStFld was executed.
  4105. //
  4106. //----------------------------------------------------------------------------
  4107. IR::Instr *
  4108. LowererMD::GenerateFastScopedStFld(IR::Instr * instrStScopedFld)
  4109. {
  4110. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  4111. // JNE $helper
  4112. // MOV r1, [base + offset(scopes)] -- load the first scope
  4113. // MOV r2, r1->type
  4114. // CMP r2, [&(inlineCache->u.local.type)] -- check type
  4115. // JNE $helper
  4116. // MOV r1, r1->slots -- load the slots array
  4117. // MOV r2, [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  4118. // [r1 + r2*4] = MOV value -- store the value directly to the slot
  4119. // JMP $fallthru
  4120. // $helper:
  4121. // CALL PatchSetPropertyScoped(inlineCache, base, field, value, defaultInstance, scriptContext)
  4122. // $fallthru:
  4123. IR::RegOpnd * opndBase;
  4124. IR::Instr * instr;
  4125. IR::IndirOpnd * indirOpnd;
  4126. IR::LabelInstr * labelHelper;
  4127. IR::Opnd * opndDst;
  4128. IR::RegOpnd * inlineCache;
  4129. IR::RegOpnd *r1;
  4130. IR::LabelInstr * labelFallThru;
  4131. IR::Opnd *newValue = instrStScopedFld->GetSrc1();
  4132. // IR::Opnd *defaultInstance = instrStScopedFld->UnlinkSrc2();
  4133. opndDst = instrStScopedFld->GetDst();
  4134. AssertMsg(opndDst->IsSymOpnd() && opndDst->AsSymOpnd()->IsPropertySymOpnd() && opndDst->AsSymOpnd()->m_sym->IsPropertySym(),
  4135. "Expected property sym operand as dst of StScoped");
  4136. IR::PropertySymOpnd * propertySymOpnd = opndDst->AsPropertySymOpnd();
  4137. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  4138. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4139. AssertMsg(opndBase->m_sym->m_isSingleDef, "We assume this isn't redefined");
  4140. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  4141. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfLength(), TyInt16, this->m_func);
  4142. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4143. instr->SetSrc1(indirOpnd);
  4144. instr->SetSrc2(IR::IntConstOpnd::New(0x1, TyInt8, this->m_func));
  4145. instrStScopedFld->InsertBefore(instr);
  4146. // JNE $helper
  4147. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  4148. instrStScopedFld->InsertBefore(instr);
  4149. // MOV r1, [base + offset(scopes)] -- load the first scope
  4150. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg, this->m_func);
  4151. r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  4152. instr = IR::Instr::New(Js::OpCode::MOV, r1, indirOpnd, this->m_func);
  4153. instrStScopedFld->InsertBefore(instr);
  4154. //first load the inlineCache type
  4155. inlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  4156. Assert(inlineCache != nullptr);
  4157. IR::RegOpnd * opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  4158. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4159. r1->m_sym->m_isNotInt = true;
  4160. // Load the type
  4161. this->m_lowerer->GenerateObjectTestAndTypeLoad(instrStScopedFld, r1, opndType, labelHelper);
  4162. // Check the local cache with the tagged type
  4163. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  4164. GenerateLoadTaggedType(instrStScopedFld, opndType, opndTaggedType);
  4165. instrStScopedFld->InsertBefore(IR::Instr::New(Js::OpCode::MOV, inlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrStScopedFld, propertySymOpnd), this->m_func));
  4166. GenerateLocalInlineCacheCheck(instrStScopedFld, opndTaggedType, inlineCache, labelHelper);
  4167. GenerateStFldFromLocalInlineCache(instrStScopedFld, r1, newValue, inlineCache, labelFallThru, false);
  4168. // $helper:
  4169. // CALL PatchSetPropertyScoped(inlineCache, opndBase, propertyId, newValue, defaultInstance, scriptContext)
  4170. // $fallthru:
  4171. instrStScopedFld->InsertBefore(labelHelper);
  4172. instrStScopedFld->InsertAfter(labelFallThru);
  4173. return instrStScopedFld->m_prev;
  4174. }
  4175. IR::Opnd *
  4176. LowererMD::CreateStackArgumentsSlotOpnd()
  4177. {
  4178. StackSym *sym = StackSym::New(TyMachReg, this->m_func);
  4179. sym->m_offset = -MachArgsSlotOffset;
  4180. sym->m_allocated = true;
  4181. return IR::SymOpnd::New(sym, TyMachReg, this->m_func);
  4182. }
  4183. IR::RegOpnd *
  4184. LowererMD::GenerateUntagVar(IR::RegOpnd * src, IR::LabelInstr * labelFail, IR::Instr * assignInstr, bool generateTagCheck)
  4185. {
  4186. Assert(src->IsVar());
  4187. // MOV valueOpnd, index
  4188. IR::RegOpnd *valueOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  4189. //
  4190. // Convert Index to 32 bits.
  4191. //
  4192. IR::Opnd * opnd = src->UseWithNewType(TyMachReg, this->m_func);
  4193. #if INT32VAR
  4194. if (generateTagCheck)
  4195. {
  4196. Assert(!opnd->IsTaggedInt());
  4197. this->GenerateSmIntTest(opnd, assignInstr, labelFail);
  4198. }
  4199. // Moving into r2 clears the tag bits on AMD64.
  4200. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV_TRUNC, valueOpnd, opnd, this->m_func);
  4201. assignInstr->InsertBefore(instr);
  4202. #else
  4203. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, valueOpnd, opnd, this->m_func);
  4204. assignInstr->InsertBefore(instr);
  4205. // SAR valueOpnd, Js::VarTag_Shift
  4206. instr = IR::Instr::New(Js::OpCode::SAR, valueOpnd, valueOpnd,
  4207. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  4208. assignInstr->InsertBefore(instr);
  4209. if (generateTagCheck)
  4210. {
  4211. Assert(!opnd->IsTaggedInt());
  4212. // SAR set the carry flag (CF) to 1 if the lower bit is 1
  4213. // JAE will jmp if CF = 0
  4214. instr = IR::BranchInstr::New(Js::OpCode::JAE, labelFail, this->m_func);
  4215. assignInstr->InsertBefore(instr);
  4216. }
  4217. #endif
  4218. return valueOpnd;
  4219. }
  4220. IR::RegOpnd *LowererMD::LoadNonnegativeIndex(
  4221. IR::RegOpnd *indexOpnd,
  4222. const bool skipNegativeCheck,
  4223. IR::LabelInstr *const notTaggedIntLabel,
  4224. IR::LabelInstr *const negativeLabel,
  4225. IR::Instr *const insertBeforeInstr)
  4226. {
  4227. Assert(indexOpnd);
  4228. Assert(indexOpnd->IsVar() || indexOpnd->GetType() == TyInt32 || indexOpnd->GetType() == TyUint32);
  4229. Assert(indexOpnd->GetType() != TyUint32 || skipNegativeCheck);
  4230. Assert(!indexOpnd->IsVar() || notTaggedIntLabel);
  4231. Assert(skipNegativeCheck || negativeLabel);
  4232. Assert(insertBeforeInstr);
  4233. if(indexOpnd->IsVar())
  4234. {
  4235. if (indexOpnd->GetValueType().IsLikelyFloat()
  4236. #ifdef _M_IX86
  4237. && AutoSystemInfo::Data.SSE2Available()
  4238. #endif
  4239. )
  4240. {
  4241. return m_lowerer->LoadIndexFromLikelyFloat(indexOpnd, skipNegativeCheck, notTaggedIntLabel, negativeLabel, insertBeforeInstr);
  4242. }
  4243. // mov intIndex, index
  4244. // sar intIndex, 1
  4245. // jae $notTaggedIntOrNegative
  4246. indexOpnd = GenerateUntagVar(indexOpnd, notTaggedIntLabel, insertBeforeInstr, !indexOpnd->IsTaggedInt());
  4247. }
  4248. if(!skipNegativeCheck)
  4249. {
  4250. // test index, index
  4251. // js $notTaggedIntOrNegative
  4252. Lowerer::InsertTestBranch(indexOpnd, indexOpnd, Js::OpCode::JSB, negativeLabel, insertBeforeInstr);
  4253. }
  4254. return indexOpnd;
  4255. }
  4256. // Inlines fast-path for int Mul/Add or int Mul/Sub. If not int, call MulAdd/MulSub helper
  4257. bool LowererMD::TryGenerateFastMulAdd(IR::Instr * instrAdd, IR::Instr ** pInstrPrev)
  4258. {
  4259. IR::Instr *instrMul = instrAdd->GetPrevRealInstrOrLabel();
  4260. IR::Opnd *addSrc;
  4261. IR::RegOpnd *addCommonSrcOpnd;
  4262. Assert(instrAdd->m_opcode == Js::OpCode::Add_A || instrAdd->m_opcode == Js::OpCode::Sub_A);
  4263. bool isSub = (instrAdd->m_opcode == Js::OpCode::Sub_A) ? true : false;
  4264. // Mul needs to be a single def reg
  4265. if (instrMul->m_opcode != Js::OpCode::Mul_A || instrMul->GetDst()->IsRegOpnd() == false)
  4266. {
  4267. // Cannot generate MulAdd
  4268. return false;
  4269. }
  4270. if (instrMul->HasBailOutInfo())
  4271. {
  4272. // Bailout will be generated for the Add, but not the Mul.
  4273. // We could handle this, but this path isn't used that much anymore.
  4274. return false;
  4275. }
  4276. IR::RegOpnd *regMulDst = instrMul->GetDst()->AsRegOpnd();
  4277. if (regMulDst->m_sym->m_isSingleDef == false)
  4278. {
  4279. // Cannot generate MulAdd
  4280. return false;
  4281. }
  4282. // Only handle a * b + c, so dst of Mul needs to match left source of Add
  4283. if (instrMul->GetDst()->IsEqual(instrAdd->GetSrc1()))
  4284. {
  4285. addCommonSrcOpnd = instrAdd->GetSrc1()->AsRegOpnd();
  4286. addSrc = instrAdd->GetSrc2();
  4287. }
  4288. else if (instrMul->GetDst()->IsEqual(instrAdd->GetSrc2()))
  4289. {
  4290. addSrc = instrAdd->GetSrc1();
  4291. addCommonSrcOpnd = instrAdd->GetSrc2()->AsRegOpnd();
  4292. }
  4293. else
  4294. {
  4295. return false;
  4296. }
  4297. // Only handle a * b + c where c != a * b
  4298. if (instrAdd->GetSrc1()->IsEqual(instrAdd->GetSrc2()))
  4299. {
  4300. return false;
  4301. }
  4302. if (addCommonSrcOpnd->m_isTempLastUse == false)
  4303. {
  4304. return false;
  4305. }
  4306. IR::Opnd *mulSrc1 = instrMul->GetSrc1();
  4307. IR::Opnd *mulSrc2 = instrMul->GetSrc2();
  4308. if (mulSrc1->IsRegOpnd() && mulSrc1->AsRegOpnd()->IsTaggedInt()
  4309. && mulSrc2->IsRegOpnd() && mulSrc2->AsRegOpnd()->IsTaggedInt())
  4310. {
  4311. return false;
  4312. }
  4313. // Save prevInstr for the main lower loop
  4314. *pInstrPrev = instrMul->m_prev;
  4315. // Generate int31 fast-path for Mul, go to MulAdd helper if it fails, or one of the source is marked notInt
  4316. if (!(addSrc->IsRegOpnd() && addSrc->AsRegOpnd()->IsNotInt())
  4317. && !(mulSrc1->IsRegOpnd() && mulSrc1->AsRegOpnd()->IsNotInt())
  4318. && !(mulSrc2->IsRegOpnd() && mulSrc2->AsRegOpnd()->IsNotInt()))
  4319. {
  4320. this->GenerateFastMul(instrMul);
  4321. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4322. IR::Instr *instr = IR::BranchInstr::New(Js::OpCode::JMP, labelHelper, this->m_func);
  4323. instrMul->InsertBefore(instr);
  4324. // Generate int31 fast-path for Add
  4325. bool success;
  4326. if (isSub)
  4327. {
  4328. success = this->GenerateFastSub(instrAdd);
  4329. }
  4330. else
  4331. {
  4332. success = this->GenerateFastAdd(instrAdd);
  4333. }
  4334. if (!success)
  4335. {
  4336. labelHelper->isOpHelper = false;
  4337. }
  4338. // Generate MulAdd helper call
  4339. instrAdd->InsertBefore(labelHelper);
  4340. }
  4341. if (instrAdd->dstIsTempNumber)
  4342. {
  4343. m_lowerer->LoadHelperTemp(instrAdd, instrAdd);
  4344. }
  4345. else
  4346. {
  4347. IR::Opnd *tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  4348. this->LoadHelperArgument(instrAdd, tempOpnd);
  4349. }
  4350. this->m_lowerer->LoadScriptContext(instrAdd);
  4351. IR::JnHelperMethod helper;
  4352. if (addSrc == instrAdd->GetSrc2())
  4353. {
  4354. instrAdd->FreeSrc1();
  4355. IR::Opnd *addOpnd = instrAdd->UnlinkSrc2();
  4356. this->LoadHelperArgument(instrAdd, addOpnd);
  4357. helper = isSub ? IR::HelperOp_MulSubRight : IR::HelperOp_MulAddRight;
  4358. }
  4359. else
  4360. {
  4361. instrAdd->FreeSrc2();
  4362. IR::Opnd *addOpnd = instrAdd->UnlinkSrc1();
  4363. this->LoadHelperArgument(instrAdd, addOpnd);
  4364. helper = isSub ? IR::HelperOp_MulSubLeft : IR::HelperOp_MulAddLeft;
  4365. }
  4366. IR::Opnd *src2 = instrMul->UnlinkSrc2();
  4367. this->LoadHelperArgument(instrAdd, src2);
  4368. IR::Opnd *src1 = instrMul->UnlinkSrc1();
  4369. this->LoadHelperArgument(instrAdd, src1);
  4370. this->ChangeToHelperCall(instrAdd, helper);
  4371. instrMul->Remove();
  4372. return true;
  4373. }
  4374. void
  4375. LowererMD::GenerateFastAbs(IR::Opnd *dst, IR::Opnd *src, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
  4376. {
  4377. // TEST src1, AtomTag
  4378. // JEQ $float
  4379. // MOV EAX, src
  4380. // SAR EAX, AtomTag_Int32
  4381. // CDQ
  4382. // XOR EAX, EDX
  4383. // SUB EAX, EDX
  4384. // SHL EAX, AtomTag_Int32
  4385. // JO $labelHelper
  4386. // INC EAX
  4387. // MOV dst, EAX
  4388. // JMP $done
  4389. // $float
  4390. // CMP [src], JavascriptNumber.vtable
  4391. // JNE $helper
  4392. // MOVSD r1, [src + offsetof(value)]
  4393. // ANDPD r1, absDoubleCst
  4394. // dst = DoubleToVar(r1)
  4395. IR::Instr *instr = nullptr;
  4396. IR::LabelInstr *labelFloat = nullptr;
  4397. bool isInt = false;
  4398. bool isNotInt = false;
  4399. if (src->IsRegOpnd())
  4400. {
  4401. if (src->AsRegOpnd()->IsTaggedInt())
  4402. {
  4403. isInt = true;
  4404. }
  4405. else if (src->AsRegOpnd()->IsNotInt())
  4406. {
  4407. isNotInt = true;
  4408. }
  4409. }
  4410. else if (src->IsAddrOpnd())
  4411. {
  4412. IR::AddrOpnd *varOpnd = src->AsAddrOpnd();
  4413. Assert(varOpnd->IsVar() && Js::TaggedInt::Is(varOpnd->m_address));
  4414. #ifdef _M_X64
  4415. __int64 absValue = ::_abs64(Js::TaggedInt::ToInt32(varOpnd->m_address));
  4416. #else
  4417. __int32 absValue = ::abs(Js::TaggedInt::ToInt32(varOpnd->m_address));
  4418. #endif
  4419. if (!Js::TaggedInt::IsOverflow(absValue))
  4420. {
  4421. varOpnd->SetAddress(Js::TaggedInt::ToVarUnchecked((__int32)absValue), IR::AddrOpndKindConstantVar);
  4422. instr = IR::Instr::New(Js::OpCode::MOV, dst, varOpnd, this->m_func);
  4423. insertInstr->InsertBefore(instr);
  4424. return;
  4425. }
  4426. }
  4427. if (src->IsRegOpnd() == false)
  4428. {
  4429. IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  4430. instr = IR::Instr::New(Js::OpCode::MOV, regOpnd, src, this->m_func);
  4431. insertInstr->InsertBefore(instr);
  4432. src = regOpnd;
  4433. }
  4434. #ifdef _M_IX86
  4435. bool emitFloatAbs = !isInt && AutoSystemInfo::Data.SSE2Available();
  4436. #else
  4437. bool emitFloatAbs = !isInt;
  4438. #endif
  4439. if (!isNotInt)
  4440. {
  4441. if (!isInt)
  4442. {
  4443. IR::LabelInstr *label = labelHelper;
  4444. if (emitFloatAbs)
  4445. {
  4446. label = labelFloat = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4447. }
  4448. GenerateSmIntTest(src, insertInstr, label);
  4449. }
  4450. // MOV EAX, src
  4451. IR::RegOpnd *regEAX = IR::RegOpnd::New(TyInt32, this->m_func);
  4452. regEAX->SetReg(LowererMDArch::GetRegIMulDestLower());
  4453. instr = IR::Instr::New(Js::OpCode::MOV, regEAX, src, this->m_func);
  4454. insertInstr->InsertBefore(instr);
  4455. #ifdef _M_IX86
  4456. // SAR EAX, AtomTag_Int32
  4457. instr = IR::Instr::New(Js::OpCode::SAR, regEAX, regEAX, IR::IntConstOpnd::New(Js::AtomTag_Int32, TyInt32, this->m_func), this->m_func);
  4458. insertInstr->InsertBefore(instr);
  4459. #endif
  4460. IR::RegOpnd *regEDX = IR::RegOpnd::New(TyInt32, this->m_func);
  4461. regEDX->SetReg(LowererMDArch::GetRegIMulHighDestLower());
  4462. // CDQ
  4463. // Note: put EDX on dst to give of def to the EDX lifetime
  4464. instr = IR::Instr::New(Js::OpCode::CDQ, regEDX, this->m_func);
  4465. insertInstr->InsertBefore(instr);
  4466. // XOR EAX, EDX
  4467. instr = IR::Instr::New(Js::OpCode::XOR, regEAX, regEAX, regEDX, this->m_func);
  4468. insertInstr->InsertBefore(instr);
  4469. // SUB EAX, EDX
  4470. instr = IR::Instr::New(Js::OpCode::SUB, regEAX, regEAX, regEDX, this->m_func);
  4471. insertInstr->InsertBefore(instr);
  4472. #ifdef _M_X64
  4473. // abs(INT_MIN) overflows a 32 bit integer.
  4474. // JO $labelHelper
  4475. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  4476. insertInstr->InsertBefore(instr);
  4477. #endif
  4478. #ifdef _M_IX86
  4479. // SHL EAX, AtomTag_Int32
  4480. instr = IR::Instr::New(Js::OpCode::SHL, regEAX, regEAX, IR::IntConstOpnd::New(Js::AtomTag_Int32, TyInt32, this->m_func), this->m_func);
  4481. insertInstr->InsertBefore(instr);
  4482. // JO $labelHelper
  4483. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  4484. insertInstr->InsertBefore(instr);
  4485. // INC EAX
  4486. instr = IR::Instr::New(Js::OpCode::INC, regEAX, regEAX, this->m_func);
  4487. insertInstr->InsertBefore(instr);
  4488. #endif
  4489. // MOV dst, EAX
  4490. instr = IR::Instr::New(Js::OpCode::MOV, dst, regEAX, this->m_func);
  4491. insertInstr->InsertBefore(instr);
  4492. #ifdef _M_X64
  4493. GenerateInt32ToVarConversion(dst, insertInstr);
  4494. #endif
  4495. }
  4496. if (labelFloat)
  4497. {
  4498. // JMP $done
  4499. instr = IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, this->m_func);
  4500. insertInstr->InsertBefore(instr);
  4501. // $float
  4502. insertInstr->InsertBefore(labelFloat);
  4503. }
  4504. if (emitFloatAbs)
  4505. {
  4506. #if defined(_M_IX86)
  4507. // CMP [src], JavascriptNumber.vtable
  4508. IR::Opnd *opnd = IR::IndirOpnd::New(src->AsRegOpnd(), (int32)0, TyMachPtr, this->m_func);
  4509. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4510. instr->SetSrc1(opnd);
  4511. instr->SetSrc2(m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptNumber));
  4512. insertInstr->InsertBefore(instr);
  4513. // JNE $helper
  4514. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  4515. insertInstr->InsertBefore(instr);
  4516. // MOVSD r1, [src + offsetof(value)]
  4517. opnd = IR::IndirOpnd::New(src->AsRegOpnd(), Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  4518. IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyMachDouble, this->m_func);
  4519. instr = IR::Instr::New(Js::OpCode::MOVSD, regOpnd, opnd, this->m_func);
  4520. insertInstr->InsertBefore(instr);
  4521. this->GenerateFloatAbs(regOpnd, insertInstr);
  4522. // dst = DoubleToVar(r1)
  4523. SaveDoubleToVar(callInstr->GetDst()->AsRegOpnd(), regOpnd, callInstr, insertInstr);
  4524. #elif defined(_M_X64)
  4525. // if (typeof(src) == double)
  4526. IR::RegOpnd *src64 = src->AsRegOpnd();
  4527. GenerateFloatTest(src64, insertInstr, labelHelper);
  4528. // dst64 = MOV src64
  4529. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, src64, this->m_func));
  4530. // Unconditionally set the sign bit. This will get XORd away when we remove the tag.
  4531. // dst64 = OR 0x8000000000000000
  4532. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::OR, dst, dst, IR::IntConstOpnd::New(MachSignBit, TyMachReg, this->m_func), this->m_func));
  4533. #endif
  4534. }
  4535. else if(!isInt)
  4536. {
  4537. // The source is not known to be a tagged int, so either it's definitely not an int (isNotInt), or the int version of
  4538. // abs failed the tag check and jumped here. We can't emit the float version of abs (!emitFloatAbs) due to SSE2 not
  4539. // being available, so jump straight to the helper.
  4540. // JMP $helper
  4541. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelHelper, this->m_func);
  4542. insertInstr->InsertBefore(instr);
  4543. }
  4544. }
  4545. IR::Instr * LowererMD::GenerateFloatAbs(IR::RegOpnd * regOpnd, IR::Instr * insertInstr)
  4546. {
  4547. // ANDPS reg, absDoubleCst
  4548. IR::Opnd * opnd;
  4549. if (regOpnd->IsFloat64())
  4550. {
  4551. opnd = m_lowerer->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueAbsDoubleCst);
  4552. }
  4553. else
  4554. {
  4555. Assert(regOpnd->IsFloat32());
  4556. opnd = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetAbsFloatCstAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  4557. }
  4558. // ANDPS has smaller encoding then ANDPD
  4559. IR::Instr * instr = IR::Instr::New(Js::OpCode::ANDPS, regOpnd, regOpnd, opnd, this->m_func);
  4560. insertInstr->InsertBefore(instr);
  4561. Legalize(instr);
  4562. return instr;
  4563. }
  4564. bool LowererMD::GenerateFastCharAt(Js::BuiltinFunction index, IR::Opnd *dst, IR::Opnd *srcStr, IR::Opnd *srcIndex, IR::Instr *callInstr,
  4565. IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
  4566. {
  4567. // if regSrcStr is not object, JMP $helper
  4568. // CMP [regSrcStr + offset(type)] , static string type -- check base string type
  4569. // JNE $helper
  4570. // MOV r1, [regSrcStr + offset(m_pszValue)]
  4571. // TEST r1, r1
  4572. // JEQ $helper
  4573. // MOV r2, srcIndex
  4574. // If r2 is not int, JMP $helper
  4575. // Convert r2 to int
  4576. // CMP [regSrcStr + offsetof(length)], r2
  4577. // JBE $helper
  4578. // MOVZX r2, [r1 + r2 * 2]
  4579. // if (charAt)
  4580. // PUSH r1
  4581. // PUSH scriptContext
  4582. // CALL GetStringFromChar
  4583. // MOV dst, EAX
  4584. // else (charCodeAt)
  4585. // if (codePointAt)
  4586. // Lowerer.GenerateFastCodePointAt -- Common inline functions
  4587. // Convert r2 to Var
  4588. // MOV dst, r2
  4589. bool isInt = false;
  4590. bool isNotTaggedValue = false;
  4591. IR::Instr *instr;
  4592. IR::RegOpnd *regSrcStr;
  4593. if (srcStr->IsRegOpnd())
  4594. {
  4595. if (srcStr->AsRegOpnd()->IsTaggedInt())
  4596. {
  4597. isInt = true;
  4598. }
  4599. else if (srcStr->AsRegOpnd()->IsNotTaggedValue())
  4600. {
  4601. isNotTaggedValue = true;
  4602. }
  4603. }
  4604. if (srcStr->IsRegOpnd() == false)
  4605. {
  4606. IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  4607. instr = IR::Instr::New(Js::OpCode::MOV, regOpnd, srcStr, this->m_func);
  4608. insertInstr->InsertBefore(instr);
  4609. regSrcStr = regOpnd;
  4610. }
  4611. else
  4612. {
  4613. regSrcStr = srcStr->AsRegOpnd();
  4614. }
  4615. if (!isNotTaggedValue)
  4616. {
  4617. if (!isInt)
  4618. {
  4619. GenerateObjectTest(regSrcStr, insertInstr, labelHelper);
  4620. }
  4621. else
  4622. {
  4623. // Insert delete branch opcode to tell the dbChecks not to assert on this helper label
  4624. IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
  4625. insertInstr->InsertBefore(fakeBr);
  4626. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelHelper, this->m_func);
  4627. insertInstr->InsertBefore(instr);
  4628. }
  4629. }
  4630. // Bail out if index a constant and is less than zero.
  4631. if (srcIndex->IsAddrOpnd() && Js::TaggedInt::ToInt32(srcIndex->AsAddrOpnd()->m_address) < 0)
  4632. {
  4633. labelHelper->isOpHelper = false;
  4634. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelHelper, this->m_func);
  4635. insertInstr->InsertBefore(instr);
  4636. return false;
  4637. }
  4638. this->m_lowerer->GenerateStringTest(regSrcStr, insertInstr, labelHelper, nullptr, false);
  4639. // r1 contains the value of the char16* pointer inside JavascriptString.
  4640. // MOV r1, [regSrcStr + offset(m_pszValue)]
  4641. IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  4642. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(regSrcStr->AsRegOpnd(), Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, this->m_func);
  4643. instr = IR::Instr::New(Js::OpCode::MOV, r1, indirOpnd, this->m_func);
  4644. insertInstr->InsertBefore(instr);
  4645. // TEST r1, r1 -- Null pointer test
  4646. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  4647. instr->SetSrc1(r1);
  4648. instr->SetSrc2(r1);
  4649. insertInstr->InsertBefore(instr);
  4650. // JEQ $helper
  4651. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  4652. insertInstr->InsertBefore(instr);
  4653. IR::IndirOpnd *strLength = IR::IndirOpnd::New(regSrcStr, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func);
  4654. if (srcIndex->IsAddrOpnd())
  4655. {
  4656. // CMP [regSrcStr + offsetof(length)], index
  4657. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4658. instr->SetSrc1(strLength);
  4659. instr->SetSrc2(IR::IntConstOpnd::New(Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address), TyUint32, this->m_func));
  4660. insertInstr->InsertBefore(instr);
  4661. // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
  4662. // JBE $helper
  4663. instr = IR::BranchInstr::New(Js::OpCode::JBE, labelHelper, this->m_func);
  4664. insertInstr->InsertBefore(instr);
  4665. indirOpnd = IR::IndirOpnd::New(r1, Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address) * sizeof(char16), TyInt16, this->m_func);
  4666. }
  4667. else
  4668. {
  4669. IR::RegOpnd *r2 = IR::RegOpnd::New(TyVar, this->m_func);
  4670. // MOV r2, srcIndex
  4671. instr = IR::Instr::New(Js::OpCode::MOV, r2, srcIndex, this->m_func);
  4672. insertInstr->InsertBefore(instr);
  4673. if (!srcIndex->IsRegOpnd() || !srcIndex->AsRegOpnd()->IsTaggedInt())
  4674. {
  4675. GenerateSmIntTest(r2, insertInstr, labelHelper);
  4676. }
  4677. #if INT32VAR
  4678. // Remove the tag
  4679. // MOV r2, [32-bit] r2
  4680. IR::Opnd * r2_32 = r2->UseWithNewType(TyInt32, this->m_func);
  4681. instr = IR::Instr::New(Js::OpCode::MOVSXD, r2, r2_32, this->m_func);
  4682. insertInstr->InsertBefore(instr);
  4683. r2 = r2_32->AsRegOpnd();
  4684. #else
  4685. // r2 = SAR r2, VarTag_Shift
  4686. instr = IR::Instr::New(Js::OpCode::SAR, r2, r2, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  4687. insertInstr->InsertBefore(instr);
  4688. #endif
  4689. // CMP [regSrcStr + offsetof(length)], r2
  4690. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4691. instr->SetSrc1(strLength);
  4692. instr->SetSrc2(r2);
  4693. insertInstr->InsertBefore(instr);
  4694. if (r2->GetSize() != MachPtr)
  4695. {
  4696. r2 = r2->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
  4697. }
  4698. // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
  4699. // JBE $helper
  4700. instr = IR::BranchInstr::New(Js::OpCode::JBE, labelHelper, this->m_func);
  4701. insertInstr->InsertBefore(instr);
  4702. indirOpnd = IR::IndirOpnd::New(r1, r2, 1, TyInt16, this->m_func);
  4703. }
  4704. // MOVZX charReg, [r1 + r2 * 2] -- this is the value of the char
  4705. IR::RegOpnd *charReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  4706. instr = IR::Instr::New(Js::OpCode::MOVZXW, charReg, indirOpnd, this->m_func);
  4707. insertInstr->InsertBefore(instr);
  4708. if (index == Js::BuiltinFunction::JavascriptString_CharAt)
  4709. {
  4710. IR::Opnd *resultOpnd;
  4711. if (dst->IsEqual(srcStr))
  4712. {
  4713. resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  4714. }
  4715. else
  4716. {
  4717. resultOpnd = dst;
  4718. }
  4719. this->m_lowerer->GenerateGetSingleCharString(charReg, resultOpnd, labelHelper, doneLabel, insertInstr, false);
  4720. }
  4721. else
  4722. {
  4723. Assert(index == Js::BuiltinFunction::JavascriptString_CharCodeAt || index == Js::BuiltinFunction::JavascriptString_CodePointAt);
  4724. if (index == Js::BuiltinFunction::JavascriptString_CodePointAt)
  4725. {
  4726. this->m_lowerer->GenerateFastInlineStringCodePointAt(insertInstr, this->m_func, strLength, srcIndex, charReg, r1);
  4727. }
  4728. GenerateInt32ToVarConversion(charReg, insertInstr);
  4729. // MOV dst, charReg
  4730. instr = IR::Instr::New(Js::OpCode::MOV, dst, charReg, this->m_func);
  4731. insertInstr->InsertBefore(instr);
  4732. }
  4733. return true;
  4734. }
  4735. IR::RegOpnd* LowererMD::MaterializeDoubleConstFromInt(intptr_t constAddr, IR::Instr* instr)
  4736. {
  4737. IR::Opnd* constVal = IR::MemRefOpnd::New(constAddr, IRType::TyFloat64, this->m_func);
  4738. IR::RegOpnd * xmmReg = IR::RegOpnd::New(TyFloat64, m_func);
  4739. this->m_lowerer->InsertMove(xmmReg, constVal, instr);
  4740. return xmmReg;
  4741. }
  4742. IR::RegOpnd* LowererMD::MaterializeConstFromBits(int bits, IRType type, IR::Instr* instr)
  4743. {
  4744. IR::Opnd * regBits = IR::RegOpnd::New(TyInt32, m_func);
  4745. this->m_lowerer->InsertMove(regBits, IR::IntConstOpnd::New(bits, TyInt32, m_func), instr);
  4746. IR::RegOpnd * regConst = IR::RegOpnd::New(type, m_func);
  4747. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOVD, regConst, regBits, m_func));
  4748. return regConst;
  4749. }
  4750. IR::Opnd* LowererMD::Subtract2To31(IR::Opnd* src1, IR::Opnd* intMinFP, IRType type, IR::Instr* instr)
  4751. {
  4752. Js::OpCode op = (type == TyFloat32) ? Js::OpCode::SUBSS : Js::OpCode::SUBSD;
  4753. IR::Opnd* adjSrc = IR::RegOpnd::New(type, m_func);
  4754. IR::Instr* sub = IR::Instr::New(op, adjSrc, src1, intMinFP, m_func);
  4755. instr->InsertBefore(sub);
  4756. Legalize(sub);
  4757. return adjSrc;
  4758. }
  4759. IR::Opnd* LowererMD::GenerateTruncChecks(IR::Instr* instr)
  4760. {
  4761. IR::LabelInstr * conversion = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4762. IR::LabelInstr * throwLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  4763. IR::Opnd* src1 = instr->GetSrc1();
  4764. IR::Opnd * src64 = nullptr;
  4765. if (src1->IsFloat32())
  4766. {
  4767. src64 = IR::RegOpnd::New(TyFloat64, m_func);
  4768. EmitFloat32ToFloat64(src64, src1, instr);
  4769. }
  4770. else
  4771. {
  4772. src64 = src1;
  4773. }
  4774. IR::RegOpnd* limitReg = MaterializeDoubleConstFromInt(instr->GetDst()->IsUInt32() ?
  4775. m_func->GetThreadContextInfo()->GetDoubleNegOneAddr() :
  4776. m_func->GetThreadContextInfo()->GetDoubleIntMinMinusOneAddr(), instr);
  4777. m_lowerer->InsertCompareBranch(src64, limitReg, Js::OpCode::BrLe_A, throwLabel, instr);
  4778. limitReg = MaterializeDoubleConstFromInt(instr->GetDst()->IsUInt32() ?
  4779. m_func->GetThreadContextInfo()->GetDoubleUintMaxPlusOneAddr() :
  4780. m_func->GetThreadContextInfo()->GetDoubleIntMaxPlusOneAddr(), instr);
  4781. m_lowerer->InsertCompareBranch(limitReg, src64, Js::OpCode::BrGt_A, conversion, instr, true /*no NaN check*/);
  4782. instr->InsertBefore(throwLabel);
  4783. this->m_lowerer->GenerateThrow(IR::IntConstOpnd::New(SCODE_CODE(VBSERR_Overflow), TyInt32, m_func), instr);
  4784. //no jump here we aren't coming back
  4785. instr->InsertBefore(conversion);
  4786. return src64;
  4787. }
  4788. void
  4789. LowererMD::GenerateTruncWithCheck(IR::Instr * instr)
  4790. {
  4791. Assert(AutoSystemInfo::Data.SSE2Available());
  4792. IR::Opnd* src64 = GenerateTruncChecks(instr); //converts src to double and checks if MIN <= src <= MAX
  4793. IR::Opnd* dst = instr->GetDst();
  4794. if (dst->IsUnsigned())
  4795. {
  4796. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(0, TyUint32, m_func), instr);
  4797. IR::LabelInstr * skipUnsignedPart = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4798. IR::Opnd* twoTo31 = MaterializeDoubleConstFromInt(m_func->GetThreadContextInfo()->GetDoubleTwoTo31Addr(), instr);
  4799. m_lowerer->InsertCompareBranch(src64, twoTo31, Js::OpCode::BrLt_A, skipUnsignedPart, instr);
  4800. instr->InsertBefore(IR::Instr::New(Js::OpCode::SUBPD, src64, src64, twoTo31, m_func));
  4801. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(0x80000000 /*2^31*/, TyUint32, m_func), instr);
  4802. instr->InsertBefore(skipUnsignedPart);
  4803. IR::Opnd* tmp = IR::RegOpnd::New(TyInt32, m_func);
  4804. instr->InsertBefore(IR::Instr::New(Js::OpCode::CVTTSD2SI, tmp, src64, m_func));
  4805. instr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, dst, dst, tmp, m_func));
  4806. }
  4807. else
  4808. {
  4809. instr->InsertBefore(IR::Instr::New(Js::OpCode::CVTTSD2SI, dst, src64, m_func));
  4810. }
  4811. instr->UnlinkSrc1();
  4812. instr->UnlinkDst();
  4813. instr->Remove();
  4814. }
  4815. void
  4816. LowererMD::GenerateCtz(IR::Instr * instr)
  4817. {
  4818. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  4819. Assert(IRType_IsNativeInt(instr->GetDst()->GetType()));
  4820. #ifdef _M_IX86
  4821. if (instr->GetSrc1()->IsInt64())
  4822. {
  4823. lowererMDArch.EmitInt64Instr(instr);
  4824. return;
  4825. }
  4826. #endif
  4827. if (AutoSystemInfo::Data.TZCntAvailable())
  4828. {
  4829. instr->m_opcode = Js::OpCode::TZCNT;
  4830. Legalize(instr);
  4831. }
  4832. else
  4833. {
  4834. // dst = BSF src
  4835. // dst = CMOVE dst, 32 // dst is src1 to help reg alloc
  4836. int instrSize = instr->GetSrc1()->GetSize();
  4837. IRType type = instrSize == 8 ? TyInt64 : TyInt32;
  4838. instr->m_opcode = Js::OpCode::BSF;
  4839. Legalize(instr);
  4840. IR::IntConstOpnd * const32 = IR::IntConstOpnd::New(instrSize * 8, type, m_func);
  4841. IR::Instr* cmove = IR::Instr::New(Js::OpCode::CMOVE, instr->GetDst(), instr->GetDst(), const32, this->m_func);
  4842. instr->InsertAfter(cmove);
  4843. Legalize(cmove);
  4844. }
  4845. }
  4846. void
  4847. LowererMD::GeneratePopCnt(IR::Instr * instr)
  4848. {
  4849. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  4850. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsUInt32() || instr->GetDst()->IsInt64());
  4851. #ifdef _M_IX86
  4852. if (instr->GetSrc1()->IsInt64())
  4853. {
  4854. lowererMDArch.EmitInt64Instr(instr);
  4855. return;
  4856. }
  4857. #endif
  4858. if (AutoSystemInfo::Data.PopCntAvailable())
  4859. {
  4860. instr->m_opcode = Js::OpCode::POPCNT;
  4861. Legalize(instr);
  4862. }
  4863. else
  4864. {
  4865. int instrSize = instr->GetSrc1()->GetSize();
  4866. LoadHelperArgument(instr, instr->GetSrc1());
  4867. instr->UnlinkSrc1();
  4868. this->ChangeToHelperCall(instr, instrSize == 8 ? IR::HelperPopCnt64 : IR::HelperPopCnt32);
  4869. }
  4870. }
  4871. void
  4872. LowererMD::GenerateClz(IR::Instr * instr)
  4873. {
  4874. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  4875. Assert(IRType_IsNativeInt(instr->GetDst()->GetType()));
  4876. #ifdef _M_IX86
  4877. if (instr->GetSrc1()->IsInt64())
  4878. {
  4879. lowererMDArch.EmitInt64Instr(instr);
  4880. return;
  4881. }
  4882. #endif
  4883. if (AutoSystemInfo::Data.LZCntAvailable())
  4884. {
  4885. instr->m_opcode = Js::OpCode::LZCNT;
  4886. Legalize(instr);
  4887. }
  4888. else
  4889. {
  4890. // tmp = BSR src
  4891. // JE $label32
  4892. // dst = SUB 31, tmp
  4893. // dst = SUB 63, tmp; for int64
  4894. // JMP $done
  4895. // label32:
  4896. // dst = mov 32;
  4897. // dst = mov 64; for int64
  4898. // $done
  4899. int instrSize = instr->GetSrc1()->GetSize();
  4900. IRType type = instrSize == 8 ? TyInt64 : TyInt32;
  4901. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr->m_next);
  4902. IR::Opnd * dst = instr->UnlinkDst();
  4903. IR::Opnd * tmpOpnd = IR::RegOpnd::New(type, m_func);
  4904. instr->SetDst(tmpOpnd);
  4905. instr->m_opcode = Js::OpCode::BSR;
  4906. Legalize(instr);
  4907. IR::LabelInstr * label32 = Lowerer::InsertLabel(false, doneLabel);
  4908. instr = IR::BranchInstr::New(Js::OpCode::JEQ, label32, m_func);
  4909. label32->InsertBefore(instr);
  4910. Lowerer::InsertSub(false, dst, IR::IntConstOpnd::New(instrSize == 8 ? 63 : 31, type, m_func), tmpOpnd, label32);
  4911. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, label32);
  4912. Lowerer::InsertMove(dst, IR::IntConstOpnd::New(instrSize == 8 ? 64 : 32, type, m_func), doneLabel);
  4913. }
  4914. }
  4915. #if !FLOATVAR
  4916. void
  4917. LowererMD::GenerateNumberAllocation(IR::RegOpnd * opndDst, IR::Instr * instrInsert, bool isHelper)
  4918. {
  4919. size_t alignedAllocSize = Js::RecyclerJavascriptNumberAllocator::GetAlignedAllocSize(
  4920. m_func->GetScriptContextInfo()->IsRecyclerVerifyEnabled(),
  4921. m_func->GetScriptContextInfo()->GetRecyclerVerifyPad());
  4922. IR::Opnd * endAddressOpnd = m_lowerer->LoadNumberAllocatorValueOpnd(instrInsert, NumberAllocatorValue::NumberAllocatorEndAddress);
  4923. IR::Opnd * freeObjectListOpnd = m_lowerer->LoadNumberAllocatorValueOpnd(instrInsert, NumberAllocatorValue::NumberAllocatorFreeObjectList);
  4924. // MOV dst, allocator->freeObjectList
  4925. IR::Instr * loadMemBlockInstr = IR::Instr::New(Js::OpCode::MOV, opndDst, freeObjectListOpnd, this->m_func);
  4926. instrInsert->InsertBefore(loadMemBlockInstr);
  4927. // LEA nextMemBlock, [dst + allocSize]
  4928. IR::RegOpnd * nextMemBlockOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  4929. IR::Instr * loadNextMemBlockInstr = IR::Instr::New(Js::OpCode::LEA, nextMemBlockOpnd,
  4930. IR::IndirOpnd::New(opndDst, alignedAllocSize, TyMachPtr, this->m_func), this->m_func);
  4931. instrInsert->InsertBefore(loadNextMemBlockInstr);
  4932. // CMP nextMemBlock, allocator->endAddress
  4933. IR::Instr * checkInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4934. checkInstr->SetSrc1(nextMemBlockOpnd);
  4935. checkInstr->SetSrc2(endAddressOpnd);
  4936. instrInsert->InsertBefore(checkInstr);
  4937. // JA $helper
  4938. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4939. IR::BranchInstr * branchInstr = IR::BranchInstr::New(Js::OpCode::JA, helperLabel, this->m_func);
  4940. instrInsert->InsertBefore(branchInstr);
  4941. // MOV allocator->freeObjectList, nextMemBlock
  4942. IR::Instr * setFreeObjectListInstr = IR::Instr::New(Js::OpCode::MOV, freeObjectListOpnd, nextMemBlockOpnd, this->m_func);
  4943. instrInsert->InsertBefore(setFreeObjectListInstr);
  4944. // JMP $done
  4945. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  4946. IR::BranchInstr * branchToDoneInstr = IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, this->m_func);
  4947. instrInsert->InsertBefore(branchToDoneInstr);
  4948. // $helper:
  4949. instrInsert->InsertBefore(helperLabel);
  4950. // PUSH allocator
  4951. this->LoadHelperArgument(instrInsert, m_lowerer->LoadScriptContextValueOpnd(instrInsert, ScriptContextValue::ScriptContextNumberAllocator));
  4952. // dst = Call AllocUninitializedNumber
  4953. IR::Instr * instrCall = IR::Instr::New(Js::OpCode::CALL, opndDst,
  4954. IR::HelperCallOpnd::New(IR::HelperAllocUninitializedNumber, this->m_func), this->m_func);
  4955. instrInsert->InsertBefore(instrCall);
  4956. this->lowererMDArch.LowerCall(instrCall, 0);
  4957. // $done:
  4958. instrInsert->InsertBefore(doneLabel);
  4959. }
  4960. #endif
  4961. #ifdef _CONTROL_FLOW_GUARD
  4962. void
  4963. LowererMD::GenerateCFGCheck(IR::Opnd * entryPointOpnd, IR::Instr * insertBeforeInstr)
  4964. {
  4965. bool useJITTrampoline = CONFIG_FLAG(UseJITTrampoline);
  4966. IR::LabelInstr * callLabelInstr = nullptr;
  4967. uintptr_t jitThunkStartAddress = NULL;
  4968. if (useJITTrampoline)
  4969. {
  4970. #if ENABLE_OOP_NATIVE_CODEGEN
  4971. if (m_func->IsOOPJIT())
  4972. {
  4973. OOPJITThunkEmitter * jitThunkEmitter = m_func->GetOOPThreadContext()->GetJITThunkEmitter();
  4974. jitThunkStartAddress = jitThunkEmitter->EnsureInitialized();
  4975. }
  4976. else
  4977. #endif
  4978. {
  4979. InProcJITThunkEmitter * jitThunkEmitter = m_func->GetInProcThreadContext()->GetJITThunkEmitter();
  4980. jitThunkStartAddress = jitThunkEmitter->EnsureInitialized();
  4981. }
  4982. if (jitThunkStartAddress)
  4983. {
  4984. uintptr_t endAddressOfSegment = jitThunkStartAddress + InProcJITThunkEmitter::TotalThunkSize;
  4985. Assert(endAddressOfSegment > jitThunkStartAddress);
  4986. // Generate instructions for local Pre-Reserved Segment Range check
  4987. IR::AddrOpnd * endAddressOfSegmentConstOpnd = IR::AddrOpnd::New(endAddressOfSegment, IR::AddrOpndKindDynamicMisc, m_func);
  4988. IR::RegOpnd *resultOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4989. callLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4990. IR::LabelInstr * cfgLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  4991. // resultOpnd = SUB endAddressOfSegmentConstOpnd, entryPointOpnd
  4992. // CMP resultOpnd, TotalThunkSize
  4993. // JAE $cfgLabel
  4994. // AND entryPointOpnd, ~(ThunkSize-1)
  4995. // JMP $callLabel
  4996. m_lowerer->InsertSub(false, resultOpnd, endAddressOfSegmentConstOpnd, entryPointOpnd, insertBeforeInstr);
  4997. m_lowerer->InsertCompareBranch(resultOpnd, IR::IntConstOpnd::New(InProcJITThunkEmitter::TotalThunkSize, TyMachReg, m_func, true), Js::OpCode::BrGe_A, true, cfgLabelInstr, insertBeforeInstr);
  4998. m_lowerer->InsertAnd(entryPointOpnd, entryPointOpnd, IR::IntConstOpnd::New(InProcJITThunkEmitter::ThunkAlignmentMask, TyMachReg, m_func, true), insertBeforeInstr);
  4999. m_lowerer->InsertBranch(Js::OpCode::Br, callLabelInstr, insertBeforeInstr);
  5000. insertBeforeInstr->InsertBefore(cfgLabelInstr);
  5001. }
  5002. }
  5003. //MOV ecx, entryPoint
  5004. IR::RegOpnd * entryPointRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  5005. #if _M_IX86
  5006. entryPointRegOpnd->SetReg(RegECX);
  5007. #elif _M_X64
  5008. entryPointRegOpnd->SetReg(RegRCX);
  5009. #endif
  5010. entryPointRegOpnd->m_isCallArg = true;
  5011. IR::Instr* movInstrEntryPointToRegister = IR::Instr::New(Js::OpCode::MOV, entryPointRegOpnd, entryPointOpnd, this->m_func);
  5012. insertBeforeInstr->InsertBefore(movInstrEntryPointToRegister);
  5013. //Generate CheckCFG CALL here
  5014. IR::HelperCallOpnd *cfgCallOpnd = IR::HelperCallOpnd::New(IR::HelperGuardCheckCall, this->m_func);
  5015. IR::Instr* cfgCallInstr = IR::Instr::New(Js::OpCode::CALL, this->m_func);
  5016. #if _M_IX86
  5017. //call[__guard_check_icall_fptr]
  5018. cfgCallInstr->SetSrc1(cfgCallOpnd);
  5019. #elif _M_X64
  5020. //mov rax, __guard_check_icall_fptr
  5021. IR::RegOpnd *targetOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, m_func), RegRAX, TyMachPtr, this->m_func);
  5022. IR::Instr *movInstr = IR::Instr::New(Js::OpCode::MOV, targetOpnd, cfgCallOpnd, this->m_func);
  5023. insertBeforeInstr->InsertBefore(movInstr);
  5024. //call rax
  5025. cfgCallInstr->SetSrc1(targetOpnd);
  5026. #endif
  5027. //CALL cfg(rax)
  5028. insertBeforeInstr->InsertBefore(cfgCallInstr);
  5029. if (jitThunkStartAddress)
  5030. {
  5031. Assert(callLabelInstr);
  5032. if (CONFIG_FLAG(ForceJITCFGCheck))
  5033. {
  5034. // Always generate CFG check to make sure that the address is still valid
  5035. movInstrEntryPointToRegister->InsertBefore(callLabelInstr);
  5036. }
  5037. else
  5038. {
  5039. insertBeforeInstr->InsertBefore(callLabelInstr);
  5040. }
  5041. }
  5042. }
  5043. #endif
  5044. void
  5045. LowererMD::GenerateFastRecyclerAlloc(size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, IR::LabelInstr* allocHelperLabel, IR::LabelInstr* allocDoneLabel)
  5046. {
  5047. IR::Opnd * endAddressOpnd;
  5048. IR::Opnd * freeListOpnd;
  5049. ScriptContextInfo* scriptContext = this->m_func->GetScriptContextInfo();
  5050. void* allocatorAddress;
  5051. uint32 endAddressOffset;
  5052. uint32 freeListOffset;
  5053. size_t alignedSize = HeapInfo::GetAlignedSizeNoCheck(allocSize);
  5054. bool allowNativeCodeBumpAllocation = scriptContext->GetRecyclerAllowNativeCodeBumpAllocation();
  5055. Recycler::GetNormalHeapBlockAllocatorInfoForNativeAllocation((void*)scriptContext->GetRecyclerAddr(), alignedSize,
  5056. allocatorAddress, endAddressOffset, freeListOffset,
  5057. allowNativeCodeBumpAllocation, this->m_func->IsOOPJIT());
  5058. endAddressOpnd = IR::MemRefOpnd::New((char*)allocatorAddress + endAddressOffset, TyMachPtr, this->m_func, IR::AddrOpndKindDynamicRecyclerAllocatorEndAddressRef);
  5059. freeListOpnd = IR::MemRefOpnd::New((char*)allocatorAddress + freeListOffset, TyMachPtr, this->m_func, IR::AddrOpndKindDynamicRecyclerAllocatorFreeListRef);
  5060. const IR::AutoReuseOpnd autoReuseTempOpnd(freeListOpnd, m_func);
  5061. // MOV newObjDst, allocator->freeObjectList
  5062. Lowerer::InsertMove(newObjDst, freeListOpnd, insertionPointInstr);
  5063. // LEA nextMemBlock, [newObjDst + allocSize]
  5064. IR::RegOpnd * nextMemBlockOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  5065. IR::IndirOpnd* nextMemBlockSrc = IR::IndirOpnd::New(newObjDst, (int32)alignedSize, TyMachPtr, this->m_func);
  5066. IR::Instr * loadNextMemBlockInstr = IR::Instr::New(Js::OpCode::LEA, nextMemBlockOpnd, nextMemBlockSrc, this->m_func);
  5067. insertionPointInstr->InsertBefore(loadNextMemBlockInstr);
  5068. // CMP nextMemBlock, allocator->endAddress
  5069. IR::Instr * checkInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  5070. checkInstr->SetSrc1(nextMemBlockOpnd);
  5071. checkInstr->SetSrc2(endAddressOpnd);
  5072. insertionPointInstr->InsertBefore(checkInstr);
  5073. Legalize(checkInstr);
  5074. // JA $allocHelper
  5075. IR::BranchInstr * branchToAllocHelperInstr = IR::BranchInstr::New(Js::OpCode::JA, allocHelperLabel, this->m_func);
  5076. insertionPointInstr->InsertBefore(branchToAllocHelperInstr);
  5077. // MOV allocator->freeObjectList, nextMemBlock
  5078. Lowerer::InsertMove(freeListOpnd, nextMemBlockOpnd, insertionPointInstr, false);
  5079. // JMP $allocDone
  5080. IR::BranchInstr * branchToAllocDoneInstr = IR::BranchInstr::New(Js::OpCode::JMP, allocDoneLabel, this->m_func);
  5081. insertionPointInstr->InsertBefore(branchToAllocDoneInstr);
  5082. }
  5083. #ifdef ENABLE_WASM
  5084. void
  5085. LowererMD::GenerateCopysign(IR::Instr * instr)
  5086. {
  5087. #if defined(_M_IX86)
  5088. // We should only generate this if sse2 is available
  5089. Assert(AutoSystemInfo::Data.SSE2Available());
  5090. #endif
  5091. // ANDPS reg0, absDoubleCst
  5092. // ANDPS reg1, sgnBitDoubleCst
  5093. // ORPS reg0, reg1
  5094. // Copy sign from src2 to src1
  5095. IR::Opnd* src1 = instr->GetSrc1();
  5096. IR::Opnd* src2 = instr->GetSrc2();
  5097. Assert(src1->IsFloat32() || src1->IsFloat64());
  5098. GenerateFloatAbs(src1->AsRegOpnd(), instr);
  5099. IR::MemRefOpnd *memRef = IR::MemRefOpnd::New(src2->IsFloat32() ? this->m_func->GetThreadContextInfo()->GetSgnFloatBitCst() : this->m_func->GetThreadContextInfo()->GetSgnDoubleBitCst(),
  5100. src2->GetType(), this->m_func, src2->IsFloat32() ? IR::AddrOpndKindDynamicFloatRef : IR::AddrOpndKindDynamicDoubleRef);
  5101. IR::Instr* t2 = IR::Instr::New(Js::OpCode::ANDPS, instr->GetSrc2(), instr->GetSrc2(), memRef, m_func);
  5102. instr->InsertBefore(t2);
  5103. Legalize(t2);
  5104. instr->m_opcode = Js::OpCode::ORPS;
  5105. Legalize(instr);
  5106. };
  5107. #endif //ENABLE_WASM
  5108. void
  5109. LowererMD::SaveDoubleToVar(IR::RegOpnd * dstOpnd, IR::RegOpnd *opndFloat, IR::Instr *instrOrig, IR::Instr *instrInsert, bool isHelper)
  5110. {
  5111. Assert(opndFloat->GetType() == TyFloat64);
  5112. // Call JSNumber::ToVar to save the float operand to the result of the original (var) instruction
  5113. #if !FLOATVAR
  5114. // We should only generate this if sse2 is available
  5115. Assert(AutoSystemInfo::Data.SSE2Available());
  5116. IR::Opnd * symVTableDst;
  5117. IR::Opnd * symDblDst;
  5118. IR::Opnd * symTypeDst;
  5119. IR::Instr * newInstr;
  5120. IR::Instr * numberInitInsertInstr = nullptr;
  5121. if (instrOrig->dstIsTempNumber)
  5122. {
  5123. // Use the original dst to get the temp number sym
  5124. StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(instrOrig->GetDst(), instrOrig->dstIsTempNumberTransferred);
  5125. // LEA dst, &tempSym
  5126. IR::SymOpnd * symTempSrc = IR::SymOpnd::New(tempNumberSym, TyMachPtr, this->m_func);
  5127. IR::Instr * loadTempNumberInstr = IR::Instr::New(Js::OpCode::LEA, dstOpnd, symTempSrc, this->m_func);
  5128. instrInsert->InsertBefore(loadTempNumberInstr);
  5129. symVTableDst = IR::SymOpnd::New(tempNumberSym, TyMachPtr, this->m_func);
  5130. symDblDst = IR::SymOpnd::New(tempNumberSym, (uint32)Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func);
  5131. symTypeDst = IR::SymOpnd::New(tempNumberSym, (uint32)Js::JavascriptNumber::GetOffsetOfType(), TyMachPtr, this->m_func);
  5132. if (this->m_lowerer->outerMostLoopLabel == nullptr)
  5133. {
  5134. // If we are not in loop, just insert in place
  5135. numberInitInsertInstr = instrInsert;
  5136. }
  5137. else
  5138. {
  5139. // Otherwise, initialize in the outer most loop top if we haven't initialized it yet.
  5140. numberInitInsertInstr = this->m_lowerer->initializedTempSym->TestAndSet(tempNumberSym->m_id) ?
  5141. nullptr : this->m_lowerer->outerMostLoopLabel;
  5142. }
  5143. }
  5144. else
  5145. {
  5146. this->GenerateNumberAllocation(dstOpnd, instrInsert, isHelper);
  5147. symVTableDst = IR::IndirOpnd::New(dstOpnd, 0, TyMachPtr, this->m_func);
  5148. symDblDst = IR::IndirOpnd::New(dstOpnd, (uint32)Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func);
  5149. symTypeDst = IR::IndirOpnd::New(dstOpnd, (uint32)Js::JavascriptNumber::GetOffsetOfType(), TyMachPtr, this->m_func);
  5150. numberInitInsertInstr = instrInsert;
  5151. }
  5152. if (numberInitInsertInstr)
  5153. {
  5154. // Inline the case where the dst is marked as temp.
  5155. IR::Opnd *jsNumberVTable = m_lowerer->LoadVTableValueOpnd(numberInitInsertInstr, VTableValue::VtableJavascriptNumber);
  5156. // MOV dst->vtable, JavascriptNumber::vtable
  5157. newInstr = IR::Instr::New(Js::OpCode::MOV, symVTableDst, jsNumberVTable, this->m_func);
  5158. numberInitInsertInstr->InsertBefore(newInstr);
  5159. // MOV dst->type, JavascriptNumber_type
  5160. IR::Opnd *typeOpnd = m_lowerer->LoadLibraryValueOpnd(numberInitInsertInstr, LibraryValue::ValueNumberTypeStatic);
  5161. newInstr = IR::Instr::New(Js::OpCode::MOV, symTypeDst, typeOpnd, this->m_func);
  5162. numberInitInsertInstr->InsertBefore(newInstr);
  5163. }
  5164. // MOVSD dst->value, opndFloat ; copy the float result to the temp JavascriptNumber
  5165. newInstr = IR::Instr::New(Js::OpCode::MOVSD, symDblDst, opndFloat, this->m_func);
  5166. instrInsert->InsertBefore(newInstr);
  5167. #else
  5168. // s1 = MOVD opndFloat
  5169. IR::RegOpnd *s1 = IR::RegOpnd::New(TyMachReg, m_func);
  5170. IR::Instr *movd = IR::Instr::New(Js::OpCode::MOVD, s1, opndFloat, m_func);
  5171. instrInsert->InsertBefore(movd);
  5172. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  5173. {
  5174. // s1 = MOVD src
  5175. // tmp = NOT s1
  5176. // tmp = AND tmp, 0x7FF0000000000000ull
  5177. // test tmp, tmp
  5178. // je helper
  5179. // jmp done
  5180. // helper:
  5181. // tmp2 = AND s1, 0x000FFFFFFFFFFFFFull
  5182. // test tmp2, tmp2
  5183. // je done
  5184. // s1 = JavascriptNumber::k_Nan
  5185. // done:
  5186. IR::RegOpnd *tmp = IR::RegOpnd::New(TyMachReg, m_func);
  5187. IR::Instr * newInstr = IR::Instr::New(Js::OpCode::NOT, tmp, s1, m_func);
  5188. instrInsert->InsertBefore(newInstr);
  5189. LowererMD::MakeDstEquSrc1(newInstr);
  5190. newInstr = IR::Instr::New(Js::OpCode::AND, tmp, tmp, IR::AddrOpnd::New((Js::Var)0x7FF0000000000000, IR::AddrOpndKindConstantVar, m_func, true), m_func);
  5191. instrInsert->InsertBefore(newInstr);
  5192. LowererMD::Legalize(newInstr);
  5193. IR::LabelInstr* helper = Lowerer::InsertLabel(true, instrInsert);
  5194. Lowerer::InsertTestBranch(tmp, tmp, Js::OpCode::BrEq_A, helper, helper);
  5195. IR::LabelInstr* done = Lowerer::InsertLabel(isHelper, instrInsert);
  5196. Lowerer::InsertBranch(Js::OpCode::Br, done, helper);
  5197. IR::RegOpnd *tmp2 = IR::RegOpnd::New(TyMachReg, m_func);
  5198. newInstr = IR::Instr::New(Js::OpCode::AND, tmp2, s1, IR::AddrOpnd::New((Js::Var)0x000FFFFFFFFFFFFFull, IR::AddrOpndKindConstantVar, m_func, true), m_func);
  5199. done->InsertBefore(newInstr);
  5200. LowererMD::Legalize(newInstr);
  5201. Lowerer::InsertTestBranch(tmp2, tmp2, Js::OpCode::BrEq_A, done, done);
  5202. IR::Opnd * opndNaN = IR::AddrOpnd::New((Js::Var)Js::JavascriptNumber::k_Nan, IR::AddrOpndKindConstantVar, m_func, true);
  5203. Lowerer::InsertMove(s1, opndNaN, done);
  5204. }
  5205. // s1 = XOR s1, FloatTag_Value
  5206. // dst = s1
  5207. IR::Instr *setTag = IR::Instr::New(Js::OpCode::XOR,
  5208. s1,
  5209. s1,
  5210. IR::AddrOpnd::New((Js::Var)Js::FloatTag_Value,
  5211. IR::AddrOpndKindConstantVar,
  5212. this->m_func,
  5213. /* dontEncode = */ true),
  5214. this->m_func);
  5215. IR::Instr *movDst = IR::Instr::New(Js::OpCode::MOV, dstOpnd, s1, this->m_func);
  5216. instrInsert->InsertBefore(setTag);
  5217. instrInsert->InsertBefore(movDst);
  5218. LowererMD::Legalize(setTag);
  5219. #endif
  5220. }
  5221. void
  5222. LowererMD::EmitLoadFloatFromNumber(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr)
  5223. {
  5224. IR::LabelInstr *labelDone;
  5225. IR::Instr *instr;
  5226. labelDone = EmitLoadFloatCommon(dst, src, insertInstr, insertInstr->HasBailOutInfo());
  5227. if (labelDone == nullptr)
  5228. {
  5229. // We're done
  5230. insertInstr->Remove();
  5231. return;
  5232. }
  5233. // $Done note: insertAfter
  5234. insertInstr->InsertAfter(labelDone);
  5235. if (!insertInstr->HasBailOutInfo())
  5236. {
  5237. // $Done
  5238. insertInstr->Remove();
  5239. return;
  5240. }
  5241. Assert(!m_func->GetJITFunctionBody()->IsAsmJsMode());
  5242. IR::LabelInstr *labelNoBailOut = nullptr;
  5243. IR::SymOpnd *tempSymOpnd = nullptr;
  5244. if (insertInstr->GetBailOutKind() == IR::BailOutPrimitiveButString)
  5245. {
  5246. if (!this->m_func->tempSymDouble)
  5247. {
  5248. this->m_func->tempSymDouble = StackSym::New(TyFloat64, this->m_func);
  5249. this->m_func->StackAllocate(this->m_func->tempSymDouble, MachDouble);
  5250. }
  5251. // LEA r3, tempSymDouble
  5252. IR::RegOpnd *reg3Opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  5253. tempSymOpnd = IR::SymOpnd::New(this->m_func->tempSymDouble, TyFloat64, this->m_func);
  5254. instr = IR::Instr::New(Js::OpCode::LEA, reg3Opnd, tempSymOpnd, this->m_func);
  5255. insertInstr->InsertBefore(instr);
  5256. // regBoolResult = to_number_fromPrimitive(value, &dst, allowUndef, scriptContext);
  5257. this->m_lowerer->LoadScriptContext(insertInstr);
  5258. IR::IntConstOpnd *allowUndefOpnd;
  5259. if (insertInstr->GetBailOutKind() == IR::BailOutPrimitiveButString)
  5260. {
  5261. allowUndefOpnd = IR::IntConstOpnd::New(true, TyInt32, this->m_func);
  5262. }
  5263. else
  5264. {
  5265. Assert(insertInstr->GetBailOutKind() == IR::BailOutNumberOnly);
  5266. allowUndefOpnd = IR::IntConstOpnd::New(false, TyInt32, this->m_func);
  5267. }
  5268. this->LoadHelperArgument(insertInstr, allowUndefOpnd);
  5269. this->LoadHelperArgument(insertInstr, reg3Opnd);
  5270. this->LoadHelperArgument(insertInstr, src);
  5271. IR::RegOpnd *regBoolResult = IR::RegOpnd::New(TyInt32, this->m_func);
  5272. instr = IR::Instr::New(Js::OpCode::CALL, regBoolResult, IR::HelperCallOpnd::New(IR::HelperOp_ConvNumber_FromPrimitive, this->m_func), this->m_func);
  5273. insertInstr->InsertBefore(instr);
  5274. this->lowererMDArch.LowerCall(instr, 0);
  5275. // TEST regBoolResult, regBoolResult
  5276. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  5277. instr->SetSrc1(regBoolResult);
  5278. instr->SetSrc2(regBoolResult);
  5279. insertInstr->InsertBefore(instr);
  5280. // JNE $noBailOut
  5281. labelNoBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5282. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelNoBailOut, this->m_func);
  5283. insertInstr->InsertBefore(instr);
  5284. }
  5285. // Bailout code
  5286. Assert(insertInstr->m_opcode == Js::OpCode::FromVar);
  5287. insertInstr->UnlinkDst();
  5288. insertInstr->FreeSrc1();
  5289. IR::Instr *bailoutInstr = insertInstr;
  5290. insertInstr = bailoutInstr->m_next;
  5291. this->m_lowerer->GenerateBailOut(bailoutInstr);
  5292. // $noBailOut
  5293. if (labelNoBailOut)
  5294. {
  5295. insertInstr->InsertBefore(labelNoBailOut);
  5296. Assert(dst->IsRegOpnd());
  5297. // MOVSD dst, [pResult].f64
  5298. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, tempSymOpnd, this->m_func);
  5299. insertInstr->InsertBefore(instr);
  5300. }
  5301. }
  5302. IR::LabelInstr*
  5303. LowererMD::EmitLoadFloatCommon(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr, bool needHelperLabel)
  5304. {
  5305. IR::Instr *instr;
  5306. Assert(src->GetType() == TyVar);
  5307. Assert(dst->IsFloat());
  5308. bool isFloatConst = false;
  5309. IR::RegOpnd *regFloatOpnd = nullptr;
  5310. if (src->IsRegOpnd() && src->AsRegOpnd()->m_sym->m_isFltConst)
  5311. {
  5312. IR::RegOpnd *regOpnd = src->AsRegOpnd();
  5313. Assert(regOpnd->m_sym->m_isSingleDef);
  5314. Js::Var value = regOpnd->m_sym->GetFloatConstValueAsVar_PostGlobOpt();
  5315. #if FLOATVAR
  5316. void *pDouble = (double*)NativeCodeDataNewNoFixup(this->m_func->GetNativeCodeDataAllocator(), DoubleType<DataDesc_LowererMD_EmitLoadFloatCommon_Double>, Js::JavascriptNumber::GetValue(value));
  5317. IR::Opnd * doubleRef;
  5318. if (!m_func->IsOOPJIT())
  5319. {
  5320. doubleRef = IR::MemRefOpnd::New(pDouble, TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  5321. }
  5322. else
  5323. {
  5324. int offset = NativeCodeData::GetDataTotalOffset(pDouble);
  5325. doubleRef = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), offset, TyMachDouble,
  5326. #if DBG
  5327. NativeCodeData::GetDataDescription(pDouble, m_func->m_alloc),
  5328. #endif
  5329. m_func);
  5330. GetLowerer()->addToLiveOnBackEdgeSyms->Set(m_func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  5331. }
  5332. #else
  5333. IR::MemRefOpnd *doubleRef = IR::MemRefOpnd::New((BYTE*)value + Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func,
  5334. IR::AddrOpndKindDynamicDoubleRef);
  5335. #endif
  5336. regFloatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  5337. instr = IR::Instr::New(Js::OpCode::MOVSD, regFloatOpnd, doubleRef, this->m_func);
  5338. insertInstr->InsertBefore(instr);
  5339. Legalize(instr);
  5340. isFloatConst = true;
  5341. }
  5342. // Src is constant?
  5343. if (src->IsImmediateOpnd() || src->IsFloatConstOpnd())
  5344. {
  5345. regFloatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  5346. m_lowerer->LoadFloatFromNonReg(src, regFloatOpnd, insertInstr);
  5347. isFloatConst = true;
  5348. }
  5349. if (isFloatConst)
  5350. {
  5351. if (dst->GetType() == TyFloat32)
  5352. {
  5353. // CVTSD2SS regOpnd32.f32, regOpnd.f64 -- Convert regOpnd from f64 to f32
  5354. IR::RegOpnd *regOpnd32 = regFloatOpnd->UseWithNewType(TyFloat32, this->m_func)->AsRegOpnd();
  5355. instr = IR::Instr::New(Js::OpCode::CVTSD2SS, regOpnd32, regFloatOpnd, this->m_func);
  5356. insertInstr->InsertBefore(instr);
  5357. // MOVSS dst, regOpnd32
  5358. instr = IR::Instr::New(Js::OpCode::MOVSS, dst, regOpnd32, this->m_func);
  5359. insertInstr->InsertBefore(instr);
  5360. }
  5361. else
  5362. {
  5363. // MOVSD dst, regOpnd
  5364. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, regFloatOpnd, this->m_func);
  5365. insertInstr->InsertBefore(instr);
  5366. }
  5367. return nullptr;
  5368. }
  5369. Assert(src->IsRegOpnd());
  5370. IR::LabelInstr *labelStore = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5371. IR::LabelInstr *labelHelper;
  5372. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5373. if (needHelperLabel)
  5374. {
  5375. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5376. }
  5377. else
  5378. {
  5379. labelHelper = labelDone;
  5380. }
  5381. bool const isFloat32 = dst->GetType() == TyFloat32;
  5382. IR::RegOpnd *reg2 = ((isFloat32 || !dst->IsRegOpnd()) ? IR::RegOpnd::New(TyMachDouble, this->m_func) : dst->AsRegOpnd());
  5383. // Load the float value in reg2
  5384. this->lowererMDArch.LoadCheckedFloat(src->AsRegOpnd(), reg2, labelStore, labelHelper, insertInstr, needHelperLabel);
  5385. // $Store
  5386. insertInstr->InsertBefore(labelStore);
  5387. if (isFloat32)
  5388. {
  5389. IR::RegOpnd *reg2_32 = reg2->UseWithNewType(TyFloat32, this->m_func)->AsRegOpnd();
  5390. // CVTSD2SS r2_32.f32, r2.f64 -- Convert regOpnd from f64 to f32
  5391. instr = IR::Instr::New(Js::OpCode::CVTSD2SS, reg2_32, reg2, this->m_func);
  5392. insertInstr->InsertBefore(instr);
  5393. // MOVSS dst, r2_32
  5394. instr = IR::Instr::New(Js::OpCode::MOVSS, dst, reg2_32, this->m_func);
  5395. insertInstr->InsertBefore(instr);
  5396. }
  5397. else if (reg2 != dst)
  5398. {
  5399. // MOVSD dst, r2
  5400. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, reg2, this->m_func);
  5401. insertInstr->InsertBefore(instr);
  5402. }
  5403. // JMP $Done
  5404. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, this->m_func);
  5405. insertInstr->InsertBefore(instr);
  5406. if (needHelperLabel)
  5407. {
  5408. // $Helper
  5409. insertInstr->InsertBefore(labelHelper);
  5410. }
  5411. return labelDone;
  5412. }
  5413. void
  5414. LowererMD::EmitLoadFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr, IR::Instr * instrBailOut, IR::LabelInstr * labelBailOut)
  5415. {
  5416. IR::LabelInstr *labelDone;
  5417. IR::Instr *instr;
  5418. labelDone = EmitLoadFloatCommon(dst, src, insertInstr, true);
  5419. if (labelDone == nullptr)
  5420. {
  5421. // We're done
  5422. return;
  5423. }
  5424. IR::BailOutKind bailOutKind = instrBailOut && instrBailOut->HasBailOutInfo() ? instrBailOut->GetBailOutKind() : IR::BailOutInvalid;
  5425. if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  5426. {
  5427. // Bail out instead of making the helper call.
  5428. Assert(labelBailOut);
  5429. m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, insertInstr);
  5430. insertInstr->InsertBefore(labelDone);
  5431. return;
  5432. }
  5433. IR::Opnd *memAddress = dst;
  5434. if (dst->IsRegOpnd())
  5435. {
  5436. // Create an f64 stack location to store the result of the helper.
  5437. IR::SymOpnd *symOpnd = IR::SymOpnd::New(StackSym::New(dst->GetType(), this->m_func), dst->GetType(), this->m_func);
  5438. this->m_func->StackAllocate(symOpnd->m_sym->AsStackSym(), sizeof(double));
  5439. memAddress = symOpnd;
  5440. }
  5441. // LEA r3, dst
  5442. IR::RegOpnd *reg3Opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  5443. instr = IR::Instr::New(Js::OpCode::LEA, reg3Opnd, memAddress, this->m_func);
  5444. insertInstr->InsertBefore(instr);
  5445. // to_number_full(value, &dst, scriptContext);
  5446. // Create dummy binary op to convert into helper
  5447. instr = IR::Instr::New(Js::OpCode::Add_A, this->m_func);
  5448. instr->SetSrc1(src);
  5449. instr->SetSrc2(reg3Opnd);
  5450. insertInstr->InsertBefore(instr);
  5451. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  5452. {
  5453. _Analysis_assume_(instrBailOut != nullptr);
  5454. instr = instr->ConvertToBailOutInstr(instrBailOut->GetBailOutInfo(), bailOutKind);
  5455. if (instrBailOut->GetBailOutInfo()->bailOutInstr == instrBailOut)
  5456. {
  5457. IR::Instr * instrShare = instrBailOut->ShareBailOut();
  5458. m_lowerer->LowerBailTarget(instrShare);
  5459. }
  5460. }
  5461. IR::JnHelperMethod helper;
  5462. if (dst->GetType() == TyFloat32)
  5463. {
  5464. helper = IR::HelperOp_ConvFloat_Helper;
  5465. }
  5466. else
  5467. {
  5468. helper = IR::HelperOp_ConvNumber_Helper;
  5469. }
  5470. this->m_lowerer->LowerBinaryHelperMem(instr, helper);
  5471. if (dst->IsRegOpnd())
  5472. {
  5473. if (dst->GetType() == TyFloat32)
  5474. {
  5475. // MOVSS dst, r32
  5476. instr = IR::Instr::New(Js::OpCode::MOVSS, dst, memAddress, this->m_func);
  5477. insertInstr->InsertBefore(instr);
  5478. }
  5479. else
  5480. {
  5481. // MOVSD dst, [pResult].f64
  5482. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, memAddress, this->m_func);
  5483. insertInstr->InsertBefore(instr);
  5484. }
  5485. }
  5486. // $Done
  5487. insertInstr->InsertBefore(labelDone);
  5488. }
  5489. void
  5490. LowererMD::LowerInt4NegWithBailOut(
  5491. IR::Instr *const instr,
  5492. const IR::BailOutKind bailOutKind,
  5493. IR::LabelInstr *const bailOutLabel,
  5494. IR::LabelInstr *const skipBailOutLabel)
  5495. {
  5496. Assert(instr);
  5497. Assert(instr->m_opcode == Js::OpCode::Neg_I4);
  5498. Assert(!instr->HasBailOutInfo());
  5499. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  5500. Assert(bailOutLabel);
  5501. Assert(instr->m_next == bailOutLabel);
  5502. Assert(skipBailOutLabel);
  5503. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5504. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5505. // Lower the instruction
  5506. instr->m_opcode = Js::OpCode::NEG;
  5507. Legalize(instr);
  5508. if(bailOutKind & IR::BailOutOnOverflow || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck)
  5509. {
  5510. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func));
  5511. }
  5512. if(bailOutKind & IR::BailOutOnNegativeZero)
  5513. {
  5514. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JEQ, bailOutLabel, instr->m_func));
  5515. }
  5516. // Skip bailout
  5517. bailOutLabel->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, skipBailOutLabel, instr->m_func));
  5518. }
  5519. void
  5520. LowererMD::LowerInt4AddWithBailOut(
  5521. IR::Instr *const instr,
  5522. const IR::BailOutKind bailOutKind,
  5523. IR::LabelInstr *const bailOutLabel,
  5524. IR::LabelInstr *const skipBailOutLabel)
  5525. {
  5526. Assert(instr);
  5527. Assert(instr->m_opcode == Js::OpCode::Add_I4);
  5528. Assert(!instr->HasBailOutInfo());
  5529. Assert(
  5530. (bailOutKind & IR::BailOutOnResultConditions) == IR::BailOutOnOverflow ||
  5531. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  5532. Assert(bailOutLabel);
  5533. Assert(instr->m_next == bailOutLabel);
  5534. Assert(skipBailOutLabel);
  5535. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5536. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5537. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  5538. // Restore sources overwritten by the instruction in the bailout path
  5539. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5540. Assert(dst->IsRegOpnd());
  5541. const bool dstEquSrc1 = dst->IsEqual(src1), dstEquSrc2 = dst->IsEqual(src2);
  5542. if(dstEquSrc1 & dstEquSrc2)
  5543. {
  5544. // We have:
  5545. // s1 += s1
  5546. // Which is equivalent to:
  5547. // s1 <<= 1
  5548. //
  5549. // These overflow a signed 32-bit integer when for the initial s1:
  5550. // s1 > 0 && (s1 & 0x40000000) - result is negative after overflow
  5551. // s1 < 0 && !(s1 & 0x40000000) - result is nonnegative after overflow
  5552. //
  5553. // To restore s1 to its value before the operation, we first do an arithmetic right-shift by one bit to undo the
  5554. // left-shift and preserve the sign of the result after overflow. Since the result after overflow always has the
  5555. // opposite sign from the operands (hence the overflow), we just need to invert the sign of the result. The following
  5556. // restores s1 to its value before the instruction:
  5557. // s1 = (s1 >> 1) ^ 0x80000000
  5558. //
  5559. // Generate:
  5560. // sar s1, 1
  5561. // xor s1, 0x80000000
  5562. const auto startBailOutInstr = bailOutLabel->m_next;
  5563. Assert(startBailOutInstr);
  5564. startBailOutInstr->InsertBefore(
  5565. IR::Instr::New(
  5566. Js::OpCode::SAR,
  5567. dst,
  5568. dst,
  5569. IR::IntConstOpnd::New(1, TyInt8, instr->m_func),
  5570. instr->m_func)
  5571. );
  5572. startBailOutInstr->InsertBefore(
  5573. IR::Instr::New(
  5574. Js::OpCode::XOR,
  5575. dst,
  5576. dst,
  5577. IR::IntConstOpnd::New(INT32_MIN, TyInt32, instr->m_func, true /* dontEncode */),
  5578. instr->m_func)
  5579. );
  5580. }
  5581. else if(dstEquSrc1 | dstEquSrc2)
  5582. {
  5583. // We have:
  5584. // s1 += s2
  5585. // Or:
  5586. // s1 = s2 + s1
  5587. //
  5588. // The following restores s1 to its value before the instruction:
  5589. // s1 -= s2
  5590. //
  5591. // Generate:
  5592. // sub s1, s2
  5593. if(dstEquSrc1)
  5594. {
  5595. Assert(src2->IsRegOpnd() || src2->IsIntConstOpnd());
  5596. }
  5597. else
  5598. {
  5599. Assert(src1->IsRegOpnd() || src1->IsIntConstOpnd());
  5600. }
  5601. bailOutLabel->InsertAfter(IR::Instr::New(Js::OpCode::SUB, dst, dst, dstEquSrc1 ? src2 : src1, instr->m_func));
  5602. }
  5603. // Lower the instruction
  5604. ChangeToAdd(instr, true /* needFlags */);
  5605. Legalize(instr);
  5606. // Skip bailout on no overflow
  5607. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNO, skipBailOutLabel, instr->m_func));
  5608. // Fall through to bailOutLabel
  5609. }
  5610. void
  5611. LowererMD::LowerInt4SubWithBailOut(
  5612. IR::Instr *const instr,
  5613. const IR::BailOutKind bailOutKind,
  5614. IR::LabelInstr *const bailOutLabel,
  5615. IR::LabelInstr *const skipBailOutLabel)
  5616. {
  5617. Assert(instr);
  5618. Assert(instr->m_opcode == Js::OpCode::Sub_I4);
  5619. Assert(!instr->HasBailOutInfo());
  5620. Assert(
  5621. (bailOutKind & IR::BailOutOnResultConditions) == IR::BailOutOnOverflow ||
  5622. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  5623. Assert(bailOutLabel);
  5624. Assert(instr->m_next == bailOutLabel);
  5625. Assert(skipBailOutLabel);
  5626. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5627. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5628. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  5629. // Restore sources overwritten by the instruction in the bailout path
  5630. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5631. Assert(dst->IsRegOpnd());
  5632. const bool dstEquSrc1 = dst->IsEqual(src1), dstEquSrc2 = dst->IsEqual(src2);
  5633. if(dstEquSrc1 ^ dstEquSrc2)
  5634. {
  5635. // We have:
  5636. // s1 -= s2
  5637. // Or:
  5638. // s1 = s2 - s1
  5639. //
  5640. // The following restores s1 to its value before the instruction:
  5641. // s1 += s2
  5642. // Or:
  5643. // s1 = s2 - s1
  5644. //
  5645. // Generate:
  5646. // neg s1 - only for second case
  5647. // add s1, s2
  5648. if(dstEquSrc1)
  5649. {
  5650. Assert(src2->IsRegOpnd() || src2->IsIntConstOpnd());
  5651. }
  5652. else
  5653. {
  5654. Assert(src1->IsRegOpnd() || src1->IsIntConstOpnd());
  5655. }
  5656. const auto startBailOutInstr = bailOutLabel->m_next;
  5657. Assert(startBailOutInstr);
  5658. if(dstEquSrc2)
  5659. {
  5660. startBailOutInstr->InsertBefore(IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func));
  5661. }
  5662. startBailOutInstr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, dst, dst, dstEquSrc1 ? src2 : src1, instr->m_func));
  5663. }
  5664. // Lower the instruction
  5665. ChangeToSub(instr, true /* needFlags */);
  5666. Legalize(instr);
  5667. // Skip bailout on no overflow
  5668. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNO, skipBailOutLabel, instr->m_func));
  5669. // Fall through to bailOutLabel
  5670. }
  5671. bool
  5672. LowererMD::GenerateSimplifiedInt4Mul(
  5673. IR::Instr *const mulInstr,
  5674. const IR::BailOutKind bailOutKind,
  5675. IR::LabelInstr *const bailOutLabel)
  5676. {
  5677. if (AutoSystemInfo::Data.IsAtomPlatform())
  5678. {
  5679. // On Atom, always optimize unless phase is off
  5680. if (PHASE_OFF(Js::AtomPhase, mulInstr->m_func->GetTopFunc()) ||
  5681. PHASE_OFF(Js::MulStrengthReductionPhase, mulInstr->m_func->GetTopFunc()))
  5682. return false;
  5683. }
  5684. else
  5685. {
  5686. // On other platforms, don't optimize unless phase is forced
  5687. if (!PHASE_FORCE(Js::AtomPhase, mulInstr->m_func->GetTopFunc()) &&
  5688. !PHASE_FORCE(Js::MulStrengthReductionPhase, mulInstr->m_func->GetTopFunc()))
  5689. return false;
  5690. }
  5691. Assert(mulInstr);
  5692. Assert(mulInstr->m_opcode == Js::OpCode::Mul_I4);
  5693. IR::Instr *instr = mulInstr, *nextInstr;
  5694. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5695. if (!src1->IsIntConstOpnd() && !src2->IsIntConstOpnd())
  5696. return false;
  5697. // if two const operands, GlobOpt would have folded the computation
  5698. Assert(!(src1->IsIntConstOpnd() && src2->IsIntConstOpnd()));
  5699. Assert(dst->IsRegOpnd());
  5700. const auto constSrc = src1->IsIntConstOpnd() ? src1 : src2;
  5701. const auto nonConstSrc = src1->IsIntConstOpnd() ? src2 : src1;
  5702. const auto constSrcValue = constSrc->AsIntConstOpnd()->AsInt32();
  5703. auto nonConstSrcCopy = nonConstSrc;
  5704. Assert(nonConstSrc->IsRegOpnd());
  5705. bool doOVF = bailOutKind & IR::BailOutOnMulOverflow || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck;
  5706. // don't simplify mul by large numbers with OF check
  5707. if (doOVF && (constSrcValue > 3 || constSrcValue < -3))
  5708. return false;
  5709. switch(constSrcValue)
  5710. {
  5711. case -3:
  5712. case 3:
  5713. // if dst = src, we need to have a copy of the src for the ADD/SUB
  5714. if (dst->IsEqual(nonConstSrc))
  5715. {
  5716. nonConstSrcCopy = IR::RegOpnd::New(nonConstSrc->GetType(), instr->m_func);
  5717. // MOV
  5718. Lowerer::InsertMove(nonConstSrcCopy, nonConstSrc, instr);
  5719. }
  5720. instr->UnlinkSrc1();
  5721. instr->UnlinkSrc2();
  5722. // SHL
  5723. instr->m_opcode = Js::OpCode::SHL;
  5724. instr->SetSrc1(nonConstSrc);
  5725. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) 1, TyInt32, instr->m_func));
  5726. constSrc->Free(instr->m_func);
  5727. Legalize(instr);
  5728. // JO
  5729. if (doOVF)
  5730. {
  5731. nextInstr = IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func);
  5732. instr->InsertAfter(nextInstr);
  5733. instr = nextInstr;
  5734. }
  5735. // ADD
  5736. nextInstr = IR::Instr::New(Js::OpCode::ADD, dst, dst, nonConstSrcCopy, instr->m_func);
  5737. instr->InsertAfter(nextInstr);
  5738. instr = nextInstr;
  5739. Legalize(instr);
  5740. if (constSrcValue == -3)
  5741. {
  5742. // JO
  5743. if (doOVF)
  5744. {
  5745. nextInstr = IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func);
  5746. instr->InsertAfter(nextInstr);
  5747. instr = nextInstr;
  5748. }
  5749. // NEG
  5750. nextInstr = IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func);
  5751. instr->InsertAfter(nextInstr);
  5752. instr = nextInstr;
  5753. Legalize(instr);
  5754. }
  5755. // last JO inserted by caller
  5756. return true;
  5757. case -2:
  5758. case 2:
  5759. instr->UnlinkSrc1();
  5760. instr->UnlinkSrc2();
  5761. // SHL
  5762. instr->m_opcode = Js::OpCode::SHL;
  5763. instr->SetSrc1(nonConstSrc);
  5764. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) 1, TyInt32, instr->m_func));
  5765. constSrc->Free(instr->m_func);
  5766. Legalize(instr);
  5767. if (constSrcValue == -2)
  5768. {
  5769. // JO
  5770. if (doOVF)
  5771. {
  5772. nextInstr = IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func);
  5773. instr->InsertAfter(nextInstr);
  5774. instr = nextInstr;
  5775. }
  5776. // NEG
  5777. nextInstr = IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func);
  5778. instr->InsertAfter(nextInstr);
  5779. instr = nextInstr;
  5780. Legalize(instr);
  5781. }
  5782. // last JO inserted by caller
  5783. return true;
  5784. case -1:
  5785. instr->UnlinkSrc1();
  5786. instr->UnlinkSrc2();
  5787. // NEG
  5788. instr->m_opcode = Js::OpCode::NEG;
  5789. instr->SetSrc1(nonConstSrc);
  5790. constSrc->Free(instr->m_func);
  5791. Legalize(instr);
  5792. // JO inserted by caller
  5793. return true;
  5794. case 0:
  5795. instr->FreeSrc1();
  5796. instr->FreeSrc2();
  5797. // MOV
  5798. instr->m_opcode = Js::OpCode::MOV;
  5799. instr->SetSrc1(IR::IntConstOpnd::New((IntConstType) 0, TyInt32, instr->m_func));
  5800. Legalize(instr);
  5801. // JO inserted by caller are removed in later phases
  5802. return true;
  5803. case 1:
  5804. instr->UnlinkSrc1();
  5805. instr->UnlinkSrc2();
  5806. // MOV
  5807. instr->m_opcode = Js::OpCode::MOV;
  5808. instr->SetSrc1(nonConstSrc);
  5809. constSrc->Free(instr->m_func);
  5810. Legalize(instr);
  5811. // JO inserted by caller are removed in later phases
  5812. return true;
  5813. default:
  5814. // large numbers with no OF check
  5815. Assert(!doOVF);
  5816. // 2^i
  5817. // -2^i
  5818. if (Math::IsPow2(constSrcValue) || Math::IsPow2(-constSrcValue))
  5819. {
  5820. uint32 shamt = constSrcValue > 0 ? Math::Log2(constSrcValue) : Math::Log2(-constSrcValue);
  5821. instr->UnlinkSrc1();
  5822. instr->UnlinkSrc2();
  5823. // SHL
  5824. instr->m_opcode = Js::OpCode::SHL;
  5825. instr->SetSrc1(nonConstSrc);
  5826. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) shamt, TyInt32, instr->m_func));
  5827. constSrc->Free(instr->m_func);
  5828. Legalize(instr);
  5829. if (constSrcValue < 0)
  5830. {
  5831. // NEG
  5832. nextInstr = IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func);
  5833. instr->InsertAfter(nextInstr);
  5834. Legalize(instr);
  5835. }
  5836. return true;
  5837. }
  5838. // 2^i + 1
  5839. // 2^i - 1
  5840. if (Math::IsPow2(constSrcValue - 1) || Math::IsPow2(constSrcValue + 1))
  5841. {
  5842. bool plusOne = Math::IsPow2(constSrcValue - 1);
  5843. uint32 shamt = plusOne ? Math::Log2(constSrcValue - 1) : Math::Log2(constSrcValue + 1);
  5844. if (dst->IsEqual(nonConstSrc))
  5845. {
  5846. nonConstSrcCopy = IR::RegOpnd::New(nonConstSrc->GetType(), instr->m_func);
  5847. // MOV
  5848. Lowerer::InsertMove(nonConstSrcCopy, nonConstSrc, instr);
  5849. }
  5850. instr->UnlinkSrc1();
  5851. instr->UnlinkSrc2();
  5852. // SHL
  5853. instr->m_opcode = Js::OpCode::SHL;
  5854. instr->SetSrc1(nonConstSrc);
  5855. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) shamt, TyInt32, instr->m_func));
  5856. constSrc->Free(instr->m_func);
  5857. Legalize(instr);
  5858. // ADD/SUB
  5859. nextInstr = IR::Instr::New(plusOne ? Js::OpCode::ADD : Js::OpCode::SUB, dst, dst, nonConstSrcCopy, instr->m_func);
  5860. instr->InsertAfter(nextInstr);
  5861. instr = nextInstr;
  5862. Legalize(instr);
  5863. return true;
  5864. }
  5865. return false;
  5866. }
  5867. }
  5868. void
  5869. LowererMD::LowerInt4MulWithBailOut(
  5870. IR::Instr *const instr,
  5871. const IR::BailOutKind bailOutKind,
  5872. IR::LabelInstr *const bailOutLabel,
  5873. IR::LabelInstr *const skipBailOutLabel)
  5874. {
  5875. Assert(instr);
  5876. Assert(instr->m_opcode == Js::OpCode::Mul_I4);
  5877. Assert(!instr->HasBailOutInfo());
  5878. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  5879. Assert(bailOutLabel);
  5880. Assert(instr->m_next == bailOutLabel);
  5881. Assert(skipBailOutLabel);
  5882. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5883. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5884. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  5885. IR::LabelInstr *checkForNegativeZeroLabel = nullptr;
  5886. if(bailOutKind & IR::BailOutOnNegativeZero)
  5887. {
  5888. // We have:
  5889. // s3 = s1 * s2
  5890. //
  5891. // If the result is zero, we need to check and only bail out if it would be -0. The following determines this:
  5892. // bailOut = (s1 < 0 || s2 < 0) (either s1 or s2 has to be zero for the result to be zero, so we don't emit zero checks)
  5893. //
  5894. // Note, however, that if in future we decide to ignore mul overflow in some cases, and overflow occurs with one of the operands as negative,
  5895. // this can lead to bailout. Will handle that case if ever we decide to ignore mul overflow.
  5896. //
  5897. // Generate:
  5898. // $checkForNegativeZeroLabel:
  5899. // test s1, s1
  5900. // js $bailOutLabel
  5901. // test s2, s2
  5902. // jns $skipBailOutLabel
  5903. // (fall through to bail out)
  5904. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5905. Assert(dst->IsRegOpnd());
  5906. Assert(!src1->IsEqual(src2)); // cannot result in -0 if both operands are the same; GlobOpt should have figured that out
  5907. checkForNegativeZeroLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true);
  5908. bailOutLabel->InsertBefore(checkForNegativeZeroLabel);
  5909. if(src1->IsIntConstOpnd() || src2->IsIntConstOpnd())
  5910. {
  5911. Assert(!(src1->IsIntConstOpnd() && src2->IsIntConstOpnd())); // if this results in -0, GlobOpt should have avoided type specialization
  5912. const auto constSrc = src1->IsIntConstOpnd() ? src1 : src2;
  5913. const auto nonConstSrc = src1->IsIntConstOpnd() ? src2 : src1;
  5914. Assert(nonConstSrc->IsRegOpnd());
  5915. const auto newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5916. newInstr->SetSrc1(nonConstSrc);
  5917. newInstr->SetSrc2(nonConstSrc);
  5918. bailOutLabel->InsertBefore(newInstr);
  5919. const auto constSrcValue = constSrc->AsIntConstOpnd()->GetValue();
  5920. if(constSrcValue == 0)
  5921. {
  5922. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNSB, skipBailOutLabel, instr->m_func));
  5923. }
  5924. else
  5925. {
  5926. Assert(constSrcValue < 0); // cannot result in -0 if one operand is positive; GlobOpt should have figured that out
  5927. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, skipBailOutLabel, instr->m_func));
  5928. }
  5929. }
  5930. else
  5931. {
  5932. auto newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5933. newInstr->SetSrc1(src1);
  5934. newInstr->SetSrc2(src1);
  5935. bailOutLabel->InsertBefore(newInstr);
  5936. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JSB, bailOutLabel, instr->m_func));
  5937. newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5938. newInstr->SetSrc1(src2);
  5939. newInstr->SetSrc2(src2);
  5940. bailOutLabel->InsertBefore(newInstr);
  5941. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNSB, skipBailOutLabel, instr->m_func));
  5942. }
  5943. // Fall through to bailOutLabel
  5944. }
  5945. const bool needsOverflowCheck =
  5946. bailOutKind & IR::BailOutOnMulOverflow || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck;
  5947. AssertMsg(!instr->ShouldCheckForNon32BitOverflow() || (needsOverflowCheck && instr->ShouldCheckForNon32BitOverflow()), "Non 32-bit overflow check required without bailout info");
  5948. bool simplifiedMul = LowererMD::GenerateSimplifiedInt4Mul(instr, bailOutKind, bailOutLabel);
  5949. // Lower the instruction
  5950. if (!simplifiedMul)
  5951. {
  5952. LowererMD::ChangeToIMul(instr, needsOverflowCheck);
  5953. }
  5954. const auto insertBeforeInstr = checkForNegativeZeroLabel ? checkForNegativeZeroLabel : bailOutLabel;
  5955. if(needsOverflowCheck)
  5956. {
  5957. // do we care about int32 or non-int32 overflow ?
  5958. if (!simplifiedMul && !instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow())
  5959. LowererMD::EmitNon32BitOvfCheck(instr, insertBeforeInstr, bailOutLabel);
  5960. else
  5961. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func));
  5962. }
  5963. if(bailOutKind & IR::BailOutOnNegativeZero)
  5964. {
  5965. // On zero, branch to determine whether the result would be -0
  5966. Assert(checkForNegativeZeroLabel);
  5967. const auto newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5968. const auto dst = instr->GetDst();
  5969. newInstr->SetSrc1(dst);
  5970. newInstr->SetSrc2(dst);
  5971. insertBeforeInstr->InsertBefore(newInstr);
  5972. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JEQ, checkForNegativeZeroLabel, instr->m_func));
  5973. }
  5974. // Skip bailout
  5975. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, skipBailOutLabel, instr->m_func));
  5976. }
  5977. void
  5978. LowererMD::LowerInt4RemWithBailOut(
  5979. IR::Instr *const instr,
  5980. const IR::BailOutKind bailOutKind,
  5981. IR::LabelInstr *const bailOutLabel,
  5982. IR::LabelInstr *const skipBailOutLabel) const
  5983. {
  5984. Assert(instr);
  5985. Assert(instr->m_opcode == Js::OpCode::Rem_I4);
  5986. Assert(!instr->HasBailOutInfo());
  5987. Assert(bailOutKind & IR::BailOutOnNegativeZero);
  5988. Assert(bailOutLabel);
  5989. Assert(instr->m_next == bailOutLabel);
  5990. Assert(skipBailOutLabel);
  5991. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5992. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5993. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  5994. bool fastPath = m_lowerer->GenerateSimplifiedInt4Rem(instr, skipBailOutLabel);
  5995. // We have:
  5996. // s3 = s1 % s2
  5997. //
  5998. // If the result is zero, we need to check and only bail out if it would be -0. The following determines this:
  5999. // bailOut = (s3 == 0 && s1 < 0)
  6000. //
  6001. // Generate:
  6002. // $checkForNegativeZeroLabel:
  6003. // test s3, s3
  6004. // jne $skipBailOutLabel
  6005. // test s1, s1
  6006. // jns $skipBailOutLabel
  6007. // (fall through to bail out)
  6008. IR::Opnd *dst = instr->GetDst(), *src1 = instr->GetSrc1();
  6009. Assert(dst->IsRegOpnd());
  6010. IR::Instr * newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  6011. newInstr->SetSrc1(dst);
  6012. newInstr->SetSrc2(dst);
  6013. bailOutLabel->InsertBefore(newInstr);
  6014. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, skipBailOutLabel, instr->m_func));
  6015. // Fast path already checks if s1 >= 0
  6016. if (!fastPath)
  6017. {
  6018. newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  6019. newInstr->SetSrc1(src1);
  6020. newInstr->SetSrc2(src1);
  6021. bailOutLabel->InsertBefore(newInstr);
  6022. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNSB, skipBailOutLabel, instr->m_func));
  6023. }
  6024. // Fall through to bailOutLabel
  6025. // Lower the instruction
  6026. LowererMDArch::EmitInt4Instr(instr);
  6027. }
  6028. IR::Instr *
  6029. LowererMD::LoadFloatZero(IR::Opnd * opndDst, IR::Instr * instrInsert)
  6030. {
  6031. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOVSD_ZERO, opndDst, instrInsert->m_func);
  6032. instrInsert->InsertBefore(instr);
  6033. return instr;
  6034. }
  6035. IR::Instr *
  6036. LowererMD::LoadFloatValue(IR::Opnd * opndDst, double value, IR::Instr * instrInsert)
  6037. {
  6038. if (value == 0.0 && !Js::JavascriptNumber::IsNegZero(value))
  6039. {
  6040. // zero can be loaded with "XORPS xmm, xmm" rather than needing memory load
  6041. return LoadFloatZero(opndDst, instrInsert);
  6042. }
  6043. IR::Opnd * opnd;
  6044. void* pValue = nullptr;
  6045. bool isFloat64 = opndDst->IsFloat64();
  6046. if (isFloat64)
  6047. {
  6048. pValue = NativeCodeDataNewNoFixup(instrInsert->m_func->GetNativeCodeDataAllocator(), DoubleType<DataDesc_LowererMD_LoadFloatValue_Double>, value);
  6049. }
  6050. else
  6051. {
  6052. Assert(opndDst->IsFloat32());
  6053. pValue = (float*)NativeCodeDataNewNoFixup(instrInsert->m_func->GetNativeCodeDataAllocator(), FloatType<DataDesc_LowererMD_LoadFloatValue_Float>, (float)value);
  6054. }
  6055. if (!instrInsert->m_func->IsOOPJIT())
  6056. {
  6057. opnd = IR::MemRefOpnd::New((void*)pValue, isFloat64 ? TyMachDouble : TyFloat32,
  6058. instrInsert->m_func, isFloat64 ? IR::AddrOpndKindDynamicDoubleRef : IR::AddrOpndKindDynamicFloatRef);
  6059. }
  6060. else // OOP JIT
  6061. {
  6062. int offset = NativeCodeData::GetDataTotalOffset(pValue);
  6063. auto addressRegOpnd = IR::RegOpnd::New(TyMachPtr, instrInsert->m_func);
  6064. Lowerer::InsertMove(
  6065. addressRegOpnd,
  6066. IR::MemRefOpnd::New(instrInsert->m_func->GetWorkItem()->GetWorkItemData()->nativeDataAddr, TyMachPtr, instrInsert->m_func, IR::AddrOpndKindDynamicNativeCodeDataRef),
  6067. instrInsert);
  6068. opnd = IR::IndirOpnd::New(addressRegOpnd, offset, isFloat64 ? TyMachDouble : TyFloat32,
  6069. #if DBG
  6070. NativeCodeData::GetDataDescription(pValue, instrInsert->m_func->m_alloc),
  6071. #endif
  6072. instrInsert->m_func);
  6073. }
  6074. // movsd xmm, [reg+offset]
  6075. IR::Instr * instr = IR::Instr::New(LowererMDArch::GetAssignOp(opndDst->GetType()), opndDst, opnd, instrInsert->m_func);
  6076. instrInsert->InsertBefore(instr);
  6077. Legalize(instr);
  6078. return instr;
  6079. }
  6080. IR::Instr *
  6081. LowererMD::EnsureAdjacentArgs(IR::Instr * instrArg)
  6082. {
  6083. // Ensure that the arg instructions for a given call site are adjacent.
  6084. // This isn't normally desirable for CQ, but it's required by, for instance, the cloner,
  6085. // which must clone a complete call sequence.
  6086. IR::Opnd * opnd = instrArg->GetSrc2();
  6087. IR::Instr * instrNextArg;
  6088. StackSym * sym;
  6089. AssertMsg(opnd, "opnd");
  6090. while (opnd->IsSymOpnd())
  6091. {
  6092. sym = opnd->AsSymOpnd()->m_sym->AsStackSym();
  6093. instrNextArg = sym->m_instrDef;
  6094. Assert(instrNextArg);
  6095. instrNextArg->SinkInstrBefore(instrArg);
  6096. instrArg = instrNextArg;
  6097. opnd = instrArg->GetSrc2();
  6098. }
  6099. sym = opnd->AsRegOpnd()->m_sym;
  6100. instrNextArg = sym->m_instrDef;
  6101. Assert(instrNextArg && instrNextArg->m_opcode == Js::OpCode::StartCall);
  6102. // The StartCall can be trivially moved down.
  6103. if (instrNextArg->m_next != instrArg)
  6104. {
  6105. instrNextArg->UnlinkStartCallFromBailOutInfo(instrArg);
  6106. instrNextArg->Unlink();
  6107. instrArg->InsertBefore(instrNextArg);
  6108. }
  6109. return instrNextArg->m_prev;
  6110. }
  6111. #if INT32VAR
  6112. //
  6113. // Convert an int32 to Var representation.
  6114. //
  6115. void LowererMD::GenerateInt32ToVarConversion( IR::Opnd * opndSrc, IR::Instr * insertInstr )
  6116. {
  6117. AssertMsg(TySize[opndSrc->GetType()] == MachPtr, "For this to work it should be a 64-bit register");
  6118. IR::Instr* instr = IR::Instr::New(Js::OpCode::BTS, opndSrc, opndSrc, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  6119. insertInstr->InsertBefore(instr);
  6120. }
  6121. //
  6122. // jump to $labelHelper, based on the result of CMP
  6123. //
  6124. void LowererMD::GenerateSmIntTest(IR::Opnd *opndSrc, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::Instr **instrFirst /* = nullptr */, bool fContinueLabel /*= false*/)
  6125. {
  6126. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  6127. IR::Opnd * opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  6128. #ifdef SHIFTLOAD
  6129. // s1 = SHLD src1, 16 - Shift top 16-bits of src1 to s1
  6130. IR::Instr* instr = IR::Instr::New(Js::OpCode::SHLD, opndReg, opndSrc, IR::IntConstOpnd::New(16, TyInt8, this->m_func), this->m_func);
  6131. insertInstr->InsertBefore(instr);
  6132. if (instrFirst)
  6133. {
  6134. *instrFirst = instr;
  6135. }
  6136. // CMP s1.i16, AtomTag.i16
  6137. IR::Opnd *opndReg16 = opndReg->Copy(m_func);
  6138. opndReg16->SetType(TyInt16);
  6139. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6140. instr->SetSrc1(opndReg16);
  6141. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt16, this->m_func, /* dontEncode = */ true));
  6142. insertInstr->InsertBefore(instr);
  6143. #else
  6144. // s1 = MOV src1 - Move to a temporary
  6145. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc, this->m_func);
  6146. insertInstr->InsertBefore(instr);
  6147. if (instrFirst)
  6148. {
  6149. *instrFirst = instr;
  6150. }
  6151. // s1 = SHR s1, VarTag_Shift
  6152. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  6153. insertInstr->InsertBefore(instr);
  6154. // CMP s1, AtomTag
  6155. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6156. instr->SetSrc1(opndReg);
  6157. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt32, this->m_func, /* dontEncode = */ true));
  6158. insertInstr->InsertBefore(instr);
  6159. #endif
  6160. if(fContinueLabel)
  6161. {
  6162. // JEQ $labelHelper
  6163. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  6164. }
  6165. else
  6166. {
  6167. // JNE $labelHelper
  6168. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  6169. }
  6170. insertInstr->InsertBefore(instr);
  6171. }
  6172. //
  6173. // If lower 32-bits are zero (value is zero), jump to $helper.
  6174. //
  6175. void LowererMD::GenerateTaggedZeroTest( IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelHelper )
  6176. {
  6177. // Cast the var to 32 bit integer.
  6178. if(opndSrc->GetSize() != 4)
  6179. {
  6180. opndSrc = opndSrc->UseWithNewType(TyUint32, this->m_func);
  6181. }
  6182. AssertMsg(TySize[opndSrc->GetType()] == 4, "This technique works only on the 32-bit version");
  6183. // TEST src1, src1
  6184. IR::Instr* instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  6185. instr->SetSrc1(opndSrc);
  6186. instr->SetSrc2(opndSrc);
  6187. insertInstr->InsertBefore(instr);
  6188. if(labelHelper != nullptr)
  6189. {
  6190. // JZ $labelHelper
  6191. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  6192. insertInstr->InsertBefore(instr);
  6193. }
  6194. }
  6195. //
  6196. // If top 16 bits are not zero i.e. it is NOT object, jump to $helper.
  6197. //
  6198. bool LowererMD::GenerateObjectTest(IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
  6199. {
  6200. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  6201. if (opndSrc->IsTaggedValue() && fContinueLabel)
  6202. {
  6203. // Insert delete branch opcode to tell the dbChecks not to assert on the helper label we may fall through into
  6204. IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
  6205. insertInstr->InsertBefore(fakeBr);
  6206. return false;
  6207. }
  6208. else if (opndSrc->IsNotTaggedValue() && !fContinueLabel)
  6209. {
  6210. return false;
  6211. }
  6212. IR::Opnd * opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  6213. // s1 = MOV src1 - Move to a temporary
  6214. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc, this->m_func);
  6215. insertInstr->InsertBefore(instr);
  6216. // s1 = SHR s1, VarTag_Shift
  6217. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  6218. insertInstr->InsertBefore(instr);
  6219. if (fContinueLabel)
  6220. {
  6221. // JEQ $labelHelper
  6222. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelTarget, this->m_func);
  6223. insertInstr->InsertBefore(instr);
  6224. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6225. insertInstr->InsertBefore(labelHelper);
  6226. }
  6227. else
  6228. {
  6229. // JNZ $labelHelper
  6230. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelTarget, this->m_func);
  6231. insertInstr->InsertBefore(instr);
  6232. }
  6233. return true;
  6234. }
  6235. #else
  6236. //
  6237. // Convert an int32 value to a Var.
  6238. //
  6239. void LowererMD::GenerateInt32ToVarConversion( IR::Opnd * opndSrc, IR::Instr * insertInstr )
  6240. {
  6241. // SHL r1, AtomTag
  6242. IR::Instr * instr = IR::Instr::New(Js::OpCode::SHL, opndSrc, opndSrc, IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func), this->m_func);
  6243. insertInstr->InsertBefore(instr);
  6244. // INC r1
  6245. instr = IR::Instr::New(Js::OpCode::INC, opndSrc, opndSrc, this->m_func);
  6246. insertInstr->InsertBefore(instr);
  6247. }
  6248. //
  6249. // jump to $labelHelper, based on the result of TEST
  6250. //
  6251. void LowererMD::GenerateSmIntTest(IR::Opnd *opndSrc, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::Instr **instrFirst /* = nullptr */, bool fContinueLabel /*= false*/)
  6252. {
  6253. if (opndSrc->IsTaggedInt() && !fContinueLabel)
  6254. {
  6255. return;
  6256. }
  6257. else if (opndSrc->IsNotTaggedValue() && fContinueLabel)
  6258. {
  6259. return;
  6260. }
  6261. // TEST src1, AtomTag
  6262. IR::Instr* instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  6263. instr->SetSrc1(opndSrc);
  6264. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func));
  6265. insertInstr->InsertBefore(instr);
  6266. if (instrFirst)
  6267. {
  6268. *instrFirst = instr;
  6269. }
  6270. if(fContinueLabel)
  6271. {
  6272. // JNE $labelHelper
  6273. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  6274. }
  6275. else
  6276. {
  6277. // JEQ $labelHelper
  6278. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  6279. }
  6280. insertInstr->InsertBefore(instr);
  6281. }
  6282. //
  6283. // If value is zero in tagged int representation, jump to $labelHelper.
  6284. //
  6285. void LowererMD::GenerateTaggedZeroTest( IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelHelper )
  6286. {
  6287. if (opndSrc->IsNotTaggedValue())
  6288. {
  6289. return;
  6290. }
  6291. // CMP src1, AtomTag
  6292. IR::Instr* instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6293. instr->SetSrc1(opndSrc);
  6294. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt32, this->m_func));
  6295. insertInstr->InsertBefore(instr);
  6296. // JEQ $helper
  6297. if(labelHelper != nullptr)
  6298. {
  6299. // JEQ $labelHelper
  6300. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  6301. insertInstr->InsertBefore(instr);
  6302. }
  6303. }
  6304. //
  6305. // If not object, jump to $labelHelper.
  6306. //
  6307. bool LowererMD::GenerateObjectTest(IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
  6308. {
  6309. if (opndSrc->IsTaggedInt() && fContinueLabel)
  6310. {
  6311. // Insert delete branch opcode to tell the dbChecks not to assert on this helper label
  6312. IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
  6313. insertInstr->InsertBefore(fakeBr);
  6314. return false;
  6315. }
  6316. else if (opndSrc->IsNotTaggedValue() && !fContinueLabel)
  6317. {
  6318. return false;
  6319. }
  6320. // TEST src1, AtomTag
  6321. IR::Instr* instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  6322. instr->SetSrc1(opndSrc);
  6323. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func));
  6324. insertInstr->InsertBefore(instr);
  6325. if (fContinueLabel)
  6326. {
  6327. // JEQ $labelHelper
  6328. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelTarget, this->m_func);
  6329. insertInstr->InsertBefore(instr);
  6330. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6331. insertInstr->InsertBefore(labelHelper);
  6332. }
  6333. else
  6334. {
  6335. // JNE $labelHelper
  6336. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelTarget, this->m_func);
  6337. insertInstr->InsertBefore(instr);
  6338. }
  6339. return true;
  6340. }
  6341. #endif
  6342. bool LowererMD::GenerateJSBooleanTest(IR::RegOpnd * regSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
  6343. {
  6344. IR::Instr* instr;
  6345. if (regSrc->GetValueType().IsBoolean())
  6346. {
  6347. if (fContinueLabel)
  6348. {
  6349. // JMP $labelTarget
  6350. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelTarget, this->m_func);
  6351. insertInstr->InsertBefore(instr);
  6352. #if DBG
  6353. if (labelTarget->isOpHelper)
  6354. {
  6355. labelTarget->m_noHelperAssert = true;
  6356. }
  6357. #endif
  6358. }
  6359. return false;
  6360. }
  6361. // CMP src1, vtable<JavaScriptBoolean>
  6362. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6363. IR::IndirOpnd *vtablePtrOpnd = IR::IndirOpnd::New(regSrc, 0, TyMachPtr, this->m_func);
  6364. instr->SetSrc1(vtablePtrOpnd);
  6365. IR::Opnd *jsBooleanVTable = m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptBoolean);
  6366. instr->SetSrc2(jsBooleanVTable);
  6367. insertInstr->InsertBefore(instr);
  6368. Legalize(instr);
  6369. if (fContinueLabel)
  6370. {
  6371. // JEQ $labelTarget
  6372. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelTarget, this->m_func);
  6373. insertInstr->InsertBefore(instr);
  6374. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6375. insertInstr->InsertBefore(labelHelper);
  6376. }
  6377. else
  6378. {
  6379. // JNE $labelTarget
  6380. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelTarget, this->m_func);
  6381. insertInstr->InsertBefore(instr);
  6382. }
  6383. return true;
  6384. }
  6385. #if FLOATVAR
  6386. //
  6387. // If any of the top 14 bits are not set, then the var is not a float value and hence, jump to $labelHelper.
  6388. //
  6389. void LowererMD::GenerateFloatTest(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper, const bool checkForNullInLoopBody)
  6390. {
  6391. if (opndSrc->GetValueType().IsFloat())
  6392. {
  6393. return;
  6394. }
  6395. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  6396. // s1 = MOV src1 - Move to a temporary
  6397. IR::Opnd * opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  6398. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc, this->m_func);
  6399. insertInstr->InsertBefore(instr);
  6400. // s1 = SHR s1, 50
  6401. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(50, TyInt8, this->m_func), this->m_func);
  6402. insertInstr->InsertBefore(instr);
  6403. // JZ $helper
  6404. instr = IR::BranchInstr::New(Js::OpCode::JEQ /* JZ */, labelHelper, this->m_func);
  6405. insertInstr->InsertBefore(instr);
  6406. }
  6407. IR::RegOpnd* LowererMD::CheckFloatAndUntag(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper)
  6408. {
  6409. IR::Opnd* floatTag = IR::AddrOpnd::New((Js::Var)Js::FloatTag_Value, IR::AddrOpndKindConstantVar, this->m_func, /* dontEncode = */ true);
  6410. IR::RegOpnd* regOpndFloatTag = IR::RegOpnd::New(TyUint64, this->m_func);
  6411. // MOV floatTagReg, FloatTag_Value
  6412. IR::Instr* instr = IR::Instr::New(Js::OpCode::MOV, regOpndFloatTag, floatTag, this->m_func);
  6413. insertInstr->InsertBefore(instr);
  6414. if (!opndSrc->GetValueType().IsFloat())
  6415. {
  6416. // TEST s1, floatTagReg
  6417. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  6418. instr->SetSrc1(opndSrc);
  6419. instr->SetSrc2(regOpndFloatTag);
  6420. insertInstr->InsertBefore(instr);
  6421. // JZ $helper
  6422. instr = IR::BranchInstr::New(Js::OpCode::JEQ /* JZ */, labelHelper, this->m_func);
  6423. insertInstr->InsertBefore(instr);
  6424. }
  6425. // untaggedFloat = XOR floatTagReg, s1 // where untaggedFloat == floatTagReg; use floatTagReg temporarily for the untagged float
  6426. IR::RegOpnd* untaggedFloat = regOpndFloatTag;
  6427. instr = IR::Instr::New(Js::OpCode::XOR, untaggedFloat, regOpndFloatTag, opndSrc, this->m_func);
  6428. insertInstr->InsertBefore(instr);
  6429. IR::RegOpnd *floatReg = IR::RegOpnd::New(TyMachDouble, this->m_func);
  6430. instr = IR::Instr::New(Js::OpCode::MOVD, floatReg, untaggedFloat, this->m_func);
  6431. insertInstr->InsertBefore(instr);
  6432. return floatReg;
  6433. }
  6434. #else
  6435. void LowererMD::GenerateFloatTest(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper, const bool checkForNullInLoopBody)
  6436. {
  6437. if (opndSrc->GetValueType().IsFloat())
  6438. {
  6439. return;
  6440. }
  6441. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  6442. if(checkForNullInLoopBody && m_func->IsLoopBody())
  6443. {
  6444. // It's possible that the value was determined dead by the jitted function and was not restored. The jitted loop
  6445. // body may not realize that it's dead and may try to use it. Check for null in loop bodies.
  6446. // test src1, src1
  6447. // jz $helper (bail out)
  6448. m_lowerer->InsertCompareBranch(
  6449. opndSrc,
  6450. IR::AddrOpnd::NewNull(m_func),
  6451. Js::OpCode::BrEq_A,
  6452. labelHelper,
  6453. insertInstr);
  6454. }
  6455. IR::Instr* instr = IR::Instr::New(Js::OpCode::CMP, insertInstr->m_func);
  6456. instr->SetSrc1(IR::IndirOpnd::New(opndSrc, 0, TyMachPtr, insertInstr->m_func));
  6457. instr->SetSrc2(m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptNumber));
  6458. insertInstr->InsertBefore(instr);
  6459. // JNZ $helper
  6460. instr = IR::BranchInstr::New(Js::OpCode::JNE /* JZ */, labelHelper, this->m_func);
  6461. insertInstr->InsertBefore(instr);
  6462. }
  6463. #endif
  6464. #if DBG
  6465. //
  6466. // Helps in debugging of fast paths.
  6467. //
  6468. void LowererMD::GenerateDebugBreak( IR::Instr * insertInstr )
  6469. {
  6470. // int 3
  6471. IR::Instr *int3 = IR::Instr::New(Js::OpCode::INT, insertInstr->m_func);
  6472. int3->SetSrc1(IR::IntConstOpnd::New(3, TyInt32, insertInstr->m_func));
  6473. insertInstr->InsertBefore(int3);
  6474. }
  6475. #endif
  6476. IR::Instr *
  6477. LowererMD::LoadStackAddress(StackSym *sym, IR::RegOpnd *optionalDstOpnd /* = nullptr */)
  6478. {
  6479. IR::RegOpnd * regDst = optionalDstOpnd != nullptr ? optionalDstOpnd : IR::RegOpnd::New(TyMachReg, this->m_func);
  6480. IR::SymOpnd * symSrc = IR::SymOpnd::New(sym, TyMachPtr, this->m_func);
  6481. IR::Instr * lea = IR::Instr::New(Js::OpCode::LEA, regDst, symSrc, this->m_func);
  6482. return lea;
  6483. }
  6484. template <bool verify>
  6485. void
  6486. LowererMD::MakeDstEquSrc1(IR::Instr *const instr)
  6487. {
  6488. Assert(instr);
  6489. Assert(instr->IsLowered());
  6490. Assert(instr->GetDst());
  6491. Assert(instr->GetSrc1());
  6492. if(instr->GetDst()->IsEqual(instr->GetSrc1()))
  6493. {
  6494. return;
  6495. }
  6496. if (verify)
  6497. {
  6498. AssertMsg(false, "Missing legalization");
  6499. return;
  6500. }
  6501. if(instr->GetSrc2() && instr->GetDst()->IsEqual(instr->GetSrc2()))
  6502. {
  6503. switch(instr->m_opcode)
  6504. {
  6505. #ifdef _M_IX86
  6506. case Js::OpCode::ADC:
  6507. #endif
  6508. case Js::OpCode::Add_I4:
  6509. case Js::OpCode::Mul_I4:
  6510. case Js::OpCode::Or_I4:
  6511. case Js::OpCode::Xor_I4:
  6512. case Js::OpCode::And_I4:
  6513. case Js::OpCode::ADD:
  6514. case Js::OpCode::IMUL2:
  6515. case Js::OpCode::OR:
  6516. case Js::OpCode::XOR:
  6517. case Js::OpCode::AND:
  6518. case Js::OpCode::ADDSD:
  6519. case Js::OpCode::MULSD:
  6520. case Js::OpCode::ADDSS:
  6521. case Js::OpCode::MULSS:
  6522. case Js::OpCode::ADDPS:
  6523. // For (a = b & a), generate (a = a & b)
  6524. instr->SwapOpnds();
  6525. return;
  6526. }
  6527. // For (a = b - a), generate (c = a; a = b - c) and fall through
  6528. ChangeToAssign(instr->HoistSrc2(Js::OpCode::Ld_A));
  6529. }
  6530. // For (a = b - c), generate (a = b; a = a - c)
  6531. IR::Instr *const mov = IR::Instr::New(Js::OpCode::Ld_A, instr->GetDst(), instr->UnlinkSrc1(), instr->m_func);
  6532. instr->InsertBefore(mov);
  6533. ChangeToAssign(mov);
  6534. instr->SetSrc1(instr->GetDst());
  6535. }
  6536. void
  6537. LowererMD::EmitInt64Instr(IR::Instr * instr)
  6538. {
  6539. #ifdef _M_IX86
  6540. lowererMDArch.EmitInt64Instr(instr);
  6541. #else
  6542. Assert(UNREACHED);
  6543. #endif
  6544. }
  6545. void
  6546. LowererMD::EmitInt4Instr(IR::Instr *instr)
  6547. {
  6548. LowererMDArch::EmitInt4Instr(instr);
  6549. }
  6550. void
  6551. LowererMD::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  6552. {
  6553. lowererMDArch.EmitLoadVar(instrLoad, isFromUint32, isHelper);
  6554. }
  6555. bool
  6556. LowererMD::EmitLoadInt32(IR::Instr *instrLoad, bool conversionFromObjectAllowed, bool bailOutOnHelper, IR::LabelInstr * labelBailOut)
  6557. {
  6558. return lowererMDArch.EmitLoadInt32(instrLoad, conversionFromObjectAllowed, bailOutOnHelper, labelBailOut);
  6559. }
  6560. void
  6561. LowererMD::EmitIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  6562. {
  6563. this->lowererMDArch.EmitIntToFloat(dst, src, instrInsert);
  6564. }
  6565. void
  6566. LowererMD::EmitUIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  6567. {
  6568. this->lowererMDArch.EmitUIntToFloat(dst, src, instrInsert);
  6569. }
  6570. void
  6571. LowererMD::EmitIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  6572. {
  6573. this->lowererMDArch.EmitIntToLong(dst, src, instrInsert);
  6574. }
  6575. void
  6576. LowererMD::EmitUIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  6577. {
  6578. this->lowererMDArch.EmitUIntToLong(dst, src, instrInsert);
  6579. }
  6580. void
  6581. LowererMD::EmitLongToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  6582. {
  6583. this->lowererMDArch.EmitLongToInt(dst, src, instrInsert);
  6584. }
  6585. void
  6586. LowererMD::EmitFloat32ToFloat64(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  6587. {
  6588. // We should only generate this if sse2 is available
  6589. Assert(AutoSystemInfo::Data.SSE2Available());
  6590. Assert(dst->IsRegOpnd() && dst->IsFloat64());
  6591. Assert(src->IsRegOpnd() && src->GetType() == TyFloat32);
  6592. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::CVTSS2SD, dst, src, this->m_func));
  6593. }
  6594. void
  6595. LowererMD::EmitInt64toFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instr)
  6596. {
  6597. #ifdef _M_IX86
  6598. IR::Opnd *srcOpnd = instr->UnlinkSrc1();
  6599. LoadInt64HelperArgument(instr, srcOpnd);
  6600. IR::Instr* callinstr = IR::Instr::New(Js::OpCode::CALL, dst, this->m_func);
  6601. instr->InsertBefore(callinstr);
  6602. CompileAssert(sizeof(IRType) == 1);
  6603. const uint16 fromToType = dst->GetType() | (srcOpnd->GetType() << 8);
  6604. IR::JnHelperMethod method = IR::HelperOp_Throw;
  6605. switch (fromToType)
  6606. {
  6607. case TyFloat32 | (TyInt64 << 8) : method = IR::HelperI64TOF32; break;
  6608. case TyFloat32 | (TyUint64 << 8) : method = IR::HelperUI64TOF32; break;
  6609. case TyFloat64 | (TyInt64 << 8) : method = IR::HelperI64TOF64; break;
  6610. case TyFloat64 | (TyUint64 << 8) : method = IR::HelperUI64TOF64; break;
  6611. default:
  6612. Assert(UNREACHED);
  6613. }
  6614. this->ChangeToHelperCall(callinstr, method);
  6615. #else
  6616. IR::Opnd* origDst = nullptr;
  6617. if (dst->IsFloat32())
  6618. {
  6619. origDst = dst;
  6620. dst = IR::RegOpnd::New(TyFloat64, this->m_func);
  6621. }
  6622. instr->InsertBefore(IR::Instr::New(Js::OpCode::CVTSI2SD, dst, src, this->m_func));
  6623. if (src->IsUnsigned())
  6624. {
  6625. IR::RegOpnd * highestBitOpnd = IR::RegOpnd::New(TyInt64, this->m_func);
  6626. IR::Instr* instrNew = IR::Instr::New(Js::OpCode::SHR, highestBitOpnd, src,
  6627. IR::IntConstOpnd::New(63, TyInt8, this->m_func, true), this->m_func);
  6628. instr->InsertBefore(instrNew);
  6629. Legalize(instrNew);
  6630. IR::RegOpnd * baseOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  6631. instrNew = IR::Instr::New(Js::OpCode::MOV, baseOpnd, IR::AddrOpnd::New(m_func->GetThreadContextInfo()->GetUInt64ConvertConstAddr(),
  6632. IR::AddrOpndKindDynamicMisc, this->m_func), this->m_func);
  6633. instr->InsertBefore(instrNew);
  6634. instrNew = IR::Instr::New(Js::OpCode::ADDSD, dst, dst, IR::IndirOpnd::New(baseOpnd,
  6635. highestBitOpnd, IndirScale8, TyFloat64, this->m_func), this->m_func);
  6636. instr->InsertBefore(instrNew);
  6637. }
  6638. if (origDst)
  6639. {
  6640. instr->InsertBefore(IR::Instr::New(Js::OpCode::CVTSD2SS, origDst, dst, this->m_func));
  6641. }
  6642. #endif
  6643. }
  6644. void
  6645. LowererMD::EmitNon32BitOvfCheck(IR::Instr *instr, IR::Instr *insertInstr, IR::LabelInstr* bailOutLabel)
  6646. {
  6647. AssertMsg(instr->m_opcode == Js::OpCode::IMUL, "IMUL should be used to check for non-32 bit overflow check on x86.");
  6648. IR::RegOpnd *edxSym = IR::RegOpnd::New(TyInt32, instr->m_func);
  6649. #ifdef _M_IX86
  6650. edxSym->SetReg(RegEDX);
  6651. #else
  6652. edxSym->SetReg(RegRDX);
  6653. #endif
  6654. // dummy def for edx to force RegAlloc to generate a lifetime. This is removed later by the Peeps phase.
  6655. IR::Instr *newInstr = IR::Instr::New(Js::OpCode::NOP, edxSym, instr->m_func);
  6656. insertInstr->InsertBefore(newInstr);
  6657. IR::RegOpnd *temp = IR::RegOpnd::New(TyInt32, instr->m_func);
  6658. Assert(instr->ignoreOverflowBitCount > 32);
  6659. uint8 shamt = 64 - instr->ignoreOverflowBitCount;
  6660. // MOV temp, edx
  6661. newInstr = IR::Instr::New(Js::OpCode::MOV, temp, edxSym, instr->m_func);
  6662. insertInstr->InsertBefore(newInstr);
  6663. // SHL temp, shamt
  6664. newInstr = IR::Instr::New(Js::OpCode::SHL, temp, temp, IR::IntConstOpnd::New(shamt, TyInt8, instr->m_func, true), instr->m_func);
  6665. insertInstr->InsertBefore(newInstr);
  6666. // SAR temp, shamt
  6667. newInstr = IR::Instr::New(Js::OpCode::SAR, temp, temp, IR::IntConstOpnd::New(shamt, TyInt8, instr->m_func, true), instr->m_func);
  6668. insertInstr->InsertBefore(newInstr);
  6669. // CMP temp, edx
  6670. newInstr = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  6671. newInstr->SetSrc1(temp);
  6672. newInstr->SetSrc2(edxSym);
  6673. insertInstr->InsertBefore(newInstr);
  6674. // JNE
  6675. Lowerer::InsertBranch(Js::OpCode::JNE, false, bailOutLabel, insertInstr);
  6676. }
  6677. void LowererMD::ConvertFloatToInt32(IR::Opnd* intOpnd, IR::Opnd* floatOpnd, IR::LabelInstr * labelHelper, IR::LabelInstr * labelDone, IR::Instr * instInsert)
  6678. {
  6679. UNREFERENCED_PARAMETER(labelHelper); // used on ARM
  6680. #if defined(_M_IX86)
  6681. // We should only generate this if sse2 is available
  6682. Assert(AutoSystemInfo::Data.SSE2Available());
  6683. #endif
  6684. Assert((floatOpnd->IsRegOpnd() && floatOpnd->IsFloat()) || (floatOpnd->IsIndirOpnd() && floatOpnd->GetType() == TyMachDouble));
  6685. Assert(intOpnd->GetType() == TyInt32);
  6686. IR::Instr* instr;
  6687. {
  6688. #ifdef _M_X64
  6689. IR::Opnd* dstOpnd = IR::RegOpnd::New(TyInt64, m_func);
  6690. #else
  6691. IR::Opnd* dstOpnd = intOpnd;
  6692. #endif
  6693. // CVTTSD2SI dst, floatOpnd
  6694. instr = IR::Instr::New(floatOpnd->IsFloat64() ? Js::OpCode::CVTTSD2SI : Js::OpCode::CVTTSS2SI, dstOpnd, floatOpnd, this->m_func);
  6695. instInsert->InsertBefore(instr);
  6696. // CMP dst, 0x80000000 {0x8000000000000000 on x64} -- Check for overflow
  6697. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6698. instr->SetSrc1(dstOpnd);
  6699. instr->SetSrc2(IR::IntConstOpnd::New(MachSignBit, TyMachReg, this->m_func, true));
  6700. instInsert->InsertBefore(instr);
  6701. Legalize(instr);
  6702. #ifdef _M_X64
  6703. // Truncate to int32 for x64. We still need to go to helper though if we have int64 overflow.
  6704. // MOV_TRUNC intOpnd, tmpOpnd
  6705. instr = IR::Instr::New(Js::OpCode::MOV_TRUNC, intOpnd, dstOpnd, this->m_func);
  6706. instInsert->InsertBefore(instr);
  6707. #endif
  6708. }
  6709. // JNE $done
  6710. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelDone, this->m_func);
  6711. instInsert->InsertBefore(instr);
  6712. // It does overflow - Let's try using FISTTP which uses 64 bits and is relevant only for x86
  6713. // but requires going to memory and should only be used in overflow scenarios
  6714. #ifdef _M_IX86
  6715. if (AutoSystemInfo::Data.SSE3Available())
  6716. {
  6717. IR::Opnd* floatStackOpnd;
  6718. StackSym* tempSymDouble = this->m_func->tempSymDouble;
  6719. if (!tempSymDouble)
  6720. {
  6721. this->m_func->tempSymDouble = StackSym::New(TyFloat64, this->m_func);
  6722. this->m_func->StackAllocate(this->m_func->tempSymDouble, MachDouble);
  6723. tempSymDouble = this->m_func->tempSymDouble;
  6724. }
  6725. IR::Opnd * float64Opnd;
  6726. if (floatOpnd->IsFloat32())
  6727. {
  6728. float64Opnd = IR::RegOpnd::New(TyFloat64, m_func);
  6729. instr = IR::Instr::New(Js::OpCode::CVTSS2SD, float64Opnd, floatOpnd, m_func);
  6730. instInsert->InsertBefore(instr);
  6731. }
  6732. else
  6733. {
  6734. float64Opnd = floatOpnd;
  6735. }
  6736. if (float64Opnd->IsRegOpnd())
  6737. {
  6738. floatStackOpnd = IR::SymOpnd::New(tempSymDouble, TyMachDouble, m_func);
  6739. instr = IR::Instr::New(Js::OpCode::MOVSD, floatStackOpnd, float64Opnd, m_func);
  6740. instInsert->InsertBefore(instr);
  6741. }
  6742. else
  6743. {
  6744. floatStackOpnd = float64Opnd;
  6745. }
  6746. // FLD [tmpDouble]
  6747. instr = IR::Instr::New(Js::OpCode::FLD, floatStackOpnd, floatStackOpnd, m_func);
  6748. instInsert->InsertBefore(instr);
  6749. if (!float64Opnd->IsRegOpnd())
  6750. {
  6751. floatStackOpnd = IR::SymOpnd::New(tempSymDouble, TyMachDouble, m_func);
  6752. }
  6753. // FISTTP qword ptr [tmpDouble]
  6754. instr = IR::Instr::New(Js::OpCode::FISTTP, floatStackOpnd, m_func);
  6755. instInsert->InsertBefore(instr);
  6756. StackSym *intSym = StackSym::New(TyInt32, m_func);
  6757. intSym->m_offset = tempSymDouble->m_offset;
  6758. intSym->m_allocated = true;
  6759. IR::Opnd* lowerBitsOpnd = IR::SymOpnd::New(intSym, TyInt32, m_func);
  6760. // MOV dst, dword ptr [tmpDouble]
  6761. instr = IR::Instr::New(Js::OpCode::MOV, intOpnd, lowerBitsOpnd, m_func);
  6762. instInsert->InsertBefore(instr);
  6763. // TEST dst, dst -- Check for overflow
  6764. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  6765. instr->SetSrc1(intOpnd);
  6766. instr->SetSrc2(intOpnd);
  6767. instInsert->InsertBefore(instr);
  6768. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelDone, this->m_func);
  6769. instInsert->InsertBefore(instr);
  6770. // CMP [tmpDouble - 4], 0x80000000
  6771. StackSym* higherBitsSym = StackSym::New(TyInt32, m_func);
  6772. higherBitsSym->m_offset = tempSymDouble->m_offset + 4;
  6773. higherBitsSym->m_allocated = true;
  6774. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6775. instr->SetSrc1(IR::SymOpnd::New(higherBitsSym, TyInt32, m_func));
  6776. instr->SetSrc2(IR::IntConstOpnd::New(0x80000000, TyInt32, this->m_func, true));
  6777. instInsert->InsertBefore(instr);
  6778. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelDone, this->m_func);
  6779. instInsert->InsertBefore(instr);
  6780. }
  6781. #endif
  6782. }
  6783. IR::Instr *
  6784. LowererMD::InsertConvertFloat64ToInt32(const RoundMode roundMode, IR::Opnd *const dst, IR::Opnd *const src, IR::Instr *const insertBeforeInstr)
  6785. {
  6786. Assert(dst);
  6787. Assert(dst->IsInt32());
  6788. Assert(src);
  6789. Assert(src->IsFloat64());
  6790. Assert(insertBeforeInstr);
  6791. // The caller is expected to check for overflow. To have that work be done automatically, use LowererMD::EmitFloatToInt.
  6792. Func *const func = insertBeforeInstr->m_func;
  6793. IR::AutoReuseOpnd autoReuseSrcPlusHalf;
  6794. IR::Instr *instr = nullptr;
  6795. switch (roundMode)
  6796. {
  6797. case RoundModeTowardInteger:
  6798. {
  6799. // Conversion with rounding towards nearest integer is not supported by the architecture. Add 0.5 and do a
  6800. // round-toward-zero conversion instead.
  6801. IR::RegOpnd *const srcPlusHalf = IR::RegOpnd::New(TyFloat64, func);
  6802. autoReuseSrcPlusHalf.Initialize(srcPlusHalf, func);
  6803. Lowerer::InsertAdd(
  6804. false /* needFlags */,
  6805. srcPlusHalf,
  6806. src,
  6807. IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetDoublePointFiveAddr(), TyFloat64, func,
  6808. IR::AddrOpndKindDynamicDoubleRef),
  6809. insertBeforeInstr);
  6810. instr = IR::Instr::New(LowererMD::MDConvertFloat64ToInt32Opcode(RoundModeTowardZero), dst, srcPlusHalf, func);
  6811. insertBeforeInstr->InsertBefore(instr);
  6812. LowererMD::Legalize(instr);
  6813. return instr;
  6814. }
  6815. case RoundModeHalfToEven:
  6816. {
  6817. instr = IR::Instr::New(LowererMD::MDConvertFloat64ToInt32Opcode(RoundModeHalfToEven), dst, src, func);
  6818. insertBeforeInstr->InsertBefore(instr);
  6819. LowererMD::Legalize(instr);
  6820. return instr;
  6821. }
  6822. default:
  6823. AssertMsg(0, "RoundMode not supported.");
  6824. return nullptr;
  6825. }
  6826. }
  6827. void
  6828. LowererMD::EmitFloatToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert, IR::Instr *instrBailOut, IR::LabelInstr * labelBailOut)
  6829. {
  6830. #ifdef _M_IX86
  6831. // We should only generate this if sse2 is available
  6832. Assert(AutoSystemInfo::Data.SSE2Available());
  6833. #endif
  6834. IR::BailOutKind bailOutKind = IR::BailOutInvalid;
  6835. if (instrBailOut && instrBailOut->HasBailOutInfo())
  6836. {
  6837. bailOutKind = instrBailOut->GetBailOutKind();
  6838. if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  6839. {
  6840. // Bail out instead of calling helper. If this is happening unconditionally, the caller should instead throw a rejit exception.
  6841. Assert(labelBailOut);
  6842. m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, instrInsert);
  6843. return;
  6844. }
  6845. }
  6846. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6847. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6848. IR::Instr *instr;
  6849. ConvertFloatToInt32(dst, src, labelHelper, labelDone, instrInsert);
  6850. // $Helper
  6851. instrInsert->InsertBefore(labelHelper);
  6852. IR::Opnd * arg = src;
  6853. if (src->IsFloat32())
  6854. {
  6855. arg = IR::RegOpnd::New(TyFloat64, m_func);
  6856. EmitFloat32ToFloat64(arg, src, instrInsert);
  6857. }
  6858. instr = IR::Instr::New(Js::OpCode::CALL, dst, this->m_func);
  6859. instrInsert->InsertBefore(instr);
  6860. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  6861. {
  6862. _Analysis_assume_(instrBailOut != nullptr);
  6863. instr = instr->ConvertToBailOutInstr(instrBailOut->GetBailOutInfo(), bailOutKind);
  6864. if (instrBailOut->GetBailOutInfo()->bailOutInstr == instrBailOut)
  6865. {
  6866. IR::Instr * instrShare = instrBailOut->ShareBailOut();
  6867. m_lowerer->LowerBailTarget(instrShare);
  6868. }
  6869. }
  6870. // dst = ToInt32Core(src);
  6871. LoadDoubleHelperArgument(instr, arg);
  6872. this->ChangeToHelperCall(instr, IR::HelperConv_ToInt32Core);
  6873. // $Done
  6874. instrInsert->InsertBefore(labelDone);
  6875. }
  6876. void
  6877. LowererMD::EmitLoadVarNoCheck(IR::RegOpnd * dst, IR::RegOpnd * src, IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  6878. {
  6879. #ifdef _M_IX86
  6880. if (!AutoSystemInfo::Data.SSE2Available())
  6881. {
  6882. IR::JnHelperMethod helperMethod;
  6883. // PUSH &floatTemp
  6884. IR::Opnd *tempOpnd;
  6885. if (instrLoad->dstIsTempNumber)
  6886. {
  6887. helperMethod = isFromUint32 ? IR::HelperOp_UInt32ToAtomInPlace : IR::HelperOp_Int32ToAtomInPlace;
  6888. // Use the original dst to get the temp number sym
  6889. StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(instrLoad->GetDst(), instrLoad->dstIsTempNumberTransferred);
  6890. IR::Instr *load = this->LoadStackAddress(tempNumberSym);
  6891. instrLoad->InsertBefore(load);
  6892. tempOpnd = load->GetDst();
  6893. this->LoadHelperArgument(instrLoad, tempOpnd);
  6894. }
  6895. else
  6896. {
  6897. helperMethod = isFromUint32 ? IR::HelperOp_UInt32ToAtom : IR::HelperOp_Int32ToAtom;
  6898. }
  6899. // PUSH memContext
  6900. this->m_lowerer->LoadScriptContext(instrLoad);
  6901. // PUSH s1
  6902. this->LoadHelperArgument(instrLoad, src);
  6903. // dst = ToVar()
  6904. IR::Instr * instr = IR::Instr::New(Js::OpCode::Call, dst,
  6905. IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
  6906. instrLoad->InsertBefore(instr);
  6907. this->LowerCall(instr, 0);
  6908. return;
  6909. }
  6910. #endif
  6911. IR::RegOpnd * floatReg = IR::RegOpnd::New(TyFloat64, this->m_func);
  6912. if (isFromUint32)
  6913. {
  6914. this->EmitUIntToFloat(floatReg, src, instrLoad);
  6915. }
  6916. else
  6917. {
  6918. this->EmitIntToFloat(floatReg, src, instrLoad);
  6919. }
  6920. this->SaveDoubleToVar(dst, floatReg, instrLoad, instrLoad, isHelper);
  6921. }
  6922. IR::Instr *
  6923. LowererMD::LowerGetCachedFunc(IR::Instr *instr)
  6924. {
  6925. // src1 is an ActivationObjectEx, and we want to get the function object identified by the index (src2)
  6926. // dst = MOV (src1)->GetFuncCacheEntry(src2)->func
  6927. //
  6928. // => [src1 + (offsetof(src1, cache) + (src2 * sizeof(FuncCacheEntry)) + offsetof(FuncCacheEntry, func))]
  6929. IR::IntConstOpnd *src2Opnd = instr->UnlinkSrc2()->AsIntConstOpnd();
  6930. IR::RegOpnd *src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
  6931. instr->m_opcode = Js::OpCode::MOV;
  6932. IntConstType offset = (src2Opnd->GetValue() * sizeof(Js::FuncCacheEntry)) + Js::ActivationObjectEx::GetOffsetOfCache() + offsetof(Js::FuncCacheEntry, func);
  6933. Assert(Math::FitsInDWord(offset));
  6934. instr->SetSrc1(IR::IndirOpnd::New(src1Opnd, (int32)offset, TyVar, this->m_func));
  6935. src2Opnd->Free(this->m_func);
  6936. return instr->m_prev;
  6937. }
  6938. IR::Instr *
  6939. LowererMD::LowerCommitScope(IR::Instr *instrCommit)
  6940. {
  6941. IR::Instr *instrPrev = instrCommit->m_prev;
  6942. IR::RegOpnd *baseOpnd = instrCommit->UnlinkSrc1()->AsRegOpnd();
  6943. IR::Opnd *opnd;
  6944. IR::Instr * insertInstr = instrCommit->m_next;
  6945. // Write undef to all the local var slots.
  6946. opnd = IR::IndirOpnd::New(baseOpnd, Js::ActivationObjectEx::GetOffsetOfCommitFlag(), TyInt8, this->m_func);
  6947. instrCommit->SetDst(opnd);
  6948. instrCommit->SetSrc1(IR::IntConstOpnd::New(1, TyInt8, this->m_func));
  6949. LowererMD::ChangeToAssign(instrCommit);
  6950. const Js::PropertyIdArray *propIds = instrCommit->m_func->GetJITFunctionBody()->GetFormalsPropIdArray();
  6951. uint firstVarSlot = (uint)Js::ActivationObjectEx::GetFirstVarSlot(propIds);
  6952. if (firstVarSlot < propIds->count)
  6953. {
  6954. IR::RegOpnd *undefOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  6955. LowererMD::CreateAssign(undefOpnd, m_lowerer->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueUndefined), insertInstr);
  6956. IR::RegOpnd *slotBaseOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  6957. // Load a pointer to the aux slots. We assume that all ActivationObject's have only aux slots.
  6958. opnd = IR::IndirOpnd::New(baseOpnd, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  6959. this->CreateAssign(slotBaseOpnd, opnd, insertInstr);
  6960. for (uint i = firstVarSlot; i < propIds->count; i++)
  6961. {
  6962. opnd = IR::IndirOpnd::New(slotBaseOpnd, i << this->GetDefaultIndirScale(), TyMachReg, this->m_func);
  6963. this->CreateAssign(opnd, undefOpnd, insertInstr);
  6964. }
  6965. }
  6966. return instrPrev;
  6967. }
  6968. void
  6969. LowererMD::ImmedSrcToReg(IR::Instr * instr, IR::Opnd * newOpnd, int srcNum)
  6970. {
  6971. if (srcNum == 2)
  6972. {
  6973. instr->SetSrc2(newOpnd);
  6974. }
  6975. else
  6976. {
  6977. Assert(srcNum == 1);
  6978. instr->SetSrc1(newOpnd);
  6979. }
  6980. }
  6981. IR::LabelInstr *
  6982. LowererMD::GetBailOutStackRestoreLabel(BailOutInfo * bailOutInfo, IR::LabelInstr * exitTargetInstr)
  6983. {
  6984. return lowererMDArch.GetBailOutStackRestoreLabel(bailOutInfo, exitTargetInstr);
  6985. }
  6986. StackSym *
  6987. LowererMD::GetImplicitParamSlotSym(Js::ArgSlot argSlot)
  6988. {
  6989. return GetImplicitParamSlotSym(argSlot, this->m_func);
  6990. }
  6991. StackSym *
  6992. LowererMD::GetImplicitParamSlotSym(Js::ArgSlot argSlot, Func * func)
  6993. {
  6994. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  6995. // Pass in the EBP+8 to start at the function object, the start of the implicit param slots
  6996. StackSym * stackSym = StackSym::NewImplicitParamSym(argSlot, func);
  6997. func->SetArgOffset(stackSym, (2 + argSlot) * MachPtr);
  6998. func->SetHasImplicitParamLoad();
  6999. return stackSym;
  7000. }
  7001. bool LowererMD::GenerateFastAnd(IR::Instr * instrAnd)
  7002. {
  7003. return this->lowererMDArch.GenerateFastAnd(instrAnd);
  7004. }
  7005. bool LowererMD::GenerateFastXor(IR::Instr * instrXor)
  7006. {
  7007. return this->lowererMDArch.GenerateFastXor(instrXor);
  7008. }
  7009. bool LowererMD::GenerateFastOr(IR::Instr * instrOr)
  7010. {
  7011. return this->lowererMDArch.GenerateFastOr(instrOr);
  7012. }
  7013. bool LowererMD::GenerateFastNot(IR::Instr * instrNot)
  7014. {
  7015. return this->lowererMDArch.GenerateFastNot(instrNot);
  7016. }
  7017. bool LowererMD::GenerateFastShiftLeft(IR::Instr * instrShift)
  7018. {
  7019. return this->lowererMDArch.GenerateFastShiftLeft(instrShift);
  7020. }
  7021. bool LowererMD::GenerateFastShiftRight(IR::Instr * instrShift)
  7022. {
  7023. return this->lowererMDArch.GenerateFastShiftRight(instrShift);
  7024. }
  7025. void LowererMD::GenerateIsDynamicObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool fContinueLabel)
  7026. {
  7027. // CMP [srcReg], Js::DynamicObject::`vtable'
  7028. {
  7029. IR::Instr *cmp = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  7030. cmp->SetSrc1(IR::IndirOpnd::New(regOpnd, 0, TyMachPtr, m_func));
  7031. cmp->SetSrc2(m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableDynamicObject));
  7032. insertInstr->InsertBefore(cmp);
  7033. Legalize(cmp);
  7034. }
  7035. if (fContinueLabel)
  7036. {
  7037. // JEQ $fallThough
  7038. IR::Instr * jne = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  7039. insertInstr->InsertBefore(jne);
  7040. }
  7041. else
  7042. {
  7043. // JNE $helper
  7044. IR::Instr * jne = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  7045. insertInstr->InsertBefore(jne);
  7046. }
  7047. }
  7048. void LowererMD::GenerateIsRecyclableObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool checkObjectAndDynamicObject)
  7049. {
  7050. // CMP [srcReg], Js::DynamicObject::`vtable'
  7051. // JEQ $fallThough
  7052. // MOV r1, [src1 + offset(type)] -- get the type id
  7053. // MOV r1, [r1 + offset(typeId)]
  7054. // ADD r1, ~TypeIds_LastJavascriptPrimitiveType -- if (typeId > TypeIds_LastJavascriptPrimitiveType && typeId <= TypeIds_LastTrueJavascriptObjectType)
  7055. // CMP r1, (TypeIds_LastTrueJavascriptObjectType - TypeIds_LastJavascriptPrimitiveType - 1)
  7056. // JA $helper
  7057. //fallThrough:
  7058. IR::LabelInstr *labelFallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7059. if (checkObjectAndDynamicObject)
  7060. {
  7061. if (!regOpnd->IsNotTaggedValue())
  7062. {
  7063. GenerateObjectTest(regOpnd, insertInstr, labelHelper);
  7064. }
  7065. this->GenerateIsDynamicObject(regOpnd, insertInstr, labelFallthrough, true);
  7066. }
  7067. IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  7068. IR::RegOpnd * typeIdRegOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  7069. // MOV r1, [src1 + offset(type)]
  7070. {
  7071. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  7072. IR::Instr * mov = IR::Instr::New(Js::OpCode::MOV, typeRegOpnd, indirOpnd, this->m_func);
  7073. insertInstr->InsertBefore(mov);
  7074. }
  7075. // MOV r1, [r1 + offset(typeId)]
  7076. {
  7077. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
  7078. IR::Instr * mov = IR::Instr::New(Js::OpCode::MOV, typeIdRegOpnd, indirOpnd, this->m_func);
  7079. insertInstr->InsertBefore(mov);
  7080. }
  7081. // ADD r1, ~TypeIds_LastJavascriptPrimitiveType
  7082. {
  7083. IR::Instr * add = IR::Instr::New(Js::OpCode::ADD, typeIdRegOpnd, typeIdRegOpnd, IR::IntConstOpnd::New(~Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, this->m_func, true), this->m_func);
  7084. insertInstr->InsertBefore(add);
  7085. }
  7086. // CMP r1, (TypeIds_LastTrueJavascriptObjectType - TypeIds_LastJavascriptPrimitiveType - 1)
  7087. {
  7088. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  7089. cmp->SetSrc1(typeIdRegOpnd);
  7090. cmp->SetSrc2(IR::IntConstOpnd::New(Js::TypeIds_LastTrueJavascriptObjectType - Js::TypeIds_LastJavascriptPrimitiveType - 1, TyInt32, this->m_func));
  7091. insertInstr->InsertBefore(cmp);
  7092. }
  7093. // JA $helper
  7094. {
  7095. IR::Instr * jbe = IR::BranchInstr::New(Js::OpCode::JA, labelHelper, this->m_func);
  7096. insertInstr->InsertBefore(jbe);
  7097. }
  7098. // $fallThrough
  7099. insertInstr->InsertBefore(labelFallthrough);
  7100. }
  7101. bool
  7102. LowererMD::GenerateLdThisCheck(IR::Instr * instr)
  7103. {
  7104. //
  7105. // If not a recyclable object, jump to $helper
  7106. // MOV dst, src1 -- return the object itself
  7107. // JMP $fallthrough
  7108. // $helper:
  7109. // (caller generates helper call)
  7110. // $fallthrough:
  7111. //
  7112. IR::RegOpnd * src1 = instr->GetSrc1()->AsRegOpnd();
  7113. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  7114. IR::LabelInstr * fallthrough = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  7115. this->GenerateIsRecyclableObject(src1, instr, helper);
  7116. // MOV dst, src1
  7117. if (instr->GetDst() && !instr->GetDst()->IsEqual(src1))
  7118. {
  7119. IR::Instr * mov = IR::Instr::New(Js::OpCode::MOV, instr->GetDst(), src1, this->m_func);
  7120. instr->InsertBefore(mov);
  7121. }
  7122. // JMP $fallthrough
  7123. {
  7124. IR::Instr * jmp = IR::BranchInstr::New(Js::OpCode::JMP, fallthrough, this->m_func);
  7125. instr->InsertBefore(jmp);
  7126. }
  7127. // $helper:
  7128. // (caller generates helper call)
  7129. // $fallthrough:
  7130. instr->InsertBefore(helper);
  7131. instr->InsertAfter(fallthrough);
  7132. return true;
  7133. }
  7134. //
  7135. // TEST src, Js::AtomTag
  7136. // JNE $done
  7137. // MOV typeReg, objectSrc + offsetof(RecyclableObject::type)
  7138. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_ActivationObject
  7139. // JEQ $helper
  7140. // $done:
  7141. // MOV dst, src
  7142. // JMP $fallthru
  7143. // helper:
  7144. // MOV dst, undefined
  7145. // $fallthru:
  7146. bool
  7147. LowererMD::GenerateLdThisStrict(IR::Instr* instr)
  7148. {
  7149. IR::RegOpnd * src1 = instr->GetSrc1()->AsRegOpnd();
  7150. IR::RegOpnd * typeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  7151. IR::LabelInstr * done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  7152. IR::LabelInstr * fallthru = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  7153. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*helper*/true);
  7154. bool assign = instr->GetDst() && !instr->GetDst()->IsEqual(src1);
  7155. // TEST src1, Js::AtomTag
  7156. // JNE $done
  7157. if(!src1->IsNotTaggedValue())
  7158. {
  7159. GenerateObjectTest(src1, instr, assign ? done : fallthru);
  7160. }
  7161. // MOV typeReg, objectSrc + offsetof(RecyclableObject::type)
  7162. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, typeReg,
  7163. IR::IndirOpnd::New(src1, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  7164. m_func));
  7165. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_ActivationObject
  7166. {
  7167. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  7168. cmp->SetSrc1(IR::IndirOpnd::New(typeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func));
  7169. cmp->SetSrc2(IR::IntConstOpnd::New(Js::TypeId::TypeIds_ActivationObject, TyInt32, m_func));
  7170. instr->InsertBefore(cmp);
  7171. }
  7172. // JEQ $helper
  7173. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JEQ, helper, m_func));
  7174. if (assign)
  7175. {
  7176. // $done:
  7177. // MOV dst, src
  7178. instr->InsertBefore(done);
  7179. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, instr->GetDst(), src1, m_func));
  7180. }
  7181. // JMP $fallthru
  7182. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, fallthru, m_func));
  7183. instr->InsertBefore(helper);
  7184. if (instr->GetDst())
  7185. {
  7186. // MOV dst, undefined
  7187. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, instr->GetDst(),
  7188. m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined), m_func));
  7189. }
  7190. // $fallthru:
  7191. instr->InsertAfter(fallthru);
  7192. return true;
  7193. }
  7194. // given object instanceof function, functionReg is a register with function,
  7195. // objectReg is a register with instance and inlineCache is an InstIsInlineCache.
  7196. // We want to generate:
  7197. //
  7198. // fallback on helper (will patch the inline cache) if function does not match the cache
  7199. // MOV dst, Js::false
  7200. // CMP functionReg, [&(inlineCache->function)]
  7201. // JNE helper
  7202. //
  7203. // fallback if object is a tagged int
  7204. // TEST objectReg, Js::AtomTag
  7205. // JNE done
  7206. //
  7207. // fallback if object's type is not the cached type
  7208. // MOV typeReg, objectSrc + offsetof(RecyclableObject::type)
  7209. // CMP typeReg, [&(inlineCache->type]
  7210. // JNE checkPrimType
  7211. // use the cached result and fallthrough
  7212. // MOV dst, [&(inlineCache->result)]
  7213. // JMP done
  7214. // return false if object is a primitive
  7215. // $checkPrimType
  7216. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_LastJavascriptPrimitiveType
  7217. // JLE done
  7218. //
  7219. //
  7220. // $helper
  7221. // $done
  7222. bool
  7223. LowererMD::GenerateFastIsInst(IR::Instr * instr)
  7224. {
  7225. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  7226. IR::LabelInstr * checkPrimType = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  7227. IR::LabelInstr * done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  7228. IR::RegOpnd * typeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  7229. IR::Opnd * objectSrc;
  7230. IR::RegOpnd * objectReg;
  7231. IR::Opnd * functionSrc;
  7232. IR::RegOpnd * functionReg;
  7233. intptr_t inlineCache;
  7234. IR::Instr * instrArg;
  7235. // We are going to use the extra ArgOut_A instructions to lower the helper call later,
  7236. // so we leave them alone here and clean them up then.
  7237. inlineCache = instr->m_func->GetJITFunctionBody()->GetIsInstInlineCache(instr->GetSrc1()->AsIntConstOpnd()->AsUint32());
  7238. Assert(instr->GetSrc2()->AsRegOpnd()->m_sym->m_isSingleDef);
  7239. instrArg = instr->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
  7240. objectSrc = instrArg->GetSrc1();
  7241. Assert(instrArg->GetSrc2()->AsRegOpnd()->m_sym->m_isSingleDef);
  7242. instrArg = instrArg->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
  7243. functionSrc = instrArg->GetSrc1();
  7244. Assert(instrArg->GetSrc2() == nullptr);
  7245. // MOV dst, Js::false
  7246. Lowerer::InsertMove(instr->GetDst(), m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
  7247. if (functionSrc->IsRegOpnd())
  7248. {
  7249. functionReg = functionSrc->AsRegOpnd();
  7250. }
  7251. else
  7252. {
  7253. functionReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  7254. // MOV functionReg, functionSrc
  7255. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, functionReg, functionSrc, m_func));
  7256. }
  7257. // CMP functionReg, [&(inlineCache->function)]
  7258. {
  7259. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  7260. cmp->SetSrc1(functionReg);
  7261. cmp->SetSrc2(IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfFunction(), TyMachReg, m_func,
  7262. IR::AddrOpndKindDynamicIsInstInlineCacheFunctionRef));
  7263. instr->InsertBefore(cmp);
  7264. Legalize(cmp);
  7265. }
  7266. // JNE helper
  7267. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, helper, m_func));
  7268. if (objectSrc->IsRegOpnd())
  7269. {
  7270. objectReg = objectSrc->AsRegOpnd();
  7271. }
  7272. else
  7273. {
  7274. objectReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  7275. // MOV objectReg, objectSrc
  7276. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, objectReg, objectSrc, m_func));
  7277. }
  7278. // TEST objectReg, Js::AtomTag
  7279. // JNE done
  7280. GenerateObjectTest(objectReg, instr, done);
  7281. // MOV typeReg, objectSrc + offsetof(RecyclableObject::type)
  7282. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, typeReg,
  7283. IR::IndirOpnd::New(objectReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  7284. m_func));
  7285. // CMP typeReg, [&(inlineCache->type]
  7286. {
  7287. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  7288. cmp->SetSrc1(typeReg);
  7289. cmp->SetSrc2(IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfType(), TyMachReg, m_func,
  7290. IR::AddrOpndKindDynamicIsInstInlineCacheTypeRef));
  7291. instr->InsertBefore(cmp);
  7292. Legalize(cmp);
  7293. }
  7294. // JNE checkPrimType
  7295. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, checkPrimType, m_func));
  7296. // MOV dst, [&(inlineCache->result)]
  7297. Lowerer::InsertMove(instr->GetDst(), IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfResult(), TyMachReg, m_func,
  7298. IR::AddrOpndKindDynamicIsInstInlineCacheResultRef), instr);
  7299. // JMP done
  7300. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  7301. // LABEL checkPrimType
  7302. instr->InsertBefore(checkPrimType);
  7303. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_LastJavascriptPrimitiveType
  7304. {
  7305. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  7306. cmp->SetSrc1(IR::IndirOpnd::New(typeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func));
  7307. cmp->SetSrc2(IR::IntConstOpnd::New(Js::TypeId::TypeIds_LastJavascriptPrimitiveType, TyInt32, m_func));
  7308. instr->InsertBefore(cmp);
  7309. }
  7310. // JLE done
  7311. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JLE, done, m_func));
  7312. // LABEL helper
  7313. instr->InsertBefore(helper);
  7314. instr->InsertAfter(done);
  7315. return true;
  7316. }
  7317. void LowererMD::GenerateIsJsObjectTest(IR::RegOpnd* instanceReg, IR::Instr* insertInstr, IR::LabelInstr* labelHelper)
  7318. {
  7319. // TEST instanceReg, (Js::AtomTag_IntPtr | Js::FloatTag_Value )
  7320. GenerateObjectTest(instanceReg, insertInstr, labelHelper);
  7321. IR::RegOpnd * typeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  7322. // MOV typeReg, instanceReg + offsetof(RecyclableObject::type)
  7323. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, typeReg,
  7324. IR::IndirOpnd::New(instanceReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  7325. m_func));
  7326. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_LastJavascriptPrimitiveType
  7327. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  7328. cmp->SetSrc1(IR::IndirOpnd::New(typeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func));
  7329. cmp->SetSrc2(IR::IntConstOpnd::New(Js::TypeId::TypeIds_LastJavascriptPrimitiveType, TyInt32, this->m_func));
  7330. insertInstr->InsertBefore(cmp);
  7331. // JLE labelHelper
  7332. insertInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JLE, labelHelper, this->m_func));
  7333. }
  7334. void
  7335. LowererMD::EmitReinterpretPrimitive(IR::Opnd* dst, IR::Opnd* src, IR::Instr* insertBeforeInstr)
  7336. {
  7337. Assert(dst && src);
  7338. Assert(dst->GetSize() == src->GetSize());
  7339. Assert(dst->GetType() != src->GetType());
  7340. if (
  7341. // Additional runtime check to prevent unknown behavior
  7342. (dst->GetSize() != src->GetSize()) ||
  7343. // There is nothing to do in this case
  7344. (dst->GetType() == src->GetType())
  7345. )
  7346. {
  7347. Lowerer::InsertMove(dst, src, insertBeforeInstr);
  7348. return;
  7349. }
  7350. auto LegalizeInsert = [insertBeforeInstr](IR::Instr* instr)
  7351. {
  7352. Legalize(instr);
  7353. insertBeforeInstr->InsertBefore(instr);
  7354. };
  7355. if (dst->GetSize() == 8)
  7356. {
  7357. #if _M_AMD64
  7358. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVQ, dst, src, m_func));
  7359. #elif LOWER_SPLIT_INT64
  7360. if (dst->IsInt64())
  7361. {
  7362. // movd xmm2, xmm1
  7363. // movd low_bits, xmm2
  7364. // shufps xmm2, xmm2, 1
  7365. // movd high_bits, xmm2
  7366. Assert(src->IsFloat64());
  7367. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(dst);
  7368. // shufps modifies the register, we shouldn't change the source here
  7369. IR::RegOpnd* tmpDouble = IR::RegOpnd::New(TyFloat64, m_func);
  7370. this->CreateAssign(tmpDouble, src, insertBeforeInstr);
  7371. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dstPair.low, tmpDouble, m_func));
  7372. LegalizeInsert(IR::Instr::New(Js::OpCode::SHUFPS, tmpDouble, tmpDouble, IR::IntConstOpnd::New(1, TyInt8, m_func, true), m_func));
  7373. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dstPair.high, tmpDouble, m_func));
  7374. }
  7375. else
  7376. {
  7377. // movd xmm0, lowBits;
  7378. // movd xmm1, highBits;
  7379. // shufps xmm0, xmm1, (0 | 2 << 2 | 0 << 4 | 1 << 6);
  7380. // shufps xmm0, xmm0, (0 | 2 << 2 | 3 << 4 | 3 << 6);
  7381. Assert(src->IsInt64());
  7382. Assert(dst->IsFloat64());
  7383. Int64RegPair srcPair = m_func->FindOrCreateInt64Pair(src);
  7384. IR::RegOpnd* tmpDouble = IR::RegOpnd::New(TyFloat64, m_func);
  7385. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dst, srcPair.low, m_func));
  7386. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, tmpDouble, srcPair.high, m_func));
  7387. LegalizeInsert(IR::Instr::New(Js::OpCode::SHUFPS, dst, tmpDouble, IR::IntConstOpnd::New((0 | 2 << 2 | 0 << 4 | 1 << 6), TyInt8, m_func, true), m_func));
  7388. LegalizeInsert(IR::Instr::New(Js::OpCode::SHUFPS, dst, dst, IR::IntConstOpnd::New((0 | 2 << 2 | 3 << 4 | 3 << 6), TyInt8, m_func, true), m_func));
  7389. }
  7390. #endif
  7391. }
  7392. else if (dst->GetSize() == 4)
  7393. {
  7394. // 32bit reinterprets
  7395. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dst, src, m_func));
  7396. }
  7397. else
  7398. {
  7399. Assert(UNREACHED);
  7400. }
  7401. }
  7402. void LowererMD::EmitReinterpretFloatToInt(IR::Opnd* dst, IR::Opnd* src, IR::Instr* insertBeforeInstr)
  7403. {
  7404. Assert(dst->IsInt32() || dst->IsUInt32() || dst->IsInt64());
  7405. Assert(src->IsFloat());
  7406. EmitReinterpretPrimitive(dst, src, insertBeforeInstr);
  7407. }
  7408. void LowererMD::EmitReinterpretIntToFloat(IR::Opnd* dst, IR::Opnd* src, IR::Instr* insertBeforeInstr)
  7409. {
  7410. Assert(dst->IsFloat());
  7411. Assert(src->IsInt32() || src->IsUInt32() || src->IsInt64());
  7412. EmitReinterpretPrimitive(dst, src, insertBeforeInstr);
  7413. }
  7414. IR::Instr *
  7415. LowererMD::LowerInt64Assign(IR::Instr * instr)
  7416. {
  7417. return this->lowererMDArch.LowerInt64Assign(instr);
  7418. }
  7419. IR::Instr *
  7420. LowererMD::LowerToFloat(IR::Instr *instr)
  7421. {
  7422. switch (instr->m_opcode)
  7423. {
  7424. case Js::OpCode::Add_A:
  7425. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  7426. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  7427. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::ADDSD : Js::OpCode::ADDSS;
  7428. break;
  7429. case Js::OpCode::Sub_A:
  7430. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  7431. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  7432. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::SUBSD : Js::OpCode::SUBSS;
  7433. break;
  7434. case Js::OpCode::Mul_A:
  7435. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  7436. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  7437. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::MULSD : Js::OpCode::MULSS;
  7438. break;
  7439. case Js::OpCode::Div_A:
  7440. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  7441. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  7442. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::DIVSD : Js::OpCode::DIVSS;
  7443. break;
  7444. case Js::OpCode::Neg_A:
  7445. {
  7446. IR::Opnd *opnd;
  7447. instr->m_opcode = Js::OpCode::XORPS;
  7448. if (instr->GetDst()->IsFloat32())
  7449. {
  7450. opnd = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetMaskNegFloatAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  7451. }
  7452. else
  7453. {
  7454. Assert(instr->GetDst()->IsFloat64());
  7455. opnd = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetMaskNegDoubleAddr(), TyMachDouble, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  7456. }
  7457. instr->SetSrc2(opnd);
  7458. Legalize(instr);
  7459. break;
  7460. }
  7461. case Js::OpCode::BrEq_A:
  7462. case Js::OpCode::BrNeq_A:
  7463. case Js::OpCode::BrSrEq_A:
  7464. case Js::OpCode::BrSrNeq_A:
  7465. case Js::OpCode::BrGt_A:
  7466. case Js::OpCode::BrGe_A:
  7467. case Js::OpCode::BrLt_A:
  7468. case Js::OpCode::BrLe_A:
  7469. case Js::OpCode::BrNotEq_A:
  7470. case Js::OpCode::BrNotNeq_A:
  7471. case Js::OpCode::BrSrNotEq_A:
  7472. case Js::OpCode::BrSrNotNeq_A:
  7473. case Js::OpCode::BrNotGt_A:
  7474. case Js::OpCode::BrNotGe_A:
  7475. case Js::OpCode::BrNotLt_A:
  7476. case Js::OpCode::BrNotLe_A:
  7477. return this->LowerFloatCondBranch(instr->AsBranchInstr());
  7478. default:
  7479. Assume(UNREACHED);
  7480. }
  7481. this->MakeDstEquSrc1(instr);
  7482. return instr;
  7483. }
  7484. IR::BranchInstr *
  7485. LowererMD::LowerFloatCondBranch(IR::BranchInstr *instrBranch, bool ignoreNan)
  7486. {
  7487. Js::OpCode brOpcode = Js::OpCode::InvalidOpCode;
  7488. Js::OpCode cmpOpcode = Js::OpCode::InvalidOpCode;
  7489. IR::Instr *instr;
  7490. bool swapCmpOpnds = false;
  7491. bool addJP = false;
  7492. IR::LabelInstr *labelNaN = nullptr;
  7493. // Generate float compare that behave correctly for NaN's.
  7494. // These branch on unordered:
  7495. // JB
  7496. // JBE
  7497. // JE
  7498. // These don't branch on unordered:
  7499. // JA
  7500. // JAE
  7501. // JNE
  7502. // Unfortunately, only JA and JAE do what we'd like....
  7503. Func * func = instrBranch->m_func;
  7504. IR::Opnd *src1 = instrBranch->UnlinkSrc1();
  7505. IR::Opnd *src2 = instrBranch->UnlinkSrc2();
  7506. Assert(src1->GetType() == src2->GetType());
  7507. switch (instrBranch->m_opcode)
  7508. {
  7509. case Js::OpCode::BrSrEq_A:
  7510. case Js::OpCode::BrEq_A:
  7511. case Js::OpCode::BrSrNotNeq_A:
  7512. case Js::OpCode::BrNotNeq_A:
  7513. cmpOpcode = src1->IsFloat64() ? Js::OpCode::UCOMISD : Js::OpCode::UCOMISS;
  7514. brOpcode = Js::OpCode::JEQ;
  7515. if (!ignoreNan)
  7516. {
  7517. // Don't jump on NaN's
  7518. labelNaN = instrBranch->GetOrCreateContinueLabel();
  7519. addJP = true;
  7520. }
  7521. break;
  7522. case Js::OpCode::BrNeq_A:
  7523. case Js::OpCode::BrSrNeq_A:
  7524. case Js::OpCode::BrSrNotEq_A:
  7525. case Js::OpCode::BrNotEq_A:
  7526. cmpOpcode = src1->IsFloat64() ? Js::OpCode::UCOMISD : Js::OpCode::UCOMISS;
  7527. brOpcode = Js::OpCode::JNE;
  7528. if (!ignoreNan)
  7529. {
  7530. // Jump on NaN's
  7531. labelNaN = instrBranch->GetTarget();
  7532. addJP = true;
  7533. }
  7534. break;
  7535. case Js::OpCode::BrLe_A:
  7536. swapCmpOpnds = true;
  7537. brOpcode = Js::OpCode::JAE;
  7538. break;
  7539. case Js::OpCode::BrLt_A:
  7540. swapCmpOpnds = true;
  7541. brOpcode = Js::OpCode::JA;
  7542. break;
  7543. case Js::OpCode::BrGe_A:
  7544. brOpcode = Js::OpCode::JAE;
  7545. break;
  7546. case Js::OpCode::BrGt_A:
  7547. brOpcode = Js::OpCode::JA;
  7548. break;
  7549. case Js::OpCode::BrNotLe_A:
  7550. swapCmpOpnds = true;
  7551. brOpcode = Js::OpCode::JB;
  7552. break;
  7553. case Js::OpCode::BrNotLt_A:
  7554. swapCmpOpnds = true;
  7555. brOpcode = Js::OpCode::JBE;
  7556. break;
  7557. case Js::OpCode::BrNotGe_A:
  7558. brOpcode = Js::OpCode::JB;
  7559. break;
  7560. case Js::OpCode::BrNotGt_A:
  7561. brOpcode = Js::OpCode::JBE;
  7562. break;
  7563. default:
  7564. Assume(UNREACHED);
  7565. }
  7566. // if we haven't set cmpOpcode, then we are using COMISD/COMISS
  7567. if (cmpOpcode == Js::OpCode::InvalidOpCode)
  7568. {
  7569. cmpOpcode = src1->IsFloat64() ? Js::OpCode::COMISD : Js::OpCode::COMISS;
  7570. }
  7571. if (swapCmpOpnds)
  7572. {
  7573. IR::Opnd *tmp = src1;
  7574. src1 = src2;
  7575. src2 = tmp;
  7576. }
  7577. // VC generates UCOMISD for BrEq/BrNeq, and COMISD for all others, accordingly to IEEE 754.
  7578. // We'll do the same.
  7579. // COMISD / UCOMISD src1, src2
  7580. IR::Instr *instrCmp = IR::Instr::New(cmpOpcode, func);
  7581. instrCmp->SetSrc1(src1);
  7582. instrCmp->SetSrc2(src2);
  7583. instrBranch->InsertBefore(instrCmp);
  7584. Legalize(instrCmp);
  7585. if (addJP)
  7586. {
  7587. // JP $LabelNaN
  7588. instr = IR::BranchInstr::New(Js::OpCode::JP, labelNaN, func);
  7589. instrBranch->InsertBefore(instr);
  7590. }
  7591. // Jcc $L
  7592. instr = IR::BranchInstr::New(brOpcode, instrBranch->GetTarget(), func);
  7593. instrBranch->InsertBefore(instr);
  7594. instrBranch->Remove();
  7595. return instr->AsBranchInstr();
  7596. }
  7597. void LowererMD::HelperCallForAsmMathBuiltin(IR::Instr* instr, IR::JnHelperMethod helperMethodFloat, IR::JnHelperMethod helperMethodDouble)
  7598. {
  7599. Assert(instr->m_opcode == Js::OpCode::InlineMathFloor || instr->m_opcode == Js::OpCode::InlineMathCeil || instr->m_opcode == Js::OpCode::Trunc_A || instr->m_opcode == Js::OpCode::Nearest_A);
  7600. AssertMsg(instr->GetDst()->IsFloat(), "dst must be float.");
  7601. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  7602. Assert(!instr->GetSrc2());
  7603. IR::Opnd * argOpnd = instr->UnlinkSrc1();
  7604. IR::JnHelperMethod helperMethod;
  7605. uint dwordCount;
  7606. if (argOpnd->IsFloat32())
  7607. {
  7608. helperMethod = helperMethodFloat;
  7609. LoadFloatHelperArgument(instr, argOpnd);
  7610. dwordCount = 1;
  7611. }
  7612. else
  7613. {
  7614. helperMethod = helperMethodDouble;
  7615. LoadDoubleHelperArgument(instr, argOpnd);
  7616. dwordCount = 2;
  7617. }
  7618. instr->m_opcode = Js::OpCode::CALL;
  7619. IR::HelperCallOpnd *helperCallOpnd = Lowerer::CreateHelperCallOpnd(helperMethod, this->lowererMDArch.GetHelperArgsCount(), m_func);
  7620. instr->SetSrc1(helperCallOpnd);
  7621. this->lowererMDArch.LowerCall(instr, dwordCount);
  7622. }
  7623. void LowererMD::GenerateFastInlineBuiltInCall(IR::Instr* instr, IR::JnHelperMethod helperMethod)
  7624. {
  7625. switch (instr->m_opcode)
  7626. {
  7627. case Js::OpCode::InlineMathSqrt:
  7628. // Sqrt maps directly to the SSE2 instruction.
  7629. // src and dst should already be XMM registers, all we need is just change the opcode.
  7630. Assert(helperMethod == (IR::JnHelperMethod)0);
  7631. Assert(instr->GetSrc2() == nullptr);
  7632. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::SQRTSD : Js::OpCode::SQRTSS;
  7633. break;
  7634. case Js::OpCode::InlineMathAbs:
  7635. Assert(helperMethod == (IR::JnHelperMethod)0);
  7636. return GenerateFastInlineBuiltInMathAbs(instr);
  7637. case Js::OpCode::InlineMathPow:
  7638. #ifdef _M_IX86
  7639. if (!instr->GetSrc2()->IsFloat())
  7640. {
  7641. #endif
  7642. this->GenerateFastInlineBuiltInMathPow(instr);
  7643. break;
  7644. #ifdef _M_IX86
  7645. }
  7646. // fallthrough
  7647. #endif
  7648. case Js::OpCode::InlineMathAcos:
  7649. case Js::OpCode::InlineMathAsin:
  7650. case Js::OpCode::InlineMathAtan:
  7651. case Js::OpCode::InlineMathAtan2:
  7652. case Js::OpCode::InlineMathCos:
  7653. case Js::OpCode::InlineMathExp:
  7654. case Js::OpCode::InlineMathLog:
  7655. case Js::OpCode::Expo_A: //** operator reuses InlineMathPow fastpath
  7656. case Js::OpCode::InlineMathSin:
  7657. case Js::OpCode::InlineMathTan:
  7658. {
  7659. AssertMsg(instr->GetDst()->IsFloat(), "dst must be float.");
  7660. AssertMsg(instr->GetSrc1()->IsFloat(), "src1 must be float.");
  7661. AssertMsg(!instr->GetSrc2() || instr->GetSrc2()->IsFloat(), "src2 must be float.");
  7662. // Before:
  7663. // dst = <Built-in call> src1, src2
  7664. // After:
  7665. // I386:
  7666. // XMM0 = MOVSD src1
  7667. // CALL helperMethod
  7668. // dst = MOVSD call->dst
  7669. // AMD64:
  7670. // XMM0 = MOVSD src1
  7671. // RAX = MOV helperMethod
  7672. // CALL RAX
  7673. // dst = MOVSD call->dst
  7674. // Src1
  7675. IR::Instr* argOut = IR::Instr::New(Js::OpCode::MOVSD, this->m_func);
  7676. IR::RegOpnd* dst1 = IR::RegOpnd::New(nullptr, (RegNum)FIRST_FLOAT_ARG_REG, TyMachDouble, this->m_func);
  7677. dst1->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  7678. argOut->SetDst(dst1);
  7679. argOut->SetSrc1(instr->UnlinkSrc1());
  7680. instr->InsertBefore(argOut);
  7681. // Src2
  7682. if (instr->GetSrc2() != nullptr)
  7683. {
  7684. IR::Instr* argOut2 = IR::Instr::New(Js::OpCode::MOVSD, this->m_func);
  7685. IR::RegOpnd* dst2 = IR::RegOpnd::New(nullptr, (RegNum)(FIRST_FLOAT_ARG_REG + 1), TyMachDouble, this->m_func);
  7686. dst2->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  7687. argOut2->SetDst(dst2);
  7688. argOut2->SetSrc1(instr->UnlinkSrc2());
  7689. instr->InsertBefore(argOut2);
  7690. }
  7691. // Call CRT.
  7692. IR::RegOpnd* floatCallDst = IR::RegOpnd::New(nullptr, (RegNum)(FIRST_FLOAT_REG), TyMachDouble, this->m_func); // Dst in XMM0.
  7693. #ifdef _M_IX86
  7694. IR::Instr* floatCall = IR::Instr::New(Js::OpCode::CALL, floatCallDst, this->m_func);
  7695. floatCall->SetSrc1(IR::HelperCallOpnd::New(helperMethod, this->m_func));
  7696. instr->InsertBefore(floatCall);
  7697. #else
  7698. // s1 = MOV helperAddr
  7699. IR::RegOpnd* s1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  7700. IR::AddrOpnd* helperAddr = IR::AddrOpnd::New((Js::Var)IR::GetMethodOriginalAddress(m_func->GetThreadContextInfo(), helperMethod), IR::AddrOpndKind::AddrOpndKindDynamicMisc, this->m_func);
  7701. IR::Instr* mov = IR::Instr::New(Js::OpCode::MOV, s1, helperAddr, this->m_func);
  7702. instr->InsertBefore(mov);
  7703. // dst(XMM0) = CALL s1
  7704. IR::Instr *floatCall = IR::Instr::New(Js::OpCode::CALL, floatCallDst, s1, this->m_func);
  7705. instr->InsertBefore(floatCall);
  7706. #endif
  7707. instr->m_func->SetHasCalls();
  7708. // Save the result.
  7709. instr->m_opcode = Js::OpCode::MOVSD;
  7710. instr->SetSrc1(floatCall->GetDst());
  7711. break;
  7712. }
  7713. case Js::OpCode::InlineMathFloor:
  7714. case Js::OpCode::InlineMathCeil:
  7715. case Js::OpCode::InlineMathRound:
  7716. #ifdef ENABLE_WASM
  7717. case Js::OpCode::Trunc_A:
  7718. case Js::OpCode::Nearest_A:
  7719. #endif //ENABLE_WASM
  7720. {
  7721. Assert(AutoSystemInfo::Data.SSE4_1Available());
  7722. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsFloat());
  7723. // MOVSD roundedFloat, src
  7724. //
  7725. // if(round)
  7726. // {
  7727. // /* N.B.: the following CMPs are lowered to COMISDs, whose results can only be >, <, or =.
  7728. // In fact, only ">" can be used if NaN has not been handled.
  7729. // */
  7730. // CMP 0.5, roundedFloat
  7731. // JA $ltHalf
  7732. // CMP TwoToFraction, roundedFloat
  7733. // JA $addHalfToRoundSrcLabel
  7734. // J $skipRoundSd (NaN is also handled here)
  7735. // $ltHalf:
  7736. // CMP roundedFloat, -0.5
  7737. // JL $ltNegHalf
  7738. // if (shouldCheckNegZero) {
  7739. // CMP roundedFloat, 0
  7740. // JA $setZero
  7741. // $negZeroTest [Helper]:
  7742. // JB $bailoutLabel
  7743. // isNegZero(src)
  7744. // JE $bailoutLabel
  7745. // J $skipRoundSd
  7746. // } // else: setZero
  7747. // $setZero:
  7748. // MOV roundedFloat, 0
  7749. // J $skipRoundSd
  7750. // $ltNegHalf:
  7751. // CMP roundedFloat, NegTwoToFraction
  7752. // JA $addHalfToRoundSrc
  7753. // J $skipRoundSd
  7754. // $addHalfToRoundSrc:
  7755. // ADDSD roundedFloat, 0.5
  7756. // $skipAddHalf:
  7757. // }
  7758. //
  7759. // if(isNotCeil)
  7760. // {
  7761. // CMP roundedFloat, 0
  7762. // JGE $skipRoundSd
  7763. // }
  7764. // ROUNDSD roundedFloat, roundedFloat, round_mode
  7765. //
  7766. // $skipRoundSd:
  7767. // if(isNotCeil)
  7768. // MOVSD checkNegZeroOpnd, roundedFloat
  7769. // else if (ceil)
  7770. // MOVSD checkNegZeroOpnd, src
  7771. //
  7772. // CMP checkNegZeroOpnd, 0
  7773. // JNE $convertToInt
  7774. //
  7775. // if(instr->ShouldCheckForNegativeZero())
  7776. // {
  7777. // isNegZero CALL IsNegZero(checkNegZeroOpnd)
  7778. // CMP isNegZero, 0
  7779. // JNE $bailoutLabel
  7780. // }
  7781. //
  7782. // $convertToInt:
  7783. // CVT(T)SD2SI dst, roundedFloat //CVTTSD2SI for floor/round and CVTSD2SI for ceil
  7784. // CMP dst 0x80000000
  7785. // JNE $fallthrough
  7786. //
  7787. // if(!sharedBailout)
  7788. // {
  7789. // $bailoutLabel:
  7790. // }
  7791. // GenerateBailout(instr)
  7792. //
  7793. // $fallthrough:
  7794. bool isNotCeil = instr->m_opcode != Js::OpCode::InlineMathCeil;
  7795. // MOVSD roundedFloat, src
  7796. IR::Opnd * src = instr->UnlinkSrc1();
  7797. IR::RegOpnd* roundedFloat = IR::RegOpnd::New(src->GetType(), this->m_func);
  7798. IR::Instr* argOut = IR::Instr::New(LowererMDArch::GetAssignOp(src->GetType()), roundedFloat, src, this->m_func);
  7799. instr->InsertBefore(argOut);
  7800. bool negZeroCheckDone = false;
  7801. IR::LabelInstr * bailoutLabel = nullptr;
  7802. bool sharedBailout = false;
  7803. if (instr->GetDst()->IsInt32())
  7804. {
  7805. sharedBailout = (instr->GetBailOutInfo()->bailOutInstr != instr) ? true : false;
  7806. bailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /*helperLabel*/true);
  7807. }
  7808. IR::Opnd * zero;
  7809. if (src->IsFloat64())
  7810. {
  7811. zero = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleZeroAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  7812. }
  7813. else
  7814. {
  7815. Assert(src->IsFloat32());
  7816. zero = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatZeroAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  7817. }
  7818. IR::AutoReuseOpnd autoReuseZero(zero, this->m_func);
  7819. IR::LabelInstr * skipRoundSd = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7820. if(instr->m_opcode == Js::OpCode::InlineMathRound)
  7821. {
  7822. IR::LabelInstr * addHalfToRoundSrcLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7823. IR::LabelInstr * ltHalf = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7824. IR::LabelInstr * setZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7825. IR::LabelInstr * ltNegHalf = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7826. IR::Opnd * pointFive;
  7827. IR::Opnd * negPointFive;
  7828. if (src->IsFloat64())
  7829. {
  7830. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoublePointFiveAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  7831. negPointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNegPointFiveAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  7832. }
  7833. else
  7834. {
  7835. Assert(src->IsFloat32());
  7836. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatPointFiveAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  7837. negPointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNegPointFiveAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  7838. }
  7839. // CMP 0.5, roundedFloat
  7840. // JA $ltHalf
  7841. this->m_lowerer->InsertCompareBranch(pointFive, roundedFloat, Js::OpCode::BrGt_A, ltHalf, instr);
  7842. if (instr->GetDst()->IsInt32())
  7843. {
  7844. // if we are specializing dst to int, we will bailout on overflow so don't need upperbound check
  7845. // Also, we will bailout on NaN, so it doesn't need special handling either
  7846. // J $addHalfToRoundSrcLabel
  7847. this->m_lowerer->InsertBranch(Js::OpCode::Br, addHalfToRoundSrcLabel, instr);
  7848. }
  7849. else
  7850. {
  7851. IR::Opnd * twoToFraction;
  7852. if (src->IsFloat64())
  7853. {
  7854. twoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleTwoToFractionAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  7855. }
  7856. else
  7857. {
  7858. Assert(src->IsFloat32());
  7859. twoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatTwoToFractionAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  7860. }
  7861. // CMP 2^fraction, roundedFloat
  7862. // JA $addHalfToRoundSrcLabel
  7863. this->m_lowerer->InsertCompareBranch(twoToFraction, roundedFloat, Js::OpCode::BrGt_A, addHalfToRoundSrcLabel, instr);
  7864. // J $skipRoundSd (NaN also handled here)
  7865. this->m_lowerer->InsertBranch(Js::OpCode::Br, skipRoundSd, instr);
  7866. }
  7867. // $ltHalf:
  7868. instr->InsertBefore(ltHalf);
  7869. // CMP roundedFloat, -0.5
  7870. // JL $ltNegHalf
  7871. this->m_lowerer->InsertCompareBranch(roundedFloat, negPointFive, Js::OpCode::BrLt_A, ltNegHalf, instr);
  7872. if (instr->ShouldCheckForNegativeZero())
  7873. {
  7874. // CMP roundedFloat, 0
  7875. // JA $setZero
  7876. this->m_lowerer->InsertCompareBranch(roundedFloat, zero, Js::OpCode::BrGt_A, setZero, instr);
  7877. // $negZeroTest [helper]
  7878. m_lowerer->InsertLabel(true, instr);
  7879. // JB $bailoutLabel
  7880. this->m_lowerer->InsertBranch(Js::OpCode::JB, bailoutLabel, instr);
  7881. // if isNegZero(src) J $bailoutLabel else J $skipRoundSd
  7882. NegZeroBranching(src, instr, bailoutLabel, skipRoundSd);
  7883. negZeroCheckDone = true;
  7884. }
  7885. // $setZero:
  7886. instr->InsertBefore(setZero);
  7887. // MOVSD_ZERO roundedFloat
  7888. LoadFloatZero(roundedFloat, instr);
  7889. // J $skipRoundSd
  7890. this->m_lowerer->InsertBranch(Js::OpCode::Br, skipRoundSd, instr);
  7891. // $ltNegHalf:
  7892. instr->InsertBefore(ltNegHalf);
  7893. if (!instr->GetDst()->IsInt32())
  7894. {
  7895. // if we are specializing dst to int, we will bailout on overflow so don't need lowerbound check
  7896. IR::Opnd * negTwoToFraction;
  7897. if (src->IsFloat64())
  7898. {
  7899. negTwoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNegTwoToFractionAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  7900. }
  7901. else
  7902. {
  7903. Assert(src->IsFloat32());
  7904. negTwoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNegTwoToFractionAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  7905. }
  7906. // CMP roundedFloat, negTwoToFraction
  7907. // JA $addHalfToRoundSrcLabel
  7908. this->m_lowerer->InsertCompareBranch(roundedFloat, negTwoToFraction, Js::OpCode::BrGt_A, addHalfToRoundSrcLabel, instr);
  7909. // J $skipRoundSd
  7910. this->m_lowerer->InsertBranch(Js::OpCode::Br, skipRoundSd, instr);
  7911. }
  7912. if (src->IsFloat64())
  7913. {
  7914. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoublePointFiveAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  7915. }
  7916. else
  7917. {
  7918. Assert(src->IsFloat32());
  7919. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatPointFiveAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  7920. }
  7921. // $addHalfToRoundSrcLabel
  7922. instr->InsertBefore(addHalfToRoundSrcLabel);
  7923. // ADDSD roundedFloat, 0.5
  7924. IR::Instr * addInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::ADDSD : Js::OpCode::ADDSS, roundedFloat, roundedFloat, pointFive, this->m_func);
  7925. instr->InsertBefore(addInstr);
  7926. Legalize(addInstr);
  7927. }
  7928. if (instr->m_opcode == Js::OpCode::InlineMathFloor && instr->GetDst()->IsInt32())
  7929. {
  7930. this->m_lowerer->InsertCompareBranch(roundedFloat, zero, Js::OpCode::BrGe_A, skipRoundSd, instr);
  7931. }
  7932. // ROUNDSD srcCopy, srcCopy, round_mode
  7933. IR::Opnd * roundMode = nullptr;
  7934. switch (instr->m_opcode)
  7935. {
  7936. #ifdef ENABLE_WASM
  7937. case Js::OpCode::Trunc_A:
  7938. roundMode = IR::IntConstOpnd::New(0x03, TyInt32, this->m_func);
  7939. break;
  7940. case Js::OpCode::Nearest_A:
  7941. roundMode = IR::IntConstOpnd::New(0x00, TyInt32, this->m_func);
  7942. break;
  7943. #endif //ENABLE_WASM
  7944. case Js::OpCode::InlineMathRound:
  7945. case Js::OpCode::InlineMathFloor:
  7946. roundMode = IR::IntConstOpnd::New(0x01, TyInt32, this->m_func);
  7947. break;
  7948. case Js::OpCode::InlineMathCeil:
  7949. roundMode = IR::IntConstOpnd::New(0x02, TyInt32, this->m_func);
  7950. break;
  7951. }
  7952. IR::Instr* roundInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::ROUNDSD : Js::OpCode::ROUNDSS, roundedFloat, roundedFloat, roundMode, this->m_func);
  7953. instr->InsertBefore(roundInstr);
  7954. if (instr->m_opcode == Js::OpCode::InlineMathRound)
  7955. {
  7956. instr->InsertBefore(skipRoundSd);
  7957. }
  7958. if (instr->GetDst()->IsInt32())
  7959. {
  7960. if (instr->m_opcode == Js::OpCode::InlineMathFloor)
  7961. {
  7962. instr->InsertBefore(skipRoundSd);
  7963. }
  7964. //negZero bailout
  7965. if(instr->ShouldCheckForNegativeZero() && !negZeroCheckDone)
  7966. {
  7967. IR::LabelInstr * convertToInt = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7968. IR::Opnd * checkNegZeroOpnd = isNotCeil ? src : roundedFloat;
  7969. this->m_lowerer->InsertCompareBranch(checkNegZeroOpnd, zero, Js::OpCode::BrNeq_A, convertToInt, instr);
  7970. m_lowerer->InsertLabel(true, instr);
  7971. NegZeroBranching(checkNegZeroOpnd, instr, bailoutLabel, convertToInt);
  7972. instr->InsertBefore(convertToInt);
  7973. }
  7974. IR::Opnd * originalDst = instr->UnlinkDst();
  7975. // CVT(T)SD2SI dst, srcCopy
  7976. IR::Instr* convertToIntInstr;
  7977. if (isNotCeil)
  7978. {
  7979. convertToIntInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::CVTTSD2SI : Js::OpCode::CVTTSS2SI, originalDst, roundedFloat, this->m_func);
  7980. }
  7981. else
  7982. {
  7983. convertToIntInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::CVTSD2SI : Js::OpCode::CVTSS2SI, originalDst, roundedFloat, this->m_func);
  7984. }
  7985. instr->InsertBefore(convertToIntInstr);
  7986. IR::LabelInstr * fallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7987. IR::Opnd * intOverflowValue = IR::IntConstOpnd::New(INT32_MIN, IRType::TyInt32, this->m_func, true);
  7988. this->m_lowerer->InsertCompareBranch(originalDst, intOverflowValue, Js::OpCode::BrNeq_A, fallthrough, instr);
  7989. instr->InsertAfter(fallthrough);
  7990. if (!sharedBailout)
  7991. {
  7992. instr->InsertBefore(bailoutLabel);
  7993. }
  7994. // In case of a shared bailout, we should jump to the code that sets some data on the bailout record which is specific
  7995. // to this bailout. Pass the bailoutLabel to GenerateFunction so that it may use the label as the collectRuntimeStatsLabel.
  7996. this->m_lowerer->GenerateBailOut(instr, nullptr, nullptr, sharedBailout ? bailoutLabel : nullptr);
  7997. }
  7998. else
  7999. {
  8000. IR::Opnd * originalDst = instr->UnlinkDst();
  8001. Assert(originalDst->IsFloat());
  8002. Assert(originalDst->GetType() == roundedFloat->GetType());
  8003. IR::Instr * movInstr = IR::Instr::New(originalDst->IsFloat64() ? Js::OpCode::MOVSD : Js::OpCode::MOVSS, originalDst, roundedFloat, this->m_func);
  8004. instr->InsertBefore(movInstr);
  8005. instr->Remove();
  8006. }
  8007. break;
  8008. }
  8009. case Js::OpCode::InlineMathMin:
  8010. case Js::OpCode::InlineMathMax:
  8011. {
  8012. IR::Opnd* src1 = instr->GetSrc1();
  8013. IR::Opnd* src2 = instr->GetSrc2();
  8014. IR::Opnd* dst = instr->GetDst();
  8015. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  8016. IR::LabelInstr* labelNaNHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  8017. IR::LabelInstr* labelNegZeroAndNaNCheckHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  8018. IR::Instr* branchInstr;
  8019. bool min = instr->m_opcode == Js::OpCode::InlineMathMin ? true : false;
  8020. // CMP src1, src2
  8021. if(dst->IsInt32())
  8022. {
  8023. //MOV dst, src2;
  8024. Assert(!dst->IsEqual(src2));
  8025. this->m_lowerer->InsertMove(dst, src2, instr);
  8026. if(min)
  8027. {
  8028. // JLT $continueLabel
  8029. branchInstr = IR::BranchInstr::New(Js::OpCode::BrGt_I4, doneLabel, src1, src2, instr->m_func);
  8030. instr->InsertBefore(branchInstr);
  8031. LowererMDArch::EmitInt4Instr(branchInstr);
  8032. }
  8033. else
  8034. {
  8035. // JGT $continueLabel
  8036. branchInstr = IR::BranchInstr::New(Js::OpCode::BrLt_I4, doneLabel, src1, src2, instr->m_func);
  8037. instr->InsertBefore(branchInstr);
  8038. LowererMDArch::EmitInt4Instr(branchInstr);
  8039. }
  8040. // MOV dst, src1
  8041. this->m_lowerer->InsertMove(dst, src1, instr);
  8042. }
  8043. else if(dst->IsFloat())
  8044. {
  8045. // COMISD/COMISS src1 (src2), src2 (src1)
  8046. // JA $doneLabel
  8047. // JEQ $labelNegZeroAndNaNCheckHelper
  8048. // MOVSD/MOVSS dst, src2
  8049. // JMP $doneLabel
  8050. //
  8051. // $labelNegZeroAndNaNCheckHelper
  8052. // JP $labelNaNHelper
  8053. // if(min)
  8054. // {
  8055. // if(src2 == -0.0)
  8056. // MOVSD/MOVSS dst, src2
  8057. // }
  8058. // else
  8059. // {
  8060. // if(src1 == -0.0)
  8061. // MOVSD/MOVSS dst, src2
  8062. // }
  8063. // JMP $doneLabel
  8064. //
  8065. // $labelNaNHelper
  8066. // MOVSD/MOVSS dst, NaN
  8067. //
  8068. // $doneLabel
  8069. //MOVSD/MOVSS dst, src1;
  8070. Assert(!dst->IsEqual(src1));
  8071. this->m_lowerer->InsertMove(dst, src1, instr);
  8072. if(min)
  8073. {
  8074. this->m_lowerer->InsertCompareBranch(src1, src2, Js::OpCode::BrLt_A, doneLabel, instr); // Lowering of BrLt_A for floats is done to JA with operands swapped
  8075. }
  8076. else
  8077. {
  8078. this->m_lowerer->InsertCompareBranch(src1, src2, Js::OpCode::BrGt_A, doneLabel, instr);
  8079. }
  8080. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JEQ, labelNegZeroAndNaNCheckHelper, instr->m_func));
  8081. this->m_lowerer->InsertMove(dst, src2, instr);
  8082. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, instr->m_func));
  8083. instr->InsertBefore(labelNegZeroAndNaNCheckHelper);
  8084. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JP, labelNaNHelper, instr->m_func));
  8085. IR::LabelInstr *isNeg0Label = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  8086. NegZeroBranching(min ? src2 : src1, instr, isNeg0Label, doneLabel);
  8087. instr->InsertBefore(isNeg0Label);
  8088. this->m_lowerer->InsertMove(dst, src2, instr);
  8089. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, instr->m_func));
  8090. instr->InsertBefore(labelNaNHelper);
  8091. IR::Opnd * opndNaN = nullptr;
  8092. if (dst->IsFloat32())
  8093. {
  8094. opndNaN = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNaNAddr(), IRType::TyFloat32, this->m_func);
  8095. }
  8096. else
  8097. {
  8098. opndNaN = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNaNAddr(), IRType::TyFloat64, this->m_func);
  8099. }
  8100. this->m_lowerer->InsertMove(dst, opndNaN, instr);
  8101. }
  8102. instr->InsertBefore(doneLabel);
  8103. instr->Remove();
  8104. break;
  8105. }
  8106. default:
  8107. AssertMsg(FALSE, "Unknown inline built-in opcode");
  8108. break;
  8109. }
  8110. }
  8111. void LowererMD::GenerateFastInlineBuiltInMathAbs(IR::Instr* inlineInstr)
  8112. {
  8113. IR::Opnd* src = inlineInstr->GetSrc1();
  8114. IR::Opnd* dst = inlineInstr->UnlinkDst();
  8115. Assert(src);
  8116. IR::Instr* tmpInstr;
  8117. IR::Instr* nextInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  8118. IR::Instr* continueInstr = m_lowerer->LowerBailOnIntMin(inlineInstr);
  8119. continueInstr->InsertAfter(nextInstr);
  8120. IRType srcType = src->GetType();
  8121. if (srcType == IRType::TyInt32)
  8122. {
  8123. // Note: if execution gets so far, we always get (untagged) int32 here.
  8124. // Since -x = ~x + 1, abs(x) = x, abs(-x) = -x, sign-extend(x) = 0, sign_extend(-x) = -1, where 0 <= x.
  8125. // Then: abs(x) = sign-extend(x) XOR x - sign-extend(x)
  8126. // Expected input (otherwise bailout):
  8127. // - src1 is (untagged) int, not equal to int_min (abs(int_min) would produce overflow, as there's no corresponding positive int).
  8128. // MOV EAX, src
  8129. IR::RegOpnd *regEAX = IR::RegOpnd::New(TyInt32, this->m_func);
  8130. regEAX->SetReg(LowererMDArch::GetRegIMulDestLower());
  8131. tmpInstr = IR::Instr::New(Js::OpCode::MOV, regEAX, src, this->m_func);
  8132. nextInstr->InsertBefore(tmpInstr);
  8133. IR::RegOpnd *regEDX = IR::RegOpnd::New(TyInt32, this->m_func);
  8134. regEDX->SetReg(LowererMDArch::GetRegIMulHighDestLower());
  8135. // CDQ (sign-extend EAX into EDX, producing 64bit EDX:EAX value)
  8136. // Note: put EDX on dst to give of def to the EDX lifetime
  8137. tmpInstr = IR::Instr::New(Js::OpCode::CDQ, regEDX, this->m_func);
  8138. nextInstr->InsertBefore(tmpInstr);
  8139. // XOR EAX, EDX
  8140. tmpInstr = IR::Instr::New(Js::OpCode::XOR, regEAX, regEAX, regEDX, this->m_func);
  8141. nextInstr->InsertBefore(tmpInstr);
  8142. // SUB EAX, EDX
  8143. tmpInstr = IR::Instr::New(Js::OpCode::SUB, regEAX, regEAX, regEDX, this->m_func);
  8144. nextInstr->InsertBefore(tmpInstr);
  8145. // MOV dst, EAX
  8146. tmpInstr = IR::Instr::New(Js::OpCode::MOV, dst, regEAX, this->m_func);
  8147. nextInstr->InsertBefore(tmpInstr);
  8148. }
  8149. else if (srcType == IRType::TyFloat64)
  8150. {
  8151. if (!dst->IsRegOpnd())
  8152. {
  8153. // MOVSD tempRegOpnd, src
  8154. IR::RegOpnd* tempRegOpnd = IR::RegOpnd::New(nullptr, TyMachDouble, this->m_func);
  8155. tempRegOpnd->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  8156. tmpInstr = IR::Instr::New(Js::OpCode::MOVSD, tempRegOpnd, src, this->m_func);
  8157. nextInstr->InsertBefore(tmpInstr);
  8158. // This saves the result in the same register.
  8159. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(tempRegOpnd), nextInstr);
  8160. // MOVSD dst, tempRegOpnd
  8161. tmpInstr = IR::Instr::New(Js::OpCode::MOVSD, dst, tempRegOpnd, this->m_func);
  8162. nextInstr->InsertBefore(tmpInstr);
  8163. }
  8164. else
  8165. {
  8166. // MOVSD dst, src
  8167. tmpInstr = IR::Instr::New(Js::OpCode::MOVSD, dst, src, this->m_func);
  8168. nextInstr->InsertBefore(tmpInstr);
  8169. // This saves the result in the same register.
  8170. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(dst), nextInstr);
  8171. }
  8172. }
  8173. else if (srcType == IRType::TyFloat32)
  8174. {
  8175. if (!dst->IsRegOpnd())
  8176. {
  8177. // MOVSS tempRegOpnd, src
  8178. IR::RegOpnd* tempRegOpnd = IR::RegOpnd::New(nullptr, TyFloat32, this->m_func);
  8179. tempRegOpnd->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  8180. tmpInstr = IR::Instr::New(Js::OpCode::MOVSS, tempRegOpnd, src, this->m_func);
  8181. nextInstr->InsertBefore(tmpInstr);
  8182. // This saves the result in the same register.
  8183. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(tempRegOpnd), nextInstr);
  8184. // MOVSS dst, tempRegOpnd
  8185. tmpInstr = IR::Instr::New(Js::OpCode::MOVSS, dst, tempRegOpnd, this->m_func);
  8186. nextInstr->InsertBefore(tmpInstr);
  8187. }
  8188. else
  8189. {
  8190. // MOVSS dst, src
  8191. tmpInstr = IR::Instr::New(Js::OpCode::MOVSS, dst, src, this->m_func);
  8192. nextInstr->InsertBefore(tmpInstr);
  8193. // This saves the result in the same register.
  8194. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(dst), nextInstr);
  8195. }
  8196. }
  8197. else
  8198. {
  8199. AssertMsg(FALSE, "GenerateFastInlineBuiltInMathAbs: unexpected type of the src!");
  8200. }
  8201. }
  8202. void LowererMD::GenerateFastInlineBuiltInMathPow(IR::Instr* instr)
  8203. {
  8204. #ifdef _M_IX86
  8205. AssertMsg(!instr->GetSrc2()->IsFloat(), "Math.pow(*, double) needs customized lowering!");
  8206. #endif
  8207. IR::JnHelperMethod directPowHelper = (IR::JnHelperMethod)0;
  8208. IR::Opnd* bailoutOpnd = nullptr;
  8209. if (!instr->GetSrc2()->IsFloat())
  8210. {
  8211. LoadHelperArgument(instr, instr->UnlinkSrc2());
  8212. if (instr->GetSrc1()->IsFloat())
  8213. {
  8214. directPowHelper = IR::HelperDirectMath_PowDoubleInt;
  8215. LoadDoubleHelperArgument(instr, instr->UnlinkSrc1());
  8216. }
  8217. else
  8218. {
  8219. directPowHelper = IR::HelperDirectMath_PowIntInt;
  8220. LoadHelperArgument(instr, instr->UnlinkSrc1());
  8221. if (!this->m_func->tempSymBool)
  8222. {
  8223. this->m_func->tempSymBool = StackSym::New(TyUint8, this->m_func);
  8224. this->m_func->StackAllocate(this->m_func->tempSymBool, TySize[TyUint8]);
  8225. }
  8226. IR::SymOpnd* boolOpnd = IR::SymOpnd::New(this->m_func->tempSymBool, TyUint8, this->m_func);
  8227. IR::RegOpnd* boolRefOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  8228. this->m_lowerer->InsertLea(boolRefOpnd, boolOpnd, instr);
  8229. LoadHelperArgument(instr, boolRefOpnd);
  8230. bailoutOpnd = boolOpnd;
  8231. }
  8232. }
  8233. #ifndef _M_IX86
  8234. else
  8235. {
  8236. AssertMsg(instr->GetSrc1()->IsFloat(), "Math.Pow(int, double) should not generated by GlobOpt!");
  8237. directPowHelper = IR::HelperDirectMath_Pow;
  8238. LoadDoubleHelperArgument(instr, instr->UnlinkSrc2());
  8239. LoadDoubleHelperArgument(instr, instr->UnlinkSrc1());
  8240. }
  8241. #endif
  8242. ChangeToHelperCall(instr, directPowHelper, nullptr, bailoutOpnd);
  8243. }
  8244. IR::Instr *
  8245. LowererMD::NegZeroBranching(IR::Opnd* opnd, IR::Instr* instr, IR::LabelInstr* isNeg0Label, IR::LabelInstr* isNotNeg0Label)
  8246. {
  8247. Assert(opnd->IsFloat());
  8248. bool is32Bits = opnd->IsFloat32();
  8249. IRType regType = is32Bits ? TyUint32 : TyUint64;
  8250. // Use UInt64 comparison between the opnd to check and negative zero constant.
  8251. // For this we have to convert opnd which is a double to uint64.
  8252. // MOV intOpnd, src
  8253. IR::RegOpnd *intOpnd = IR::RegOpnd::New(regType, this->m_func);
  8254. EmitReinterpretFloatToInt(intOpnd, opnd, instr);
  8255. #if LOWER_SPLIT_INT64
  8256. if (!is32Bits)
  8257. {
  8258. // For 64bits comparisons on x86 we need to check 2 registers
  8259. // CMP intOpnd.high, (k_NegZero >> 32).i32
  8260. // BRNEQ isNotNeg0Label
  8261. // CMP intOpnd.low, k_NegZero.i32
  8262. // BREQ isNeg0Label
  8263. // JMP isNotNeg0Label
  8264. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(intOpnd);
  8265. const uint32 high64NegZero = Js::NumberConstants::k_NegZero >> 32;
  8266. const uint32 low64NegZero = Js::NumberConstants::k_NegZero & UINT32_MAX;
  8267. IR::IntConstOpnd *negZeroHighOpnd = IR::IntConstOpnd::New(high64NegZero, TyUint32, m_func);
  8268. IR::IntConstOpnd *negZeroLowOpnd = IR::IntConstOpnd::New(low64NegZero, TyUint32, m_func);
  8269. m_lowerer->InsertCompareBranch(dstPair.high, negZeroHighOpnd, Js::OpCode::BrNeq_A, isNotNeg0Label, instr);
  8270. m_lowerer->InsertCompareBranch(dstPair.low, negZeroLowOpnd, Js::OpCode::BrEq_A, isNeg0Label, instr);
  8271. }
  8272. else
  8273. #endif
  8274. {
  8275. #if _M_IX86
  8276. IR::IntConstOpnd *negZeroOpnd = IR::IntConstOpnd::New(Js::NumberConstants::k_Float32NegZero, regType, m_func);
  8277. #else
  8278. IR::IntConstOpnd *negZeroOpnd = IR::IntConstOpnd::New(is32Bits ? Js::NumberConstants::k_Float32NegZero : Js::NumberConstants::k_NegZero, regType, m_func);
  8279. #endif
  8280. // CMP intOpnd, k_NegZero
  8281. // BREQ isNeg0Label
  8282. // JMP isNotNeg0Label
  8283. m_lowerer->InsertCompareBranch(intOpnd, negZeroOpnd, Js::OpCode::BrEq_A, isNeg0Label, instr);
  8284. }
  8285. IR::Instr* jmpNotNegZero = IR::BranchInstr::New(Js::OpCode::JMP, isNotNeg0Label, m_func);
  8286. instr->InsertBefore(jmpNotNegZero);
  8287. return jmpNotNegZero;
  8288. }
  8289. void
  8290. LowererMD::FinalLower()
  8291. {
  8292. this->lowererMDArch.FinalLower();
  8293. }
  8294. IR::Instr *
  8295. LowererMD::LowerDivI4AndBailOnReminder(IR::Instr * instr, IR::LabelInstr * bailOutLabel)
  8296. {
  8297. // Don't have save the operand for bailout because the lowering of IDIV don't overwrite their values
  8298. // (EDX) = CDQ
  8299. // EAX = numerator
  8300. // (EDX:EAX)= IDIV (EAX), denominator
  8301. // TEST EDX, EDX
  8302. // JNE bailout
  8303. // <Caller insert more checks here>
  8304. // dst = MOV EAX <-- assignInstr
  8305. Assert(instr);
  8306. Assert(instr->m_opcode == Js::OpCode::Div_I4);
  8307. Assert(!instr->HasBailOutInfo());
  8308. EmitInt4Instr(instr);
  8309. Assert(instr->m_opcode == Js::OpCode::IDIV);
  8310. IR::Instr * prev = instr->m_prev;
  8311. Assert(prev->m_opcode == Js::OpCode::CDQ);
  8312. #ifdef _M_IX86
  8313. Assert(prev->GetDst()->AsRegOpnd()->GetReg() == RegEDX);
  8314. #else
  8315. Assert(prev->GetDst()->AsRegOpnd()->GetReg() == RegRDX);
  8316. #endif
  8317. IR::Opnd * reminderOpnd = prev->GetDst();
  8318. // Insert all check before the assignment to the actual dst.
  8319. IR::Instr * insertBeforeInstr = instr->m_next;
  8320. Assert(insertBeforeInstr->m_opcode == Js::OpCode::MOV);
  8321. #ifdef _M_IX86
  8322. Assert(insertBeforeInstr->GetSrc1()->AsRegOpnd()->GetReg() == RegEAX);
  8323. #else
  8324. Assert(insertBeforeInstr->GetSrc1()->AsRegOpnd()->GetReg() == RegRAX);
  8325. #endif
  8326. // Jump to bailout if the reminder is not 0 (not int result)
  8327. this->m_lowerer->InsertTestBranch(reminderOpnd, reminderOpnd, Js::OpCode::BrNeq_A, bailOutLabel, insertBeforeInstr);
  8328. return insertBeforeInstr;
  8329. }
  8330. void
  8331. LowererMD::LowerTypeof(IR::Instr * typeOfInstr)
  8332. {
  8333. Func * func = typeOfInstr->m_func;
  8334. IR::Opnd * src1 = typeOfInstr->GetSrc1();
  8335. IR::Opnd * dst = typeOfInstr->GetDst();
  8336. Assert(src1->IsRegOpnd() && dst->IsRegOpnd());
  8337. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  8338. IR::LabelInstr * taggedIntLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  8339. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  8340. // MOV typeDisplayStringsArray, &javascriptLibrary->typeDisplayStrings
  8341. IR::RegOpnd * typeDisplayStringsArrayOpnd = IR::RegOpnd::New(TyMachPtr, func);
  8342. m_lowerer->InsertMove(typeDisplayStringsArrayOpnd, IR::AddrOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetTypeDisplayStringsOffset(), IR::AddrOpndKindConstantAddress, this->m_func), typeOfInstr);
  8343. GenerateObjectTest(src1, typeOfInstr, taggedIntLabel);
  8344. // MOV typeId, TypeIds_Object
  8345. // MOV typeRegOpnd, [src1 + offset(Type)]
  8346. // MOV objTypeId, [typeRegOpnd + offsetof(typeId)]
  8347. // CMP objTypeId, TypeIds_Limit /*external object test*/
  8348. // CMOVB typeId, objTypeId
  8349. // TEST [typeRegOpnd + offsetof(flags)], TypeFlagMask_IsFalsy /*test for falsy*/
  8350. // CMOVNE typeId, TypeIds_Undefined
  8351. // MOV dst, typeDisplayStrings[typeId]
  8352. // TEST dst, dst
  8353. // JE $helper
  8354. // JMP $done
  8355. IR::RegOpnd * typeIdOpnd = IR::RegOpnd::New(TyUint32, func);
  8356. m_lowerer->InsertMove(typeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_Object, TyUint32, func), typeOfInstr);
  8357. IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, func);
  8358. m_lowerer->InsertMove(typeRegOpnd,
  8359. IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func),
  8360. typeOfInstr);
  8361. IR::RegOpnd * objTypeIdOpnd = IR::RegOpnd::New(TyUint32, func);
  8362. m_lowerer->InsertMove(objTypeIdOpnd, IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, func), typeOfInstr);
  8363. m_lowerer->InsertCompare(objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_Limit, TyUint32, func), typeOfInstr);
  8364. InsertCmovCC(Js::OpCode::CMOVB, typeIdOpnd, objTypeIdOpnd, typeOfInstr);
  8365. // Insert MOV reg, 0 before the TEST because MOV reg, 0 will be peeped to XOR reg, reg and that may affect the zero flags that CMOVE depends on
  8366. IR::RegOpnd* typeIdUndefinedOpnd = IR::RegOpnd::New(TyUint32, func);
  8367. m_lowerer->InsertMove(typeIdUndefinedOpnd, IR::IntConstOpnd::New(Js::TypeIds_Undefined, TyUint32, func), typeOfInstr);
  8368. IR::Opnd *flagsOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfFlags(), TyInt32, this->m_func);
  8369. m_lowerer->InsertTest(flagsOpnd, IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyInt32, this->m_func), typeOfInstr);
  8370. InsertCmovCC(Js::OpCode::CMOVNE, typeIdOpnd, typeIdUndefinedOpnd, typeOfInstr);
  8371. if (dst->IsEqual(src1))
  8372. {
  8373. ChangeToAssign(typeOfInstr->HoistSrc1(Js::OpCode::Ld_A));
  8374. }
  8375. m_lowerer->InsertMove(dst, IR::IndirOpnd::New(typeDisplayStringsArrayOpnd, typeIdOpnd, this->GetDefaultIndirScale(), TyMachPtr, func), typeOfInstr);
  8376. m_lowerer->InsertTestBranch(dst, dst, Js::OpCode::BrEq_A, helperLabel, typeOfInstr);
  8377. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, typeOfInstr);
  8378. // $taggedInt:
  8379. // MOV dst, typeDisplayStrings[TypeIds_Number]
  8380. // JMP $done
  8381. typeOfInstr->InsertBefore(taggedIntLabel);
  8382. m_lowerer->InsertMove(dst, IR::IndirOpnd::New(typeDisplayStringsArrayOpnd, Js::TypeIds_Number * sizeof(Js::Var), TyMachPtr, func), typeOfInstr);
  8383. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, typeOfInstr);
  8384. // $helper
  8385. // CALL OP_TypeOf
  8386. // $done
  8387. typeOfInstr->InsertBefore(helperLabel);
  8388. typeOfInstr->InsertAfter(doneLabel);
  8389. m_lowerer->LowerUnaryHelperMem(typeOfInstr, IR::HelperOp_Typeof);
  8390. }
  8391. IR::Instr*
  8392. LowererMD::InsertCmovCC(const Js::OpCode opCode, IR::Opnd * dst, IR::Opnd* src1, IR::Instr* insertBeforeInstr, bool postRegAlloc)
  8393. {
  8394. Assert(opCode > Js::OpCode::MDStart);
  8395. Func* func = insertBeforeInstr->m_func;
  8396. IR::Opnd* src2 = nullptr;
  8397. if (!postRegAlloc)
  8398. {
  8399. src2 = src1;
  8400. src1 = dst;
  8401. }
  8402. IR::Instr * instr = IR::Instr::New(opCode, dst, src1, src2, func);
  8403. insertBeforeInstr->InsertBefore(instr);
  8404. LowererMD::Legalize(instr);
  8405. return instr;
  8406. }