LowerMDShared.cpp 306 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Language/JavascriptFunctionArgIndex.h"
  7. const Js::OpCode LowererMD::MDUncondBranchOpcode = Js::OpCode::JMP;
  8. const Js::OpCode LowererMD::MDMultiBranchOpcode = Js::OpCode::JMP;
  9. const Js::OpCode LowererMD::MDTestOpcode = Js::OpCode::TEST;
  10. const Js::OpCode LowererMD::MDOrOpcode = Js::OpCode::OR;
  11. const Js::OpCode LowererMD::MDXorOpcode = Js::OpCode::XOR;
  12. #if _M_X64
  13. const Js::OpCode LowererMD::MDMovUint64ToFloat64Opcode = Js::OpCode::MOVQ;
  14. #endif
  15. const Js::OpCode LowererMD::MDOverflowBranchOpcode = Js::OpCode::JO;
  16. const Js::OpCode LowererMD::MDNotOverflowBranchOpcode = Js::OpCode::JNO;
  17. const Js::OpCode LowererMD::MDConvertFloat32ToFloat64Opcode = Js::OpCode::CVTSS2SD;
  18. const Js::OpCode LowererMD::MDConvertFloat64ToFloat32Opcode = Js::OpCode::CVTSD2SS;
  19. const Js::OpCode LowererMD::MDCallOpcode = Js::OpCode::CALL;
  20. const Js::OpCode LowererMD::MDImulOpcode = Js::OpCode::IMUL2;
  21. const Js::OpCode LowererMD::MDLea = Js::OpCode::LEA;
  22. const Js::OpCode LowererMD::MDSpecBlockNEOpcode = Js::OpCode::CMOVNE;
  23. const Js::OpCode LowererMD::MDSpecBlockFNEOpcode = Js::OpCode::CMOVNE;
  24. static const int TWO_31_FLOAT = 0x4f000000;
  25. static const int FLOAT_INT_MIN = 0xcf000000;
  26. //
  27. // Static utility fn()
  28. //
  29. bool
  30. LowererMD::IsAssign(IR::Instr *instr)
  31. {
  32. return instr->GetDst() && instr->m_opcode == LowererMDArch::GetAssignOp(instr->GetDst()->GetType());
  33. }
  34. ///----------------------------------------------------------------------------
  35. ///
  36. /// LowererMD::IsCall
  37. ///
  38. ///----------------------------------------------------------------------------
  39. bool
  40. LowererMD::IsCall(IR::Instr *instr)
  41. {
  42. return instr->m_opcode == Js::OpCode::CALL;
  43. }
  44. ///----------------------------------------------------------------------------
  45. ///
  46. /// LowererMD::IsUnconditionalBranch
  47. ///
  48. ///----------------------------------------------------------------------------
  49. bool
  50. LowererMD::IsUnconditionalBranch(const IR::Instr *instr)
  51. {
  52. return (instr->m_opcode == Js::OpCode::JMP);
  53. }
  54. // GenerateMemRef: Return an opnd that can be used to access the given address.
  55. IR::Opnd *
  56. LowererMD::GenerateMemRef(intptr_t addr, IRType type, IR::Instr *instr, bool dontEncode)
  57. {
  58. return IR::MemRefOpnd::New(addr, type, this->m_func);
  59. }
  60. void
  61. LowererMD::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, size_t value, IR::Instr * insertBeforeInstr, bool isZeroed)
  62. {
  63. #if _M_X64
  64. lowererMDArch.GenerateMemInit(opnd, offset, value, insertBeforeInstr, isZeroed);
  65. #else
  66. m_lowerer->GenerateMemInit(opnd, offset, (uint32)value, insertBeforeInstr, isZeroed);
  67. #endif
  68. }
  69. ///----------------------------------------------------------------------------
  70. ///
  71. /// LowererMD::InvertBranch
  72. ///
  73. ///----------------------------------------------------------------------------
  74. void
  75. LowererMD::InvertBranch(IR::BranchInstr *branchInstr)
  76. {
  77. switch (branchInstr->m_opcode)
  78. {
  79. case Js::OpCode::JA:
  80. branchInstr->m_opcode = Js::OpCode::JBE;
  81. break;
  82. case Js::OpCode::JAE:
  83. branchInstr->m_opcode = Js::OpCode::JB;
  84. break;
  85. case Js::OpCode::JB:
  86. branchInstr->m_opcode = Js::OpCode::JAE;
  87. break;
  88. case Js::OpCode::JBE:
  89. branchInstr->m_opcode = Js::OpCode::JA;
  90. break;
  91. case Js::OpCode::JEQ:
  92. branchInstr->m_opcode = Js::OpCode::JNE;
  93. break;
  94. case Js::OpCode::JNE:
  95. branchInstr->m_opcode = Js::OpCode::JEQ;
  96. break;
  97. case Js::OpCode::JGE:
  98. branchInstr->m_opcode = Js::OpCode::JLT;
  99. break;
  100. case Js::OpCode::JGT:
  101. branchInstr->m_opcode = Js::OpCode::JLE;
  102. break;
  103. case Js::OpCode::JLT:
  104. branchInstr->m_opcode = Js::OpCode::JGE;
  105. break;
  106. case Js::OpCode::JLE:
  107. branchInstr->m_opcode = Js::OpCode::JGT;
  108. break;
  109. case Js::OpCode::JO:
  110. branchInstr->m_opcode = Js::OpCode::JNO;
  111. break;
  112. case Js::OpCode::JNO:
  113. branchInstr->m_opcode = Js::OpCode::JO;
  114. break;
  115. case Js::OpCode::JP:
  116. branchInstr->m_opcode = Js::OpCode::JNP;
  117. break;
  118. case Js::OpCode::JNP:
  119. branchInstr->m_opcode = Js::OpCode::JP;
  120. break;
  121. case Js::OpCode::JSB:
  122. branchInstr->m_opcode = Js::OpCode::JNSB;
  123. break;
  124. case Js::OpCode::JNSB:
  125. branchInstr->m_opcode = Js::OpCode::JSB;
  126. break;
  127. default:
  128. AssertMsg(UNREACHED, "JCC missing in InvertBranch()");
  129. }
  130. }
  131. void
  132. LowererMD::ReverseBranch(IR::BranchInstr *branchInstr)
  133. {
  134. switch (branchInstr->m_opcode)
  135. {
  136. case Js::OpCode::JA:
  137. branchInstr->m_opcode = Js::OpCode::JB;
  138. break;
  139. case Js::OpCode::JAE:
  140. branchInstr->m_opcode = Js::OpCode::JBE;
  141. break;
  142. case Js::OpCode::JB:
  143. branchInstr->m_opcode = Js::OpCode::JA;
  144. break;
  145. case Js::OpCode::JBE:
  146. branchInstr->m_opcode = Js::OpCode::JAE;
  147. break;
  148. case Js::OpCode::JGE:
  149. branchInstr->m_opcode = Js::OpCode::JLE;
  150. break;
  151. case Js::OpCode::JGT:
  152. branchInstr->m_opcode = Js::OpCode::JLT;
  153. break;
  154. case Js::OpCode::JLT:
  155. branchInstr->m_opcode = Js::OpCode::JGT;
  156. break;
  157. case Js::OpCode::JLE:
  158. branchInstr->m_opcode = Js::OpCode::JGE;
  159. break;
  160. case Js::OpCode::JEQ:
  161. case Js::OpCode::JNE:
  162. case Js::OpCode::JO:
  163. case Js::OpCode::JNO:
  164. case Js::OpCode::JP:
  165. case Js::OpCode::JNP:
  166. case Js::OpCode::JSB:
  167. case Js::OpCode::JNSB:
  168. break;
  169. default:
  170. AssertMsg(UNREACHED, "JCC missing in ReverseBranch()");
  171. }
  172. }
  173. IR::Instr *
  174. LowererMD::LowerCallHelper(IR::Instr *instrCall)
  175. {
  176. IR::Opnd *argOpnd = instrCall->UnlinkSrc2();
  177. IR::Instr *prevInstr = nullptr;
  178. IR::JnHelperMethod helperMethod = instrCall->GetSrc1()->AsHelperCallOpnd()->m_fnHelper;
  179. instrCall->FreeSrc1();
  180. #ifndef _M_X64
  181. bool callHasDst = instrCall->GetDst() != nullptr;
  182. prevInstr = ChangeToHelperCall(instrCall, helperMethod);
  183. if (callHasDst)
  184. {
  185. prevInstr = prevInstr->m_prev;
  186. }
  187. Assert(prevInstr->GetSrc1()->IsHelperCallOpnd() && prevInstr->GetSrc1()->AsHelperCallOpnd()->m_fnHelper == helperMethod);
  188. #else
  189. prevInstr = instrCall;
  190. #endif
  191. while (argOpnd)
  192. {
  193. Assert(argOpnd->IsRegOpnd());
  194. IR::RegOpnd *regArg = argOpnd->AsRegOpnd();
  195. Assert(regArg->m_sym->m_isSingleDef);
  196. IR::Instr *instrArg = regArg->m_sym->m_instrDef;
  197. Assert(instrArg->m_opcode == Js::OpCode::ArgOut_A || instrArg->m_opcode == Js::OpCode::ExtendArg_A &&
  198. (
  199. helperMethod == IR::JnHelperMethod::HelperOP_InitCachedScope ||
  200. helperMethod == IR::JnHelperMethod::HelperScrFunc_OP_NewScFuncHomeObj ||
  201. helperMethod == IR::JnHelperMethod::HelperScrFunc_OP_NewScGenFuncHomeObj ||
  202. helperMethod == IR::JnHelperMethod::HelperRestify ||
  203. helperMethod == IR::JnHelperMethod::HelperStPropIdArrFromVar
  204. ));
  205. prevInstr = LoadHelperArgument(prevInstr, instrArg->GetSrc1());
  206. argOpnd = instrArg->GetSrc2();
  207. if (prevInstr == instrArg)
  208. {
  209. prevInstr = prevInstr->m_prev;
  210. }
  211. if (instrArg->m_opcode == Js::OpCode::ArgOut_A)
  212. {
  213. instrArg->UnlinkSrc1();
  214. if (argOpnd)
  215. {
  216. instrArg->UnlinkSrc2();
  217. }
  218. regArg->Free(this->m_func);
  219. instrArg->Remove();
  220. }
  221. else if (instrArg->m_opcode == Js::OpCode::ExtendArg_A)
  222. {
  223. if (instrArg->GetSrc1()->IsRegOpnd())
  224. {
  225. m_lowerer->addToLiveOnBackEdgeSyms->Set(instrArg->GetSrc1()->AsRegOpnd()->GetStackSym()->m_id);
  226. }
  227. }
  228. }
  229. switch (helperMethod)
  230. {
  231. case IR::JnHelperMethod::HelperScrFunc_OP_NewScFuncHomeObj:
  232. case IR::JnHelperMethod::HelperScrFunc_OP_NewScGenFuncHomeObj:
  233. break;
  234. default:
  235. prevInstr = m_lowerer->LoadScriptContext(prevInstr);
  236. break;
  237. }
  238. #ifdef _M_X64
  239. FlipHelperCallArgsOrder();
  240. ChangeToHelperCall(instrCall, helperMethod);
  241. #else
  242. this->lowererMDArch.ResetHelperArgsCount();
  243. #endif
  244. // There might be ToVar in between the ArgOut, need to continue lower from the call still
  245. return instrCall;
  246. }
  247. //
  248. // forwarding functions
  249. //
  250. IR::Instr *
  251. LowererMD::LowerCall(IR::Instr * callInstr, Js::ArgSlot argCount)
  252. {
  253. return this->lowererMDArch.LowerCall(callInstr, argCount);
  254. }
  255. IR::Instr *
  256. LowererMD::LowerCallI(IR::Instr * callInstr, ushort callFlags, bool isHelper, IR::Instr * insertBeforeInstrForCFG)
  257. {
  258. return this->lowererMDArch.LowerCallI(callInstr, callFlags, isHelper, insertBeforeInstrForCFG);
  259. }
  260. IR::Instr *
  261. LowererMD::LowerAsmJsCallI(IR::Instr * callInstr)
  262. {
  263. #if DBG
  264. if (PHASE_ON(Js::AsmjsCallDebugBreakPhase, this->m_func))
  265. {
  266. this->GenerateDebugBreak(callInstr->m_next);
  267. }
  268. #endif
  269. return this->lowererMDArch.LowerAsmJsCallI(callInstr);
  270. }
  271. IR::Instr *
  272. LowererMD::LowerAsmJsCallE(IR::Instr * callInstr)
  273. {
  274. #if DBG
  275. if (PHASE_ON(Js::AsmjsCallDebugBreakPhase, this->m_func))
  276. {
  277. this->GenerateDebugBreak(callInstr->m_next);
  278. }
  279. #endif
  280. return this->lowererMDArch.LowerAsmJsCallE(callInstr);
  281. }
  282. IR::Instr *
  283. LowererMD::LowerWasmArrayBoundsCheck(IR::Instr * instr, IR::Opnd *addrOpnd)
  284. {
  285. return this->lowererMDArch.LowerWasmArrayBoundsCheck(instr, addrOpnd);
  286. }
  287. void LowererMD::LowerAtomicStore(IR::Opnd * dst, IR::Opnd * src1, IR::Instr * insertBeforeInstr)
  288. {
  289. return this->lowererMDArch.LowerAtomicStore(dst, src1, insertBeforeInstr);
  290. }
  291. void LowererMD::LowerAtomicLoad(IR::Opnd * dst, IR::Opnd * src1, IR::Instr * insertBeforeInstr)
  292. {
  293. return this->lowererMDArch.LowerAtomicLoad(dst, src1, insertBeforeInstr);
  294. }
  295. IR::Instr *
  296. LowererMD::LowerAsmJsLdElemHelper(IR::Instr * callInstr)
  297. {
  298. return this->lowererMDArch.LowerAsmJsLdElemHelper(callInstr);
  299. }
  300. IR::Instr *
  301. LowererMD::LowerAsmJsStElemHelper(IR::Instr * callInstr)
  302. {
  303. return this->lowererMDArch.LowerAsmJsStElemHelper(callInstr);
  304. }
  305. IR::Instr *
  306. LowererMD::LoadInt64HelperArgument(IR::Instr * instr, IR::Opnd* opnd)
  307. {
  308. return this->lowererMDArch.LoadInt64HelperArgument(instr, opnd);
  309. }
  310. IR::Instr *
  311. LowererMD::LoadHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  312. {
  313. return this->lowererMDArch.LoadHelperArgument(instr, opndArg);
  314. }
  315. IR::Instr *
  316. LowererMD::LoadDoubleHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  317. {
  318. return this->lowererMDArch.LoadDoubleHelperArgument(instr, opndArg);
  319. }
  320. IR::Instr *
  321. LowererMD::LoadFloatHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  322. {
  323. return this->lowererMDArch.LoadFloatHelperArgument(instr, opndArg);
  324. }
  325. IR::Instr *
  326. LowererMD::LowerEntryInstr(IR::EntryInstr * entryInstr)
  327. {
  328. return this->lowererMDArch.LowerEntryInstr(entryInstr);
  329. }
  330. IR::Instr *
  331. LowererMD::LowerExitInstr(IR::ExitInstr * exitInstr)
  332. {
  333. return this->lowererMDArch.LowerExitInstr(exitInstr);
  334. }
  335. IR::Instr *
  336. LowererMD::LowerExitInstrAsmJs(IR::ExitInstr * exitInstr)
  337. {
  338. return this->lowererMDArch.LowerExitInstrAsmJs(exitInstr);
  339. }
  340. IR::Instr *
  341. LowererMD::LoadNewScObjFirstArg(IR::Instr * instr, IR::Opnd * dst, ushort extraArgs)
  342. {
  343. return this->lowererMDArch.LoadNewScObjFirstArg(instr, dst, extraArgs);
  344. }
  345. IR::Instr *
  346. LowererMD::LowerTry(IR::Instr *tryInstr, IR::JnHelperMethod helperMethod)
  347. {
  348. // Mark the entry to the try
  349. IR::Instr *instr = tryInstr->GetNextRealInstrOrLabel();
  350. AssertMsg(instr->IsLabelInstr(), "No label at the entry to a try?");
  351. IR::LabelInstr *tryAddr = instr->AsLabelInstr();
  352. // Arg 5: ScriptContext
  353. this->m_lowerer->LoadScriptContext(tryAddr);
  354. if (tryInstr->m_opcode == Js::OpCode::TryCatch || (this->m_func->DoOptimizeTry() || (this->m_func->IsSimpleJit() && this->m_func->hasBailout)))
  355. {
  356. // Arg 4 : hasBailedOutOffset
  357. IR::Opnd * hasBailedOutOffset = IR::IntConstOpnd::New(this->m_func->m_hasBailedOutSym->m_offset, TyInt32, this->m_func);
  358. this->LoadHelperArgument(tryAddr, hasBailedOutOffset);
  359. }
  360. #ifdef _M_X64
  361. // Arg: args size
  362. IR::RegOpnd *argsSizeOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  363. tryAddr->InsertBefore(IR::Instr::New(Js::OpCode::LdArgSize, argsSizeOpnd, this->m_func));
  364. this->LoadHelperArgument(tryAddr, argsSizeOpnd);
  365. // Arg: spill size
  366. IR::RegOpnd *spillSizeOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  367. tryAddr->InsertBefore(IR::Instr::New(Js::OpCode::LdSpillSize, spillSizeOpnd, this->m_func));
  368. this->LoadHelperArgument(tryAddr, spillSizeOpnd);
  369. #endif
  370. // Arg 3: frame pointer
  371. IR::RegOpnd *ebpOpnd = IR::RegOpnd::New(nullptr, lowererMDArch.GetRegBlockPointer(), TyMachReg, this->m_func);
  372. this->LoadHelperArgument(tryAddr, ebpOpnd);
  373. // Arg 2: handler address
  374. IR::LabelInstr *helperAddr = tryInstr->AsBranchInstr()->GetTarget();
  375. this->LoadHelperArgument(tryAddr, IR::LabelOpnd::New(helperAddr, this->m_func));
  376. // Arg 1: try address
  377. this->LoadHelperArgument(tryAddr, IR::LabelOpnd::New(tryAddr, this->m_func));
  378. // Call the helper
  379. IR::RegOpnd *continuationAddr =
  380. IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), lowererMDArch.GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  381. IR::Instr *callInstr = IR::Instr::New(
  382. Js::OpCode::Call, continuationAddr, IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
  383. tryAddr->InsertBefore(callInstr);
  384. this->LowerCall(callInstr, 0);
  385. #ifdef _M_X64
  386. {
  387. // Emit some instruction to separate the CALL from the JMP following it. The OS stack unwinder
  388. // mistakes the JMP for the start of the epilog otherwise.
  389. IR::Instr *nop = IR::Instr::New(Js::OpCode::NOP, m_func);
  390. tryAddr->InsertBefore(nop);
  391. }
  392. #endif
  393. // Jump to the continuation address supplied by the helper
  394. IR::BranchInstr *branchInstr = IR::MultiBranchInstr::New(Js::OpCode::JMP, continuationAddr, this->m_func);
  395. tryAddr->InsertBefore(branchInstr);
  396. return tryInstr->m_prev;
  397. }
  398. IR::Instr *
  399. LowererMD::LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * targetOpnd)
  400. {
  401. return lowererMDArch.LowerEHRegionReturn(insertBeforeInstr, targetOpnd);
  402. }
  403. IR::Instr *
  404. LowererMD::LowerLeaveNull(IR::Instr *finallyEndInstr)
  405. {
  406. IR::Instr *instrPrev = finallyEndInstr->m_prev;
  407. IR::Instr *instr = nullptr;
  408. // Return a null continuation address to the helper: execution will resume at the point determined by the try
  409. // or the exception handler.
  410. IR::RegOpnd *retReg = IR::RegOpnd::New(StackSym::New(TyMachReg,this->m_func), lowererMDArch.GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  411. instr = IR::Instr::New(Js::OpCode::XOR, retReg, this->m_func);
  412. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(nullptr, lowererMDArch.GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  413. instr->SetSrc1(eaxOpnd);
  414. instr->SetSrc2(eaxOpnd);
  415. finallyEndInstr->InsertBefore(instr);
  416. #if _M_X64
  417. {
  418. // amd64_ReturnFromCallWithFakeFrame expects to find the spill size and args size
  419. // in REG_EH_SPILL_SIZE and REG_EH_ARGS_SIZE.
  420. // MOV REG_EH_SPILL_SIZE, spillSize
  421. IR::Instr *movR8 = IR::Instr::New(Js::OpCode::LdSpillSize,
  422. IR::RegOpnd::New(nullptr, REG_EH_SPILL_SIZE, TyMachReg, m_func),
  423. m_func);
  424. finallyEndInstr->InsertBefore(movR8);
  425. // MOV REG_EH_ARGS_SIZE, argsSize
  426. IR::Instr *movR9 = IR::Instr::New(Js::OpCode::LdArgSize,
  427. IR::RegOpnd::New(nullptr, REG_EH_ARGS_SIZE, TyMachReg, m_func),
  428. m_func);
  429. finallyEndInstr->InsertBefore(movR9);
  430. IR::Opnd *targetOpnd = IR::RegOpnd::New(nullptr, REG_EH_TARGET, TyMachReg, m_func);
  431. IR::Instr *movTarget = IR::Instr::New(Js::OpCode::MOV,
  432. targetOpnd,
  433. IR::HelperCallOpnd::New(IR::HelperOp_ReturnFromCallWithFakeFrame, m_func),
  434. m_func);
  435. finallyEndInstr->InsertBefore(movTarget);
  436. IR::Instr *push = IR::Instr::New(Js::OpCode::PUSH, m_func);
  437. push->SetSrc1(targetOpnd);
  438. finallyEndInstr->InsertBefore(push);
  439. }
  440. #endif
  441. IR::IntConstOpnd *intSrc = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  442. instr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  443. instr->SetSrc1(intSrc);
  444. instr->SetSrc2(retReg);
  445. finallyEndInstr->InsertBefore(instr);
  446. finallyEndInstr->Remove();
  447. return instrPrev;
  448. }
  449. ///----------------------------------------------------------------------------
  450. ///
  451. /// LowererMD::Init
  452. ///
  453. ///----------------------------------------------------------------------------
  454. void
  455. LowererMD::Init(Lowerer *lowerer)
  456. {
  457. m_lowerer = lowerer;
  458. this->lowererMDArch.Init(this);
  459. #ifdef ENABLE_WASM_SIMD
  460. Simd128InitOpcodeMap();
  461. #endif
  462. }
  463. ///----------------------------------------------------------------------------
  464. ///
  465. /// LowererMD::LoadInputParamCount
  466. ///
  467. /// Load the passed-in parameter count from the appropriate EBP slot.
  468. ///
  469. ///----------------------------------------------------------------------------
  470. IR::Instr *
  471. LowererMD::LoadInputParamCount(IR::Instr * instrInsert, int adjust, bool needFlags)
  472. {
  473. IR::Instr * instr;
  474. IR::RegOpnd * dstOpnd;
  475. IR::SymOpnd * srcOpnd;
  476. srcOpnd = Lowerer::LoadCallInfo(instrInsert);
  477. dstOpnd = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), TyMachReg, this->m_func);
  478. instr = IR::Instr::New(Js::OpCode::MOV, dstOpnd, srcOpnd, this->m_func);
  479. instrInsert->InsertBefore(instr);
  480. // Copy the callinfo before masking off the param count
  481. Assert(Js::CallInfo::ksizeofCount == 24);
  482. // Mask off call flags from callinfo
  483. instr = IR::Instr::New(Js::OpCode::AND, dstOpnd, dstOpnd,
  484. IR::IntConstOpnd::New(0x00FFFFFF, TyMachReg, this->m_func, true), this->m_func);
  485. instrInsert->InsertBefore(instr);
  486. instr = m_lowerer->InsertSub(true, dstOpnd, dstOpnd, IR::IntConstOpnd::New(-adjust, TyMachReg, this->m_func), instrInsert);
  487. return instr;
  488. }
  489. IR::Instr *
  490. LowererMD::LoadStackArgPtr(IR::Instr * instr)
  491. {
  492. if (this->m_func->IsLoopBody())
  493. {
  494. // Get the first user param from the interpreter frame instance that was passed in.
  495. // These args don't include the func object and callinfo; we just need to advance past "this".
  496. // t1 = MOV [prm1 + m_inParams]
  497. // dst = LEA &[t1 + sizeof(var)]
  498. Assert(this->m_func->m_loopParamSym);
  499. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  500. size_t offset = Js::InterpreterStackFrame::GetOffsetOfInParams();
  501. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, this->m_func);
  502. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  503. IR::Instr *instrLdParams = IR::Instr::New(Js::OpCode::MOV, tmpOpnd, indirOpnd, this->m_func);
  504. instr->InsertBefore(instrLdParams);
  505. indirOpnd = IR::IndirOpnd::New(tmpOpnd, sizeof(Js::Var), TyMachReg, this->m_func);
  506. instr->SetSrc1(indirOpnd);
  507. instr->m_opcode = Js::OpCode::LEA;
  508. return instr->m_prev;
  509. }
  510. else
  511. {
  512. return this->lowererMDArch.LoadStackArgPtr(instr);
  513. }
  514. }
  515. IR::Instr *
  516. LowererMD::LoadArgumentsFromFrame(IR::Instr * instr)
  517. {
  518. if (this->m_func->IsLoopBody())
  519. {
  520. // Get the arguments ptr from the interpreter frame instance that was passed in.
  521. Assert(this->m_func->m_loopParamSym);
  522. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  523. int32 offset = (int32)Js::InterpreterStackFrame::GetOffsetOfArguments();
  524. instr->SetSrc1(IR::IndirOpnd::New(baseOpnd, offset, TyMachReg, this->m_func));
  525. }
  526. else
  527. {
  528. instr->SetSrc1(this->CreateStackArgumentsSlotOpnd());
  529. }
  530. instr->m_opcode = Js::OpCode::MOV;
  531. return instr->m_prev;
  532. }
  533. // load argument count as I4
  534. IR::Instr *
  535. LowererMD::LoadArgumentCount(IR::Instr * instr)
  536. {
  537. if (this->m_func->IsLoopBody())
  538. {
  539. // Pull the arg count from the interpreter frame instance that was passed in.
  540. // (The callinfo in the loop body's frame just shows the single parameter, the interpreter frame.)
  541. Assert(this->m_func->m_loopParamSym);
  542. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  543. size_t offset = Js::InterpreterStackFrame::GetOffsetOfInSlotsCount();
  544. instr->SetSrc1(IR::IndirOpnd::New(baseOpnd, (int32)offset, TyInt32, this->m_func));
  545. }
  546. else
  547. {
  548. StackSym *sym = StackSym::New(TyVar, this->m_func);
  549. this->m_func->SetArgOffset(sym, (Js::JavascriptFunctionArgIndex_CallInfo - Js::JavascriptFunctionArgIndex_Frame) * sizeof(Js::Var));
  550. instr->SetSrc1(IR::SymOpnd::New(sym, TyMachReg, this->m_func));
  551. }
  552. instr->m_opcode = Js::OpCode::MOV;
  553. return instr->m_prev;
  554. }
  555. IR::Instr *
  556. LowererMD::LoadHeapArguments(IR::Instr * instrArgs)
  557. {
  558. return this->lowererMDArch.LoadHeapArguments(instrArgs);
  559. }
  560. IR::Instr *
  561. LowererMD::LoadHeapArgsCached(IR::Instr * instrArgs)
  562. {
  563. return this->lowererMDArch.LoadHeapArgsCached(instrArgs);
  564. }
  565. ///----------------------------------------------------------------------------
  566. ///
  567. /// LowererMD::ChangeToHelperCall
  568. ///
  569. /// Change the current instruction to a call to the given helper.
  570. ///
  571. ///----------------------------------------------------------------------------
  572. IR::Instr *
  573. LowererMD::ChangeToHelperCall(IR::Instr * callInstr, IR::JnHelperMethod helperMethod, IR::LabelInstr *labelBailOut,
  574. IR::Opnd *opndBailOutArg, IR::PropertySymOpnd *propSymOpnd, bool isHelperContinuation)
  575. {
  576. #if DBG
  577. this->m_lowerer->ReconcileWithLowererStateOnHelperCall(callInstr, helperMethod);
  578. #endif
  579. IR::Instr * bailOutInstr = callInstr;
  580. if (callInstr->HasBailOutInfo())
  581. {
  582. IR::BailOutKind bailOutKind = callInstr->GetBailOutKind();
  583. if (bailOutKind == IR::BailOutOnNotPrimitive ||
  584. bailOutKind == IR::BailOutOnPowIntIntOverflow)
  585. {
  586. callInstr = IR::Instr::New(callInstr->m_opcode, callInstr->m_func);
  587. bailOutInstr->TransferTo(callInstr);
  588. bailOutInstr->InsertBefore(callInstr);
  589. bailOutInstr->m_opcode = bailOutKind == IR::BailOutOnNotPrimitive
  590. ? Js::OpCode::BailOnNotPrimitive
  591. : Js::OpCode::BailOnPowIntIntOverflow;
  592. bailOutInstr->SetSrc1(opndBailOutArg);
  593. }
  594. else
  595. {
  596. bailOutInstr = this->m_lowerer->SplitBailOnImplicitCall(callInstr);
  597. }
  598. }
  599. callInstr->m_opcode = Js::OpCode::CALL;
  600. IR::HelperCallOpnd *helperCallOpnd = Lowerer::CreateHelperCallOpnd(helperMethod, this->lowererMDArch.GetHelperArgsCount(), m_func);
  601. if (helperCallOpnd->IsDiagHelperCallOpnd())
  602. {
  603. // Load arguments for the wrapper.
  604. this->LoadHelperArgument(callInstr, IR::AddrOpnd::New((Js::Var)IR::GetMethodOriginalAddress(m_func->GetThreadContextInfo(), helperMethod), IR::AddrOpndKindDynamicMisc, m_func));
  605. this->m_lowerer->LoadScriptContext(callInstr);
  606. }
  607. callInstr->SetSrc1(helperCallOpnd);
  608. IR::Instr * instrRet = this->lowererMDArch.LowerCall(callInstr, 0);
  609. if (bailOutInstr != callInstr)
  610. {
  611. // The bailout needs to be lowered after we lower the helper call because the helper argument
  612. // has already been loaded. We need to drain them on AMD64 before starting another helper call
  613. if (bailOutInstr->m_opcode == Js::OpCode::BailOnNotObject)
  614. {
  615. this->m_lowerer->LowerBailOnNotObject(bailOutInstr, nullptr, labelBailOut);
  616. }
  617. else if (bailOutInstr->m_opcode == Js::OpCode::BailOnNotPrimitive ||
  618. bailOutInstr->m_opcode == Js::OpCode::BailOnPowIntIntOverflow)
  619. {
  620. this->m_lowerer->LowerBailOnTrue(bailOutInstr, labelBailOut);
  621. }
  622. else if (bailOutInstr->m_opcode == Js::OpCode::BailOut)
  623. {
  624. this->m_lowerer->GenerateBailOut(bailOutInstr, nullptr, labelBailOut);
  625. }
  626. else
  627. {
  628. this->m_lowerer->LowerBailOnEqualOrNotEqual(bailOutInstr, nullptr, labelBailOut, propSymOpnd, isHelperContinuation);
  629. }
  630. }
  631. #if DBG
  632. if (PHASE_ON(Js::AsmjsCallDebugBreakPhase, this->m_func))
  633. {
  634. this->GenerateDebugBreak(instrRet->m_next);
  635. }
  636. #endif
  637. return instrRet;
  638. }
  639. IR::Instr* LowererMD::ChangeToHelperCallMem(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  640. {
  641. this->m_lowerer->LoadScriptContext(instr);
  642. return this->ChangeToHelperCall(instr, helperMethod);
  643. }
  644. ///----------------------------------------------------------------------------
  645. ///
  646. /// LowererMD::ChangeToAssign
  647. ///
  648. /// Change to a MOV.
  649. ///
  650. ///----------------------------------------------------------------------------
  651. IR::Instr *
  652. LowererMD::ChangeToAssignNoBarrierCheck(IR::Instr * instr)
  653. {
  654. return ChangeToAssign(instr, instr->GetDst()->GetType());
  655. }
  656. IR::Instr *
  657. LowererMD::ChangeToAssign(IR::Instr * instr)
  658. {
  659. return ChangeToWriteBarrierAssign(instr, instr->m_func);
  660. }
  661. IR::Instr *
  662. LowererMD::ChangeToAssign(IR::Instr * instr, IRType type)
  663. {
  664. Assert(!instr->HasBailOutInfo() || instr->GetBailOutKind() == IR::BailOutExpectingString);
  665. #if _M_IX86
  666. if (IRType_IsInt64(type))
  667. {
  668. return LowererMDArch::ChangeToAssignInt64(instr);
  669. }
  670. #endif
  671. instr->m_opcode = LowererMDArch::GetAssignOp(type);
  672. Legalize(instr);
  673. return instr;
  674. }
  675. ///----------------------------------------------------------------------------
  676. ///
  677. /// LowererMD::LowerRet
  678. ///
  679. /// Lower Ret to "MOV EAX, src"
  680. /// The real RET is inserted at the exit of the function when emitting the
  681. /// epilog.
  682. ///
  683. ///----------------------------------------------------------------------------
  684. IR::Instr *
  685. LowererMD::LowerRet(IR::Instr * retInstr)
  686. {
  687. IR::RegOpnd * retReg = nullptr;
  688. bool needsRetReg = true;
  689. #ifdef ASMJS_PLAT
  690. if (m_func->GetJITFunctionBody()->IsAsmJsMode() && !m_func->IsLoopBody()) // for loop body ret is the bytecodeoffset
  691. {
  692. Js::AsmJsRetType::Which asmType = m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetRetType();
  693. IRType regType = TyInt32;
  694. switch (asmType)
  695. {
  696. case Js::AsmJsRetType::Double:
  697. regType = TyFloat64;
  698. break;
  699. case Js::AsmJsRetType::Float:
  700. regType = TyFloat32;
  701. break;
  702. case Js::AsmJsRetType::Int64:
  703. {
  704. regType = TyInt64;
  705. #if LOWER_SPLIT_INT64
  706. regType = TyInt32;
  707. {
  708. IR::Opnd* lowOpnd = nullptr;
  709. IR::Opnd* highOpnd = nullptr;
  710. if (retInstr->GetSrc1()->IsRegOpnd())
  711. {
  712. Int64RegPair srcPair = m_func->FindOrCreateInt64Pair(retInstr->GetSrc1()->AsRegOpnd());
  713. lowOpnd = srcPair.low;
  714. highOpnd = srcPair.high;
  715. }
  716. else if (retInstr->GetSrc1()->IsImmediateOpnd())
  717. {
  718. int64 value = retInstr->GetSrc1()->GetImmediateValue(m_func);
  719. lowOpnd = IR::IntConstOpnd::New(value & UINT_MAX, regType, m_func);
  720. highOpnd = IR::IntConstOpnd::New(value >> 32, regType, m_func);
  721. }
  722. else
  723. {
  724. Assert(UNREACHED);
  725. }
  726. retInstr->UnlinkSrc1();
  727. retInstr->SetSrc1(lowOpnd);
  728. // Mov high bits to edx
  729. IR::RegOpnd* regEdx = IR::RegOpnd::New(regType, this->m_func);
  730. regEdx->SetReg(RegEDX);
  731. Lowerer::InsertMove(regEdx, highOpnd, retInstr);
  732. retInstr->SetSrc2(regEdx);
  733. }
  734. #endif
  735. break;
  736. }
  737. case Js::AsmJsRetType::Void:
  738. needsRetReg = false;
  739. break;
  740. case Js::AsmJsRetType::Signed:
  741. regType = TyInt32;
  742. break;
  743. #ifdef ENABLE_WASM_SIMD
  744. case Js::AsmJsRetType::Float32x4:
  745. regType = TySimd128F4;
  746. break;
  747. case Js::AsmJsRetType::Int32x4:
  748. regType = TySimd128I4;
  749. break;
  750. case Js::AsmJsRetType::Float64x2:
  751. regType = TySimd128D2;
  752. break;
  753. case Js::AsmJsRetType::Int64x2:
  754. regType = TySimd128I2;
  755. break;
  756. case Js::AsmJsRetType::Int16x8:
  757. regType = TySimd128I8;
  758. break;
  759. case Js::AsmJsRetType::Int8x16:
  760. regType = TySimd128I16;
  761. break;
  762. case Js::AsmJsRetType::Uint32x4:
  763. regType = TySimd128U4;
  764. break;
  765. case Js::AsmJsRetType::Uint16x8:
  766. regType = TySimd128U8;
  767. break;
  768. case Js::AsmJsRetType::Uint8x16:
  769. regType = TySimd128U16;
  770. break;
  771. case Js::AsmJsRetType::Bool32x4:
  772. regType = TySimd128B4;
  773. break;
  774. case Js::AsmJsRetType::Bool16x8:
  775. regType = TySimd128B8;
  776. break;
  777. case Js::AsmJsRetType::Bool8x16:
  778. regType = TySimd128B16;
  779. break;
  780. #endif
  781. default:
  782. Assert(UNREACHED);
  783. }
  784. if (needsRetReg)
  785. {
  786. retReg = IR::RegOpnd::New(regType, m_func);
  787. retReg->SetReg(lowererMDArch.GetRegReturnAsmJs(regType));
  788. }
  789. }
  790. else
  791. #endif
  792. {
  793. retReg = IR::RegOpnd::New(TyMachReg, m_func);
  794. retReg->SetReg(lowererMDArch.GetRegReturn(TyMachReg));
  795. }
  796. if (needsRetReg)
  797. {
  798. Lowerer::InsertMove(retReg, retInstr->UnlinkSrc1(), retInstr);
  799. retInstr->SetSrc1(retReg);
  800. }
  801. return retInstr;
  802. }
  803. ///----------------------------------------------------------------------------
  804. ///
  805. /// LowererMD::LowerCondBranch
  806. ///
  807. ///----------------------------------------------------------------------------
  808. IR::Instr *
  809. LowererMD::LowerCondBranch(IR::Instr * instr)
  810. {
  811. AssertMsg(instr->GetSrc1() != nullptr, "Expected src opnds on conditional branch");
  812. Assert(!instr->HasBailOutInfo());
  813. IR::Opnd * opndSrc1 = instr->UnlinkSrc1();
  814. IR::Instr * instrPrev = nullptr;
  815. switch (instr->m_opcode)
  816. {
  817. case Js::OpCode::BrTrue_A:
  818. case Js::OpCode::BrFalse_A:
  819. case Js::OpCode::BrNotNull_A:
  820. case Js::OpCode::BrOnObject_A:
  821. case Js::OpCode::BrOnClassConstructor:
  822. case Js::OpCode::BrOnBaseConstructorKind:
  823. Assert(!opndSrc1->IsFloat64());
  824. AssertMsg(instr->GetSrc2() == nullptr, "Expected 1 src on boolean branch");
  825. instrPrev = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  826. instrPrev->SetSrc1(opndSrc1);
  827. instrPrev->SetSrc2(opndSrc1);
  828. instr->InsertBefore(instrPrev);
  829. if (instr->m_opcode != Js::OpCode::BrFalse_A)
  830. {
  831. instr->m_opcode = Js::OpCode::JNE;
  832. }
  833. else
  834. {
  835. instr->m_opcode = Js::OpCode::JEQ;
  836. }
  837. break;
  838. case Js::OpCode::BrOnEmpty:
  839. case Js::OpCode::BrOnNotEmpty:
  840. AssertMsg(0, "BrOnEmpty opcodes should not be passed to MD lowerer");
  841. break;
  842. default:
  843. IR::Opnd * opndSrc2 = instr->UnlinkSrc2();
  844. AssertMsg(opndSrc2 != nullptr, "Expected 2 src's on non-boolean branch");
  845. if (opndSrc1->IsFloat())
  846. {
  847. Assert(opndSrc1->GetType() == opndSrc2->GetType());
  848. instrPrev = IR::Instr::New(opndSrc1->IsFloat64() ? Js::OpCode::COMISD : Js::OpCode::COMISS, m_func);
  849. instrPrev->SetSrc1(opndSrc1);
  850. instrPrev->SetSrc2(opndSrc2);
  851. instr->InsertBefore(instrPrev);
  852. }
  853. else
  854. {
  855. // This check assumes src1 is a variable.
  856. if (opndSrc2->IsIntConstOpnd() && opndSrc2->AsIntConstOpnd()->GetValue() == 0)
  857. {
  858. instrPrev = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  859. instrPrev->SetSrc1(opndSrc1);
  860. instrPrev->SetSrc2(opndSrc1);
  861. instr->InsertBefore(instrPrev);
  862. opndSrc2->Free(this->m_func);
  863. }
  864. else
  865. {
  866. instrPrev = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  867. //
  868. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  869. // relevant only on AMD64.
  870. //
  871. opndSrc1 = instrPrev->SetSrc1(opndSrc1);
  872. opndSrc2 = instrPrev->SetSrc2(opndSrc2);
  873. instr->InsertBefore(instrPrev);
  874. LowererMD::Legalize(instrPrev);
  875. }
  876. }
  877. instr->m_opcode = LowererMD::MDBranchOpcode(instr->m_opcode);
  878. break;
  879. }
  880. return instrPrev;
  881. }
  882. ///----------------------------------------------------------------------------
  883. ///
  884. /// LowererMD::MDBranchOpcode
  885. ///
  886. /// Map HIR branch opcode to machine-dependent equivalent.
  887. ///
  888. ///----------------------------------------------------------------------------
  889. Js::OpCode
  890. LowererMD::MDBranchOpcode(Js::OpCode opcode)
  891. {
  892. switch (opcode)
  893. {
  894. case Js::OpCode::BrSrEq_A:
  895. case Js::OpCode::BrEq_A:
  896. case Js::OpCode::BrSrNotNeq_A:
  897. case Js::OpCode::BrNotNeq_A:
  898. case Js::OpCode::BrAddr_A:
  899. return Js::OpCode::JEQ;
  900. case Js::OpCode::BrSrNeq_A:
  901. case Js::OpCode::BrNeq_A:
  902. case Js::OpCode::BrSrNotEq_A:
  903. case Js::OpCode::BrNotEq_A:
  904. case Js::OpCode::BrNotAddr_A:
  905. return Js::OpCode::JNE;
  906. case Js::OpCode::BrLt_A:
  907. case Js::OpCode::BrNotGe_A:
  908. return Js::OpCode::JLT;
  909. case Js::OpCode::BrLe_A:
  910. case Js::OpCode::BrNotGt_A:
  911. return Js::OpCode::JLE;
  912. case Js::OpCode::BrGt_A:
  913. case Js::OpCode::BrNotLe_A:
  914. return Js::OpCode::JGT;
  915. case Js::OpCode::BrGe_A:
  916. case Js::OpCode::BrNotLt_A:
  917. return Js::OpCode::JGE;
  918. default:
  919. AssertMsg(0, "Branch opcode has no MD mapping");
  920. return opcode;
  921. }
  922. }
  923. Js::OpCode
  924. LowererMD::MDConvertFloat64ToInt32Opcode(const RoundMode roundMode)
  925. {
  926. switch (roundMode)
  927. {
  928. case RoundModeTowardZero:
  929. return Js::OpCode::CVTTSD2SI;
  930. case RoundModeTowardInteger:
  931. return Js::OpCode::Nop;
  932. case RoundModeHalfToEven:
  933. return Js::OpCode::CVTSD2SI;
  934. default:
  935. AssertMsg(0, "RoundMode has no MD mapping.");
  936. return Js::OpCode::Nop;
  937. }
  938. }
  939. Js::OpCode
  940. LowererMD::MDUnsignedBranchOpcode(Js::OpCode opcode)
  941. {
  942. switch (opcode)
  943. {
  944. case Js::OpCode::BrEq_A:
  945. case Js::OpCode::BrSrEq_A:
  946. case Js::OpCode::BrSrNotNeq_A:
  947. case Js::OpCode::BrNotNeq_A:
  948. case Js::OpCode::BrAddr_A:
  949. return Js::OpCode::JEQ;
  950. case Js::OpCode::BrNeq_A:
  951. case Js::OpCode::BrSrNeq_A:
  952. case Js::OpCode::BrSrNotEq_A:
  953. case Js::OpCode::BrNotEq_A:
  954. case Js::OpCode::BrNotAddr_A:
  955. return Js::OpCode::JNE;
  956. case Js::OpCode::BrLt_A:
  957. case Js::OpCode::BrNotGe_A:
  958. return Js::OpCode::JB;
  959. case Js::OpCode::BrLe_A:
  960. case Js::OpCode::BrNotGt_A:
  961. return Js::OpCode::JBE;
  962. case Js::OpCode::BrGt_A:
  963. case Js::OpCode::BrNotLe_A:
  964. return Js::OpCode::JA;
  965. case Js::OpCode::BrGe_A:
  966. case Js::OpCode::BrNotLt_A:
  967. return Js::OpCode::JAE;
  968. default:
  969. AssertMsg(0, "Branch opcode has no MD mapping");
  970. return opcode;
  971. }
  972. }
  973. Js::OpCode LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode opcode)
  974. {
  975. Assert(opcode == Js::OpCode::BrLt_A || opcode == Js::OpCode::BrGe_A);
  976. return opcode == Js::OpCode::BrLt_A ? Js::OpCode::JSB : Js::OpCode::JNSB;
  977. }
  978. void LowererMD::ChangeToAdd(IR::Instr *const instr, const bool needFlags)
  979. {
  980. Assert(instr);
  981. Assert(instr->GetDst());
  982. Assert(instr->GetSrc1());
  983. Assert(instr->GetSrc2());
  984. if(instr->GetDst()->IsFloat64())
  985. {
  986. Assert(instr->GetSrc1()->IsFloat64());
  987. Assert(instr->GetSrc2()->IsFloat64());
  988. Assert(!needFlags);
  989. instr->m_opcode = Js::OpCode::ADDSD;
  990. return;
  991. }
  992. else if (instr->GetDst()->IsFloat32())
  993. {
  994. Assert(instr->GetSrc1()->IsFloat32());
  995. Assert(instr->GetSrc2()->IsFloat32());
  996. Assert(!needFlags);
  997. instr->m_opcode = Js::OpCode::ADDSS;
  998. return;
  999. }
  1000. instr->m_opcode = Js::OpCode::ADD;
  1001. Legalize(instr);
  1002. if (!needFlags)
  1003. {
  1004. // Prefer INC for add by one
  1005. if ((instr->GetDst()->IsEqual(instr->GetSrc1()) &&
  1006. instr->GetSrc2()->IsIntConstOpnd() &&
  1007. instr->GetSrc2()->AsIntConstOpnd()->GetValue() == 1) ||
  1008. (instr->GetDst()->IsEqual(instr->GetSrc2()) &&
  1009. instr->GetSrc1()->IsIntConstOpnd() &&
  1010. instr->GetSrc1()->AsIntConstOpnd()->GetValue() == 1))
  1011. {
  1012. if (instr->GetSrc1()->IsIntConstOpnd())
  1013. {
  1014. // Swap the operands, such that we would create (dst = INC src2)
  1015. instr->SwapOpnds();
  1016. }
  1017. instr->FreeSrc2();
  1018. instr->m_opcode = Js::OpCode::INC;
  1019. }
  1020. }
  1021. }
  1022. void LowererMD::ChangeToSub(IR::Instr *const instr, const bool needFlags)
  1023. {
  1024. Assert(instr);
  1025. Assert(instr->GetDst());
  1026. Assert(instr->GetSrc1());
  1027. Assert(instr->GetSrc2());
  1028. if(instr->GetDst()->IsFloat64())
  1029. {
  1030. Assert(instr->GetSrc1()->IsFloat64());
  1031. Assert(instr->GetSrc2()->IsFloat64());
  1032. Assert(!needFlags);
  1033. instr->m_opcode = Js::OpCode::SUBSD;
  1034. return;
  1035. }
  1036. // Prefer DEC for sub by one
  1037. if(instr->GetDst()->IsEqual(instr->GetSrc1()) &&
  1038. instr->GetSrc2()->IsIntConstOpnd() &&
  1039. instr->GetSrc2()->AsIntConstOpnd()->GetValue() == 1)
  1040. {
  1041. instr->FreeSrc2();
  1042. instr->m_opcode = Js::OpCode::DEC;
  1043. return;
  1044. }
  1045. instr->m_opcode = Js::OpCode::SUB;
  1046. }
  1047. void LowererMD::ChangeToShift(IR::Instr *const instr, const bool needFlags)
  1048. {
  1049. Assert(instr);
  1050. Assert(instr->GetDst());
  1051. Assert(instr->GetSrc1());
  1052. Assert(instr->GetSrc2());
  1053. switch(instr->m_opcode)
  1054. {
  1055. case Js::OpCode::Shl_A:
  1056. case Js::OpCode::Shl_I4:
  1057. instr->m_opcode = Js::OpCode::SHL;
  1058. break;
  1059. case Js::OpCode::Shr_A:
  1060. case Js::OpCode::Shr_I4:
  1061. instr->m_opcode = Js::OpCode::SAR;
  1062. break;
  1063. case Js::OpCode::ShrU_A:
  1064. case Js::OpCode::ShrU_I4:
  1065. instr->m_opcode = Js::OpCode::SHR;
  1066. break;
  1067. case Js::OpCode::Rol_I4:
  1068. instr->m_opcode = Js::OpCode::ROL;
  1069. break;
  1070. case Js::OpCode::Ror_I4:
  1071. instr->m_opcode = Js::OpCode::ROR;
  1072. break;
  1073. default:
  1074. Assert(false);
  1075. __assume(false);
  1076. }
  1077. if(instr->GetSrc2()->IsIntConstOpnd() && !instr->GetSrc1()->IsInt64())
  1078. {
  1079. // Only values between 0-31 mean anything
  1080. IntConstType value = instr->GetSrc2()->AsIntConstOpnd()->GetValue();
  1081. value &= TySize[instr->GetDst()->GetType()] == 8 ? 63 : 31;
  1082. instr->GetSrc2()->AsIntConstOpnd()->SetValue(value);
  1083. }
  1084. }
  1085. void LowererMD::ChangeToIMul(IR::Instr *const instr, bool hasOverflowCheck)
  1086. {
  1087. // If non-32 bit overflow check is needed, we have to use the IMUL form.
  1088. if (hasOverflowCheck && !instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow())
  1089. {
  1090. IR::RegOpnd *regEAX = IR::RegOpnd::New(TyInt32, instr->m_func);
  1091. IR::Opnd *temp2 = nullptr;
  1092. // MOV eax, src1
  1093. regEAX->SetReg(LowererMDArch::GetRegIMulDestLower());
  1094. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, regEAX, instr->GetSrc1(), instr->m_func));
  1095. if (instr->GetSrc2()->IsImmediateOpnd())
  1096. {
  1097. // MOV reg, imm
  1098. temp2 = IR::RegOpnd::New(TyInt32, instr->m_func);
  1099. IR::Opnd * src2 = instr->GetSrc2();
  1100. bool dontEncode = false;
  1101. if (src2->IsHelperCallOpnd())
  1102. {
  1103. dontEncode = true;
  1104. }
  1105. else if (src2->IsIntConstOpnd() || src2->IsAddrOpnd())
  1106. {
  1107. dontEncode = src2->IsIntConstOpnd() ? src2->AsIntConstOpnd()->m_dontEncode : src2->AsAddrOpnd()->m_dontEncode;
  1108. }
  1109. else if (src2->IsInt64ConstOpnd())
  1110. {
  1111. dontEncode = false;
  1112. }
  1113. else
  1114. {
  1115. AssertMsg(false, "Unexpected immediate opnd");
  1116. throw Js::OperationAbortedException();
  1117. }
  1118. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, temp2,
  1119. IR::IntConstOpnd::New((IntConstType)instr->GetSrc2()->GetImmediateValue(instr->m_func), TyInt32, instr->m_func, dontEncode),
  1120. instr->m_func));
  1121. }
  1122. // eax = IMUL eax, reg
  1123. instr->m_opcode = Js::OpCode::IMUL;
  1124. instr->ReplaceSrc1(regEAX);
  1125. if (temp2 != nullptr)
  1126. instr->ReplaceSrc2(temp2);
  1127. auto *dst = instr->GetDst()->Copy(instr->m_func);
  1128. instr->ReplaceDst(regEAX);
  1129. // MOV dst, eax
  1130. instr->InsertAfter(IR::Instr::New(Js::OpCode::MOV, dst, regEAX, instr->m_func));
  1131. }
  1132. else
  1133. EmitInt4Instr(instr); // IMUL2
  1134. }
  1135. const uint16
  1136. LowererMD::GetFormalParamOffset()
  1137. {
  1138. //In x86\x64 formal params were offset from EBP by the EBP chain, return address, and the 2 non-user params
  1139. return 4;
  1140. }
  1141. ///----------------------------------------------------------------------------
  1142. ///
  1143. /// LowererMD::ForceDstToReg
  1144. ///
  1145. ///----------------------------------------------------------------------------
  1146. void
  1147. LowererMD::ForceDstToReg(IR::Instr *instr)
  1148. {
  1149. IR::Opnd * dst = instr->GetDst();
  1150. if (dst->IsRegOpnd())
  1151. {
  1152. return;
  1153. }
  1154. if(dst->IsFloat64())
  1155. {
  1156. instr->SinkDst(Js::OpCode::MOVSD);
  1157. return;
  1158. }
  1159. instr->SinkDst(Js::OpCode::MOV);
  1160. }
  1161. struct LegalInstrForms
  1162. {
  1163. const LegalForms dst, src[2];
  1164. };
  1165. namespace LegalInstrFormsImpl
  1166. {
  1167. LegalInstrForms LEGAL_NONE = { L_None, { L_None, L_None } };
  1168. LegalInstrForms LEGAL_CUSTOM = { LF_Custom, { LF_Custom, LF_Custom } };
  1169. LegalInstrForms LEGAL_CALL = { LF_Optional | L_Reg, { L_Reg | L_Mem | L_Ptr, L_None } };
  1170. LegalInstrForms LEGAL_R = { L_Reg, { L_None, L_None } };
  1171. LegalInstrForms LEGAL_M = { L_Mem, { L_None, L_None } };
  1172. LegalInstrForms LEGAL_RM = { L_Reg | L_Mem, { L_None, L_None } };
  1173. LegalInstrForms LEGAL_N_I = { L_None, { L_Imm32, L_None } };
  1174. LegalInstrForms LEGAL_N_RMI = { L_None, { L_Reg | L_Mem | L_Imm32, L_None } };
  1175. LegalInstrForms LEGAL_R_R = { L_Reg, { L_Reg, L_None } };
  1176. LegalInstrForms LEGAL_R_M = { L_Reg, { L_Mem, L_None } };
  1177. LegalInstrForms LEGAL_M_M = { L_Mem, { L_Mem, L_None } };
  1178. LegalInstrForms LEGAL_R_OR = { L_Reg, { LF_Optional | L_Reg, L_None } };
  1179. LegalInstrForms LEGAL_R_RM = { L_Reg, { L_Reg | L_Mem, L_None } };
  1180. LegalInstrForms LEGAL_R_RMI = { L_Reg, { L_Reg | L_Mem | L_Imm32, L_None } };
  1181. LegalInstrForms LEGAL_RM_RM = { L_Reg | L_Mem, { L_Reg | L_Mem, L_None } };
  1182. LegalInstrForms LEGAL_N_R_R = { L_None, { L_Reg, L_Reg } };
  1183. LegalInstrForms LEGAL_N_I_OR = { L_None, { L_Imm32, LF_Optional | L_Reg } };
  1184. LegalInstrForms LEGAL_N_R_RM = { L_None, { L_Reg, L_Reg | L_Mem } };
  1185. LegalInstrForms LEGAL_N_RM_RI = { L_None, { L_Reg | L_Mem, L_Reg | L_Imm32 } };
  1186. LegalInstrForms LEGAL_N_RM_RMI = { L_None, { L_Reg | L_Mem, L_Reg | L_Mem | L_Imm32 } };
  1187. LegalInstrForms LEGAL_R_R_RM = { L_Reg, { L_Reg, L_Reg | L_Mem } };
  1188. LegalInstrForms LEGAL_R_R_RI = { L_Reg, { L_Reg, L_Reg | L_Imm32 } };
  1189. LegalInstrForms LEGAL_R_R_RMI = { L_Reg, { L_Reg, L_Reg | L_Mem | L_Imm32 } };
  1190. LegalInstrForms LEGAL_RM_R_I = { L_Reg | L_Mem, { L_Reg, L_Imm32 } };
  1191. LegalInstrForms LEGAL_R_RM_I = { L_Reg, { L_Reg | L_Mem, L_Imm32 } };
  1192. LegalInstrForms LEGAL_RM_RM_RM = { L_Reg | L_Mem, { L_Reg | L_Mem, L_Reg | L_Mem } };
  1193. LegalInstrForms LEGAL_RM_RM_RI = { L_Reg | L_Mem, { L_Reg | L_Mem, L_Reg | L_Imm32 } };
  1194. LegalInstrForms LEGAL_RM_RM_RMI = { L_Reg | L_Mem, { L_Reg | L_Mem, L_Reg | L_Mem | L_Imm32 } };
  1195. };
  1196. LegalInstrForms AllLegalInstrForms[] = {
  1197. #define MACRO(name, jnLayout, attrib, byte2, form, opByte, dope, leadIn, legal, ...) LegalInstrFormsImpl::legal,
  1198. #include "MdOpCodes.h"
  1199. #undef MACRO
  1200. };
  1201. template <bool verify>
  1202. void
  1203. LowererMD::Legalize(IR::Instr *const instr, bool fPostRegAlloc)
  1204. {
  1205. Assert(instr);
  1206. Assert(!instr->isInlineeEntryInstr
  1207. || (instr->m_opcode == Js::OpCode::MOV && instr->GetSrc1()->IsIntConstOpnd()));
  1208. const bool isMDOpCode = instr->m_opcode > Js::OpCode::MDStart;
  1209. Assert(isMDOpCode || Lowerer::ValidOpcodeAfterLower(instr, instr->m_func));
  1210. const LegalInstrForms legalInstrForms = isMDOpCode ? AllLegalInstrForms[instr->m_opcode - (Js::OpCode::MDStart + 1)] : LegalInstrFormsImpl::LEGAL_NONE;
  1211. LegalForms dstForms = legalInstrForms.dst;
  1212. LegalForms src1Forms = legalInstrForms.src[0];
  1213. LegalForms src2Forms = legalInstrForms.src[1];
  1214. bool hasSwitchCase = true;
  1215. bool isCustomForm = (dstForms & LF_Custom) != 0;;
  1216. switch(instr->m_opcode)
  1217. {
  1218. case Js::OpCode::JA:
  1219. case Js::OpCode::JAE:
  1220. case Js::OpCode::JB:
  1221. case Js::OpCode::JBE:
  1222. case Js::OpCode::JEQ:
  1223. case Js::OpCode::JNE:
  1224. case Js::OpCode::JLT:
  1225. case Js::OpCode::JLE:
  1226. case Js::OpCode::JGT:
  1227. case Js::OpCode::JGE:
  1228. case Js::OpCode::JNO:
  1229. case Js::OpCode::JO:
  1230. case Js::OpCode::JP:
  1231. case Js::OpCode::JNP:
  1232. case Js::OpCode::JNSB:
  1233. case Js::OpCode::JSB:
  1234. case Js::OpCode::JMP:
  1235. Assert(instr->IsBranchInstr());
  1236. break;
  1237. case Js::OpCode::MOV:
  1238. {
  1239. Assert(instr->GetSrc2() == nullptr);
  1240. IR::Opnd *const dst = instr->GetDst();
  1241. const IRType dstType = dst->GetType();
  1242. IR::Opnd *const src = instr->GetSrc1();
  1243. const IRType srcType = src->GetType();
  1244. if(TySize[dstType] > TySize[srcType])
  1245. {
  1246. if (verify)
  1247. {
  1248. return;
  1249. }
  1250. #if DBG
  1251. switch(dstType)
  1252. {
  1253. case TyInt32:
  1254. case TyUint32:
  1255. #ifdef _M_X64
  1256. case TyInt64:
  1257. case TyUint64:
  1258. #endif
  1259. case TyVar:
  1260. break;
  1261. default:
  1262. Assert(false);
  1263. }
  1264. #endif
  1265. IR::IntConstOpnd *const intConstantSrc = src->IsIntConstOpnd() ? src->AsIntConstOpnd() : nullptr;
  1266. const auto UpdateIntConstantSrc = [&](const size_t extendedValue)
  1267. {
  1268. Assert(intConstantSrc);
  1269. #ifdef _M_X64
  1270. if(TySize[dstType] > sizeof(IntConstType))
  1271. {
  1272. instr->ReplaceSrc1(
  1273. IR::AddrOpnd::New(
  1274. reinterpret_cast<void *>(extendedValue),
  1275. IR::AddrOpndKindConstantVar,
  1276. instr->m_func,
  1277. intConstantSrc->m_dontEncode));
  1278. }
  1279. else
  1280. #endif
  1281. {
  1282. intConstantSrc->SetType(dstType);
  1283. intConstantSrc->SetValue(static_cast<IntConstType>(extendedValue));
  1284. }
  1285. };
  1286. switch(srcType)
  1287. {
  1288. case TyInt8:
  1289. if(intConstantSrc)
  1290. {
  1291. UpdateIntConstantSrc(static_cast<int8>(intConstantSrc->GetValue())); // sign-extend
  1292. break;
  1293. }
  1294. instr->m_opcode = Js::OpCode::MOVSX;
  1295. break;
  1296. case TyUint8:
  1297. if(intConstantSrc)
  1298. {
  1299. UpdateIntConstantSrc(static_cast<uint8>(intConstantSrc->GetValue())); // zero-extend
  1300. break;
  1301. }
  1302. instr->m_opcode = Js::OpCode::MOVZX;
  1303. break;
  1304. case TyInt16:
  1305. if(intConstantSrc)
  1306. {
  1307. UpdateIntConstantSrc(static_cast<int16>(intConstantSrc->GetValue())); // sign-extend
  1308. break;
  1309. }
  1310. instr->m_opcode = Js::OpCode::MOVSXW;
  1311. break;
  1312. case TyUint16:
  1313. if(intConstantSrc)
  1314. {
  1315. UpdateIntConstantSrc(static_cast<uint16>(intConstantSrc->GetValue())); // zero-extend
  1316. break;
  1317. }
  1318. instr->m_opcode = Js::OpCode::MOVZXW;
  1319. break;
  1320. #ifdef _M_X64
  1321. case TyInt32:
  1322. if(intConstantSrc)
  1323. {
  1324. UpdateIntConstantSrc(static_cast<int32>(intConstantSrc->GetValue())); // sign-extend
  1325. break;
  1326. }
  1327. instr->m_opcode = Js::OpCode::MOVSXD;
  1328. break;
  1329. case TyUint32:
  1330. if(intConstantSrc)
  1331. {
  1332. UpdateIntConstantSrc(static_cast<uint32>(intConstantSrc->GetValue())); // zero-extend
  1333. break;
  1334. }
  1335. switch(dst->GetKind())
  1336. {
  1337. case IR::OpndKindReg:
  1338. // (mov r0.u32, r1.u32) clears the upper 32 bits of r0
  1339. dst->SetType(TyUint32);
  1340. instr->m_opcode = Js::OpCode::MOV_TRUNC;
  1341. break;
  1342. case IR::OpndKindSym:
  1343. case IR::OpndKindIndir:
  1344. case IR::OpndKindMemRef:
  1345. // Even if the src is a reg, we don't know if the upper 32 bits are zero. Copy the value to a
  1346. // reg first to zero-extend it to 64 bits, and then copy the 64-bit value to the original dst.
  1347. instr->HoistSrc1(Js::OpCode::MOV_TRUNC);
  1348. instr->GetSrc1()->SetType(dstType);
  1349. break;
  1350. default:
  1351. Assert(false);
  1352. __assume(false);
  1353. }
  1354. break;
  1355. #endif
  1356. default:
  1357. Assert(false);
  1358. __assume(false);
  1359. }
  1360. }
  1361. else if (TySize[dstType] < TySize[srcType])
  1362. {
  1363. instr->GetSrc1()->SetType(dst->GetType());
  1364. }
  1365. if(instr->m_opcode == Js::OpCode::MOV)
  1366. {
  1367. // Allow 64 bit values in x64 as well
  1368. src1Forms = L_Reg | L_Mem | L_Ptr;
  1369. #if _M_X64
  1370. if (dst->IsMemoryOpnd())
  1371. {
  1372. // Only allow <= 32 bit values
  1373. src1Forms = L_Reg | L_Imm32;
  1374. }
  1375. #endif
  1376. LegalizeOpnds<verify>(
  1377. instr,
  1378. L_Reg | L_Mem,
  1379. src1Forms,
  1380. L_None);
  1381. }
  1382. else
  1383. {
  1384. LegalizeOpnds<verify>(
  1385. instr,
  1386. L_Reg,
  1387. L_Reg | L_Mem,
  1388. L_None);
  1389. }
  1390. break;
  1391. }
  1392. case Js::OpCode::CMOVA:
  1393. case Js::OpCode::CMOVAE:
  1394. case Js::OpCode::CMOVB:
  1395. case Js::OpCode::CMOVBE:
  1396. case Js::OpCode::CMOVE:
  1397. case Js::OpCode::CMOVG:
  1398. case Js::OpCode::CMOVGE:
  1399. case Js::OpCode::CMOVL:
  1400. case Js::OpCode::CMOVLE:
  1401. case Js::OpCode::CMOVNE:
  1402. case Js::OpCode::CMOVNO:
  1403. case Js::OpCode::CMOVNP:
  1404. case Js::OpCode::CMOVNS:
  1405. case Js::OpCode::CMOVO:
  1406. case Js::OpCode::CMOVP:
  1407. case Js::OpCode::CMOVS:
  1408. if (instr->GetSrc2())
  1409. {
  1410. Assert(instr->GetDst()->GetSize() == instr->GetSrc2()->GetSize());
  1411. Assert(instr->GetDst()->GetSize() == instr->GetSrc1()->GetSize());
  1412. // 0 shouldn't be the src2 of a CMOVcc.
  1413. // CMOVcc doesn't support moving a constant and the legalizer will hoist the load of the constant
  1414. // to a register. If the constant was 0, Peeps will turn it into a XOR which, in turn, may change
  1415. // the zero flags and hence the result of CMOVcc. If you do want to CMOVcc 0, you should load 0
  1416. // into a register before the instruction whose result the CMOVcc depends on.
  1417. Assert(!instr->GetSrc2()->IsIntConstOpnd() || instr->GetSrc2()->AsIntConstOpnd()->GetValue() != 0);
  1418. // sometimes we have fake src1 to help reg alloc
  1419. LegalizeOpnds<verify>(
  1420. instr,
  1421. L_Reg,
  1422. L_Reg,
  1423. L_Reg | L_Mem);
  1424. }
  1425. else
  1426. {
  1427. Assert(instr->GetDst()->GetSize() == instr->GetSrc1()->GetSize());
  1428. LegalizeOpnds<verify>(
  1429. instr,
  1430. L_Reg,
  1431. L_Reg | L_Mem,
  1432. L_None);
  1433. }
  1434. break;
  1435. case Js::OpCode::MOVSD:
  1436. case Js::OpCode::MOVSS:
  1437. Assert(instr->GetDst()->GetType() == (instr->m_opcode == Js::OpCode::MOVSD? TyFloat64 : TyFloat32) || instr->GetDst()->IsSimd128());
  1438. Assert(instr->GetSrc1()->GetType() == (instr->m_opcode == Js::OpCode::MOVSD ? TyFloat64 : TyFloat32) || instr->GetSrc1()->IsSimd128());
  1439. goto LegalizeDefault;
  1440. case Js::OpCode::NOP:
  1441. {
  1442. Assert(!instr->GetSrc2());
  1443. #if _M_IX86
  1444. RegNum edx = RegEDX;
  1445. #else
  1446. RegNum edx = RegRDX;
  1447. #endif
  1448. // Special case handled by peeps
  1449. Assert(!instr->GetDst() || (instr->GetDst()->IsRegOpnd() && instr->GetDst()->AsRegOpnd()->GetReg() == edx));
  1450. break;
  1451. }
  1452. case Js::OpCode::MOVSX:
  1453. case Js::OpCode::MOVSXW:
  1454. Assert(instr->GetDst()->GetSize() == 4 || instr->GetDst()->GetSize() == 8);
  1455. Assert(instr->m_opcode != Js::OpCode::MOVSX || instr->GetSrc1()->GetSize() == 1);
  1456. Assert(instr->m_opcode != Js::OpCode::MOVSXW || instr->GetSrc1()->GetSize() == 2);
  1457. goto LegalizeDefault;
  1458. case Js::OpCode::LOCKCMPXCHG8B:
  1459. case Js::OpCode::CMPXCHG8B:
  1460. {
  1461. const auto getRegMask = [](IR::Opnd* opnd)
  1462. {
  1463. Assert(opnd->IsListOpnd());
  1464. return opnd->AsListOpnd()->Reduce(
  1465. [](int i, IR::Opnd* opnd) {
  1466. Assert(opnd->IsRegOpnd());
  1467. return 1 << opnd->AsRegOpnd()->GetReg();
  1468. },
  1469. [](int i, uint32 regmask, uint32 allReg)
  1470. {
  1471. AssertMsg((allReg & regmask) == 0, "Should not have the same register twice");
  1472. return allReg | regmask;
  1473. }, 0);
  1474. };
  1475. #if _M_IX86
  1476. const uint32 dstMask = (1 << RegEAX | 1 << RegEDX);
  1477. const uint32 srcMask = (1 << RegEAX | 1 << RegEBX | 1 << RegECX | 1 << RegEDX);
  1478. #else
  1479. const uint32 dstMask = (1 << RegRAX | 1 << RegRDX);
  1480. const uint32 srcMask = (1 << RegRAX | 1 << RegRBX | 1 << RegRCX | 1 << RegRDX);
  1481. #endif
  1482. AssertMsg(!instr->m_func->isPostFinalLower || !instr->GetDst(), "After FinalLower, there should not be a dst");
  1483. AssertMsg(instr->m_func->isPostFinalLower || getRegMask(instr->GetDst()) == dstMask,
  1484. "Before FinalLower, instr should have eax,edx as dst");
  1485. AssertMsg(!instr->m_func->isPostFinalLower || !instr->GetSrc2(), "After FinalLower, there should not be a src2");
  1486. AssertMsg(instr->m_func->isPostFinalLower || getRegMask(instr->GetSrc2()) == srcMask,
  1487. "Before FinalLower, instr should have eax,edx,ecx,ebx as src2");
  1488. LegalizeSrc<verify>(
  1489. instr,
  1490. instr->GetSrc1(),
  1491. L_Mem);
  1492. break;
  1493. }
  1494. case Js::OpCode::TEST:
  1495. if((instr->GetSrc1()->IsImmediateOpnd() && !instr->GetSrc2()->IsImmediateOpnd()) ||
  1496. (instr->GetSrc2()->IsMemoryOpnd() && !instr->GetSrc1()->IsMemoryOpnd()))
  1497. {
  1498. if (verify)
  1499. {
  1500. AssertMsg(false, "Invalid Js::OpCode::TEST opnd order. Missing legalization");
  1501. return;
  1502. }
  1503. instr->SwapOpnds();
  1504. }
  1505. goto LegalizeDefault;
  1506. case Js::OpCode::SHL:
  1507. case Js::OpCode::SHR:
  1508. case Js::OpCode::SAR:
  1509. case Js::OpCode::ROL:
  1510. case Js::OpCode::ROR:
  1511. if (verify)
  1512. {
  1513. Assert(instr->GetSrc2()->IsIntConstOpnd()
  1514. || instr->GetSrc2()->AsRegOpnd()->GetReg() == LowererMDArch::GetRegShiftCount());
  1515. }
  1516. else
  1517. {
  1518. if(!instr->GetSrc2()->IsIntConstOpnd())
  1519. {
  1520. IR::Instr *const newInstr = instr->HoistSrc2(Js::OpCode::MOV);
  1521. newInstr->GetDst()->AsRegOpnd()->SetReg(LowererMDArch::GetRegShiftCount());
  1522. instr->GetSrc2()->AsRegOpnd()->SetReg(LowererMDArch::GetRegShiftCount());
  1523. }
  1524. instr->GetSrc2()->SetType(TyUint8);
  1525. }
  1526. goto LegalizeDefault;
  1527. case Js::OpCode::TZCNT:
  1528. Assert(AutoSystemInfo::Data.TZCntAvailable());
  1529. goto LegalizeDefault;
  1530. case Js::OpCode::LZCNT:
  1531. Assert(AutoSystemInfo::Data.LZCntAvailable());
  1532. goto LegalizeDefault;
  1533. case Js::OpCode::ROUNDSD:
  1534. case Js::OpCode::ROUNDSS:
  1535. Assert(AutoSystemInfo::Data.SSE4_1Available());
  1536. goto LegalizeDefault;
  1537. default:
  1538. LegalizeDefault:
  1539. if (isMDOpCode)
  1540. {
  1541. AssertMsg(!isCustomForm, "Custom legal forms should have a case in the switch statement");
  1542. hasSwitchCase = false;
  1543. if (EncoderMD::IsOPEQ(instr))
  1544. {
  1545. MakeDstEquSrc1<verify>(instr);
  1546. Assert((dstForms & L_FormMask) == (src1Forms & L_FormMask));
  1547. }
  1548. LegalizeOpnds<verify>(
  1549. instr,
  1550. dstForms,
  1551. src1Forms,
  1552. src2Forms);
  1553. }
  1554. break;
  1555. }
  1556. #if DBG
  1557. // Asserting general rules
  1558. // There should be at most 1 memory opnd in an instruction
  1559. if (instr->GetDst() && instr->GetDst()->IsMemoryOpnd())
  1560. {
  1561. // All memref address need to fit in a dword
  1562. Assert(!instr->GetDst()->IsMemRefOpnd() || Math::FitsInDWord((size_t)instr->GetDst()->AsMemRefOpnd()->GetMemLoc()));
  1563. if (instr->GetSrc1())
  1564. {
  1565. Assert(instr->GetSrc1()->IsEqual(instr->GetDst()) || !instr->GetSrc1()->IsMemoryOpnd());
  1566. if (instr->GetSrc2())
  1567. {
  1568. Assert(!instr->GetSrc2()->IsMemoryOpnd());
  1569. }
  1570. }
  1571. }
  1572. else if (instr->GetSrc1() && instr->GetSrc1()->IsMemoryOpnd())
  1573. {
  1574. // All memref address need to fit in a dword
  1575. Assert(!instr->GetSrc1()->IsMemRefOpnd() || Math::FitsInDWord((size_t)instr->GetSrc1()->AsMemRefOpnd()->GetMemLoc()));
  1576. Assert(!instr->GetSrc2() || !instr->GetSrc2()->IsMemoryOpnd());
  1577. }
  1578. else if (instr->GetSrc2() && instr->GetSrc2()->IsMemRefOpnd())
  1579. {
  1580. // All memref address need to fit in a dword
  1581. Assert(Math::FitsInDWord((size_t)instr->GetSrc2()->AsMemRefOpnd()->GetMemLoc()));
  1582. }
  1583. // Non-MOV (second operand) immediate need to fit in DWORD for AMD64
  1584. Assert(!instr->GetSrc2() || !instr->GetSrc2()->IsImmediateOpnd()
  1585. || (TySize[instr->GetSrc2()->GetType()] != 8) || Math::FitsInDWord(instr->GetSrc2()->GetImmediateValue(instr->m_func)));
  1586. #endif
  1587. }
  1588. template <bool verify>
  1589. void LowererMD::LegalizeOpnds(IR::Instr *const instr, const LegalForms dstForms, LegalForms src1Forms, LegalForms src2Forms)
  1590. {
  1591. Assert(instr);
  1592. Assert(dstForms & LF_Optional || !instr->GetDst() == !dstForms);
  1593. Assert(src1Forms & LF_Optional || !instr->GetSrc1() == !src1Forms);
  1594. Assert(src2Forms & LF_Optional || !instr->GetSrc2() == !src2Forms);
  1595. Assert(src1Forms || !src2Forms);
  1596. const auto NormalizeForms = [](LegalForms forms) -> LegalForms
  1597. {
  1598. #ifdef _M_X64
  1599. if(forms & L_Ptr)
  1600. {
  1601. forms |= L_Imm32;
  1602. }
  1603. #else
  1604. if(forms & (L_Imm32 | L_Ptr))
  1605. {
  1606. forms |= L_Imm32 | L_Ptr;
  1607. }
  1608. #endif
  1609. // Remove Legal Flags
  1610. forms &= L_FormMask;
  1611. return forms;
  1612. };
  1613. if(dstForms && instr->GetDst())
  1614. {
  1615. LegalizeDst<verify>(instr, NormalizeForms(dstForms));
  1616. }
  1617. if(!src1Forms || !instr->GetSrc1())
  1618. {
  1619. return;
  1620. }
  1621. bool hasMemOpnd = instr->GetDst() && instr->GetDst()->IsMemoryOpnd();
  1622. // Allow src1 to be a mem opnd if dst & src1 must be the same
  1623. if (hasMemOpnd && src1Forms & L_Mem && !EncoderMD::IsOPEQ(instr))
  1624. {
  1625. src1Forms ^= L_Mem;
  1626. }
  1627. LegalizeSrc<verify>(instr, instr->GetSrc1(), NormalizeForms(src1Forms));
  1628. hasMemOpnd |= instr->GetSrc1()->IsMemoryOpnd();
  1629. // If dst or src1 is a mem opnd, mem2 cannot be a mem opnd
  1630. if(hasMemOpnd && src2Forms & L_Mem)
  1631. {
  1632. src2Forms ^= L_Mem;
  1633. }
  1634. if(src2Forms && instr->GetSrc2())
  1635. {
  1636. LegalizeSrc<verify>(instr, instr->GetSrc2(), NormalizeForms(src2Forms));
  1637. }
  1638. }
  1639. template <bool verify>
  1640. void LowererMD::LegalizeDst(IR::Instr *const instr, const LegalForms forms)
  1641. {
  1642. Assert(instr);
  1643. Assert(forms);
  1644. IR::Opnd *dst = instr->GetDst();
  1645. Assert(dst);
  1646. #ifndef _M_X64
  1647. AssertMsg(!dst->IsInt64(), "Int64 supported only on x64");
  1648. #endif
  1649. switch(dst->GetKind())
  1650. {
  1651. case IR::OpndKindReg:
  1652. Assert(forms & L_Reg);
  1653. return;
  1654. case IR::OpndKindMemRef:
  1655. {
  1656. IR::MemRefOpnd *const memRefOpnd = dst->AsMemRefOpnd();
  1657. if(!LowererMDArch::IsLegalMemLoc(memRefOpnd))
  1658. {
  1659. if (verify)
  1660. {
  1661. AssertMsg(false, "Memory reference not legal in dst opnd. Missing legalization");
  1662. return;
  1663. }
  1664. dst = instr->HoistMemRefAddress(memRefOpnd, Js::OpCode::MOV);
  1665. }
  1666. // fall through
  1667. }
  1668. case IR::OpndKindSym:
  1669. case IR::OpndKindIndir:
  1670. if(forms & L_Mem)
  1671. {
  1672. return;
  1673. }
  1674. break;
  1675. default:
  1676. Assert(false);
  1677. __assume(false);
  1678. }
  1679. if (verify)
  1680. {
  1681. AssertMsg(false, "Dst opnd not legal. Missing legalization");
  1682. return;
  1683. }
  1684. // Use a reg dst, then store that reg into the original dst
  1685. Assert(forms & L_Reg);
  1686. const IRType irType = dst->GetType();
  1687. IR::RegOpnd *const regOpnd = IR::RegOpnd::New(irType, instr->m_func);
  1688. regOpnd->SetValueType(dst->GetValueType());
  1689. instr->UnlinkDst();
  1690. instr->SetDst(regOpnd);
  1691. instr->InsertAfter(IR::Instr::New(GetStoreOp(irType), dst, regOpnd, instr->m_func));
  1692. // If the original dst is the same as one of the srcs, hoist a src into the same reg and replace the same srcs with the reg
  1693. const bool equalsSrc1 = instr->GetSrc1() && dst->IsEqual(instr->GetSrc1());
  1694. const bool equalsSrc2 = instr->GetSrc2() && dst->IsEqual(instr->GetSrc2());
  1695. if(!(equalsSrc1 || equalsSrc2))
  1696. {
  1697. return;
  1698. }
  1699. const Js::OpCode loadOpCode = GetLoadOp(irType);
  1700. if(equalsSrc1)
  1701. {
  1702. instr->HoistSrc1(loadOpCode, RegNOREG, regOpnd->m_sym);
  1703. if(equalsSrc2)
  1704. {
  1705. instr->ReplaceSrc2(regOpnd);
  1706. }
  1707. }
  1708. else
  1709. {
  1710. instr->HoistSrc2(loadOpCode, RegNOREG, regOpnd->m_sym);
  1711. }
  1712. }
  1713. bool LowererMD::HoistLargeConstant(IR::IndirOpnd *indirOpnd, IR::Opnd *src, IR::Instr *instr) {
  1714. if (indirOpnd != nullptr)
  1715. {
  1716. if (indirOpnd->GetOffset() == 0)
  1717. {
  1718. instr->ReplaceSrc(src, indirOpnd->GetBaseOpnd());
  1719. }
  1720. else
  1721. {
  1722. // Hoist the address load as LEA [reg + offset]
  1723. // with the reg = MOV <some address within 32-bit range at the start of the function
  1724. IR::RegOpnd * regOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  1725. Lowerer::InsertLea(regOpnd, indirOpnd, instr);
  1726. instr->ReplaceSrc(src, regOpnd);
  1727. }
  1728. return true;
  1729. }
  1730. return false;
  1731. }
  1732. template <bool verify>
  1733. void LowererMD::LegalizeSrc(IR::Instr *const instr, IR::Opnd *src, const LegalForms forms)
  1734. {
  1735. Assert(instr);
  1736. Assert(src);
  1737. Assert(src == instr->GetSrc1() || src == instr->GetSrc2());
  1738. Assert(forms);
  1739. #ifndef _M_X64
  1740. AssertMsg(!src->IsInt64() || src->IsMemoryOpnd(), "Int64 supported only on x64");
  1741. #endif
  1742. switch(src->GetKind())
  1743. {
  1744. case IR::OpndKindReg:
  1745. Assert(forms & L_Reg);
  1746. return;
  1747. case IR::OpndKindIntConst:
  1748. if(forms & L_Ptr)
  1749. {
  1750. return;
  1751. }
  1752. #ifdef _M_X64
  1753. {
  1754. IR::IntConstOpnd * intOpnd = src->AsIntConstOpnd();
  1755. if ((TySize[intOpnd->GetType()] != 8) ||
  1756. (!instr->isInlineeEntryInstr && Math::FitsInDWord(intOpnd->GetValue())))
  1757. {
  1758. if (forms & L_Imm32)
  1759. {
  1760. // the constant fits in 32-bit, no need to hoist
  1761. return;
  1762. }
  1763. break;
  1764. }
  1765. if (verify)
  1766. {
  1767. AssertMsg(false, "IntConstOpnd doesn't fit in 32 bits. Missing legalization");
  1768. return;
  1769. }
  1770. // The actual value for inlinee entry instr isn't determined until encoder
  1771. // So it need to be hoisted conventionally.
  1772. if (!instr->isInlineeEntryInstr)
  1773. {
  1774. Assert(forms & L_Reg);
  1775. IR::IntConstOpnd * newIntOpnd = intOpnd->Copy(instr->m_func)->AsIntConstOpnd();
  1776. IR::IndirOpnd * indirOpnd = instr->m_func->GetTopFunc()->GetConstantAddressIndirOpnd(intOpnd->GetValue(), newIntOpnd, IR::AddrOpndKindConstantAddress, TyMachPtr, Js::OpCode::MOV);
  1777. if (HoistLargeConstant(indirOpnd, src, instr))
  1778. {
  1779. return;
  1780. }
  1781. }
  1782. }
  1783. #endif
  1784. break;
  1785. case IR::OpndKindFloatConst:
  1786. break; // assume for now that it always needs to be hoisted
  1787. case IR::OpndKindInt64Const:
  1788. if (forms & L_Ptr)
  1789. {
  1790. return;
  1791. }
  1792. #ifdef _M_X64
  1793. {
  1794. IR::Int64ConstOpnd * int64Opnd = src->AsInt64ConstOpnd();
  1795. if ((forms & L_Imm32) && ((src->GetSize() != 8) ||
  1796. (!instr->isInlineeEntryInstr && Math::FitsInDWord(int64Opnd->GetValue()))))
  1797. {
  1798. // the immediate fits in 32-bit, no need to hoist
  1799. return;
  1800. }
  1801. if (verify)
  1802. {
  1803. AssertMsg(false, "Int64ConstOpnd doesn't fit in 32 bits. Missing legalization");
  1804. return;
  1805. }
  1806. Assert(forms & L_Reg);
  1807. IR::Opnd* regOpnd = IR::RegOpnd::New(src->GetType(), instr->m_func);
  1808. IR::Instr* moveToReg = IR::Instr::New(Js::OpCode::MOV, regOpnd, src, instr->m_func);
  1809. instr->InsertBefore(moveToReg);
  1810. instr->ReplaceSrc(src, regOpnd);
  1811. return;
  1812. }
  1813. #endif
  1814. break;
  1815. case IR::OpndKindAddr:
  1816. if (forms & L_Ptr)
  1817. {
  1818. return;
  1819. }
  1820. #ifdef _M_X64
  1821. {
  1822. IR::AddrOpnd * addrOpnd = src->AsAddrOpnd();
  1823. if ((forms & L_Imm32) && ((TySize[addrOpnd->GetType()] != 8) ||
  1824. (!instr->isInlineeEntryInstr && Math::FitsInDWord((size_t)addrOpnd->m_address))))
  1825. {
  1826. // the address fits in 32-bit, no need to hoist
  1827. return;
  1828. }
  1829. if (verify)
  1830. {
  1831. AssertMsg(false, "AddrOpnd doesn't fit in 32 bits. Missing legalization");
  1832. return;
  1833. }
  1834. Assert(!instr->isInlineeEntryInstr);
  1835. Assert(forms & L_Reg);
  1836. // TODO: michhol, remove cast after making m_address intptr
  1837. IR::AddrOpnd * newAddrOpnd = addrOpnd->Copy(instr->m_func)->AsAddrOpnd();
  1838. IR::IndirOpnd * indirOpnd = instr->m_func->GetTopFunc()->GetConstantAddressIndirOpnd((intptr_t)addrOpnd->m_address, newAddrOpnd, addrOpnd->GetAddrOpndKind(), TyMachPtr, Js::OpCode::MOV);
  1839. if (HoistLargeConstant(indirOpnd, src, instr))
  1840. {
  1841. return;
  1842. }
  1843. }
  1844. #endif
  1845. break;
  1846. case IR::OpndKindMemRef:
  1847. {
  1848. IR::MemRefOpnd *const memRefOpnd = src->AsMemRefOpnd();
  1849. if(!LowererMDArch::IsLegalMemLoc(memRefOpnd))
  1850. {
  1851. if (verify)
  1852. {
  1853. AssertMsg(false, "Memory reference not legal in src opnd. Missing legalization");
  1854. return;
  1855. }
  1856. src = instr->HoistMemRefAddress(memRefOpnd, Js::OpCode::MOV);
  1857. }
  1858. // fall through
  1859. }
  1860. case IR::OpndKindSym:
  1861. case IR::OpndKindIndir:
  1862. if(forms & L_Mem)
  1863. {
  1864. return;
  1865. }
  1866. break;
  1867. case IR::OpndKindHelperCall:
  1868. case IR::OpndKindLabel:
  1869. Assert(!instr->isInlineeEntryInstr);
  1870. Assert(forms & L_Ptr);
  1871. return;
  1872. default:
  1873. Assert(false);
  1874. __assume(false);
  1875. }
  1876. if (verify)
  1877. {
  1878. AssertMsg(false, "Src opnd not legal. Missing legalization");
  1879. return;
  1880. }
  1881. // Hoist the src into a reg
  1882. Assert(forms & L_Reg);
  1883. Assert(!(instr->GetDst() && instr->GetDst()->IsEqual(src)));
  1884. const Js::OpCode loadOpCode = GetLoadOp(src->GetType());
  1885. if(src == instr->GetSrc2())
  1886. {
  1887. instr->HoistSrc2(loadOpCode);
  1888. return;
  1889. }
  1890. const bool equalsSrc2 = instr->GetSrc2() && src->IsEqual(instr->GetSrc2());
  1891. IR::Instr * hoistInstr = instr->HoistSrc1(loadOpCode);
  1892. if(equalsSrc2)
  1893. {
  1894. instr->ReplaceSrc2(hoistInstr->GetDst());
  1895. }
  1896. hoistInstr->isInlineeEntryInstr = instr->isInlineeEntryInstr;
  1897. instr->isInlineeEntryInstr = false;
  1898. }
  1899. template void LowererMD::Legalize<false>(IR::Instr *const instr, bool fPostRegAlloc);
  1900. template void LowererMD::LegalizeOpnds<false>(IR::Instr *const instr, const LegalForms dstForms, const LegalForms src1Forms, LegalForms src2Forms);
  1901. template void LowererMD::LegalizeDst<false>(IR::Instr *const instr, const LegalForms forms);
  1902. template void LowererMD::LegalizeSrc<false>(IR::Instr *const instr, IR::Opnd *src, const LegalForms forms);
  1903. template void LowererMD::MakeDstEquSrc1<false>(IR::Instr *const instr);
  1904. #if DBG
  1905. template void LowererMD::Legalize<true>(IR::Instr *const instr, bool fPostRegAlloc);
  1906. template void LowererMD::LegalizeOpnds<true>(IR::Instr *const instr, const LegalForms dstForms, const LegalForms src1Forms, LegalForms src2Forms);
  1907. template void LowererMD::LegalizeDst<true>(IR::Instr *const instr, const LegalForms forms);
  1908. template void LowererMD::LegalizeSrc<true>(IR::Instr *const instr, IR::Opnd *src, const LegalForms forms);
  1909. template void LowererMD::MakeDstEquSrc1<true>(IR::Instr *const instr);
  1910. #endif
  1911. IR::Instr *
  1912. LowererMD::LoadFunctionObjectOpnd(IR::Instr *instr, IR::Opnd *&functionObjOpnd)
  1913. {
  1914. IR::Opnd * src1 = instr->GetSrc1();
  1915. IR::Instr * instrPrev = instr->m_prev;
  1916. if (src1 == nullptr)
  1917. {
  1918. IR::RegOpnd * regOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  1919. StackSym *paramSym = StackSym::New(TyMachPtr, m_func);
  1920. IR::SymOpnd *paramOpnd = IR::SymOpnd::New(paramSym, TyMachPtr, m_func);
  1921. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  1922. IR::Instr * mov1 = IR::Instr::New(Js::OpCode::MOV, regOpnd, paramOpnd, m_func);
  1923. instr->InsertBefore(mov1);
  1924. functionObjOpnd = mov1->GetDst()->AsRegOpnd();
  1925. instrPrev = mov1;
  1926. instr->m_func->SetHasImplicitParamLoad();
  1927. }
  1928. else
  1929. {
  1930. // Inlinee, use the function object opnd on the instruction
  1931. functionObjOpnd = instr->UnlinkSrc1();
  1932. if (!functionObjOpnd->IsRegOpnd())
  1933. {
  1934. Assert(functionObjOpnd->IsAddrOpnd());
  1935. }
  1936. }
  1937. return instrPrev;
  1938. }
  1939. void
  1940. LowererMD::GenerateFastDivByPow2(IR::Instr *instr)
  1941. {
  1942. //
  1943. // Given:
  1944. // dst = Div_A src1, src2
  1945. // where src2 == power of 2
  1946. //
  1947. // Generate:
  1948. // MOV s1, src1
  1949. // AND s1, 0xFFFF000000000000 | (src2Value-1) ----- test for tagged int and divisibility by src2Value [int32]
  1950. // AND s1, 0x00000001 | ((src2Value-1)<<1) [int31]
  1951. // CMP s1, AtomTag_IntPtr
  1952. // JNE $divbyhalf
  1953. // MOV s1, src1
  1954. // SAR s1, log2(src2Value) ------ perform the divide
  1955. // OR s1, 1
  1956. // MOV dst, s1
  1957. // JMP $done
  1958. // $divbyhalf:
  1959. // AND s1, 0xFFFF000000000000 | (src2Value-1>>1) ----- test for tagged int and divisibility by src2Value /2 [int32]
  1960. // AND s1, 0x00000001 | ((src2Value-1)) [int31]
  1961. // CMP s1, AtomTag_IntPtr
  1962. // JNE $helper
  1963. // MOV s1, src1
  1964. // SAR s1, log2(src2Value) [int32]
  1965. // SAR s1, log2(src2Value) + 1 ------ removes the tag and divides [int31]
  1966. // PUSH s1
  1967. // PUSH 0xXXXXXXXX (ScriptContext)
  1968. // CALL Op_FinishOddDivByPow2
  1969. // MOV dst, eax
  1970. // JMP $done
  1971. // $helper:
  1972. // ...
  1973. // $done:
  1974. //
  1975. if (instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->IsNotInt())
  1976. return;
  1977. IR::Opnd *dst = instr->GetDst();
  1978. IR::Opnd *src1 = instr->GetSrc1();
  1979. IR::AddrOpnd *src2 = instr->GetSrc2()->IsAddrOpnd() ? instr->GetSrc2()->AsAddrOpnd() : nullptr;
  1980. IR::LabelInstr *divbyhalf = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  1981. IR::LabelInstr *helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  1982. IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  1983. IR::RegOpnd *s1 = IR::RegOpnd::New(TyVar, m_func);
  1984. AnalysisAssert(src2);
  1985. Assert(src2->IsVar() && Js::TaggedInt::Is(src2->m_address) && (Math::IsPow2(Js::TaggedInt::ToInt32(src2->m_address))));
  1986. int32 src2Value = Js::TaggedInt::ToInt32(src2->m_address);
  1987. // MOV s1, src1
  1988. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, s1, src1, m_func));
  1989. #if INT32VAR
  1990. // dontEncode as src2 is a power of 2.
  1991. IR::Opnd *constant = IR::AddrOpnd::New((Js::Var)(0xFFFF000000000000 | (src2Value - 1)), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true);
  1992. #else
  1993. IR::Opnd *constant = IR::IntConstOpnd::New((0x00000001 | ((src2Value - 1) << 1)), TyInt32, m_func);
  1994. #endif
  1995. // AND s1, constant
  1996. {
  1997. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, s1, s1, constant, m_func);
  1998. instr->InsertBefore(andInstr);
  1999. Legalize(andInstr);
  2000. }
  2001. // CMP s1, AtomTag_IntPtr
  2002. {
  2003. IR::Instr *cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  2004. cmp->SetSrc1(s1);
  2005. cmp->SetSrc2(IR::AddrOpnd::New((Js::Var)(Js::AtomTag_IntPtr), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true));
  2006. instr->InsertBefore(cmp);
  2007. Legalize(cmp);
  2008. }
  2009. // JNE $divbyhalf
  2010. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, divbyhalf, m_func));
  2011. // MOV s1, src1
  2012. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, s1, src1, m_func));
  2013. s1 = s1->UseWithNewType(TyInt32, m_func)->AsRegOpnd();
  2014. // SAR s1, log2(src2Value)
  2015. instr->InsertBefore(IR::Instr::New(Js::OpCode::SAR, s1, s1, IR::IntConstOpnd::New(Math::Log2(src2Value), TyInt32, m_func), m_func));
  2016. if(s1->GetSize() != MachPtr)
  2017. {
  2018. s1 = s1->UseWithNewType(TyMachPtr, m_func)->AsRegOpnd();
  2019. }
  2020. #if INT32VAR
  2021. GenerateInt32ToVarConversion(s1, instr);
  2022. #else
  2023. // OR s1, 1
  2024. instr->InsertBefore(IR::Instr::New(Js::OpCode::OR, s1, s1, IR::IntConstOpnd::New(1, TyInt32, m_func), m_func));
  2025. #endif
  2026. // MOV dst, s1
  2027. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, s1, m_func));
  2028. // JMP $done
  2029. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  2030. // $divbyhalf:
  2031. instr->InsertBefore(divbyhalf);
  2032. #if INT32VAR
  2033. constant = IR::AddrOpnd::New((Js::Var)(0xFFFF000000000000 | ((src2Value-1) >> 1)), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true);
  2034. #else
  2035. constant = IR::IntConstOpnd::New((0x00000001 | (src2Value-1)), TyInt32, m_func);
  2036. #endif
  2037. // AND s1, constant
  2038. {
  2039. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, s1, s1, constant, m_func);
  2040. instr->InsertBefore(andInstr);
  2041. Legalize(andInstr);
  2042. }
  2043. // CMP s1, AtomTag_IntPtr
  2044. {
  2045. IR::Instr *cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  2046. cmp->SetSrc1(s1);
  2047. cmp->SetSrc2(IR::AddrOpnd::New((Js::Var)(Js::AtomTag_IntPtr), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true));
  2048. instr->InsertBefore(cmp);
  2049. Legalize(cmp);
  2050. }
  2051. // JNE $helper
  2052. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, helper, m_func));
  2053. // MOV s1, src1
  2054. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, s1, src1, m_func));
  2055. s1 = s1->UseWithNewType(TyInt32, this->m_func)->AsRegOpnd();
  2056. #if INT32VAR
  2057. IR::Opnd* shiftOpnd = IR::IntConstOpnd::New(Math::Log2(src2Value), TyInt32, m_func);
  2058. #else
  2059. IR::Opnd* shiftOpnd = IR::IntConstOpnd::New(Math::Log2(src2Value) + 1, TyInt32, m_func);
  2060. #endif
  2061. // SAR s1, shiftOpnd
  2062. instr->InsertBefore(IR::Instr::New(Js::OpCode::SAR, s1, s1, shiftOpnd, m_func));
  2063. // PUSH s1
  2064. // PUSH ScriptContext
  2065. // CALL Op_FinishOddDivByPow2
  2066. {
  2067. IR::JnHelperMethod helperMethod;
  2068. if (instr->dstIsTempNumber)
  2069. {
  2070. IR::Opnd *tempOpnd;
  2071. helperMethod = IR::HelperOp_FinishOddDivByPow2InPlace;
  2072. Assert(dst->IsRegOpnd());
  2073. StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(dst, instr->dstIsTempNumberTransferred);
  2074. IR::Instr *load = this->m_lowerer->InsertLoadStackAddress(tempNumberSym, instr);
  2075. tempOpnd = load->GetDst();
  2076. this->lowererMDArch.LoadHelperArgument(instr, tempOpnd);
  2077. }
  2078. else
  2079. {
  2080. helperMethod = IR::HelperOp_FinishOddDivByPow2;
  2081. }
  2082. m_lowerer->LoadScriptContext(instr);
  2083. lowererMDArch.LoadHelperArgument(instr, s1);
  2084. IR::Instr *call = IR::Instr::New(Js::OpCode::Call, dst, IR::HelperCallOpnd::New(helperMethod, m_func), m_func);
  2085. instr->InsertBefore(call);
  2086. lowererMDArch.LowerCall(call, 0);
  2087. }
  2088. // JMP $done
  2089. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  2090. // $helper:
  2091. instr->InsertBefore(helper);
  2092. // $done:
  2093. instr->InsertAfter(done);
  2094. }
  2095. ///----------------------------------------------------------------------------
  2096. ///
  2097. /// LowererMD::GenerateFastCmSrEqConst
  2098. ///
  2099. ///----------------------------------------------------------------------------
  2100. bool
  2101. LowererMD::GenerateFastCmSrEqConst(IR::Instr *instr)
  2102. {
  2103. //
  2104. // Given:
  2105. // s1 = CmSrEq_A s2, s3
  2106. // where either s2 or s3 is 'null', 'true' or 'false'
  2107. //
  2108. // Generate:
  2109. //
  2110. // CMP s2, s3
  2111. // JEQ $mov_true
  2112. // MOV s1, Library.GetFalse()
  2113. // JMP $done
  2114. // $mov_true:
  2115. // MOV s1, Library.GetTrue()
  2116. // $done:
  2117. //
  2118. Assert(m_lowerer->IsConstRegOpnd(instr->GetSrc2()->AsRegOpnd()));
  2119. IR::Opnd *opnd = instr->GetSrc1();
  2120. IR::RegOpnd *opndReg = instr->GetSrc2()->AsRegOpnd();
  2121. IR::LabelInstr *labelMovTrue = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2122. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2123. if (!opnd->IsRegOpnd())
  2124. {
  2125. IR::RegOpnd *lhsReg = IR::RegOpnd::New(TyVar, m_func);
  2126. IR::Instr *mov = IR::Instr::New(Js::OpCode::MOV, lhsReg, opnd, m_func);
  2127. instr->InsertBefore(mov);
  2128. opnd = lhsReg;
  2129. }
  2130. Assert(opnd->IsRegOpnd());
  2131. // CMP s2, s3
  2132. // JEQ $mov_true
  2133. this->m_lowerer->InsertCompareBranch(opnd, opndReg->m_sym->GetConstOpnd(), Js::OpCode::BrEq_A, labelMovTrue, instr);
  2134. // MOV s1, 'false'
  2135. IR::Instr *instrMov = IR::Instr::New(Js::OpCode::MOV,
  2136. instr->GetDst(),
  2137. m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse),
  2138. m_func);
  2139. instr->InsertBefore(instrMov);
  2140. // JMP $done
  2141. IR::BranchInstr *jmp = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, this->m_func);
  2142. instr->InsertBefore(jmp);
  2143. // $mov_true:
  2144. instr->InsertBefore(labelMovTrue);
  2145. // MOV s1, 'true'
  2146. instr->m_opcode = Js::OpCode::MOV;
  2147. instr->UnlinkSrc1();
  2148. instr->UnlinkSrc2();
  2149. instr->SetSrc1(m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue));
  2150. instr->ClearBailOutInfo();
  2151. Legalize(instr);
  2152. // $done:
  2153. instr->InsertAfter(labelDone);
  2154. return true;
  2155. }
  2156. ///----------------------------------------------------------------------------
  2157. ///
  2158. /// LowererMD::GenerateFastCmXxTaggedInt
  2159. ///
  2160. ///----------------------------------------------------------------------------
  2161. bool LowererMD::GenerateFastCmXxTaggedInt(IR::Instr *instr, bool isInHelper /* = false */)
  2162. {
  2163. // The idea is to do an inline compare if we can prove that both sources
  2164. // are tagged ints (i.e., are vars with the low bit set).
  2165. //
  2166. // Given:
  2167. //
  2168. // Cmxx_A dst, src1, src2
  2169. //
  2170. // Generate:
  2171. //
  2172. // (If not Int31's, goto $helper)
  2173. // MOV r1, src1
  2174. // if (==, !=, !== or ===)
  2175. // SUB r1, src2
  2176. // NEG r1 // Sets CF if r1 != 0
  2177. // SBB r1, r1 // CF == 1 ? r1 = -1 : r1 = 0
  2178. // else
  2179. // MOV r2, 0
  2180. // CMP r1, src2
  2181. // SETcc r2
  2182. // DEC r2
  2183. // set r1 to r2
  2184. // AND r1, (notEqualResult - equalResult)
  2185. // ADD r1, equalResult
  2186. // MOV dst, r1
  2187. // JMP $fallthru
  2188. // $helper:
  2189. // (caller will generate normal helper call sequence)
  2190. // $fallthru:
  2191. IR::Opnd * src1 = instr->GetSrc1();
  2192. IR::Opnd * src2 = instr->GetSrc2();
  2193. IR::Opnd * dst = instr->GetDst();
  2194. IR::RegOpnd * r1 = IR::RegOpnd::New(TyMachReg, m_func);
  2195. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  2196. IR::LabelInstr * fallthru = IR::LabelInstr::New(Js::OpCode::Label, m_func, isInHelper);
  2197. Assert(src1 && src2 && dst);
  2198. // Not tagged ints?
  2199. if (src1->IsRegOpnd() && src1->AsRegOpnd()->IsNotInt())
  2200. {
  2201. return false;
  2202. }
  2203. if (src2->IsRegOpnd() && src2->AsRegOpnd()->IsNotInt())
  2204. {
  2205. return false;
  2206. }
  2207. bool isNeqOp = instr->m_opcode == Js::OpCode::CmSrNeq_A || instr->m_opcode == Js::OpCode::CmNeq_A;
  2208. intptr_t notEqualResult = isNeqOp ? m_func->GetScriptContextInfo()->GetTrueAddr() : m_func->GetScriptContextInfo()->GetFalseAddr();
  2209. intptr_t equalResult = !isNeqOp ? m_func->GetScriptContextInfo()->GetTrueAddr() : m_func->GetScriptContextInfo()->GetFalseAddr();
  2210. // Tagged ints?
  2211. bool isTaggedInts = false;
  2212. if (src1->IsTaggedInt())
  2213. {
  2214. if (src2->IsTaggedInt())
  2215. {
  2216. isTaggedInts = true;
  2217. }
  2218. }
  2219. if (!isTaggedInts)
  2220. {
  2221. this->GenerateSmIntPairTest(instr, src1, src2, helper);
  2222. }
  2223. // MOV r1, src1
  2224. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, r1, src1, m_func));
  2225. Js::OpCode setCC_Opcode = Js::OpCode::Nop;
  2226. switch(instr->m_opcode)
  2227. {
  2228. case Js::OpCode::CmSrEq_A:
  2229. case Js::OpCode::CmEq_A:
  2230. break;
  2231. case Js::OpCode::CmSrNeq_A:
  2232. case Js::OpCode::CmNeq_A:
  2233. break;
  2234. case Js::OpCode::CmGe_A:
  2235. setCC_Opcode = Js::OpCode::SETGE;
  2236. break;
  2237. case Js::OpCode::CmGt_A:
  2238. setCC_Opcode = Js::OpCode::SETG;
  2239. break;
  2240. case Js::OpCode::CmLe_A:
  2241. setCC_Opcode = Js::OpCode::SETLE;
  2242. break;
  2243. case Js::OpCode::CmLt_A:
  2244. setCC_Opcode = Js::OpCode::SETL;
  2245. break;
  2246. default:
  2247. Assume(UNREACHED);
  2248. }
  2249. if (setCC_Opcode == Js::OpCode::Nop)
  2250. {
  2251. // SUB r1, src2
  2252. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::SUB, r1, r1, src2, m_func);
  2253. instr->InsertBefore(subInstr);
  2254. Legalize(subInstr); // src2 may need legalizing
  2255. // NEG r1
  2256. instr->InsertBefore(IR::Instr::New(Js::OpCode::NEG, r1, r1, m_func));
  2257. // SBB r1, r1
  2258. instr->InsertBefore(IR::Instr::New(Js::OpCode::SBB, r1, r1, r1, m_func));
  2259. }
  2260. else
  2261. {
  2262. IR::Instr *instrNew;
  2263. IR::RegOpnd *r2 = IR::RegOpnd::New(TyMachPtr, this->m_func);
  2264. // MOV r2, 0
  2265. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, r2, IR::IntConstOpnd::New(0, TyMachReg, this->m_func), m_func));
  2266. // CMP r1, src2
  2267. IR::Opnd *r1_32 = r1->UseWithNewType(TyInt32, this->m_func);
  2268. IR::Opnd *src2_32 =src2->UseWithNewType(TyInt32, this->m_func);
  2269. instrNew = IR::Instr::New(Js::OpCode::CMP, m_func);
  2270. instrNew->SetSrc1(r1_32);
  2271. instrNew->SetSrc2(src2_32);
  2272. instr->InsertBefore(instrNew);
  2273. // SETcc r2
  2274. IR::RegOpnd *r2_i8 = (IR::RegOpnd*) r2->UseWithNewType(TyInt8, this->m_func);
  2275. instrNew = IR::Instr::New(setCC_Opcode, r2_i8, r2_i8, m_func);
  2276. instr->InsertBefore(instrNew);
  2277. // DEC r2
  2278. instr->InsertBefore(IR::Instr::New(Js::OpCode::DEC, r2, r2, m_func));
  2279. // r1 <- r2
  2280. r1 = r2;
  2281. }
  2282. // AND r1, (notEqualResult - equalResult)
  2283. {
  2284. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, r1, r1, m_func);
  2285. andInstr->SetSrc2(IR::AddrOpnd::New((void*)((size_t)notEqualResult - (size_t)equalResult), IR::AddrOpndKind::AddrOpndKindDynamicMisc, this->m_func));
  2286. instr->InsertBefore(andInstr);
  2287. Legalize(andInstr);
  2288. }
  2289. // ADD r1, equalResult
  2290. {
  2291. IR::Instr * add = IR::Instr::New(Js::OpCode::ADD, r1, r1, m_func);
  2292. add->SetSrc2(IR::AddrOpnd::New(equalResult, IR::AddrOpndKind::AddrOpndKindDynamicVar, this->m_func));
  2293. instr->InsertBefore(add);
  2294. Legalize(add);
  2295. }
  2296. // MOV dst, r1
  2297. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, r1, m_func));
  2298. if (isTaggedInts)
  2299. {
  2300. instr->Remove();
  2301. return true;
  2302. }
  2303. // JMP $fallthru
  2304. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, fallthru, m_func));
  2305. instr->InsertBefore(helper);
  2306. instr->InsertAfter(fallthru);
  2307. return false;
  2308. }
  2309. void LowererMD::GenerateFastCmXxR8(IR::Instr *instr)
  2310. {
  2311. GenerateFastCmXx(instr);
  2312. }
  2313. void LowererMD::GenerateFastCmXxI4(IR::Instr *instr)
  2314. {
  2315. GenerateFastCmXx(instr);
  2316. }
  2317. void LowererMD::GenerateFastCmXx(IR::Instr *instr)
  2318. {
  2319. // For float src:
  2320. // dst = MOV 0/1
  2321. // (U)COMISD src1, src2
  2322. // JP $done
  2323. // dst.i8 = SetCC dst.i8
  2324. // $done:
  2325. // for int src:
  2326. // CMP src1, src2
  2327. // dst = MOV 0 / false
  2328. // dst.i8 = SetCC dst.i8 / CMOCcc true
  2329. IR::Opnd * src1 = instr->UnlinkSrc1();
  2330. IR::Opnd * src2 = instr->UnlinkSrc2();
  2331. IR::Opnd * dst = instr->UnlinkDst();
  2332. IR::Opnd * tmp = dst;
  2333. bool isIntDst = dst->AsRegOpnd()->m_sym->IsInt32();
  2334. bool isFloatSrc = src1->IsFloat();
  2335. bool isInt64Src = src1->IsInt64();
  2336. Assert(!isFloatSrc || src2->IsFloat());
  2337. Assert(!isInt64Src || src2->IsInt64());
  2338. Assert(!isFloatSrc || AutoSystemInfo::Data.SSE2Available());
  2339. IR::Opnd *opnd;
  2340. IR::Instr *newInstr;
  2341. Assert(src1->IsRegOpnd());
  2342. #if LOWER_SPLIT_INT64
  2343. Int64RegPair src1Pair, src2Pair;
  2344. if (isInt64Src)
  2345. {
  2346. src1Pair = this->m_func->FindOrCreateInt64Pair(src1);
  2347. src2Pair = this->m_func->FindOrCreateInt64Pair(src2);
  2348. src1 = src1Pair.high;
  2349. src2 = src2Pair.high;
  2350. }
  2351. #endif
  2352. IR::Instr * done;
  2353. if (isFloatSrc)
  2354. {
  2355. done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2356. instr->InsertBefore(done);
  2357. }
  2358. else
  2359. {
  2360. done = instr;
  2361. }
  2362. bool isNegOpt = instr->m_opcode == Js::OpCode::CmNeq_A || instr->m_opcode == Js::OpCode::CmSrNeq_A;
  2363. bool initDstToFalse = true;
  2364. if (isIntDst)
  2365. {
  2366. // Fast path for int src with destination type specialized to int
  2367. // reg = MOV 0 will get peeped to XOR reg, reg which sets the flags.
  2368. // Put the MOV before the CMP, but use a tmp if dst == src1/src2
  2369. if (dst->IsEqual(src1) || dst->IsEqual(src2))
  2370. {
  2371. tmp = IR::RegOpnd::New(dst->GetType(), this->m_func);
  2372. }
  2373. // dst = MOV 0
  2374. if (isFloatSrc && isNegOpt)
  2375. {
  2376. opnd = IR::IntConstOpnd::New(1, TyInt32, this->m_func);
  2377. }
  2378. else
  2379. {
  2380. opnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  2381. }
  2382. m_lowerer->InsertMove(tmp, opnd, done);
  2383. }
  2384. else if (isFloatSrc)
  2385. {
  2386. // Fast path for float src when destination is a var
  2387. // Assign default value for destination in case either src is NaN
  2388. Assert(dst->IsVar());
  2389. if (isNegOpt)
  2390. {
  2391. opnd = this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue);
  2392. }
  2393. else
  2394. {
  2395. opnd = this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse);
  2396. initDstToFalse = false;
  2397. }
  2398. Lowerer::InsertMove(tmp, opnd, done);
  2399. }
  2400. Js::OpCode cmpOp;
  2401. if (isFloatSrc)
  2402. {
  2403. if (instr->m_opcode == Js::OpCode::CmEq_A || instr->m_opcode == Js::OpCode::CmNeq_A)
  2404. {
  2405. cmpOp = src1->IsFloat64() ? Js::OpCode::UCOMISD : Js::OpCode::UCOMISS;
  2406. }
  2407. else
  2408. {
  2409. cmpOp = src1->IsFloat64() ? Js::OpCode::COMISD : Js::OpCode::COMISS;
  2410. }
  2411. }
  2412. else
  2413. {
  2414. cmpOp = Js::OpCode::CMP;
  2415. }
  2416. // CMP src1, src2
  2417. newInstr = IR::Instr::New(cmpOp, this->m_func);
  2418. newInstr->SetSrc1(src1);
  2419. newInstr->SetSrc2(src2);
  2420. done->InsertBefore(newInstr);
  2421. LowererMD::Legalize(newInstr);
  2422. if (isFloatSrc)
  2423. {
  2424. newInstr = IR::BranchInstr::New(Js::OpCode::JP, done->AsLabelInstr(), this->m_func);
  2425. done->InsertBefore(newInstr);
  2426. }
  2427. // For all cases where the operator is a comparison, we do not want to emit False value
  2428. // since it has already been generated in the if block before.
  2429. if (!isIntDst && initDstToFalse)
  2430. {
  2431. opnd = this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse);
  2432. Lowerer::InsertMove(tmp, opnd, done);
  2433. }
  2434. Js::OpCode useCC;
  2435. switch(instr->m_opcode)
  2436. {
  2437. case Js::OpCode::CmEq_I4:
  2438. case Js::OpCode::CmEq_A:
  2439. case Js::OpCode::CmSrEq_A:
  2440. useCC = isIntDst ? Js::OpCode::SETE : Js::OpCode::CMOVE;
  2441. break;
  2442. case Js::OpCode::CmNeq_I4:
  2443. case Js::OpCode::CmNeq_A:
  2444. case Js::OpCode::CmSrNeq_A:
  2445. useCC = isIntDst ? Js::OpCode::SETNE : Js::OpCode::CMOVNE;
  2446. break;
  2447. case Js::OpCode::CmGe_I4:
  2448. useCC = isIntDst ? Js::OpCode::SETGE : Js::OpCode::CMOVGE;
  2449. break;
  2450. case Js::OpCode::CmGt_I4:
  2451. useCC = isIntDst ? Js::OpCode::SETG : Js::OpCode::CMOVG;
  2452. break;
  2453. case Js::OpCode::CmLe_I4:
  2454. useCC = isIntDst ? Js::OpCode::SETLE : Js::OpCode::CMOVLE;
  2455. break;
  2456. case Js::OpCode::CmLt_I4:
  2457. useCC = isIntDst ? Js::OpCode::SETL : Js::OpCode::CMOVL;
  2458. break;
  2459. case Js::OpCode::CmUnGe_I4:
  2460. case Js::OpCode::CmGe_A:
  2461. useCC = isIntDst ? Js::OpCode::SETAE : Js::OpCode::CMOVAE;
  2462. break;
  2463. case Js::OpCode::CmUnGt_I4:
  2464. case Js::OpCode::CmGt_A:
  2465. useCC = isIntDst ? Js::OpCode::SETA : Js::OpCode::CMOVA;
  2466. break;
  2467. case Js::OpCode::CmUnLe_I4:
  2468. case Js::OpCode::CmLe_A:
  2469. useCC = isIntDst ? Js::OpCode::SETBE : Js::OpCode::CMOVBE;
  2470. break;
  2471. case Js::OpCode::CmUnLt_I4:
  2472. case Js::OpCode::CmLt_A:
  2473. useCC = isIntDst ? Js::OpCode::SETB : Js::OpCode::CMOVB;
  2474. break;
  2475. default:
  2476. useCC = Js::OpCode::InvalidOpCode;
  2477. Assume(UNREACHED);
  2478. }
  2479. if (isIntDst)
  2480. {
  2481. // tmp.i8 = SetCC tmp.i8
  2482. IR::Opnd *tmp_i8 = tmp->UseWithNewType(TyInt8, this->m_func);
  2483. newInstr = IR::Instr::New(useCC, tmp_i8, tmp_i8, this->m_func);
  2484. }
  2485. else
  2486. {
  2487. // regTrue = MOV true
  2488. IR::Opnd *regTrue = IR::RegOpnd::New(TyMachPtr, this->m_func);
  2489. Lowerer::InsertMove(regTrue, this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), done);
  2490. // tmp = CMOVcc tmp, regTrue
  2491. newInstr = IR::Instr::New(useCC, tmp, tmp, regTrue, this->m_func);
  2492. }
  2493. done->InsertBefore(newInstr);
  2494. #ifndef _M_X64
  2495. if (isInt64Src)
  2496. {
  2497. IR::LabelInstr* skipLow = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2498. newInstr = IR::BranchInstr::New(Js::OpCode::JNE, skipLow, this->m_func);
  2499. done->InsertBefore(newInstr);
  2500. newInstr = IR::Instr::New(cmpOp, this->m_func);
  2501. newInstr->SetSrc1(src1Pair.low);
  2502. newInstr->SetSrc2(src2Pair.low);
  2503. done->InsertBefore(newInstr);
  2504. Js::OpCode lowUseCC = useCC;
  2505. // Need to do an unsigned compare for the lower part
  2506. switch (instr->m_opcode)
  2507. {
  2508. case Js::OpCode::CmGe_I4: lowUseCC = Js::OpCode::SETAE; break;
  2509. case Js::OpCode::CmGt_I4: lowUseCC = Js::OpCode::SETA; break;
  2510. case Js::OpCode::CmLe_I4: lowUseCC = Js::OpCode::SETBE; break;
  2511. case Js::OpCode::CmLt_I4: lowUseCC = Js::OpCode::SETB; break;
  2512. }
  2513. // tmp.i8 = SetCC tmp.i8
  2514. IR::Opnd *tmp_i8 = tmp->UseWithNewType(TyInt8, this->m_func);
  2515. newInstr = IR::Instr::New(lowUseCC, tmp_i8, tmp_i8, this->m_func);
  2516. done->InsertBefore(newInstr);
  2517. done->InsertBefore(skipLow);
  2518. }
  2519. #endif
  2520. if (tmp != dst)
  2521. {
  2522. newInstr = IR::Instr::New(Js::OpCode::MOV, dst, tmp, this->m_func);
  2523. instr->InsertBefore(newInstr);
  2524. }
  2525. instr->Remove();
  2526. }
  2527. IR::Instr * LowererMD::GenerateConvBool(IR::Instr *instr)
  2528. {
  2529. // TEST src1, src1
  2530. // dst = MOV true
  2531. // rf = MOV false
  2532. // dst = CMOV dst, rf
  2533. IR::Instr *instrNew, *instrFirst;
  2534. IR::RegOpnd *dst = instr->GetDst()->AsRegOpnd();
  2535. IR::RegOpnd *regFalse;
  2536. // TEST src1, src2
  2537. instrFirst = instrNew = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  2538. instrNew->SetSrc1(instr->GetSrc1());
  2539. instrNew->SetSrc2(instr->GetSrc1());
  2540. instr->InsertBefore(instrNew);
  2541. // dst = MOV true
  2542. Lowerer::InsertMove(dst, this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), instr);
  2543. // rf = MOV false
  2544. regFalse = IR::RegOpnd::New(TyMachPtr, this->m_func);
  2545. Lowerer::InsertMove(regFalse, this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
  2546. // Add dst as src1 of CMOV to create a pseudo use of dst. Otherwise, the register allocator
  2547. // won't know the previous dst is needed. and needed in the same register as the dst of the CMOV.
  2548. // dst = CMOV dst, rf
  2549. instrNew = IR::Instr::New(Js::OpCode::CMOVE, dst, dst, regFalse, this->m_func);
  2550. instr->InsertBefore(instrNew);
  2551. instr->Remove();
  2552. return instrFirst;
  2553. }
  2554. ///----------------------------------------------------------------------------
  2555. ///
  2556. /// LowererMD::GenerateFastAdd
  2557. ///
  2558. /// NOTE: We assume that only the sum of two Int31's will have 0x2 set. This
  2559. /// is only true until we have a var type with tag == 0x2.
  2560. ///
  2561. ///----------------------------------------------------------------------------
  2562. bool
  2563. LowererMD::GenerateFastAdd(IR::Instr * instrAdd)
  2564. {
  2565. // Given:
  2566. //
  2567. // dst = Add src1, src2
  2568. //
  2569. // Generate:
  2570. //
  2571. // (If not 2 Int31's, jump to $helper.)
  2572. // s1 = MOV src1
  2573. // s1 = DEC s1 -- Get rid of one of the tag [Int31 only]
  2574. // s1 = ADD s1, src2 -- try an inline add
  2575. // JO $helper -- bail if the add overflowed
  2576. // s1 = OR s1, AtomTag_IntPtr [Int32 only]
  2577. // dst = MOV s1
  2578. // JMP $fallthru
  2579. // $helper:
  2580. // (caller generates helper call)
  2581. // $fallthru:
  2582. IR::Instr * instr;
  2583. IR::LabelInstr * labelHelper;
  2584. IR::LabelInstr * labelFallThru;
  2585. IR::Opnd * opndReg;
  2586. IR::Opnd * opndSrc1;
  2587. IR::Opnd * opndSrc2;
  2588. opndSrc1 = instrAdd->GetSrc1();
  2589. opndSrc2 = instrAdd->GetSrc2();
  2590. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Add instruction");
  2591. // Generate fastpath for Incr_A anyway -
  2592. // Incrementing strings representing integers can be inter-mixed with integers e.g. "1"++ -> converts 1 to an int and thereafter, integer increment is expected.
  2593. if (opndSrc1->IsRegOpnd() && (opndSrc1->AsRegOpnd()->IsNotInt() || opndSrc1->GetValueType().IsString()
  2594. || (instrAdd->m_opcode != Js::OpCode::Incr_A && opndSrc1->GetValueType().IsLikelyString())))
  2595. {
  2596. return false;
  2597. }
  2598. if (opndSrc2->IsRegOpnd() && (opndSrc2->AsRegOpnd()->IsNotInt() ||
  2599. opndSrc2->GetValueType().IsLikelyString()))
  2600. {
  2601. return false;
  2602. }
  2603. // Tagged ints?
  2604. bool isTaggedInts = false;
  2605. if (opndSrc1->IsTaggedInt())
  2606. {
  2607. if (opndSrc2->IsTaggedInt())
  2608. {
  2609. isTaggedInts = true;
  2610. }
  2611. }
  2612. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2613. if (!isTaggedInts)
  2614. {
  2615. // (If not 2 Int31's, jump to $helper.)
  2616. this->GenerateSmIntPairTest(instrAdd, opndSrc1, opndSrc2, labelHelper);
  2617. }
  2618. if (opndSrc1->IsAddrOpnd())
  2619. {
  2620. // If opnd1 is a constant, just swap them.
  2621. IR::Opnd *opndTmp = opndSrc1;
  2622. opndSrc1 = opndSrc2;
  2623. opndSrc2 = opndTmp;
  2624. }
  2625. //
  2626. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2627. // relevant only on AMD64.
  2628. //
  2629. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2630. // s1 = MOV src1
  2631. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2632. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  2633. instrAdd->InsertBefore(instr);
  2634. #if !INT32VAR
  2635. // Do the DEC in place
  2636. if (opndSrc2->IsAddrOpnd())
  2637. {
  2638. Assert(opndSrc2->AsAddrOpnd()->GetAddrOpndKind() == IR::AddrOpndKindConstantVar);
  2639. opndSrc2 = IR::IntConstOpnd::New(*((int *)&(opndSrc2->AsAddrOpnd()->m_address)) - 1, TyInt32, this->m_func, opndSrc2->AsAddrOpnd()->m_dontEncode);
  2640. opndSrc2 = opndSrc2->Use(this->m_func);
  2641. }
  2642. else if (opndSrc2->IsIntConstOpnd())
  2643. {
  2644. Assert(opndSrc2->GetType() == TyInt32);
  2645. opndSrc2 = opndSrc2->Use(this->m_func);
  2646. opndSrc2->AsIntConstOpnd()->DecrValue(1);
  2647. }
  2648. else
  2649. {
  2650. // s1 = DEC s1
  2651. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2652. instr = IR::Instr::New(Js::OpCode::DEC, opndReg, opndReg, this->m_func);
  2653. instrAdd->InsertBefore(instr);
  2654. }
  2655. instr = IR::Instr::New(Js::OpCode::ADD, opndReg, opndReg, opndSrc2, this->m_func);
  2656. #else
  2657. if (opndSrc2->IsAddrOpnd())
  2658. {
  2659. // truncate to untag
  2660. int value = ::Math::PointerCastToIntegralTruncate<int>(opndSrc2->AsAddrOpnd()->m_address);
  2661. if (value == 1)
  2662. {
  2663. instr = IR::Instr::New(Js::OpCode::INC, opndReg, opndReg, this->m_func);
  2664. }
  2665. else
  2666. {
  2667. opndSrc2 = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
  2668. instr = IR::Instr::New(Js::OpCode::ADD, opndReg, opndReg, opndSrc2, this->m_func);
  2669. }
  2670. }
  2671. else
  2672. {
  2673. instr = IR::Instr::New(Js::OpCode::ADD, opndReg, opndReg, opndSrc2->UseWithNewType(TyInt32, this->m_func), this->m_func);
  2674. }
  2675. #endif
  2676. // s1 = ADD s1, src2
  2677. instrAdd->InsertBefore(instr);
  2678. Legalize(instr);
  2679. // JO $helper
  2680. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  2681. instrAdd->InsertBefore(instr);
  2682. //
  2683. // Convert TyInt32 operand, back to TyMachPtr type.
  2684. //
  2685. if(TyMachReg != opndReg->GetType())
  2686. {
  2687. opndReg = opndReg->UseWithNewType(TyMachPtr, this->m_func);
  2688. }
  2689. #if INT32VAR
  2690. // s1 = OR s1, AtomTag_IntPtr
  2691. GenerateInt32ToVarConversion(opndReg, instrAdd);
  2692. #endif
  2693. // dst = MOV s1
  2694. instr = IR::Instr::New(Js::OpCode::MOV, instrAdd->GetDst(), opndReg, this->m_func);
  2695. instrAdd->InsertBefore(instr);
  2696. // JMP $fallthru
  2697. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2698. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2699. instrAdd->InsertBefore(instr);
  2700. // $helper:
  2701. // (caller generates helper call)
  2702. // $fallthru:
  2703. instrAdd->InsertBefore(labelHelper);
  2704. instrAdd->InsertAfter(labelFallThru);
  2705. return true;
  2706. }
  2707. ///----------------------------------------------------------------------------
  2708. ///
  2709. /// LowererMD::GenerateFastSub
  2710. ///
  2711. ///
  2712. ///----------------------------------------------------------------------------
  2713. bool
  2714. LowererMD::GenerateFastSub(IR::Instr * instrSub)
  2715. {
  2716. // Given:
  2717. //
  2718. // dst = Sub src1, src2
  2719. //
  2720. // Generate:
  2721. //
  2722. // (If not 2 Int31's, jump to $helper.)
  2723. // s1 = MOV src1
  2724. // s1 = SUB s1, src2 -- try an inline sub
  2725. // JO $helper -- bail if the subtract overflowed
  2726. // JNE $helper
  2727. // s1 = INC s1 -- restore the var tag on the result [Int31 only]
  2728. // s1 = OR s1, AtomTag_IntPtr [Int32 only]
  2729. // dst = MOV s1
  2730. // JMP $fallthru
  2731. // $helper:
  2732. // (caller generates helper call)
  2733. // $fallthru:
  2734. IR::Instr * instr;
  2735. IR::LabelInstr * labelHelper;
  2736. IR::LabelInstr * labelFallThru;
  2737. IR::Opnd * opndReg;
  2738. IR::Opnd * opndSrc1;
  2739. IR::Opnd * opndSrc2;
  2740. opndSrc1 = instrSub->GetSrc1();
  2741. opndSrc2 = instrSub->GetSrc2();
  2742. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Sub instruction");
  2743. // Not tagged ints?
  2744. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2745. {
  2746. return false;
  2747. }
  2748. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2749. {
  2750. return false;
  2751. }
  2752. // Tagged ints?
  2753. bool isTaggedInts = false;
  2754. if (opndSrc1->IsTaggedInt())
  2755. {
  2756. if (opndSrc2->IsTaggedInt())
  2757. {
  2758. isTaggedInts = true;
  2759. }
  2760. }
  2761. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2762. if (!isTaggedInts)
  2763. {
  2764. // (If not 2 Int31's, jump to $helper.)
  2765. this->GenerateSmIntPairTest(instrSub, opndSrc1, opndSrc2, labelHelper);
  2766. }
  2767. //
  2768. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2769. // relevant only on AMD64.
  2770. //
  2771. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2772. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2773. // s1 = MOV src1
  2774. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2775. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  2776. instrSub->InsertBefore(instr);
  2777. // s1 = SUB s1, src2
  2778. instr = IR::Instr::New(Js::OpCode::SUB, opndReg, opndReg, opndSrc2, this->m_func);
  2779. instrSub->InsertBefore(instr);
  2780. // JO $helper
  2781. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  2782. instrSub->InsertBefore(instr);
  2783. #if !INT32VAR
  2784. // s1 = INC s1
  2785. instr = IR::Instr::New(Js::OpCode::INC, opndReg, opndReg, this->m_func);
  2786. instrSub->InsertBefore(instr);
  2787. #endif
  2788. //
  2789. // Convert TyInt32 operand, back to TyMachPtr type.
  2790. //
  2791. if(TyMachReg != opndReg->GetType())
  2792. {
  2793. opndReg = opndReg->UseWithNewType(TyMachPtr, this->m_func);
  2794. }
  2795. #if INT32VAR
  2796. // s1 = OR s1, AtomTag_IntPtr
  2797. GenerateInt32ToVarConversion(opndReg, instrSub);
  2798. #endif
  2799. // dst = MOV s1
  2800. instr = IR::Instr::New(Js::OpCode::MOV, instrSub->GetDst(), opndReg, this->m_func);
  2801. instrSub->InsertBefore(instr);
  2802. // JMP $fallthru
  2803. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2804. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2805. instrSub->InsertBefore(instr);
  2806. // $helper:
  2807. // (caller generates helper call)
  2808. // $fallthru:
  2809. instrSub->InsertBefore(labelHelper);
  2810. instrSub->InsertAfter(labelFallThru);
  2811. return true;
  2812. }
  2813. ///----------------------------------------------------------------------------
  2814. ///
  2815. /// LowererMD::GenerateFastMul
  2816. ///
  2817. ///----------------------------------------------------------------------------
  2818. bool
  2819. LowererMD::GenerateFastMul(IR::Instr * instrMul)
  2820. {
  2821. // Given:
  2822. //
  2823. // dst = Mul src1, src2
  2824. //
  2825. // Generate:
  2826. //
  2827. // (If not 2 Int31's, jump to $helper.)
  2828. // s1 = MOV src1
  2829. // s1 = DEC s1 -- clear the var tag from the value to be multiplied [Int31 only]
  2830. // s2 = MOV src2
  2831. // s2 = SAR s2, Js::VarTag_Shift -- extract the real src2 amount from the var [Int31 only]
  2832. // s1 = IMUL s1, s2 -- do the signed mul
  2833. // JO $helper -- bail if the result overflowed
  2834. // s3 = MOV s1
  2835. // TEST s3, s3 -- Check result is 0. might be -0. Result is -0 when a negative number is multiplied with 0.
  2836. // JEQ $zero
  2837. // JMP $nonzero
  2838. // $zero: -- result of mul was 0. try to check for -0
  2839. // s2 = ADD s2, src1 -- Add src1 to s2
  2840. // JGT $nonzero -- positive 0. [Int31 only]
  2841. // JGE $nonzero -- positive 0. [Int32 only]
  2842. // dst = ToVar(-0.0) -- load negative 0
  2843. // JMP $fallthru
  2844. // $nonzero:
  2845. // s3 = INC s3 -- restore the var tag on the result [Int31 only]
  2846. // s3 = OR s3, AtomTag_IntPtr [Int32 only]
  2847. // dst= MOV s3
  2848. // JMP $fallthru
  2849. // $helper:
  2850. // (caller generates helper call)
  2851. // $fallthru:
  2852. IR::LabelInstr * labelHelper;
  2853. IR::LabelInstr * labelFallThru;
  2854. IR::LabelInstr * labelNonZero;
  2855. IR::Instr * instr;
  2856. IR::RegOpnd * opndReg1;
  2857. IR::RegOpnd * opndReg2;
  2858. IR::RegOpnd * s3;
  2859. IR::Opnd * opndSrc1;
  2860. IR::Opnd * opndSrc2;
  2861. opndSrc1 = instrMul->GetSrc1();
  2862. opndSrc2 = instrMul->GetSrc2();
  2863. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on mul instruction");
  2864. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2865. {
  2866. return true;
  2867. }
  2868. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2869. {
  2870. return true;
  2871. }
  2872. // (If not 2 Int31's, jump to $helper.)
  2873. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2874. labelNonZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2875. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2876. this->GenerateSmIntPairTest(instrMul, opndSrc1, opndSrc2, labelHelper);
  2877. //
  2878. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2879. // relevant only on AMD64.
  2880. //
  2881. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2882. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2883. if (opndSrc1->IsImmediateOpnd())
  2884. {
  2885. IR::Opnd * temp = opndSrc1;
  2886. opndSrc1 = opndSrc2;
  2887. opndSrc2 = temp;
  2888. }
  2889. // s1 = MOV src1
  2890. opndReg1 = IR::RegOpnd::New(TyInt32, this->m_func);
  2891. instr = IR::Instr::New(Js::OpCode::MOV, opndReg1, opndSrc1, this->m_func);
  2892. instrMul->InsertBefore(instr);
  2893. #if !INT32VAR
  2894. // s1 = DEC s1
  2895. instr = IR::Instr::New(Js::OpCode::DEC, opndReg1, opndReg1, this->m_func);
  2896. instrMul->InsertBefore(instr);
  2897. #endif
  2898. if (opndSrc2->IsImmediateOpnd())
  2899. {
  2900. Assert(opndSrc2->IsAddrOpnd() && opndSrc2->AsAddrOpnd()->IsVar());
  2901. IR::Opnd *opnd2 = IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(opndSrc2->AsAddrOpnd()->m_address), TyInt32, this->m_func);
  2902. // s2 = MOV src2
  2903. opndReg2 = IR::RegOpnd::New(TyInt32, this->m_func);
  2904. instr = IR::Instr::New(Js::OpCode::MOV, opndReg2, opnd2, this->m_func);
  2905. instrMul->InsertBefore(instr);
  2906. }
  2907. else
  2908. {
  2909. // s2 = MOV src2
  2910. opndReg2 = IR::RegOpnd::New(TyInt32, this->m_func);
  2911. instr = IR::Instr::New(Js::OpCode::MOV, opndReg2, opndSrc2, this->m_func);
  2912. instrMul->InsertBefore(instr);
  2913. #if !INT32VAR
  2914. // s2 = SAR s2, Js::VarTag_Shift
  2915. instr = IR::Instr::New(
  2916. Js::OpCode::SAR, opndReg2, opndReg2,
  2917. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  2918. instrMul->InsertBefore(instr);
  2919. #endif
  2920. }
  2921. // s1 = IMUL s1, s2
  2922. instr = IR::Instr::New(Js::OpCode::IMUL2, opndReg1, opndReg1, opndReg2, this->m_func);
  2923. instrMul->InsertBefore(instr);
  2924. // JO $helper
  2925. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  2926. instrMul->InsertBefore(instr);
  2927. // MOV s3, s1
  2928. s3 = IR::RegOpnd::New(TyInt32, this->m_func);
  2929. instr = IR::Instr::New(Js::OpCode::MOV, s3, opndReg1, this->m_func);
  2930. instrMul->InsertBefore(instr);
  2931. // TEST s3, s3
  2932. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  2933. instr->SetSrc1(s3);
  2934. instr->SetSrc2(s3);
  2935. instrMul->InsertBefore(instr);
  2936. // JEQ $zero
  2937. IR::LabelInstr *labelZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2938. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelZero, this->m_func);
  2939. instrMul->InsertBefore(instr);
  2940. // JMP $nonzero
  2941. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelNonZero, this->m_func);
  2942. instrMul->InsertBefore(instr);
  2943. // $zero:
  2944. instrMul->InsertBefore(labelZero);
  2945. // s2 = ADD s2, src1
  2946. instr = IR::Instr::New(Js::OpCode::ADD, opndReg2, opndReg2, opndSrc1, this->m_func);
  2947. instrMul->InsertBefore(instr);
  2948. Legalize(instr);
  2949. // JGT $nonzero
  2950. #if INT32VAR
  2951. Js::OpCode greaterOpCode = Js::OpCode::JGE;
  2952. #else
  2953. Js::OpCode greaterOpCode = Js::OpCode::JGT;
  2954. #endif
  2955. instr = IR::BranchInstr::New(greaterOpCode, labelNonZero, this->m_func);
  2956. instrMul->InsertBefore(instr);
  2957. // dst = ToVar(-0.0) -- load negative 0
  2958. instr = IR::Instr::New(Js::OpCode::MOV, instrMul->GetDst(), m_lowerer->LoadLibraryValueOpnd(instrMul, LibraryValue::ValueNegativeZero), this->m_func);
  2959. instrMul->InsertBefore(instr);
  2960. // JMP $fallthru
  2961. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2962. instrMul->InsertBefore(instr);
  2963. // $nonzero:
  2964. instrMul->InsertBefore(labelNonZero);
  2965. #if !INT32VAR
  2966. // s3 = INC s3
  2967. instr = IR::Instr::New(Js::OpCode::INC, s3, s3, this->m_func);
  2968. instrMul->InsertBefore(instr);
  2969. #endif
  2970. //
  2971. // Convert TyInt32 operand, back to TyMachPtr type.
  2972. // Cast is fine. We know ChangeType returns IR::Opnd * but it
  2973. // preserves the Type.
  2974. //
  2975. if(TyMachReg != s3->GetType())
  2976. {
  2977. s3 = static_cast<IR::RegOpnd *>(s3->UseWithNewType(TyMachPtr, this->m_func));
  2978. }
  2979. #if INT32VAR
  2980. // s3 = OR s3, AtomTag_IntPtr
  2981. GenerateInt32ToVarConversion(s3, instrMul);
  2982. #endif
  2983. // dst = MOV s3
  2984. instr = IR::Instr::New(Js::OpCode::MOV, instrMul->GetDst(), s3, this->m_func);
  2985. instrMul->InsertBefore(instr);
  2986. // JMP $fallthru
  2987. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2988. instrMul->InsertBefore(instr);
  2989. // $helper:
  2990. // (caller generates helper call)
  2991. // $fallthru:
  2992. instrMul->InsertBefore(labelHelper);
  2993. instrMul->InsertAfter(labelFallThru);
  2994. return true;
  2995. }
  2996. bool
  2997. LowererMD::GenerateFastNeg(IR::Instr * instrNeg)
  2998. {
  2999. // Given:
  3000. //
  3001. // dst = Not src
  3002. //
  3003. // Generate:
  3004. //
  3005. // if not int, jump $helper
  3006. // if src == 0 -- test for zero (must be handled by the runtime to preserve
  3007. // JEQ $helper difference btw +0 and -0)
  3008. // dst = MOV src
  3009. // dst = NEG dst -- do an inline NEG
  3010. // dst = ADD dst, 2 -- restore the var tag on the result [int31 only]
  3011. // JO $helper
  3012. // dst = OR dst, AtomTag_Ptr [int32 only]
  3013. // JMP $fallthru
  3014. // $helper:
  3015. // (caller generates helper call)
  3016. // $fallthru:
  3017. IR::Instr * instr;
  3018. IR::LabelInstr * labelHelper = nullptr;
  3019. IR::LabelInstr * labelFallThru = nullptr;
  3020. IR::Opnd * opndSrc1;
  3021. IR::Opnd * opndDst;
  3022. bool usingNewDst = false;
  3023. opndSrc1 = instrNeg->GetSrc1();
  3024. AssertMsg(opndSrc1, "Expected src opnd on Neg instruction");
  3025. if(opndSrc1->IsEqual(instrNeg->GetDst()))
  3026. {
  3027. usingNewDst = true;
  3028. opndDst = IR::RegOpnd::New(TyInt32, this->m_func);
  3029. }
  3030. else
  3031. {
  3032. opndDst = instrNeg->GetDst()->UseWithNewType(TyInt32, this->m_func);
  3033. }
  3034. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->m_sym->IsIntConst())
  3035. {
  3036. IR::Opnd *newOpnd;
  3037. IntConstType value = opndSrc1->AsRegOpnd()->m_sym->GetIntConstValue();
  3038. if (value == 0)
  3039. {
  3040. // If the negate operand is zero, the result is -0.0, which is a Number rather than an Int31.
  3041. newOpnd = m_lowerer->LoadLibraryValueOpnd(instrNeg, LibraryValue::ValueNegativeZero);
  3042. }
  3043. else
  3044. {
  3045. // negation below can overflow because max negative int32 value > max positive value by 1.
  3046. newOpnd = IR::AddrOpnd::NewFromNumber(-(int64)value, m_func);
  3047. }
  3048. instrNeg->ClearBailOutInfo();
  3049. instrNeg->FreeSrc1();
  3050. instrNeg->SetSrc1(newOpnd);
  3051. instrNeg = this->ChangeToAssign(instrNeg);
  3052. // Skip lowering call to helper
  3053. return false;
  3054. }
  3055. bool isInt = (opndSrc1->IsTaggedInt());
  3056. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  3057. {
  3058. return true;
  3059. }
  3060. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3061. if (!isInt)
  3062. {
  3063. GenerateSmIntTest(opndSrc1, instrNeg, labelHelper);
  3064. }
  3065. //
  3066. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  3067. // relevant only on AMD64.
  3068. //
  3069. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  3070. GenerateTaggedZeroTest(opndSrc1, instrNeg, labelHelper);
  3071. // dst = MOV src
  3072. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndSrc1, this->m_func);
  3073. instrNeg->InsertBefore(instr);
  3074. // dst = NEG dst
  3075. instr = IR::Instr::New(Js::OpCode::NEG, opndDst, opndDst, this->m_func);
  3076. instrNeg->InsertBefore(instr);
  3077. #if !INT32VAR
  3078. // dst = ADD dst, 2
  3079. instr = IR::Instr::New(Js::OpCode::ADD, opndDst, opndDst, IR::IntConstOpnd::New(2, TyInt32, this->m_func), this->m_func);
  3080. instrNeg->InsertBefore(instr);
  3081. #endif
  3082. // JO $helper
  3083. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  3084. instrNeg->InsertBefore(instr);
  3085. //
  3086. // Convert TyInt32 operand, back to TyMachPtr type.
  3087. //
  3088. if(TyMachReg != opndDst->GetType())
  3089. {
  3090. opndDst = opndDst->UseWithNewType(TyMachPtr, this->m_func);
  3091. }
  3092. #if INT32VAR
  3093. GenerateInt32ToVarConversion(opndDst, instrNeg);
  3094. #endif
  3095. if(usingNewDst)
  3096. {
  3097. instr = IR::Instr::New(Js::OpCode::MOV, instrNeg->GetDst(), opndDst, this->m_func);
  3098. instrNeg->InsertBefore(instr);
  3099. }
  3100. // JMP $fallthru
  3101. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3102. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  3103. instrNeg->InsertBefore(instr);
  3104. // $helper:
  3105. // (caller generates helper sequence)
  3106. // $fallthru:
  3107. AssertMsg(labelHelper, "Should not be NULL");
  3108. instrNeg->InsertBefore(labelHelper);
  3109. instrNeg->InsertAfter(labelFallThru);
  3110. return true;
  3111. }
  3112. void
  3113. LowererMD::GenerateFastBrS(IR::BranchInstr *brInstr)
  3114. {
  3115. IR::Opnd *src1 = brInstr->UnlinkSrc1();
  3116. Assert(src1->IsIntConstOpnd() || src1->IsAddrOpnd() || src1->IsRegOpnd());
  3117. IR::Instr *cmpInstr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  3118. cmpInstr->SetSrc1(m_lowerer->LoadOptimizationOverridesValueOpnd(brInstr, OptimizationOverridesValue::OptimizationOverridesSideEffects));
  3119. cmpInstr->SetSrc2(src1);
  3120. brInstr->InsertBefore(cmpInstr);
  3121. Legalize(cmpInstr);
  3122. Js::OpCode opcode = Js::OpCode::InvalidOpCode;
  3123. switch(brInstr->m_opcode)
  3124. {
  3125. case Js::OpCode::BrHasSideEffects:
  3126. opcode = Js::OpCode::JNE;
  3127. break;
  3128. case Js::OpCode::BrNotHasSideEffects:
  3129. opcode = Js::OpCode::JEQ;
  3130. break;
  3131. default:
  3132. Assert(UNREACHED);
  3133. __assume(false);
  3134. }
  3135. brInstr->m_opcode = opcode;
  3136. }
  3137. ///----------------------------------------------------------------------------
  3138. ///
  3139. /// LowererMD::GenerateSmIntPairTest
  3140. ///
  3141. /// Generate code to test whether the given operands are both Int31 vars
  3142. /// and branch to the given label if not.
  3143. ///
  3144. ///----------------------------------------------------------------------------
  3145. #if !INT32VAR
  3146. IR::Instr *
  3147. LowererMD::GenerateSmIntPairTest(
  3148. IR::Instr * instrInsert,
  3149. IR::Opnd * opndSrc1,
  3150. IR::Opnd * opndSrc2,
  3151. IR::LabelInstr * labelFail)
  3152. {
  3153. IR::Opnd * opndReg;
  3154. IR::Instr * instrPrev = instrInsert->m_prev;
  3155. IR::Instr * instr;
  3156. Assert(opndSrc1->GetType() == TyVar);
  3157. Assert(opndSrc2->GetType() == TyVar);
  3158. if (opndSrc1->IsTaggedInt())
  3159. {
  3160. IR::Opnd *tempOpnd = opndSrc1;
  3161. opndSrc1 = opndSrc2;
  3162. opndSrc2 = tempOpnd;
  3163. }
  3164. if (opndSrc2->IsTaggedInt())
  3165. {
  3166. if (opndSrc1->IsTaggedInt())
  3167. {
  3168. return instrPrev;
  3169. }
  3170. // TEST src1, AtomTag
  3171. // JEQ $fail
  3172. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  3173. instr->SetSrc1(opndSrc1);
  3174. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func));
  3175. instrInsert->InsertBefore(instr);
  3176. }
  3177. else
  3178. {
  3179. // s1 = MOV src1
  3180. // s1 = AND s1, 1
  3181. // TEST s1, src2
  3182. // JEQ $fail
  3183. // s1 = MOV src1
  3184. opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  3185. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  3186. instrInsert->InsertBefore(instr);
  3187. // s1 = AND s1, AtomTag
  3188. instr = IR::Instr::New(
  3189. Js::OpCode::AND, opndReg, opndReg, IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func), this->m_func);
  3190. instrInsert->InsertBefore(instr);
  3191. // TEST s1, src2
  3192. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  3193. instr->SetSrc1(opndReg);
  3194. instr->SetSrc2(opndSrc2);
  3195. instrInsert->InsertBefore(instr);
  3196. }
  3197. // JEQ $fail
  3198. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelFail, this->m_func);
  3199. instrInsert->InsertBefore(instr);
  3200. return instrPrev;
  3201. }
  3202. #else
  3203. IR::Instr *
  3204. LowererMD::GenerateSmIntPairTest(
  3205. IR::Instr * instrInsert,
  3206. IR::Opnd * opndSrc1,
  3207. IR::Opnd * opndSrc2,
  3208. IR::LabelInstr * labelFail)
  3209. {
  3210. IR::Opnd * opndReg;
  3211. IR::Instr * instrPrev = instrInsert->m_prev;
  3212. IR::Instr * instr;
  3213. Assert(opndSrc1->GetType() == TyVar);
  3214. Assert(opndSrc2->GetType() == TyVar);
  3215. if (opndSrc1->IsTaggedInt())
  3216. {
  3217. IR::Opnd *tempOpnd = opndSrc1;
  3218. opndSrc1 = opndSrc2;
  3219. opndSrc2 = tempOpnd;
  3220. }
  3221. if (opndSrc2->IsTaggedInt())
  3222. {
  3223. if (opndSrc1->IsTaggedInt())
  3224. {
  3225. return instrPrev;
  3226. }
  3227. GenerateSmIntTest(opndSrc1, instrInsert, labelFail);
  3228. return instrPrev;
  3229. }
  3230. else
  3231. {
  3232. opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  3233. IR::Opnd * opndReg1;
  3234. // s1 = MOV src1
  3235. // s1 = SHR s1, VarTag_Shift
  3236. // s2 = MOV src2
  3237. // s2 = SHR s2, 32
  3238. // s1 = OR s1, s2 ------ move both tags to the lower 32 bits
  3239. // CMP s1, AtomTag_Pair ------ compare the tags together to the expected tag pair
  3240. // JNE $fail
  3241. // s1 = MOV src1
  3242. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  3243. instrInsert->InsertBefore(instr);
  3244. // s1 = SHR s1, VarTag_Shift
  3245. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  3246. instrInsert->InsertBefore(instr);
  3247. // s2 = MOV src2
  3248. opndReg1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  3249. instr = IR::Instr::New(Js::OpCode::MOV, opndReg1, opndSrc2, this->m_func);
  3250. instrInsert->InsertBefore(instr);
  3251. // s2 = SHR s2, 32
  3252. instr = IR::Instr::New(Js::OpCode::SHR, opndReg1, opndReg1, IR::IntConstOpnd::New(32, TyInt8, this->m_func), this->m_func);
  3253. instrInsert->InsertBefore(instr);
  3254. // s1 = OR s1, s2
  3255. instr = IR::Instr::New(Js::OpCode::OR, opndReg, opndReg, opndReg1, this->m_func);
  3256. instrInsert->InsertBefore(instr);
  3257. opndReg = opndReg->UseWithNewType(TyInt32, this->m_func)->AsRegOpnd();
  3258. // CMP s1, AtomTag_Pair
  3259. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  3260. instr->SetSrc1(opndReg);
  3261. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag_Pair, TyInt32, this->m_func, true));
  3262. instrInsert->InsertBefore(instr);
  3263. }
  3264. // JNE $fail
  3265. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelFail, this->m_func);
  3266. instrInsert->InsertBefore(instr);
  3267. return instrPrev;
  3268. }
  3269. #endif
  3270. void
  3271. LowererMD::GenerateLoadTaggedType(IR::Instr * instrLdSt, IR::RegOpnd * opndType, IR::RegOpnd * opndTaggedType)
  3272. {
  3273. // Generate
  3274. //
  3275. // MOV taggedType, type
  3276. // OR taggedType, InlineCacheAuxSlotTypeTag
  3277. // MOV taggedType, type
  3278. {
  3279. IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV, opndTaggedType, opndType, instrLdSt->m_func);
  3280. instrLdSt->InsertBefore(instrMov);
  3281. }
  3282. // OR taggedType, InlineCacheAuxSlotTypeTag
  3283. {
  3284. IR::IntConstOpnd * opndAuxSlotTag = IR::IntConstOpnd::New(InlineCacheAuxSlotTypeTag, TyMachPtr, instrLdSt->m_func);
  3285. IR::Instr * instrAnd = IR::Instr::New(Js::OpCode::OR, opndTaggedType, opndTaggedType, opndAuxSlotTag, instrLdSt->m_func);
  3286. instrLdSt->InsertBefore(instrAnd);
  3287. }
  3288. }
  3289. ///----------------------------------------------------------------------------
  3290. ///
  3291. /// LowererMD::GenerateFastLdMethodFromFlags
  3292. ///
  3293. /// Make use of the helper to cache the type and slot index used to do a LdFld
  3294. /// and do an inline load from the appropriate slot if the type hasn't changed
  3295. /// since the last time this LdFld was executed.
  3296. ///
  3297. ///----------------------------------------------------------------------------
  3298. bool
  3299. LowererMD::GenerateFastLdMethodFromFlags(IR::Instr * instrLdFld)
  3300. {
  3301. IR::LabelInstr * labelFallThru;
  3302. IR::LabelInstr * bailOutLabel;
  3303. IR::Opnd * opndSrc;
  3304. IR::Opnd * opndDst;
  3305. IR::RegOpnd * opndBase;
  3306. IR::RegOpnd * opndType;
  3307. IR::RegOpnd * opndInlineCache;
  3308. opndSrc = instrLdFld->GetSrc1();
  3309. AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->IsPropertySymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(),
  3310. "Expected property sym operand as src of LdFldFlags");
  3311. IR::PropertySymOpnd * propertySymOpnd = opndSrc->AsPropertySymOpnd();
  3312. Assert(!instrLdFld->DoStackArgsOpt());
  3313. if (propertySymOpnd->IsTypeCheckSeqCandidate())
  3314. {
  3315. AssertMsg(propertySymOpnd->HasObjectTypeSym(), "Type optimized property sym operand without a type sym?");
  3316. StackSym *typeSym = propertySymOpnd->GetObjectTypeSym();
  3317. opndType = IR::RegOpnd::New(typeSym, TyMachReg, this->m_func);
  3318. }
  3319. else
  3320. {
  3321. opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3322. }
  3323. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  3324. opndDst = instrLdFld->GetDst();
  3325. opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  3326. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3327. // Label to jump to (or fall through to) when bailing out
  3328. bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instrLdFld->m_func, true /* isOpHelper */);
  3329. instrLdFld->InsertBefore(IR::Instr::New(Js::OpCode::MOV, opndInlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrLdFld, propertySymOpnd), this->m_func));
  3330. IR::LabelInstr * labelFlagAux = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3331. // Check the flag cache with the untagged type
  3332. this->m_lowerer->GenerateObjectTestAndTypeLoad(instrLdFld, opndBase, opndType, bailOutLabel);
  3333. // Blindly do the check for getter flag first and then do the type check
  3334. // We avoid repeated check for getter flag when the function object may be in either
  3335. // inline slots or auxiliary slots
  3336. this->m_lowerer->GenerateFlagInlineCacheCheckForGetterSetter(instrLdFld, opndInlineCache, bailOutLabel);
  3337. this->m_lowerer->GenerateFlagInlineCacheCheck(instrLdFld, opndType, opndInlineCache, labelFlagAux);
  3338. this->m_lowerer->GenerateLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  3339. // Check the flag cache with the tagged type
  3340. instrLdFld->InsertBefore(labelFlagAux);
  3341. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3342. GenerateLoadTaggedType(instrLdFld, opndType, opndTaggedType);
  3343. this->m_lowerer->GenerateFlagInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, bailOutLabel);
  3344. this->m_lowerer->GenerateLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  3345. instrLdFld->InsertBefore(bailOutLabel);
  3346. instrLdFld->InsertAfter(labelFallThru);
  3347. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  3348. // ordering instructions anymore.
  3349. instrLdFld->UnlinkSrc1();
  3350. this->m_lowerer->GenerateBailOut(instrLdFld);
  3351. return true;
  3352. }
  3353. void
  3354. LowererMD::GenerateLoadPolymorphicInlineCacheSlot(IR::Instr * instrLdSt, IR::RegOpnd * opndInlineCache, IR::RegOpnd * opndType, uint polymorphicInlineCacheSize)
  3355. {
  3356. // Generate
  3357. //
  3358. // MOV r1, type
  3359. // SHR r1, PolymorphicInlineCacheShift
  3360. // AND r1, (size - 1)
  3361. // SHL r1, log2(sizeof(Js::InlineCache))
  3362. // LEA inlineCache, [inlineCache + r1]
  3363. // MOV r1, type
  3364. IR::RegOpnd * opndOffset = IR::RegOpnd::New(TyMachPtr, instrLdSt->m_func);
  3365. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndOffset, opndType, instrLdSt->m_func);
  3366. instrLdSt->InsertBefore(instr);
  3367. IntConstType rightShiftAmount = PolymorphicInlineCacheShift;
  3368. IntConstType leftShiftAmount = Math::Log2(sizeof(Js::InlineCache));
  3369. // instead of generating
  3370. // SHR r1, PolymorphicInlineCacheShift
  3371. // AND r1, (size - 1)
  3372. // SHL r1, log2(sizeof(Js::InlineCache))
  3373. //
  3374. // we can generate:
  3375. // SHR r1, (PolymorphicInlineCacheShift - log2(sizeof(Js::InlineCache))
  3376. // AND r1, (size - 1) << log2(sizeof(Js::InlineCache))
  3377. Assert(rightShiftAmount > leftShiftAmount);
  3378. instr = IR::Instr::New(Js::OpCode::SHR, opndOffset, opndOffset, IR::IntConstOpnd::New(rightShiftAmount - leftShiftAmount, TyUint8, instrLdSt->m_func, true), instrLdSt->m_func);
  3379. instrLdSt->InsertBefore(instr);
  3380. instr = IR::Instr::New(Js::OpCode::AND, opndOffset, opndOffset, IR::IntConstOpnd::New(((__int64)(polymorphicInlineCacheSize - 1) << leftShiftAmount), TyMachReg, instrLdSt->m_func, true), instrLdSt->m_func);
  3381. instrLdSt->InsertBefore(instr);
  3382. // LEA inlineCache, [inlineCache + r1]
  3383. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(opndInlineCache, opndOffset, TyMachPtr, instrLdSt->m_func);
  3384. instr = IR::Instr::New(Js::OpCode::LEA, opndInlineCache, indirOpnd, instrLdSt->m_func);
  3385. instrLdSt->InsertBefore(instr);
  3386. }
  3387. IR::Instr *
  3388. LowererMD::ChangeToWriteBarrierAssign(IR::Instr * assignInstr, const Func* func)
  3389. {
  3390. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3391. IR::Opnd* dest = assignInstr->GetDst();
  3392. auto threadContextInfo = func->GetTopFunc()->GetThreadContextInfo();
  3393. void* destAddr = nullptr;
  3394. bool isPossibleBarrieredDest = false;
  3395. if (TySize[dest->GetType()] == sizeof(void*))
  3396. {
  3397. if (dest->IsIndirOpnd())
  3398. {
  3399. Assert(!dest->AsIndirOpnd()->HasAddrKind());
  3400. isPossibleBarrieredDest = true;
  3401. }
  3402. else if (dest->IsMemRefOpnd())
  3403. {
  3404. // looks all thread context field access are from MemRefOpnd
  3405. destAddr = (void*)dest->AsMemRefOpnd()->GetMemLoc();
  3406. isPossibleBarrieredDest = destAddr != nullptr
  3407. && ((intptr_t)destAddr % sizeof(void*)) == 0
  3408. && destAddr != (void*)threadContextInfo->GetImplicitCallFlagsAddr()
  3409. && destAddr != (void*)threadContextInfo->GetDisableImplicitFlagsAddr()
  3410. && destAddr != (void*)threadContextInfo->GetBailOutRegisterSaveSpaceAddr();
  3411. if (isPossibleBarrieredDest)
  3412. {
  3413. Assert(Recycler::WBCheckIsRecyclerAddress((char*)destAddr));
  3414. }
  3415. }
  3416. }
  3417. #endif
  3418. IR::Instr * instr = ChangeToAssignNoBarrierCheck(assignInstr);
  3419. // Now insert write barrier if necessary
  3420. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3421. if (isPossibleBarrieredDest
  3422. && assignInstr->m_opcode == Js::OpCode::MOV // ignore SSE instructions like MOVSD
  3423. && assignInstr->GetSrc1()->IsWriteBarrierTriggerableValue())
  3424. {
  3425. instr = LowererMD::GenerateWriteBarrier(assignInstr);
  3426. }
  3427. #endif
  3428. return instr;
  3429. }
  3430. void
  3431. LowererMD::GenerateWriteBarrierAssign(IR::MemRefOpnd * opndDst, IR::Opnd * opndSrc, IR::Instr * insertBeforeInstr)
  3432. {
  3433. Lowerer::InsertMove(opndDst, opndSrc, insertBeforeInstr);
  3434. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3435. if (opndSrc->IsWriteBarrierTriggerableValue())
  3436. {
  3437. void * address = (void *)opndDst->AsMemRefOpnd()->GetMemLoc();
  3438. #ifdef RECYCLER_WRITE_BARRIER_BYTE
  3439. // WriteBarrier-TODO: need to pass card table address through RPC
  3440. IR::MemRefOpnd * cardTableEntry = IR::MemRefOpnd::New(
  3441. &RecyclerWriteBarrierManager::GetAddressOfCardTable()[RecyclerWriteBarrierManager::GetCardTableIndex(address)], TyInt8, insertBeforeInstr->m_func);
  3442. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, cardTableEntry, IR::IntConstOpnd::New(1, TyInt8, insertBeforeInstr->m_func), insertBeforeInstr->m_func);
  3443. insertBeforeInstr->InsertBefore(movInstr);
  3444. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  3445. if (CONFIG_FLAG(ForceSoftwareWriteBarrier) && CONFIG_FLAG(RecyclerVerifyMark))
  3446. {
  3447. this->LoadHelperArgument(insertBeforeInstr, opndDst);
  3448. IR::Instr* instrCall = IR::Instr::New(Js::OpCode::Call, m_func);
  3449. insertBeforeInstr->InsertBefore(instrCall);
  3450. this->ChangeToHelperCall(instrCall, IR::HelperWriteBarrierSetVerifyBit);
  3451. }
  3452. #endif
  3453. #else
  3454. IR::MemRefOpnd * cardTableEntry = IR::MemRefOpnd::New(
  3455. &RecyclerWriteBarrierManager::GetAddressOfCardTable()[RecyclerWriteBarrierManager::GetCardTableIndex(address)], TyMachPtr, assignInstr->m_func);
  3456. IR::Instr * orInstr = IR::Instr::New(Js::OpCode::OR, cardTableEntry,
  3457. IR::IntConstOpnd::New(1 << ((uint)address >> 7), TyInt32, assignInstr->m_func), assignInstr->m_func);
  3458. assignInstr->InsertBefore(orInstr);
  3459. #endif
  3460. }
  3461. #endif
  3462. }
  3463. void
  3464. LowererMD::GenerateWriteBarrierAssign(IR::IndirOpnd * opndDst, IR::Opnd * opndSrc, IR::Instr * insertBeforeInstr)
  3465. {
  3466. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3467. if (opndSrc->IsWriteBarrierTriggerableValue())
  3468. {
  3469. IR::RegOpnd * writeBarrierAddrRegOpnd = IR::RegOpnd::New(TyMachPtr, insertBeforeInstr->m_func);
  3470. insertBeforeInstr->InsertBefore(IR::Instr::New(Js::OpCode::LEA, writeBarrierAddrRegOpnd, opndDst, insertBeforeInstr->m_func));
  3471. IR::Instr* movInstr = IR::Instr::New(Js::OpCode::MOV,
  3472. IR::IndirOpnd::New(writeBarrierAddrRegOpnd, 0, TyMachReg, insertBeforeInstr->m_func), opndSrc, insertBeforeInstr->m_func);
  3473. insertBeforeInstr->InsertBefore(movInstr);
  3474. GenerateWriteBarrier(movInstr);
  3475. // The mov happens above, and it's slightly faster doing it that way since we've already calculated the address we're writing to
  3476. return;
  3477. }
  3478. #endif
  3479. Lowerer::InsertMove(opndDst, opndSrc, insertBeforeInstr);
  3480. return;
  3481. }
  3482. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3483. IR::Instr*
  3484. LowererMD::GenerateWriteBarrier(IR::Instr * assignInstr)
  3485. {
  3486. #if defined(RECYCLER_WRITE_BARRIER_BYTE)
  3487. PHASE_PRINT_TRACE(Js::JitWriteBarrierPhase, assignInstr->m_func, _u("Generating write barrier\n"));
  3488. IR::RegOpnd * indexOpnd = IR::RegOpnd::New(TyMachPtr, assignInstr->m_func);
  3489. IR::Instr * loadIndexInstr = IR::Instr::New(Js::OpCode::LEA, indexOpnd, assignInstr->GetDst(), assignInstr->m_func);
  3490. assignInstr->InsertBefore(loadIndexInstr);
  3491. IR::Instr * shiftBitInstr = IR::Instr::New(Js::OpCode::SHR, indexOpnd, indexOpnd,
  3492. IR::IntConstOpnd::New(12 /* 1 << 12 = 4096 */, TyInt8, assignInstr->m_func), assignInstr->m_func);
  3493. assignInstr->InsertAfter(shiftBitInstr);
  3494. // The cardtable address is likely 64 bits already so we have to load it to a register
  3495. // That is, we have to do the following:
  3496. // LEA reg1, targetOfWrite
  3497. // SHR reg1, 12
  3498. // MOV reg2, cardTableAddress
  3499. // MOV [reg1 + reg2], 1
  3500. //
  3501. // Instead of doing this:
  3502. // LEA reg1, targetOfWrite
  3503. // SHR reg1, 12
  3504. // MOV [cardTableAddress + reg2], 1
  3505. //
  3506. //TODO: (leish)(swb) hoist RecyclerWriteBarrierManager::GetAddressOfCardTable()
  3507. IR::RegOpnd * cardTableRegOpnd = IR::RegOpnd::New(TyMachReg, assignInstr->m_func);
  3508. IR::Instr * cardTableAddrInstr = IR::Instr::New(Js::OpCode::MOV, cardTableRegOpnd,
  3509. IR::AddrOpnd::New(RecyclerWriteBarrierManager::GetAddressOfCardTable(), IR::AddrOpndKindWriteBarrierCardTable, assignInstr->m_func),
  3510. assignInstr->m_func);
  3511. shiftBitInstr->InsertAfter(cardTableAddrInstr);
  3512. IR::IndirOpnd * cardTableEntryOpnd = IR::IndirOpnd::New(cardTableRegOpnd, indexOpnd,
  3513. TyInt8, assignInstr->m_func);
  3514. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, cardTableEntryOpnd, IR::IntConstOpnd::New(1, TyInt8, assignInstr->m_func), assignInstr->m_func);
  3515. cardTableAddrInstr->InsertAfter(movInstr);
  3516. return loadIndexInstr;
  3517. #else
  3518. Assert(writeBarrierAddrRegOpnd->IsRegOpnd());
  3519. IR::RegOpnd * shiftBitOpnd = IR::RegOpnd::New(TyInt32, assignInstr->m_func);
  3520. shiftBitOpnd->SetReg(LowererMDArch::GetRegShiftCount());
  3521. IR::Instr * moveShiftBitOpnd = IR::Instr::New(Js::OpCode::MOV, shiftBitOpnd, writeBarrierAddrRegOpnd, assignInstr->m_func);
  3522. assignInstr->InsertBefore(moveShiftBitOpnd);
  3523. IR::Instr * shiftBitInstr = IR::Instr::New(Js::OpCode::SHR, shiftBitOpnd, shiftBitOpnd,
  3524. IR::IntConstOpnd::New(7 /* 1 << 7 = 128 */, TyInt32, assignInstr->m_func), assignInstr->m_func);
  3525. assignInstr->InsertBefore(shiftBitInstr);
  3526. IR::RegOpnd * bitOpnd = IR::RegOpnd::New(TyInt32, assignInstr->m_func);
  3527. IR::Instr * mov1Instr = IR::Instr::New(Js::OpCode::MOV, bitOpnd,
  3528. IR::IntConstOpnd::New(1, TyInt32, assignInstr->m_func), assignInstr->m_func);
  3529. assignInstr->InsertBefore(mov1Instr);
  3530. IR::Instr * bitInstr = IR::Instr::New(Js::OpCode::SHL, bitOpnd, bitOpnd, shiftBitOpnd, assignInstr->m_func);
  3531. assignInstr->InsertBefore(bitInstr);
  3532. IR::RegOpnd * indexOpnd = shiftBitOpnd;
  3533. IR::Instr * indexInstr = IR::Instr::New(Js::OpCode::SHR, indexOpnd, indexOpnd,
  3534. IR::IntConstOpnd::New(5 /* 1 << 5 = 32 */, TyInt32, assignInstr->m_func), assignInstr->m_func);
  3535. assignInstr->InsertBefore(indexInstr);
  3536. IR::RegOpnd * cardTableRegOpnd = IR::RegOpnd::New(TyMachReg, assignInstr->m_func);
  3537. IR::Instr * cardTableAddrInstr = IR::Instr::New(Js::OpCode::MOV, cardTableRegOpnd,
  3538. IR::AddrOpnd::New(RecyclerWriteBarrierManager::GetAddressOfCardTable(), IR::AddrOpndKindDynamicMisc, assignInstr->m_func),
  3539. assignInstr->m_func);
  3540. assignInstr->InsertBefore(cardTableAddrInstr);
  3541. IR::IndirOpnd * cardTableEntryOpnd = IR::IndirOpnd::New(cardTableRegOpnd, indexOpnd, LowererMDArch::GetDefaultIndirScale(),
  3542. TyInt32, assignInstr->m_func);
  3543. IR::Instr * orInstr = IR::Instr::New(Js::OpCode::OR, cardTableEntryOpnd, cardTableEntryOpnd,
  3544. bitOpnd, assignInstr->m_func);
  3545. assignInstr->InsertBefore(orInstr);
  3546. #endif
  3547. }
  3548. #endif
  3549. void
  3550. LowererMD::GenerateStFldFromLocalInlineCache(
  3551. IR::Instr * instrStFld,
  3552. IR::RegOpnd * opndBase,
  3553. IR::Opnd * opndSrc,
  3554. IR::RegOpnd * inlineCache,
  3555. IR::LabelInstr * labelFallThru,
  3556. bool isInlineSlot)
  3557. {
  3558. IR::Instr * instr;
  3559. IR::Opnd* slotIndexOpnd;
  3560. IR::RegOpnd * opndIndirBase = opndBase;
  3561. if (!isInlineSlot)
  3562. {
  3563. // slotArray = MOV base->slots -- load the slot array
  3564. IR::RegOpnd * opndSlotArray = IR::RegOpnd::New(TyMachReg, instrStFld->m_func);
  3565. IR::IndirOpnd * opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrStFld->m_func);
  3566. instr = IR::Instr::New(Js::OpCode::MOV, opndSlotArray, opndIndir, instrStFld->m_func);
  3567. instrStFld->InsertBefore(instr);
  3568. opndIndirBase = opndSlotArray;
  3569. }
  3570. // slotIndex = MOV [&inlineCache->u.local.inlineSlotOffsetOrAuxSlotIndex] -- load the cached slot offset or index
  3571. IR::RegOpnd * opndSlotIndex = IR::RegOpnd::New(TyMachReg, instrStFld->m_func);
  3572. slotIndexOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.local.slotIndex), TyUint16, instrStFld->m_func);
  3573. instr = IR::Instr::New(Js::OpCode::MOVZXW, opndSlotIndex, slotIndexOpnd, instrStFld->m_func);
  3574. instrStFld->InsertBefore(instr);
  3575. // [base + slotIndex * (1 << indirScale)] = MOV src -- store the value directly to the slot
  3576. // [slotArray + slotIndex * (1 << indirScale)] = MOV src -- store the value directly to the slot
  3577. IR::IndirOpnd * storeLocIndirOpnd = IR::IndirOpnd::New(opndIndirBase, opndSlotIndex,
  3578. LowererMDArch::GetDefaultIndirScale(), TyMachReg, instrStFld->m_func);
  3579. GenerateWriteBarrierAssign(storeLocIndirOpnd, opndSrc, instrStFld);
  3580. // JMP $fallthru
  3581. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrStFld->m_func);
  3582. instrStFld->InsertBefore(instr);
  3583. }
  3584. //----------------------------------------------------------------------------
  3585. //
  3586. // LowererMD::GenerateFastScopedLdFld
  3587. //
  3588. // Make use of the helper to cache the type and slot index used to do a ScopedLdFld
  3589. // when the scope is an array of length 1.
  3590. // Extract the only element from array and do an inline load from the appropriate slot
  3591. // if the type hasn't changed since the last time this ScopedLdFld was executed.
  3592. //
  3593. //----------------------------------------------------------------------------
  3594. IR::Instr *
  3595. LowererMD::GenerateFastScopedLdFld(IR::Instr * instrLdScopedFld)
  3596. {
  3597. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  3598. // JNE $helper
  3599. // MOV r1, [base + offset(scopes)] -- load the first scope
  3600. // MOV r2, r1->type
  3601. // CMP r2, [&(inlineCache->u.local.type)] -- check type
  3602. // JNE $helper
  3603. // MOV r1, r1->slots -- load the slots array
  3604. // MOV r2 , [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  3605. // MOV dst, [r1+r2] -- load the value from the slot
  3606. // JMP $fallthru
  3607. // $helper:
  3608. // dst = CALL PatchGetPropertyScoped(inlineCache, base, field, defaultInstance, scriptContext)
  3609. // $fallthru:
  3610. IR::RegOpnd * opndBase;
  3611. IR::Instr * instr;
  3612. IR::IndirOpnd * indirOpnd;
  3613. IR::LabelInstr * labelHelper;
  3614. IR::Opnd * opndDst;
  3615. IR::RegOpnd * inlineCache;
  3616. IR::RegOpnd *r1;
  3617. IR::LabelInstr * labelFallThru;
  3618. IR::Opnd *propertySrc = instrLdScopedFld->GetSrc1();
  3619. AssertMsg(propertySrc->IsSymOpnd() && propertySrc->AsSymOpnd()->IsPropertySymOpnd() && propertySrc->AsSymOpnd()->m_sym->IsPropertySym(),
  3620. "Expected property sym operand as src of LdScoped");
  3621. IR::PropertySymOpnd * propertySymOpnd = propertySrc->AsPropertySymOpnd();
  3622. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  3623. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3624. AssertMsg(opndBase->m_sym->m_isSingleDef, "We assume this isn't redefined");
  3625. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  3626. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfLength(), TyInt16, this->m_func);
  3627. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  3628. instr->SetSrc1(indirOpnd);
  3629. instr->SetSrc2(IR::IntConstOpnd::New(0x1, TyInt8, this->m_func));
  3630. instrLdScopedFld->InsertBefore(instr);
  3631. // JNE $helper
  3632. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  3633. instrLdScopedFld->InsertBefore(instr);
  3634. // MOV r1, [base + offset(scopes)] -- load the first scope
  3635. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg, this->m_func);
  3636. r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  3637. instr = IR::Instr::New(Js::OpCode::MOV, r1, indirOpnd, this->m_func);
  3638. instrLdScopedFld->InsertBefore(instr);
  3639. //first load the inlineCache type
  3640. inlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  3641. Assert(inlineCache != nullptr);
  3642. IR::RegOpnd * opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3643. opndDst = instrLdScopedFld->GetDst();
  3644. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3645. r1->m_sym->m_isNotNumber = true;
  3646. // Load the type
  3647. this->m_lowerer->GenerateObjectTestAndTypeLoad(instrLdScopedFld, r1, opndType, labelHelper);
  3648. // Check the local cache with the tagged type
  3649. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3650. GenerateLoadTaggedType(instrLdScopedFld, opndType, opndTaggedType);
  3651. instrLdScopedFld->InsertBefore(IR::Instr::New(Js::OpCode::MOV, inlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrLdScopedFld, propertySymOpnd), this->m_func));
  3652. Lowerer::GenerateLocalInlineCacheCheck(instrLdScopedFld, opndTaggedType, inlineCache, labelHelper);
  3653. Lowerer::GenerateLdFldFromLocalInlineCache(instrLdScopedFld, r1, opndDst, inlineCache, labelFallThru, false);
  3654. // $helper:
  3655. // dst = CALL PatchGetPropertyScoped(inlineCache, opndBase, propertyId, srcBase, scriptContext)
  3656. // $fallthru:
  3657. instrLdScopedFld->InsertBefore(labelHelper);
  3658. instrLdScopedFld->InsertAfter(labelFallThru);
  3659. return instrLdScopedFld->m_prev;
  3660. }
  3661. //----------------------------------------------------------------------------
  3662. //
  3663. // LowererMD::GenerateFastScopedStFld
  3664. //
  3665. // Make use of the helper to cache the type and slot index used to do a ScopedStFld
  3666. // when the scope is an array of length 1.
  3667. // Extract the only element from array and do an inline load from the appropriate slot
  3668. // if the type hasn't changed since the last time this ScopedStFld was executed.
  3669. //
  3670. //----------------------------------------------------------------------------
  3671. IR::Instr *
  3672. LowererMD::GenerateFastScopedStFld(IR::Instr * instrStScopedFld)
  3673. {
  3674. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  3675. // JNE $helper
  3676. // MOV r1, [base + offset(scopes)] -- load the first scope
  3677. // MOV r2, r1->type
  3678. // CMP r2, [&(inlineCache->u.local.type)] -- check type
  3679. // JNE $helper
  3680. // MOV r1, r1->slots -- load the slots array
  3681. // MOV r2, [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  3682. // [r1 + r2*4] = MOV value -- store the value directly to the slot
  3683. // JMP $fallthru
  3684. // $helper:
  3685. // CALL PatchSetPropertyScoped(inlineCache, base, field, value, defaultInstance, scriptContext)
  3686. // $fallthru:
  3687. IR::RegOpnd * opndBase;
  3688. IR::Instr * instr;
  3689. IR::IndirOpnd * indirOpnd;
  3690. IR::LabelInstr * labelHelper;
  3691. IR::Opnd * opndDst;
  3692. IR::RegOpnd * inlineCache;
  3693. IR::RegOpnd *r1;
  3694. IR::LabelInstr * labelFallThru;
  3695. IR::Opnd *newValue = instrStScopedFld->GetSrc1();
  3696. // IR::Opnd *defaultInstance = instrStScopedFld->UnlinkSrc2();
  3697. opndDst = instrStScopedFld->GetDst();
  3698. AssertMsg(opndDst->IsSymOpnd() && opndDst->AsSymOpnd()->IsPropertySymOpnd() && opndDst->AsSymOpnd()->m_sym->IsPropertySym(),
  3699. "Expected property sym operand as dst of StScoped");
  3700. IR::PropertySymOpnd * propertySymOpnd = opndDst->AsPropertySymOpnd();
  3701. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  3702. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3703. AssertMsg(opndBase->m_sym->m_isSingleDef, "We assume this isn't redefined");
  3704. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  3705. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfLength(), TyInt16, this->m_func);
  3706. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  3707. instr->SetSrc1(indirOpnd);
  3708. instr->SetSrc2(IR::IntConstOpnd::New(0x1, TyInt8, this->m_func));
  3709. instrStScopedFld->InsertBefore(instr);
  3710. // JNE $helper
  3711. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  3712. instrStScopedFld->InsertBefore(instr);
  3713. // MOV r1, [base + offset(scopes)] -- load the first scope
  3714. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg, this->m_func);
  3715. r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  3716. instr = IR::Instr::New(Js::OpCode::MOV, r1, indirOpnd, this->m_func);
  3717. instrStScopedFld->InsertBefore(instr);
  3718. //first load the inlineCache type
  3719. inlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  3720. Assert(inlineCache != nullptr);
  3721. IR::RegOpnd * opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3722. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3723. r1->m_sym->m_isNotNumber = true;
  3724. // Load the type
  3725. this->m_lowerer->GenerateObjectTestAndTypeLoad(instrStScopedFld, r1, opndType, labelHelper);
  3726. // Check the local cache with the tagged type
  3727. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3728. GenerateLoadTaggedType(instrStScopedFld, opndType, opndTaggedType);
  3729. instrStScopedFld->InsertBefore(IR::Instr::New(Js::OpCode::MOV, inlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrStScopedFld, propertySymOpnd), this->m_func));
  3730. Lowerer::GenerateLocalInlineCacheCheck(instrStScopedFld, opndTaggedType, inlineCache, labelHelper);
  3731. GenerateStFldFromLocalInlineCache(instrStScopedFld, r1, newValue, inlineCache, labelFallThru, false);
  3732. // $helper:
  3733. // CALL PatchSetPropertyScoped(inlineCache, opndBase, propertyId, newValue, defaultInstance, scriptContext)
  3734. // $fallthru:
  3735. instrStScopedFld->InsertBefore(labelHelper);
  3736. instrStScopedFld->InsertAfter(labelFallThru);
  3737. return instrStScopedFld->m_prev;
  3738. }
  3739. IR::Opnd *
  3740. LowererMD::CreateStackArgumentsSlotOpnd()
  3741. {
  3742. StackSym *sym = StackSym::New(TyMachReg, this->m_func);
  3743. sym->m_offset = -MachArgsSlotOffset;
  3744. sym->m_allocated = true;
  3745. return IR::SymOpnd::New(sym, TyMachReg, this->m_func);
  3746. }
  3747. IR::RegOpnd *
  3748. LowererMD::GenerateUntagVar(IR::RegOpnd * src, IR::LabelInstr * labelFail, IR::Instr * assignInstr, bool generateTagCheck)
  3749. {
  3750. Assert(src->IsVar());
  3751. // MOV valueOpnd, index
  3752. IR::RegOpnd *valueOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  3753. //
  3754. // Convert Index to 32 bits.
  3755. //
  3756. IR::Opnd * opnd = src->UseWithNewType(TyMachReg, this->m_func);
  3757. #if INT32VAR
  3758. if (generateTagCheck)
  3759. {
  3760. Assert(!opnd->IsTaggedInt());
  3761. this->GenerateSmIntTest(opnd, assignInstr, labelFail);
  3762. }
  3763. // Moving into r2 clears the tag bits on AMD64.
  3764. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV_TRUNC, valueOpnd, opnd, this->m_func);
  3765. assignInstr->InsertBefore(instr);
  3766. #else
  3767. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, valueOpnd, opnd, this->m_func);
  3768. assignInstr->InsertBefore(instr);
  3769. // SAR valueOpnd, Js::VarTag_Shift
  3770. instr = IR::Instr::New(Js::OpCode::SAR, valueOpnd, valueOpnd,
  3771. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  3772. assignInstr->InsertBefore(instr);
  3773. if (generateTagCheck)
  3774. {
  3775. Assert(!opnd->IsTaggedInt());
  3776. // SAR set the carry flag (CF) to 1 if the lower bit is 1
  3777. // JAE will jmp if CF = 0
  3778. instr = IR::BranchInstr::New(Js::OpCode::JAE, labelFail, this->m_func);
  3779. assignInstr->InsertBefore(instr);
  3780. }
  3781. #endif
  3782. return valueOpnd;
  3783. }
  3784. IR::RegOpnd *LowererMD::LoadNonnegativeIndex(
  3785. IR::RegOpnd *indexOpnd,
  3786. const bool skipNegativeCheck,
  3787. IR::LabelInstr *const notTaggedIntLabel,
  3788. IR::LabelInstr *const negativeLabel,
  3789. IR::Instr *const insertBeforeInstr)
  3790. {
  3791. Assert(indexOpnd);
  3792. Assert(indexOpnd->IsVar() || indexOpnd->GetType() == TyInt32 || indexOpnd->GetType() == TyUint32);
  3793. Assert(indexOpnd->GetType() != TyUint32 || skipNegativeCheck);
  3794. Assert(!indexOpnd->IsVar() || notTaggedIntLabel);
  3795. Assert(skipNegativeCheck || negativeLabel);
  3796. Assert(insertBeforeInstr);
  3797. if(indexOpnd->IsVar())
  3798. {
  3799. if (indexOpnd->GetValueType().IsLikelyFloat()
  3800. #ifdef _M_IX86
  3801. && AutoSystemInfo::Data.SSE2Available()
  3802. #endif
  3803. )
  3804. {
  3805. return m_lowerer->LoadIndexFromLikelyFloat(indexOpnd, skipNegativeCheck, notTaggedIntLabel, negativeLabel, insertBeforeInstr);
  3806. }
  3807. // mov intIndex, index
  3808. // sar intIndex, 1
  3809. // jae $notTaggedIntOrNegative
  3810. indexOpnd = m_lowerer->GenerateUntagVar(indexOpnd, notTaggedIntLabel, insertBeforeInstr, !indexOpnd->IsTaggedInt());
  3811. }
  3812. if(!skipNegativeCheck)
  3813. {
  3814. // test index, index
  3815. // js $notTaggedIntOrNegative
  3816. Lowerer::InsertTestBranch(indexOpnd, indexOpnd, Js::OpCode::JSB, negativeLabel, insertBeforeInstr);
  3817. }
  3818. return indexOpnd;
  3819. }
  3820. // Inlines fast-path for int Mul/Add or int Mul/Sub. If not int, call MulAdd/MulSub helper
  3821. bool LowererMD::TryGenerateFastMulAdd(IR::Instr * instrAdd, IR::Instr ** pInstrPrev)
  3822. {
  3823. IR::Instr *instrMul = instrAdd->GetPrevRealInstrOrLabel();
  3824. IR::Opnd *addSrc;
  3825. IR::RegOpnd *addCommonSrcOpnd;
  3826. Assert(instrAdd->m_opcode == Js::OpCode::Add_A || instrAdd->m_opcode == Js::OpCode::Sub_A);
  3827. bool isSub = (instrAdd->m_opcode == Js::OpCode::Sub_A) ? true : false;
  3828. // Mul needs to be a single def reg
  3829. if (instrMul->m_opcode != Js::OpCode::Mul_A || instrMul->GetDst()->IsRegOpnd() == false)
  3830. {
  3831. // Cannot generate MulAdd
  3832. return false;
  3833. }
  3834. if (instrMul->HasBailOutInfo())
  3835. {
  3836. // Bailout will be generated for the Add, but not the Mul.
  3837. // We could handle this, but this path isn't used that much anymore.
  3838. return false;
  3839. }
  3840. IR::RegOpnd *regMulDst = instrMul->GetDst()->AsRegOpnd();
  3841. if (regMulDst->m_sym->m_isSingleDef == false)
  3842. {
  3843. // Cannot generate MulAdd
  3844. return false;
  3845. }
  3846. // Only handle a * b + c, so dst of Mul needs to match left source of Add
  3847. if (instrMul->GetDst()->IsEqual(instrAdd->GetSrc1()))
  3848. {
  3849. addCommonSrcOpnd = instrAdd->GetSrc1()->AsRegOpnd();
  3850. addSrc = instrAdd->GetSrc2();
  3851. }
  3852. else if (instrMul->GetDst()->IsEqual(instrAdd->GetSrc2()))
  3853. {
  3854. addSrc = instrAdd->GetSrc1();
  3855. addCommonSrcOpnd = instrAdd->GetSrc2()->AsRegOpnd();
  3856. }
  3857. else
  3858. {
  3859. return false;
  3860. }
  3861. // Only handle a * b + c where c != a * b
  3862. if (instrAdd->GetSrc1()->IsEqual(instrAdd->GetSrc2()))
  3863. {
  3864. return false;
  3865. }
  3866. if (addCommonSrcOpnd->m_isTempLastUse == false)
  3867. {
  3868. return false;
  3869. }
  3870. IR::Opnd *mulSrc1 = instrMul->GetSrc1();
  3871. IR::Opnd *mulSrc2 = instrMul->GetSrc2();
  3872. if (mulSrc1->IsRegOpnd() && mulSrc1->AsRegOpnd()->IsTaggedInt()
  3873. && mulSrc2->IsRegOpnd() && mulSrc2->AsRegOpnd()->IsTaggedInt())
  3874. {
  3875. return false;
  3876. }
  3877. // Save prevInstr for the main lower loop
  3878. *pInstrPrev = instrMul->m_prev;
  3879. // Generate int31 fast-path for Mul, go to MulAdd helper if it fails, or one of the source is marked notInt
  3880. if (!(addSrc->IsRegOpnd() && addSrc->AsRegOpnd()->IsNotInt())
  3881. && !(mulSrc1->IsRegOpnd() && mulSrc1->AsRegOpnd()->IsNotInt())
  3882. && !(mulSrc2->IsRegOpnd() && mulSrc2->AsRegOpnd()->IsNotInt()))
  3883. {
  3884. this->GenerateFastMul(instrMul);
  3885. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3886. IR::Instr *instr = IR::BranchInstr::New(Js::OpCode::JMP, labelHelper, this->m_func);
  3887. instrMul->InsertBefore(instr);
  3888. // Generate int31 fast-path for Add
  3889. bool success;
  3890. if (isSub)
  3891. {
  3892. success = this->GenerateFastSub(instrAdd);
  3893. }
  3894. else
  3895. {
  3896. success = this->GenerateFastAdd(instrAdd);
  3897. }
  3898. if (!success)
  3899. {
  3900. labelHelper->isOpHelper = false;
  3901. }
  3902. // Generate MulAdd helper call
  3903. instrAdd->InsertBefore(labelHelper);
  3904. }
  3905. if (instrAdd->dstIsTempNumber)
  3906. {
  3907. m_lowerer->LoadHelperTemp(instrAdd, instrAdd);
  3908. }
  3909. else
  3910. {
  3911. IR::Opnd *tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  3912. this->LoadHelperArgument(instrAdd, tempOpnd);
  3913. }
  3914. this->m_lowerer->LoadScriptContext(instrAdd);
  3915. IR::JnHelperMethod helper;
  3916. if (addSrc == instrAdd->GetSrc2())
  3917. {
  3918. instrAdd->FreeSrc1();
  3919. IR::Opnd *addOpnd = instrAdd->UnlinkSrc2();
  3920. this->LoadHelperArgument(instrAdd, addOpnd);
  3921. helper = isSub ? IR::HelperOp_MulSubRight : IR::HelperOp_MulAddRight;
  3922. }
  3923. else
  3924. {
  3925. instrAdd->FreeSrc2();
  3926. IR::Opnd *addOpnd = instrAdd->UnlinkSrc1();
  3927. this->LoadHelperArgument(instrAdd, addOpnd);
  3928. helper = isSub ? IR::HelperOp_MulSubLeft : IR::HelperOp_MulAddLeft;
  3929. }
  3930. IR::Opnd *src2 = instrMul->UnlinkSrc2();
  3931. this->LoadHelperArgument(instrAdd, src2);
  3932. IR::Opnd *src1 = instrMul->UnlinkSrc1();
  3933. this->LoadHelperArgument(instrAdd, src1);
  3934. this->ChangeToHelperCall(instrAdd, helper);
  3935. instrMul->Remove();
  3936. return true;
  3937. }
  3938. void
  3939. LowererMD::GenerateFastAbs(IR::Opnd *dst, IR::Opnd *src, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
  3940. {
  3941. // TEST src1, AtomTag
  3942. // JEQ $float
  3943. // MOV EAX, src
  3944. // SAR EAX, AtomTag_Int32
  3945. // CDQ
  3946. // XOR EAX, EDX
  3947. // SUB EAX, EDX
  3948. // SHL EAX, AtomTag_Int32
  3949. // JO $labelHelper
  3950. // INC EAX
  3951. // MOV dst, EAX
  3952. // JMP $done
  3953. // $float
  3954. // CMP [src], JavascriptNumber.vtable
  3955. // JNE $helper
  3956. // MOVSD r1, [src + offsetof(value)]
  3957. // ANDPD r1, absDoubleCst
  3958. // dst = DoubleToVar(r1)
  3959. IR::Instr *instr = nullptr;
  3960. IR::LabelInstr *labelFloat = nullptr;
  3961. bool isInt = false;
  3962. bool isNotInt = false;
  3963. if (src->IsRegOpnd())
  3964. {
  3965. if (src->AsRegOpnd()->IsTaggedInt())
  3966. {
  3967. isInt = true;
  3968. }
  3969. else if (src->AsRegOpnd()->IsNotInt())
  3970. {
  3971. isNotInt = true;
  3972. }
  3973. }
  3974. else if (src->IsAddrOpnd())
  3975. {
  3976. IR::AddrOpnd *varOpnd = src->AsAddrOpnd();
  3977. Assert(varOpnd->IsVar() && Js::TaggedInt::Is(varOpnd->m_address));
  3978. #ifdef _M_X64
  3979. __int64 absValue = ::_abs64(Js::TaggedInt::ToInt32(varOpnd->m_address));
  3980. #else
  3981. __int32 absValue = ::abs(Js::TaggedInt::ToInt32(varOpnd->m_address));
  3982. #endif
  3983. if (!Js::TaggedInt::IsOverflow(absValue))
  3984. {
  3985. varOpnd->SetAddress(Js::TaggedInt::ToVarUnchecked((__int32)absValue), IR::AddrOpndKindConstantVar);
  3986. instr = IR::Instr::New(Js::OpCode::MOV, dst, varOpnd, this->m_func);
  3987. insertInstr->InsertBefore(instr);
  3988. return;
  3989. }
  3990. }
  3991. if (src->IsRegOpnd() == false)
  3992. {
  3993. IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  3994. instr = IR::Instr::New(Js::OpCode::MOV, regOpnd, src, this->m_func);
  3995. insertInstr->InsertBefore(instr);
  3996. src = regOpnd;
  3997. }
  3998. #ifdef _M_IX86
  3999. bool emitFloatAbs = !isInt && AutoSystemInfo::Data.SSE2Available();
  4000. #else
  4001. bool emitFloatAbs = !isInt;
  4002. #endif
  4003. if (!isNotInt)
  4004. {
  4005. if (!isInt)
  4006. {
  4007. IR::LabelInstr *label = labelHelper;
  4008. if (emitFloatAbs)
  4009. {
  4010. label = labelFloat = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4011. }
  4012. GenerateSmIntTest(src, insertInstr, label);
  4013. }
  4014. // MOV EAX, src
  4015. IR::RegOpnd *regEAX = IR::RegOpnd::New(TyInt32, this->m_func);
  4016. regEAX->SetReg(LowererMDArch::GetRegIMulDestLower());
  4017. instr = IR::Instr::New(Js::OpCode::MOV, regEAX, src, this->m_func);
  4018. insertInstr->InsertBefore(instr);
  4019. #ifdef _M_IX86
  4020. // SAR EAX, AtomTag_Int32
  4021. instr = IR::Instr::New(Js::OpCode::SAR, regEAX, regEAX, IR::IntConstOpnd::New(Js::AtomTag_Int32, TyInt32, this->m_func), this->m_func);
  4022. insertInstr->InsertBefore(instr);
  4023. #endif
  4024. IR::RegOpnd *regEDX = IR::RegOpnd::New(TyInt32, this->m_func);
  4025. regEDX->SetReg(LowererMDArch::GetRegIMulHighDestLower());
  4026. // CDQ
  4027. // Note: put EDX on dst to give of def to the EDX lifetime
  4028. instr = IR::Instr::New(Js::OpCode::CDQ, regEDX, this->m_func);
  4029. insertInstr->InsertBefore(instr);
  4030. // XOR EAX, EDX
  4031. instr = IR::Instr::New(Js::OpCode::XOR, regEAX, regEAX, regEDX, this->m_func);
  4032. insertInstr->InsertBefore(instr);
  4033. // SUB EAX, EDX
  4034. instr = IR::Instr::New(Js::OpCode::SUB, regEAX, regEAX, regEDX, this->m_func);
  4035. insertInstr->InsertBefore(instr);
  4036. #ifdef _M_X64
  4037. // abs(INT_MIN) overflows a 32 bit integer.
  4038. // JO $labelHelper
  4039. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  4040. insertInstr->InsertBefore(instr);
  4041. #endif
  4042. #ifdef _M_IX86
  4043. // SHL EAX, AtomTag_Int32
  4044. instr = IR::Instr::New(Js::OpCode::SHL, regEAX, regEAX, IR::IntConstOpnd::New(Js::AtomTag_Int32, TyInt32, this->m_func), this->m_func);
  4045. insertInstr->InsertBefore(instr);
  4046. // JO $labelHelper
  4047. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  4048. insertInstr->InsertBefore(instr);
  4049. // INC EAX
  4050. instr = IR::Instr::New(Js::OpCode::INC, regEAX, regEAX, this->m_func);
  4051. insertInstr->InsertBefore(instr);
  4052. #endif
  4053. // MOV dst, EAX
  4054. instr = IR::Instr::New(Js::OpCode::MOV, dst, regEAX, this->m_func);
  4055. insertInstr->InsertBefore(instr);
  4056. #ifdef _M_X64
  4057. GenerateInt32ToVarConversion(dst, insertInstr);
  4058. #endif
  4059. }
  4060. if (labelFloat)
  4061. {
  4062. // JMP $done
  4063. instr = IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, this->m_func);
  4064. insertInstr->InsertBefore(instr);
  4065. // $float
  4066. insertInstr->InsertBefore(labelFloat);
  4067. }
  4068. if (emitFloatAbs)
  4069. {
  4070. #if defined(_M_IX86)
  4071. // CMP [src], JavascriptNumber.vtable
  4072. IR::Opnd *opnd = IR::IndirOpnd::New(src->AsRegOpnd(), (int32)0, TyMachPtr, this->m_func);
  4073. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4074. instr->SetSrc1(opnd);
  4075. instr->SetSrc2(m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptNumber));
  4076. insertInstr->InsertBefore(instr);
  4077. // JNE $helper
  4078. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  4079. insertInstr->InsertBefore(instr);
  4080. // MOVSD r1, [src + offsetof(value)]
  4081. opnd = IR::IndirOpnd::New(src->AsRegOpnd(), Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  4082. IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyMachDouble, this->m_func);
  4083. instr = IR::Instr::New(Js::OpCode::MOVSD, regOpnd, opnd, this->m_func);
  4084. insertInstr->InsertBefore(instr);
  4085. this->GenerateFloatAbs(regOpnd, insertInstr);
  4086. // dst = DoubleToVar(r1)
  4087. SaveDoubleToVar(callInstr->GetDst()->AsRegOpnd(), regOpnd, callInstr, insertInstr);
  4088. #elif defined(_M_X64)
  4089. // if (typeof(src) == double)
  4090. IR::RegOpnd *src64 = src->AsRegOpnd();
  4091. GenerateFloatTest(src64, insertInstr, labelHelper);
  4092. // dst64 = MOV src64
  4093. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, src64, this->m_func));
  4094. // Unconditionally set the sign bit. This will get XORd away when we remove the tag.
  4095. // dst64 = OR 0x8000000000000000
  4096. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::OR, dst, dst, IR::IntConstOpnd::New(MachSignBit, TyMachReg, this->m_func), this->m_func));
  4097. #endif
  4098. }
  4099. else if(!isInt)
  4100. {
  4101. // The source is not known to be a tagged int, so either it's definitely not an int (isNotInt), or the int version of
  4102. // abs failed the tag check and jumped here. We can't emit the float version of abs (!emitFloatAbs) due to SSE2 not
  4103. // being available, so jump straight to the helper.
  4104. // JMP $helper
  4105. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelHelper, this->m_func);
  4106. insertInstr->InsertBefore(instr);
  4107. }
  4108. }
  4109. IR::Instr * LowererMD::GenerateFloatAbs(IR::RegOpnd * regOpnd, IR::Instr * insertInstr)
  4110. {
  4111. // ANDPS reg, absDoubleCst
  4112. IR::Opnd * opnd;
  4113. if (regOpnd->IsFloat64())
  4114. {
  4115. opnd = m_lowerer->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueAbsDoubleCst);
  4116. }
  4117. else
  4118. {
  4119. Assert(regOpnd->IsFloat32());
  4120. opnd = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetAbsFloatCstAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  4121. }
  4122. // ANDPS has smaller encoding then ANDPD
  4123. IR::Instr * instr = IR::Instr::New(Js::OpCode::ANDPS, regOpnd, regOpnd, opnd, this->m_func);
  4124. insertInstr->InsertBefore(instr);
  4125. Legalize(instr);
  4126. return instr;
  4127. }
  4128. IR::RegOpnd* LowererMD::MaterializeDoubleConstFromInt(intptr_t constAddr, IR::Instr* instr)
  4129. {
  4130. IR::Opnd* constVal = IR::MemRefOpnd::New(constAddr, IRType::TyFloat64, this->m_func);
  4131. IR::RegOpnd * xmmReg = IR::RegOpnd::New(TyFloat64, m_func);
  4132. this->m_lowerer->InsertMove(xmmReg, constVal, instr);
  4133. return xmmReg;
  4134. }
  4135. IR::RegOpnd* LowererMD::MaterializeConstFromBits(int bits, IRType type, IR::Instr* instr)
  4136. {
  4137. IR::Opnd * regBits = IR::RegOpnd::New(TyInt32, m_func);
  4138. this->m_lowerer->InsertMove(regBits, IR::IntConstOpnd::New(bits, TyInt32, m_func), instr);
  4139. IR::RegOpnd * regConst = IR::RegOpnd::New(type, m_func);
  4140. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOVD, regConst, regBits, m_func));
  4141. return regConst;
  4142. }
  4143. IR::Opnd* LowererMD::Subtract2To31(IR::Opnd* src1, IR::Opnd* intMinFP, IRType type, IR::Instr* instr)
  4144. {
  4145. Js::OpCode op = (type == TyFloat32) ? Js::OpCode::SUBSS : Js::OpCode::SUBSD;
  4146. IR::Opnd* adjSrc = IR::RegOpnd::New(type, m_func);
  4147. IR::Instr* sub = IR::Instr::New(op, adjSrc, src1, intMinFP, m_func);
  4148. instr->InsertBefore(sub);
  4149. Legalize(sub);
  4150. return adjSrc;
  4151. }
  4152. template <bool Saturate>
  4153. IR::Opnd*
  4154. LowererMD::GenerateTruncChecks(_In_ IR::Instr* instr, _In_opt_ IR::LabelInstr* doneLabel)
  4155. {
  4156. AnalysisAssert(!Saturate || doneLabel);
  4157. IR::Opnd* dst = instr->GetDst();
  4158. Assert(dst->IsInt32() || dst->IsUInt32());
  4159. IR::LabelInstr * nanLabel = (Saturate && dst->IsSigned()) ? IR::LabelInstr::New(Js::OpCode::Label, m_func, true) : nullptr;
  4160. IR::LabelInstr * conversion = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4161. IR::LabelInstr * tooSmallLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  4162. IR::Opnd* src1 = instr->GetSrc1();
  4163. IR::Opnd * src64 = nullptr;
  4164. if (src1->IsFloat32())
  4165. {
  4166. src64 = IR::RegOpnd::New(TyFloat64, m_func);
  4167. EmitFloat32ToFloat64(src64, src1, instr);
  4168. }
  4169. else
  4170. {
  4171. src64 = src1;
  4172. }
  4173. IR::RegOpnd* limitReg = MaterializeDoubleConstFromInt(dst->IsUInt32() ?
  4174. m_func->GetThreadContextInfo()->GetDoubleNegOneAddr() :
  4175. m_func->GetThreadContextInfo()->GetDoubleIntMinMinusOneAddr(), instr);
  4176. m_lowerer->InsertCompareBranch(src64, limitReg, Js::OpCode::BrLe_A, tooSmallLabel, instr);
  4177. limitReg = MaterializeDoubleConstFromInt(dst->IsUInt32() ?
  4178. m_func->GetThreadContextInfo()->GetDoubleUintMaxPlusOneAddr() :
  4179. m_func->GetThreadContextInfo()->GetDoubleIntMaxPlusOneAddr(), instr);
  4180. m_lowerer->InsertCompareBranch(limitReg, src64, Js::OpCode::BrGt_A, conversion, instr, true /*no NaN check*/);
  4181. if (Saturate)
  4182. {
  4183. // Insert a label to mark this as the start of a helper block, so layout knows to move it
  4184. m_lowerer->InsertLabel(true, instr);
  4185. // NaN case is same as too small case for unsigned, so combine them
  4186. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JP, dst->IsSigned() ? nanLabel : tooSmallLabel, m_func));
  4187. // Overflow case
  4188. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(dst->IsUnsigned() ? UINT32_MAX : INT32_MAX, dst->GetType(), m_func), instr);
  4189. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, instr);
  4190. instr->InsertBefore(tooSmallLabel);
  4191. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(dst->IsUnsigned() ? 0 : INT32_MIN, dst->GetType(), m_func), instr);
  4192. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, instr);
  4193. if (dst->IsSigned())
  4194. {
  4195. instr->InsertBefore(nanLabel);
  4196. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(0, dst->GetType(), m_func), instr);
  4197. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, instr);
  4198. }
  4199. }
  4200. else
  4201. {
  4202. instr->InsertBefore(tooSmallLabel);
  4203. m_lowerer->GenerateThrow(IR::IntConstOpnd::New(SCODE_CODE(VBSERR_Overflow), TyInt32, m_func), instr);
  4204. //no jump here we aren't coming back
  4205. }
  4206. instr->InsertBefore(conversion);
  4207. return src64;
  4208. }
  4209. template <bool Saturate>
  4210. void
  4211. LowererMD::GenerateTruncWithCheck(_In_ IR::Instr * instr)
  4212. {
  4213. Assert(AutoSystemInfo::Data.SSE2Available());
  4214. IR::LabelInstr * doneLabel = Saturate ? IR::LabelInstr::New(Js::OpCode::Label, m_func) : nullptr;
  4215. IR::Opnd* src64 = GenerateTruncChecks<Saturate>(instr, doneLabel); //converts src to double and checks if MIN <= src <= MAX
  4216. IR::Opnd* dst = instr->GetDst();
  4217. if (dst->IsUnsigned())
  4218. {
  4219. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(0, TyUint32, m_func), instr);
  4220. IR::LabelInstr * skipUnsignedPart = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4221. IR::Opnd* twoTo31 = MaterializeDoubleConstFromInt(m_func->GetThreadContextInfo()->GetDoubleTwoTo31Addr(), instr);
  4222. m_lowerer->InsertCompareBranch(src64, twoTo31, Js::OpCode::BrLt_A, skipUnsignedPart, instr);
  4223. instr->InsertBefore(IR::Instr::New(Js::OpCode::SUBPD, src64, src64, twoTo31, m_func));
  4224. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(0x80000000 /*2^31*/, TyUint32, m_func), instr);
  4225. instr->InsertBefore(skipUnsignedPart);
  4226. IR::Opnd* tmp = IR::RegOpnd::New(TyInt32, m_func);
  4227. instr->InsertBefore(IR::Instr::New(Js::OpCode::CVTTSD2SI, tmp, src64, m_func));
  4228. instr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, dst, dst, tmp, m_func));
  4229. }
  4230. else
  4231. {
  4232. instr->InsertBefore(IR::Instr::New(Js::OpCode::CVTTSD2SI, dst, src64, m_func));
  4233. }
  4234. if (Saturate)
  4235. {
  4236. instr->InsertBefore(doneLabel);
  4237. }
  4238. instr->UnlinkSrc1();
  4239. instr->UnlinkDst();
  4240. instr->Remove();
  4241. }
  4242. template void LowererMD::GenerateTruncWithCheck<false>(_In_ IR::Instr * instr);
  4243. template void LowererMD::GenerateTruncWithCheck<true>(_In_ IR::Instr * instr);
  4244. void
  4245. LowererMD::GenerateCtz(IR::Instr * instr)
  4246. {
  4247. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  4248. Assert(IRType_IsNativeInt(instr->GetDst()->GetType()));
  4249. #ifdef _M_IX86
  4250. if (instr->GetSrc1()->IsInt64())
  4251. {
  4252. lowererMDArch.EmitInt64Instr(instr);
  4253. return;
  4254. }
  4255. #endif
  4256. if (AutoSystemInfo::Data.TZCntAvailable())
  4257. {
  4258. instr->m_opcode = Js::OpCode::TZCNT;
  4259. Legalize(instr);
  4260. }
  4261. else
  4262. {
  4263. // dst = BSF src
  4264. // dst = CMOVE dst, 32 // dst is src1 to help reg alloc
  4265. int instrSize = instr->GetSrc1()->GetSize();
  4266. IRType type = instrSize == 8 ? TyInt64 : TyInt32;
  4267. instr->m_opcode = Js::OpCode::BSF;
  4268. Legalize(instr);
  4269. IR::IntConstOpnd * const32 = IR::IntConstOpnd::New(instrSize * 8, type, m_func);
  4270. IR::Instr* cmove = IR::Instr::New(Js::OpCode::CMOVE, instr->GetDst(), instr->GetDst(), const32, this->m_func);
  4271. instr->InsertAfter(cmove);
  4272. Legalize(cmove);
  4273. }
  4274. }
  4275. void
  4276. LowererMD::GeneratePopCnt(IR::Instr * instr)
  4277. {
  4278. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  4279. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsUInt32() || instr->GetDst()->IsInt64());
  4280. #ifdef _M_IX86
  4281. if (instr->GetSrc1()->IsInt64())
  4282. {
  4283. lowererMDArch.EmitInt64Instr(instr);
  4284. return;
  4285. }
  4286. #endif
  4287. if (AutoSystemInfo::Data.PopCntAvailable())
  4288. {
  4289. instr->m_opcode = Js::OpCode::POPCNT;
  4290. Legalize(instr);
  4291. }
  4292. else
  4293. {
  4294. int instrSize = instr->GetSrc1()->GetSize();
  4295. LoadHelperArgument(instr, instr->GetSrc1());
  4296. instr->UnlinkSrc1();
  4297. this->ChangeToHelperCall(instr, instrSize == 8 ? IR::HelperPopCnt64 : IR::HelperPopCnt32);
  4298. }
  4299. }
  4300. void
  4301. LowererMD::GenerateClz(IR::Instr * instr)
  4302. {
  4303. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  4304. Assert(IRType_IsNativeInt(instr->GetDst()->GetType()));
  4305. #ifdef _M_IX86
  4306. if (instr->GetSrc1()->IsInt64())
  4307. {
  4308. lowererMDArch.EmitInt64Instr(instr);
  4309. return;
  4310. }
  4311. #endif
  4312. if (AutoSystemInfo::Data.LZCntAvailable())
  4313. {
  4314. instr->m_opcode = Js::OpCode::LZCNT;
  4315. Legalize(instr);
  4316. }
  4317. else
  4318. {
  4319. // tmp = BSR src
  4320. // JE $label32
  4321. // dst = SUB 31, tmp
  4322. // dst = SUB 63, tmp; for int64
  4323. // JMP $done
  4324. // label32:
  4325. // dst = mov 32;
  4326. // dst = mov 64; for int64
  4327. // $done
  4328. int instrSize = instr->GetSrc1()->GetSize();
  4329. IRType type = instrSize == 8 ? TyInt64 : TyInt32;
  4330. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr->m_next);
  4331. IR::Opnd * dst = instr->UnlinkDst();
  4332. IR::Opnd * tmpOpnd = IR::RegOpnd::New(type, m_func);
  4333. instr->SetDst(tmpOpnd);
  4334. instr->m_opcode = Js::OpCode::BSR;
  4335. Legalize(instr);
  4336. IR::LabelInstr * label32 = Lowerer::InsertLabel(false, doneLabel);
  4337. instr = IR::BranchInstr::New(Js::OpCode::JEQ, label32, m_func);
  4338. label32->InsertBefore(instr);
  4339. Lowerer::InsertSub(false, dst, IR::IntConstOpnd::New(instrSize == 8 ? 63 : 31, type, m_func), tmpOpnd, label32);
  4340. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, label32);
  4341. Lowerer::InsertMove(dst, IR::IntConstOpnd::New(instrSize == 8 ? 64 : 32, type, m_func), doneLabel);
  4342. }
  4343. }
  4344. #if !FLOATVAR
  4345. void
  4346. LowererMD::GenerateNumberAllocation(IR::RegOpnd * opndDst, IR::Instr * instrInsert, bool isHelper)
  4347. {
  4348. size_t alignedAllocSize = Js::RecyclerJavascriptNumberAllocator::GetAlignedAllocSize(
  4349. m_func->GetScriptContextInfo()->IsRecyclerVerifyEnabled(),
  4350. m_func->GetScriptContextInfo()->GetRecyclerVerifyPad());
  4351. IR::Opnd * endAddressOpnd = m_lowerer->LoadNumberAllocatorValueOpnd(instrInsert, NumberAllocatorValue::NumberAllocatorEndAddress);
  4352. IR::Opnd * freeObjectListOpnd = m_lowerer->LoadNumberAllocatorValueOpnd(instrInsert, NumberAllocatorValue::NumberAllocatorFreeObjectList);
  4353. // MOV dst, allocator->freeObjectList
  4354. IR::Instr * loadMemBlockInstr = IR::Instr::New(Js::OpCode::MOV, opndDst, freeObjectListOpnd, this->m_func);
  4355. instrInsert->InsertBefore(loadMemBlockInstr);
  4356. // LEA nextMemBlock, [dst + allocSize]
  4357. IR::RegOpnd * nextMemBlockOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  4358. IR::Instr * loadNextMemBlockInstr = IR::Instr::New(Js::OpCode::LEA, nextMemBlockOpnd,
  4359. IR::IndirOpnd::New(opndDst, alignedAllocSize, TyMachPtr, this->m_func), this->m_func);
  4360. instrInsert->InsertBefore(loadNextMemBlockInstr);
  4361. // CMP nextMemBlock, allocator->endAddress
  4362. IR::Instr * checkInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4363. checkInstr->SetSrc1(nextMemBlockOpnd);
  4364. checkInstr->SetSrc2(endAddressOpnd);
  4365. instrInsert->InsertBefore(checkInstr);
  4366. // JA $helper
  4367. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4368. IR::BranchInstr * branchInstr = IR::BranchInstr::New(Js::OpCode::JA, helperLabel, this->m_func);
  4369. instrInsert->InsertBefore(branchInstr);
  4370. // MOV allocator->freeObjectList, nextMemBlock
  4371. IR::Instr * setFreeObjectListInstr = IR::Instr::New(Js::OpCode::MOV, freeObjectListOpnd, nextMemBlockOpnd, this->m_func);
  4372. instrInsert->InsertBefore(setFreeObjectListInstr);
  4373. // JMP $done
  4374. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  4375. IR::BranchInstr * branchToDoneInstr = IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, this->m_func);
  4376. instrInsert->InsertBefore(branchToDoneInstr);
  4377. // $helper:
  4378. instrInsert->InsertBefore(helperLabel);
  4379. // PUSH allocator
  4380. this->LoadHelperArgument(instrInsert, m_lowerer->LoadScriptContextValueOpnd(instrInsert, ScriptContextValue::ScriptContextNumberAllocator));
  4381. // dst = Call AllocUninitializedNumber
  4382. IR::Instr * instrCall = IR::Instr::New(Js::OpCode::CALL, opndDst,
  4383. IR::HelperCallOpnd::New(IR::HelperAllocUninitializedNumber, this->m_func), this->m_func);
  4384. instrInsert->InsertBefore(instrCall);
  4385. this->lowererMDArch.LowerCall(instrCall, 0);
  4386. // $done:
  4387. instrInsert->InsertBefore(doneLabel);
  4388. }
  4389. #endif
  4390. #ifdef _CONTROL_FLOW_GUARD
  4391. void
  4392. LowererMD::GenerateCFGCheck(IR::Opnd * entryPointOpnd, IR::Instr * insertBeforeInstr)
  4393. {
  4394. bool useJITTrampoline = CONFIG_FLAG(UseJITTrampoline);
  4395. IR::LabelInstr * callLabelInstr = nullptr;
  4396. uintptr_t jitThunkStartAddress = NULL;
  4397. if (useJITTrampoline)
  4398. {
  4399. #if ENABLE_OOP_NATIVE_CODEGEN
  4400. if (m_func->IsOOPJIT())
  4401. {
  4402. OOPJITThunkEmitter * jitThunkEmitter = m_func->GetOOPThreadContext()->GetJITThunkEmitter();
  4403. jitThunkStartAddress = jitThunkEmitter->EnsureInitialized();
  4404. }
  4405. else
  4406. #endif
  4407. {
  4408. InProcJITThunkEmitter * jitThunkEmitter = m_func->GetInProcThreadContext()->GetJITThunkEmitter();
  4409. jitThunkStartAddress = jitThunkEmitter->EnsureInitialized();
  4410. }
  4411. if (jitThunkStartAddress)
  4412. {
  4413. uintptr_t endAddressOfSegment = jitThunkStartAddress + InProcJITThunkEmitter::TotalThunkSize;
  4414. Assert(endAddressOfSegment > jitThunkStartAddress);
  4415. // Generate instructions for local Pre-Reserved Segment Range check
  4416. IR::AddrOpnd * endAddressOfSegmentConstOpnd = IR::AddrOpnd::New(endAddressOfSegment, IR::AddrOpndKindDynamicMisc, m_func);
  4417. IR::RegOpnd *resultOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4418. callLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4419. IR::LabelInstr * cfgLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  4420. // resultOpnd = SUB endAddressOfSegmentConstOpnd, entryPointOpnd
  4421. // CMP resultOpnd, TotalThunkSize
  4422. // JAE $cfgLabel
  4423. // AND entryPointOpnd, ~(ThunkSize-1)
  4424. // JMP $callLabel
  4425. m_lowerer->InsertSub(false, resultOpnd, endAddressOfSegmentConstOpnd, entryPointOpnd, insertBeforeInstr);
  4426. m_lowerer->InsertCompareBranch(resultOpnd, IR::IntConstOpnd::New(InProcJITThunkEmitter::TotalThunkSize, TyMachReg, m_func, true), Js::OpCode::BrGe_A, true, cfgLabelInstr, insertBeforeInstr);
  4427. m_lowerer->InsertAnd(entryPointOpnd, entryPointOpnd, IR::IntConstOpnd::New(InProcJITThunkEmitter::ThunkAlignmentMask, TyMachReg, m_func, true), insertBeforeInstr);
  4428. m_lowerer->InsertBranch(Js::OpCode::Br, callLabelInstr, insertBeforeInstr);
  4429. insertBeforeInstr->InsertBefore(cfgLabelInstr);
  4430. }
  4431. }
  4432. //MOV ecx, entryPoint
  4433. IR::RegOpnd * entryPointRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4434. #if _M_IX86
  4435. entryPointRegOpnd->SetReg(RegECX);
  4436. #elif _M_X64
  4437. entryPointRegOpnd->SetReg(RegRCX);
  4438. #endif
  4439. entryPointRegOpnd->m_isCallArg = true;
  4440. IR::Instr* movInstrEntryPointToRegister = IR::Instr::New(Js::OpCode::MOV, entryPointRegOpnd, entryPointOpnd, this->m_func);
  4441. insertBeforeInstr->InsertBefore(movInstrEntryPointToRegister);
  4442. //Generate CheckCFG CALL here
  4443. IR::HelperCallOpnd *cfgCallOpnd = IR::HelperCallOpnd::New(IR::HelperGuardCheckCall, this->m_func);
  4444. IR::Instr* cfgCallInstr = IR::Instr::New(Js::OpCode::CALL, this->m_func);
  4445. this->m_func->SetHasCallsOnSelfAndParents();
  4446. #if _M_IX86
  4447. //call[__guard_check_icall_fptr]
  4448. cfgCallInstr->SetSrc1(cfgCallOpnd);
  4449. #elif _M_X64
  4450. //mov rax, __guard_check_icall_fptr
  4451. IR::RegOpnd *targetOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, m_func), RegRAX, TyMachPtr, this->m_func);
  4452. IR::Instr *movInstr = IR::Instr::New(Js::OpCode::MOV, targetOpnd, cfgCallOpnd, this->m_func);
  4453. insertBeforeInstr->InsertBefore(movInstr);
  4454. //call rax
  4455. cfgCallInstr->SetSrc1(targetOpnd);
  4456. #endif
  4457. //CALL cfg(rax)
  4458. insertBeforeInstr->InsertBefore(cfgCallInstr);
  4459. if (jitThunkStartAddress)
  4460. {
  4461. Assert(callLabelInstr);
  4462. if (CONFIG_FLAG(ForceJITCFGCheck))
  4463. {
  4464. // Always generate CFG check to make sure that the address is still valid
  4465. movInstrEntryPointToRegister->InsertBefore(callLabelInstr);
  4466. }
  4467. else
  4468. {
  4469. insertBeforeInstr->InsertBefore(callLabelInstr);
  4470. }
  4471. }
  4472. }
  4473. #endif
  4474. void
  4475. LowererMD::GenerateFastRecyclerAlloc(size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, IR::LabelInstr* allocHelperLabel, IR::LabelInstr* allocDoneLabel)
  4476. {
  4477. IR::Opnd * endAddressOpnd;
  4478. IR::Opnd * freeListOpnd;
  4479. ScriptContextInfo* scriptContext = this->m_func->GetScriptContextInfo();
  4480. void* allocatorAddress;
  4481. uint32 endAddressOffset;
  4482. uint32 freeListOffset;
  4483. size_t alignedSize = HeapInfo::GetAlignedSizeNoCheck(allocSize);
  4484. bool allowNativeCodeBumpAllocation = scriptContext->GetRecyclerAllowNativeCodeBumpAllocation();
  4485. Recycler::GetNormalHeapBlockAllocatorInfoForNativeAllocation((void*)scriptContext->GetRecyclerAddr(), alignedSize,
  4486. allocatorAddress, endAddressOffset, freeListOffset,
  4487. allowNativeCodeBumpAllocation, this->m_func->IsOOPJIT());
  4488. endAddressOpnd = IR::MemRefOpnd::New((char*)allocatorAddress + endAddressOffset, TyMachPtr, this->m_func, IR::AddrOpndKindDynamicRecyclerAllocatorEndAddressRef);
  4489. freeListOpnd = IR::MemRefOpnd::New((char*)allocatorAddress + freeListOffset, TyMachPtr, this->m_func, IR::AddrOpndKindDynamicRecyclerAllocatorFreeListRef);
  4490. const IR::AutoReuseOpnd autoReuseTempOpnd(freeListOpnd, m_func);
  4491. // MOV newObjDst, allocator->freeObjectList
  4492. Lowerer::InsertMove(newObjDst, freeListOpnd, insertionPointInstr);
  4493. // LEA nextMemBlock, [newObjDst + allocSize]
  4494. IR::RegOpnd * nextMemBlockOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  4495. IR::IndirOpnd* nextMemBlockSrc = IR::IndirOpnd::New(newObjDst, (int32)alignedSize, TyMachPtr, this->m_func);
  4496. IR::Instr * loadNextMemBlockInstr = IR::Instr::New(Js::OpCode::LEA, nextMemBlockOpnd, nextMemBlockSrc, this->m_func);
  4497. insertionPointInstr->InsertBefore(loadNextMemBlockInstr);
  4498. // CMP nextMemBlock, allocator->endAddress
  4499. IR::Instr * checkInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4500. checkInstr->SetSrc1(nextMemBlockOpnd);
  4501. checkInstr->SetSrc2(endAddressOpnd);
  4502. insertionPointInstr->InsertBefore(checkInstr);
  4503. Legalize(checkInstr);
  4504. // JA $allocHelper
  4505. IR::BranchInstr * branchToAllocHelperInstr = IR::BranchInstr::New(Js::OpCode::JA, allocHelperLabel, this->m_func);
  4506. insertionPointInstr->InsertBefore(branchToAllocHelperInstr);
  4507. // MOV allocator->freeObjectList, nextMemBlock
  4508. Lowerer::InsertMove(freeListOpnd, nextMemBlockOpnd, insertionPointInstr, false);
  4509. // JMP $allocDone
  4510. IR::BranchInstr * branchToAllocDoneInstr = IR::BranchInstr::New(Js::OpCode::JMP, allocDoneLabel, this->m_func);
  4511. insertionPointInstr->InsertBefore(branchToAllocDoneInstr);
  4512. }
  4513. #ifdef ENABLE_WASM
  4514. void
  4515. LowererMD::GenerateCopysign(IR::Instr * instr)
  4516. {
  4517. #if defined(_M_IX86)
  4518. // We should only generate this if sse2 is available
  4519. Assert(AutoSystemInfo::Data.SSE2Available());
  4520. #endif
  4521. // ANDPS reg0, absDoubleCst
  4522. // ANDPS reg1, sgnBitDoubleCst
  4523. // ORPS reg0, reg1
  4524. // Copy sign from src2 to src1
  4525. IR::Opnd* src1 = instr->GetSrc1();
  4526. IR::Opnd* src2 = instr->GetSrc2();
  4527. Assert(src1->IsFloat32() || src1->IsFloat64());
  4528. GenerateFloatAbs(src1->AsRegOpnd(), instr);
  4529. IR::MemRefOpnd *memRef = IR::MemRefOpnd::New(src2->IsFloat32() ? this->m_func->GetThreadContextInfo()->GetSgnFloatBitCst() : this->m_func->GetThreadContextInfo()->GetSgnDoubleBitCst(),
  4530. src2->GetType(), this->m_func, src2->IsFloat32() ? IR::AddrOpndKindDynamicFloatRef : IR::AddrOpndKindDynamicDoubleRef);
  4531. IR::Instr* t2 = IR::Instr::New(Js::OpCode::ANDPS, instr->GetSrc2(), instr->GetSrc2(), memRef, m_func);
  4532. instr->InsertBefore(t2);
  4533. Legalize(t2);
  4534. instr->m_opcode = Js::OpCode::ORPS;
  4535. Legalize(instr);
  4536. };
  4537. #endif //ENABLE_WASM
  4538. void
  4539. LowererMD::SaveDoubleToVar(IR::RegOpnd * dstOpnd, IR::RegOpnd *opndFloat, IR::Instr *instrOrig, IR::Instr *instrInsert, bool isHelper)
  4540. {
  4541. Assert(opndFloat->GetType() == TyFloat64);
  4542. // Call JSNumber::ToVar to save the float operand to the result of the original (var) instruction
  4543. #if !FLOATVAR
  4544. // We should only generate this if sse2 is available
  4545. Assert(AutoSystemInfo::Data.SSE2Available());
  4546. IR::Opnd * symVTableDst;
  4547. IR::Opnd * symDblDst;
  4548. IR::Opnd * symTypeDst;
  4549. IR::Instr * newInstr;
  4550. IR::Instr * numberInitInsertInstr = nullptr;
  4551. if (instrOrig->dstIsTempNumber)
  4552. {
  4553. // Use the original dst to get the temp number sym
  4554. StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(instrOrig->GetDst(), instrOrig->dstIsTempNumberTransferred);
  4555. // LEA dst, &tempSym
  4556. IR::SymOpnd * symTempSrc = IR::SymOpnd::New(tempNumberSym, TyMachPtr, this->m_func);
  4557. IR::Instr * loadTempNumberInstr = IR::Instr::New(Js::OpCode::LEA, dstOpnd, symTempSrc, this->m_func);
  4558. instrInsert->InsertBefore(loadTempNumberInstr);
  4559. symVTableDst = IR::SymOpnd::New(tempNumberSym, TyMachPtr, this->m_func);
  4560. symDblDst = IR::SymOpnd::New(tempNumberSym, (uint32)Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func);
  4561. symTypeDst = IR::SymOpnd::New(tempNumberSym, (uint32)Js::JavascriptNumber::GetOffsetOfType(), TyMachPtr, this->m_func);
  4562. if (this->m_lowerer->outerMostLoopLabel == nullptr)
  4563. {
  4564. // If we are not in loop, just insert in place
  4565. numberInitInsertInstr = instrInsert;
  4566. }
  4567. else
  4568. {
  4569. // Otherwise, initialize in the outer most loop top if we haven't initialized it yet.
  4570. numberInitInsertInstr = this->m_lowerer->initializedTempSym->TestAndSet(tempNumberSym->m_id) ?
  4571. nullptr : this->m_lowerer->outerMostLoopLabel;
  4572. }
  4573. }
  4574. else
  4575. {
  4576. this->GenerateNumberAllocation(dstOpnd, instrInsert, isHelper);
  4577. symVTableDst = IR::IndirOpnd::New(dstOpnd, 0, TyMachPtr, this->m_func);
  4578. symDblDst = IR::IndirOpnd::New(dstOpnd, (uint32)Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func);
  4579. symTypeDst = IR::IndirOpnd::New(dstOpnd, (uint32)Js::JavascriptNumber::GetOffsetOfType(), TyMachPtr, this->m_func);
  4580. numberInitInsertInstr = instrInsert;
  4581. }
  4582. if (numberInitInsertInstr)
  4583. {
  4584. // Inline the case where the dst is marked as temp.
  4585. IR::Opnd *jsNumberVTable = m_lowerer->LoadVTableValueOpnd(numberInitInsertInstr, VTableValue::VtableJavascriptNumber);
  4586. // MOV dst->vtable, JavascriptNumber::vtable
  4587. newInstr = IR::Instr::New(Js::OpCode::MOV, symVTableDst, jsNumberVTable, this->m_func);
  4588. numberInitInsertInstr->InsertBefore(newInstr);
  4589. // MOV dst->type, JavascriptNumber_type
  4590. IR::Opnd *typeOpnd = m_lowerer->LoadLibraryValueOpnd(numberInitInsertInstr, LibraryValue::ValueNumberTypeStatic);
  4591. newInstr = IR::Instr::New(Js::OpCode::MOV, symTypeDst, typeOpnd, this->m_func);
  4592. numberInitInsertInstr->InsertBefore(newInstr);
  4593. }
  4594. // MOVSD dst->value, opndFloat ; copy the float result to the temp JavascriptNumber
  4595. newInstr = IR::Instr::New(Js::OpCode::MOVSD, symDblDst, opndFloat, this->m_func);
  4596. instrInsert->InsertBefore(newInstr);
  4597. #else
  4598. // s1 = MOVD opndFloat
  4599. IR::RegOpnd *s1 = IR::RegOpnd::New(TyMachReg, m_func);
  4600. IR::Instr *movd = IR::Instr::New(Js::OpCode::MOVD, s1, opndFloat, m_func);
  4601. instrInsert->InsertBefore(movd);
  4602. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  4603. {
  4604. // s1 = MOVD src
  4605. // tmp = NOT s1
  4606. // tmp = AND tmp, 0x7FF0000000000000ull
  4607. // test tmp, tmp
  4608. // je helper
  4609. // jmp done
  4610. // helper:
  4611. // tmp2 = AND s1, 0x000FFFFFFFFFFFFFull
  4612. // test tmp2, tmp2
  4613. // je done
  4614. // s1 = JavascriptNumber::k_Nan
  4615. // done:
  4616. IR::RegOpnd *tmp = IR::RegOpnd::New(TyMachReg, m_func);
  4617. IR::Instr * newInstr = IR::Instr::New(Js::OpCode::NOT, tmp, s1, m_func);
  4618. instrInsert->InsertBefore(newInstr);
  4619. LowererMD::MakeDstEquSrc1(newInstr);
  4620. newInstr = IR::Instr::New(Js::OpCode::AND, tmp, tmp, IR::AddrOpnd::New((Js::Var)0x7FF0000000000000, IR::AddrOpndKindConstantVar, m_func, true), m_func);
  4621. instrInsert->InsertBefore(newInstr);
  4622. LowererMD::Legalize(newInstr);
  4623. IR::LabelInstr* helper = Lowerer::InsertLabel(true, instrInsert);
  4624. Lowerer::InsertTestBranch(tmp, tmp, Js::OpCode::BrEq_A, helper, helper);
  4625. IR::LabelInstr* done = Lowerer::InsertLabel(isHelper, instrInsert);
  4626. Lowerer::InsertBranch(Js::OpCode::Br, done, helper);
  4627. IR::RegOpnd *tmp2 = IR::RegOpnd::New(TyMachReg, m_func);
  4628. newInstr = IR::Instr::New(Js::OpCode::AND, tmp2, s1, IR::AddrOpnd::New((Js::Var)0x000FFFFFFFFFFFFFull, IR::AddrOpndKindConstantVar, m_func, true), m_func);
  4629. done->InsertBefore(newInstr);
  4630. LowererMD::Legalize(newInstr);
  4631. Lowerer::InsertTestBranch(tmp2, tmp2, Js::OpCode::BrEq_A, done, done);
  4632. IR::Opnd * opndNaN = IR::AddrOpnd::New((Js::Var)Js::JavascriptNumber::k_Nan, IR::AddrOpndKindConstantVar, m_func, true);
  4633. Lowerer::InsertMove(s1, opndNaN, done);
  4634. }
  4635. // s1 = XOR s1, FloatTag_Value
  4636. // dst = s1
  4637. IR::Instr *setTag = IR::Instr::New(Js::OpCode::XOR,
  4638. s1,
  4639. s1,
  4640. IR::AddrOpnd::New((Js::Var)Js::FloatTag_Value,
  4641. IR::AddrOpndKindConstantVar,
  4642. this->m_func,
  4643. /* dontEncode = */ true),
  4644. this->m_func);
  4645. IR::Instr *movDst = IR::Instr::New(Js::OpCode::MOV, dstOpnd, s1, this->m_func);
  4646. instrInsert->InsertBefore(setTag);
  4647. instrInsert->InsertBefore(movDst);
  4648. LowererMD::Legalize(setTag);
  4649. #endif
  4650. }
  4651. void
  4652. LowererMD::EmitLoadFloatFromNumber(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr)
  4653. {
  4654. IR::LabelInstr *labelDone;
  4655. IR::Instr *instr;
  4656. labelDone = EmitLoadFloatCommon(dst, src, insertInstr, insertInstr->HasBailOutInfo());
  4657. if (labelDone == nullptr)
  4658. {
  4659. // We're done
  4660. insertInstr->Remove();
  4661. return;
  4662. }
  4663. // $Done note: insertAfter
  4664. insertInstr->InsertAfter(labelDone);
  4665. if (!insertInstr->HasBailOutInfo())
  4666. {
  4667. // $Done
  4668. insertInstr->Remove();
  4669. return;
  4670. }
  4671. Assert(!m_func->GetJITFunctionBody()->IsAsmJsMode());
  4672. IR::LabelInstr *labelNoBailOut = nullptr;
  4673. IR::SymOpnd *tempSymOpnd = nullptr;
  4674. if (insertInstr->GetBailOutKind() == IR::BailOutPrimitiveButString)
  4675. {
  4676. if (!this->m_func->tempSymDouble)
  4677. {
  4678. this->m_func->tempSymDouble = StackSym::New(TyFloat64, this->m_func);
  4679. this->m_func->StackAllocate(this->m_func->tempSymDouble, MachDouble);
  4680. }
  4681. // LEA r3, tempSymDouble
  4682. IR::RegOpnd *reg3Opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4683. tempSymOpnd = IR::SymOpnd::New(this->m_func->tempSymDouble, TyFloat64, this->m_func);
  4684. instr = IR::Instr::New(Js::OpCode::LEA, reg3Opnd, tempSymOpnd, this->m_func);
  4685. insertInstr->InsertBefore(instr);
  4686. // regBoolResult = to_number_fromPrimitive(value, &dst, allowUndef, scriptContext);
  4687. this->m_lowerer->LoadScriptContext(insertInstr);
  4688. IR::IntConstOpnd *allowUndefOpnd;
  4689. if (insertInstr->GetBailOutKind() == IR::BailOutPrimitiveButString)
  4690. {
  4691. allowUndefOpnd = IR::IntConstOpnd::New(true, TyInt32, this->m_func);
  4692. }
  4693. else
  4694. {
  4695. Assert(insertInstr->GetBailOutKind() == IR::BailOutNumberOnly);
  4696. allowUndefOpnd = IR::IntConstOpnd::New(false, TyInt32, this->m_func);
  4697. }
  4698. this->LoadHelperArgument(insertInstr, allowUndefOpnd);
  4699. this->LoadHelperArgument(insertInstr, reg3Opnd);
  4700. this->LoadHelperArgument(insertInstr, src);
  4701. IR::RegOpnd *regBoolResult = IR::RegOpnd::New(TyInt32, this->m_func);
  4702. instr = IR::Instr::New(Js::OpCode::CALL, regBoolResult, IR::HelperCallOpnd::New(IR::HelperOp_ConvNumber_FromPrimitive, this->m_func), this->m_func);
  4703. insertInstr->InsertBefore(instr);
  4704. this->lowererMDArch.LowerCall(instr, 0);
  4705. // TEST regBoolResult, regBoolResult
  4706. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  4707. instr->SetSrc1(regBoolResult);
  4708. instr->SetSrc2(regBoolResult);
  4709. insertInstr->InsertBefore(instr);
  4710. // JNE $noBailOut
  4711. labelNoBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4712. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelNoBailOut, this->m_func);
  4713. insertInstr->InsertBefore(instr);
  4714. }
  4715. // Bailout code
  4716. Assert(insertInstr->m_opcode == Js::OpCode::FromVar);
  4717. insertInstr->UnlinkDst();
  4718. insertInstr->FreeSrc1();
  4719. IR::Instr *bailoutInstr = insertInstr;
  4720. insertInstr = bailoutInstr->m_next;
  4721. this->m_lowerer->GenerateBailOut(bailoutInstr);
  4722. // $noBailOut
  4723. if (labelNoBailOut)
  4724. {
  4725. insertInstr->InsertBefore(labelNoBailOut);
  4726. Assert(dst->IsRegOpnd());
  4727. // MOVSD dst, [pResult].f64
  4728. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, tempSymOpnd, this->m_func);
  4729. insertInstr->InsertBefore(instr);
  4730. }
  4731. }
  4732. IR::LabelInstr*
  4733. LowererMD::EmitLoadFloatCommon(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr, bool needHelperLabel)
  4734. {
  4735. IR::Instr *instr;
  4736. Assert(src->GetType() == TyVar);
  4737. Assert(dst->IsFloat());
  4738. bool isFloatConst = false;
  4739. IR::RegOpnd *regFloatOpnd = nullptr;
  4740. if (src->IsRegOpnd() && src->AsRegOpnd()->m_sym->m_isFltConst)
  4741. {
  4742. IR::RegOpnd *regOpnd = src->AsRegOpnd();
  4743. Assert(regOpnd->m_sym->m_isSingleDef);
  4744. Js::Var value = regOpnd->m_sym->GetFloatConstValueAsVar_PostGlobOpt();
  4745. #if FLOATVAR
  4746. void *pDouble = (double*)NativeCodeDataNewNoFixup(this->m_func->GetNativeCodeDataAllocator(), DoubleType<DataDesc_LowererMD_EmitLoadFloatCommon_Double>, Js::JavascriptNumber::GetValue(value));
  4747. IR::Opnd * doubleRef;
  4748. if (!m_func->IsOOPJIT())
  4749. {
  4750. doubleRef = IR::MemRefOpnd::New(pDouble, TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  4751. }
  4752. else
  4753. {
  4754. int offset = NativeCodeData::GetDataTotalOffset(pDouble);
  4755. doubleRef = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), offset, TyMachDouble,
  4756. #if DBG
  4757. NativeCodeData::GetDataDescription(pDouble, m_func->m_alloc),
  4758. #endif
  4759. m_func, true);
  4760. GetLowerer()->addToLiveOnBackEdgeSyms->Set(m_func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  4761. }
  4762. #else
  4763. IR::MemRefOpnd *doubleRef = IR::MemRefOpnd::New((BYTE*)value + Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func,
  4764. IR::AddrOpndKindDynamicDoubleRef);
  4765. #endif
  4766. regFloatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  4767. instr = IR::Instr::New(Js::OpCode::MOVSD, regFloatOpnd, doubleRef, this->m_func);
  4768. insertInstr->InsertBefore(instr);
  4769. Legalize(instr);
  4770. isFloatConst = true;
  4771. }
  4772. // Src is constant?
  4773. if (src->IsImmediateOpnd() || src->IsFloatConstOpnd())
  4774. {
  4775. regFloatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  4776. m_lowerer->LoadFloatFromNonReg(src, regFloatOpnd, insertInstr);
  4777. isFloatConst = true;
  4778. }
  4779. if (isFloatConst)
  4780. {
  4781. if (dst->GetType() == TyFloat32)
  4782. {
  4783. // CVTSD2SS regOpnd32.f32, regOpnd.f64 -- Convert regOpnd from f64 to f32
  4784. IR::RegOpnd *regOpnd32 = regFloatOpnd->UseWithNewType(TyFloat32, this->m_func)->AsRegOpnd();
  4785. instr = IR::Instr::New(Js::OpCode::CVTSD2SS, regOpnd32, regFloatOpnd, this->m_func);
  4786. insertInstr->InsertBefore(instr);
  4787. // MOVSS dst, regOpnd32
  4788. instr = IR::Instr::New(Js::OpCode::MOVSS, dst, regOpnd32, this->m_func);
  4789. insertInstr->InsertBefore(instr);
  4790. }
  4791. else
  4792. {
  4793. // MOVSD dst, regOpnd
  4794. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, regFloatOpnd, this->m_func);
  4795. insertInstr->InsertBefore(instr);
  4796. }
  4797. return nullptr;
  4798. }
  4799. Assert(src->IsRegOpnd());
  4800. IR::LabelInstr *labelStore = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4801. IR::LabelInstr *labelHelper;
  4802. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4803. if (needHelperLabel)
  4804. {
  4805. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4806. }
  4807. else
  4808. {
  4809. labelHelper = labelDone;
  4810. }
  4811. bool const isFloat32 = dst->GetType() == TyFloat32;
  4812. IR::RegOpnd *reg2 = ((isFloat32 || !dst->IsRegOpnd()) ? IR::RegOpnd::New(TyMachDouble, this->m_func) : dst->AsRegOpnd());
  4813. // Load the float value in reg2
  4814. this->lowererMDArch.LoadCheckedFloat(src->AsRegOpnd(), reg2, labelStore, labelHelper, insertInstr, needHelperLabel);
  4815. // $Store
  4816. insertInstr->InsertBefore(labelStore);
  4817. if (isFloat32)
  4818. {
  4819. IR::RegOpnd *reg2_32 = reg2->UseWithNewType(TyFloat32, this->m_func)->AsRegOpnd();
  4820. // CVTSD2SS r2_32.f32, r2.f64 -- Convert regOpnd from f64 to f32
  4821. instr = IR::Instr::New(Js::OpCode::CVTSD2SS, reg2_32, reg2, this->m_func);
  4822. insertInstr->InsertBefore(instr);
  4823. // MOVSS dst, r2_32
  4824. instr = IR::Instr::New(Js::OpCode::MOVSS, dst, reg2_32, this->m_func);
  4825. insertInstr->InsertBefore(instr);
  4826. }
  4827. else if (reg2 != dst)
  4828. {
  4829. // MOVSD dst, r2
  4830. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, reg2, this->m_func);
  4831. insertInstr->InsertBefore(instr);
  4832. }
  4833. // JMP $Done
  4834. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, this->m_func);
  4835. insertInstr->InsertBefore(instr);
  4836. if (needHelperLabel)
  4837. {
  4838. // $Helper
  4839. insertInstr->InsertBefore(labelHelper);
  4840. }
  4841. return labelDone;
  4842. }
  4843. void
  4844. LowererMD::EmitLoadFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr, IR::Instr * instrBailOut, IR::LabelInstr * labelBailOut)
  4845. {
  4846. IR::LabelInstr *labelDone;
  4847. IR::Instr *instr;
  4848. labelDone = EmitLoadFloatCommon(dst, src, insertInstr, true);
  4849. if (labelDone == nullptr)
  4850. {
  4851. // We're done
  4852. return;
  4853. }
  4854. IR::BailOutKind bailOutKind = instrBailOut && instrBailOut->HasBailOutInfo() ? instrBailOut->GetBailOutKind() : IR::BailOutInvalid;
  4855. if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  4856. {
  4857. // Bail out instead of making the helper call.
  4858. Assert(labelBailOut);
  4859. m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, insertInstr);
  4860. insertInstr->InsertBefore(labelDone);
  4861. return;
  4862. }
  4863. IR::Opnd *memAddress = dst;
  4864. if (dst->IsRegOpnd())
  4865. {
  4866. // Create an f64 stack location to store the result of the helper.
  4867. IR::SymOpnd *symOpnd = IR::SymOpnd::New(StackSym::New(dst->GetType(), this->m_func), dst->GetType(), this->m_func);
  4868. this->m_func->StackAllocate(symOpnd->m_sym->AsStackSym(), sizeof(double));
  4869. memAddress = symOpnd;
  4870. }
  4871. // LEA r3, dst
  4872. IR::RegOpnd *reg3Opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4873. instr = IR::Instr::New(Js::OpCode::LEA, reg3Opnd, memAddress, this->m_func);
  4874. insertInstr->InsertBefore(instr);
  4875. // to_number_full(value, &dst, scriptContext);
  4876. // Create dummy binary op to convert into helper
  4877. instr = IR::Instr::New(Js::OpCode::Add_A, this->m_func);
  4878. instr->SetSrc1(src);
  4879. instr->SetSrc2(reg3Opnd);
  4880. insertInstr->InsertBefore(instr);
  4881. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  4882. {
  4883. _Analysis_assume_(instrBailOut != nullptr);
  4884. instr = instr->ConvertToBailOutInstr(instrBailOut->GetBailOutInfo(), bailOutKind);
  4885. if (instrBailOut->GetBailOutInfo()->bailOutInstr == instrBailOut)
  4886. {
  4887. IR::Instr * instrShare = instrBailOut->ShareBailOut();
  4888. m_lowerer->LowerBailTarget(instrShare);
  4889. }
  4890. }
  4891. IR::JnHelperMethod helper;
  4892. if (dst->GetType() == TyFloat32)
  4893. {
  4894. helper = IR::HelperOp_ConvFloat_Helper;
  4895. }
  4896. else
  4897. {
  4898. helper = IR::HelperOp_ConvNumber_Helper;
  4899. }
  4900. this->m_lowerer->LowerBinaryHelperMem(instr, helper);
  4901. if (dst->IsRegOpnd())
  4902. {
  4903. if (dst->GetType() == TyFloat32)
  4904. {
  4905. // MOVSS dst, r32
  4906. instr = IR::Instr::New(Js::OpCode::MOVSS, dst, memAddress, this->m_func);
  4907. insertInstr->InsertBefore(instr);
  4908. }
  4909. else
  4910. {
  4911. // MOVSD dst, [pResult].f64
  4912. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, memAddress, this->m_func);
  4913. insertInstr->InsertBefore(instr);
  4914. }
  4915. }
  4916. // $Done
  4917. insertInstr->InsertBefore(labelDone);
  4918. }
  4919. void
  4920. LowererMD::LowerInt4NegWithBailOut(
  4921. IR::Instr *const instr,
  4922. const IR::BailOutKind bailOutKind,
  4923. IR::LabelInstr *const bailOutLabel,
  4924. IR::LabelInstr *const skipBailOutLabel)
  4925. {
  4926. Assert(instr);
  4927. Assert(instr->m_opcode == Js::OpCode::Neg_I4);
  4928. Assert(!instr->HasBailOutInfo());
  4929. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  4930. Assert(bailOutLabel);
  4931. Assert(instr->m_next == bailOutLabel);
  4932. Assert(skipBailOutLabel);
  4933. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  4934. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  4935. // Lower the instruction
  4936. instr->m_opcode = Js::OpCode::NEG;
  4937. Legalize(instr);
  4938. if(bailOutKind & IR::BailOutOnOverflow || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck)
  4939. {
  4940. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func));
  4941. }
  4942. if(bailOutKind & IR::BailOutOnNegativeZero)
  4943. {
  4944. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JEQ, bailOutLabel, instr->m_func));
  4945. }
  4946. // Skip bailout
  4947. bailOutLabel->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, skipBailOutLabel, instr->m_func));
  4948. }
  4949. void
  4950. LowererMD::LowerInt4AddWithBailOut(
  4951. IR::Instr *const instr,
  4952. const IR::BailOutKind bailOutKind,
  4953. IR::LabelInstr *const bailOutLabel,
  4954. IR::LabelInstr *const skipBailOutLabel)
  4955. {
  4956. Assert(instr);
  4957. Assert(instr->m_opcode == Js::OpCode::Add_I4);
  4958. Assert(!instr->HasBailOutInfo());
  4959. Assert(
  4960. (bailOutKind & IR::BailOutOnResultConditions) == IR::BailOutOnOverflow ||
  4961. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  4962. Assert(bailOutLabel);
  4963. Assert(instr->m_next == bailOutLabel);
  4964. Assert(skipBailOutLabel);
  4965. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  4966. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  4967. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  4968. // Restore sources overwritten by the instruction in the bailout path
  4969. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  4970. Assert(dst->IsRegOpnd());
  4971. const bool dstEquSrc1 = dst->IsEqual(src1), dstEquSrc2 = dst->IsEqual(src2);
  4972. if(dstEquSrc1 & dstEquSrc2)
  4973. {
  4974. // We have:
  4975. // s1 += s1
  4976. // Which is equivalent to:
  4977. // s1 <<= 1
  4978. //
  4979. // These overflow a signed 32-bit integer when for the initial s1:
  4980. // s1 > 0 && (s1 & 0x40000000) - result is negative after overflow
  4981. // s1 < 0 && !(s1 & 0x40000000) - result is nonnegative after overflow
  4982. //
  4983. // To restore s1 to its value before the operation, we first do an arithmetic right-shift by one bit to undo the
  4984. // left-shift and preserve the sign of the result after overflow. Since the result after overflow always has the
  4985. // opposite sign from the operands (hence the overflow), we just need to invert the sign of the result. The following
  4986. // restores s1 to its value before the instruction:
  4987. // s1 = (s1 >> 1) ^ 0x80000000
  4988. //
  4989. // Generate:
  4990. // sar s1, 1
  4991. // xor s1, 0x80000000
  4992. const auto startBailOutInstr = bailOutLabel->m_next;
  4993. Assert(startBailOutInstr);
  4994. startBailOutInstr->InsertBefore(
  4995. IR::Instr::New(
  4996. Js::OpCode::SAR,
  4997. dst,
  4998. dst,
  4999. IR::IntConstOpnd::New(1, TyInt8, instr->m_func),
  5000. instr->m_func)
  5001. );
  5002. startBailOutInstr->InsertBefore(
  5003. IR::Instr::New(
  5004. Js::OpCode::XOR,
  5005. dst,
  5006. dst,
  5007. IR::IntConstOpnd::New(INT32_MIN, TyInt32, instr->m_func, true /* dontEncode */),
  5008. instr->m_func)
  5009. );
  5010. }
  5011. else if(dstEquSrc1 | dstEquSrc2)
  5012. {
  5013. // We have:
  5014. // s1 += s2
  5015. // Or:
  5016. // s1 = s2 + s1
  5017. //
  5018. // The following restores s1 to its value before the instruction:
  5019. // s1 -= s2
  5020. //
  5021. // Generate:
  5022. // sub s1, s2
  5023. if(dstEquSrc1)
  5024. {
  5025. Assert(src2->IsRegOpnd() || src2->IsIntConstOpnd());
  5026. }
  5027. else
  5028. {
  5029. Assert(src1->IsRegOpnd() || src1->IsIntConstOpnd());
  5030. }
  5031. bailOutLabel->InsertAfter(IR::Instr::New(Js::OpCode::SUB, dst, dst, dstEquSrc1 ? src2 : src1, instr->m_func));
  5032. }
  5033. // Lower the instruction
  5034. ChangeToAdd(instr, true /* needFlags */);
  5035. Legalize(instr);
  5036. // Skip bailout on no overflow
  5037. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNO, skipBailOutLabel, instr->m_func));
  5038. // Fall through to bailOutLabel
  5039. }
  5040. void
  5041. LowererMD::LowerInt4SubWithBailOut(
  5042. IR::Instr *const instr,
  5043. const IR::BailOutKind bailOutKind,
  5044. IR::LabelInstr *const bailOutLabel,
  5045. IR::LabelInstr *const skipBailOutLabel)
  5046. {
  5047. Assert(instr);
  5048. Assert(instr->m_opcode == Js::OpCode::Sub_I4);
  5049. Assert(!instr->HasBailOutInfo());
  5050. Assert(
  5051. (bailOutKind & IR::BailOutOnResultConditions) == IR::BailOutOnOverflow ||
  5052. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  5053. Assert(bailOutLabel);
  5054. Assert(instr->m_next == bailOutLabel);
  5055. Assert(skipBailOutLabel);
  5056. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5057. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5058. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  5059. // Restore sources overwritten by the instruction in the bailout path
  5060. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5061. Assert(dst->IsRegOpnd());
  5062. const bool dstEquSrc1 = dst->IsEqual(src1), dstEquSrc2 = dst->IsEqual(src2);
  5063. if(dstEquSrc1 ^ dstEquSrc2)
  5064. {
  5065. // We have:
  5066. // s1 -= s2
  5067. // Or:
  5068. // s1 = s2 - s1
  5069. //
  5070. // The following restores s1 to its value before the instruction:
  5071. // s1 += s2
  5072. // Or:
  5073. // s1 = s2 - s1
  5074. //
  5075. // Generate:
  5076. // neg s1 - only for second case
  5077. // add s1, s2
  5078. if(dstEquSrc1)
  5079. {
  5080. Assert(src2->IsRegOpnd() || src2->IsIntConstOpnd());
  5081. }
  5082. else
  5083. {
  5084. Assert(src1->IsRegOpnd() || src1->IsIntConstOpnd());
  5085. }
  5086. const auto startBailOutInstr = bailOutLabel->m_next;
  5087. Assert(startBailOutInstr);
  5088. if(dstEquSrc2)
  5089. {
  5090. startBailOutInstr->InsertBefore(IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func));
  5091. }
  5092. startBailOutInstr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, dst, dst, dstEquSrc1 ? src2 : src1, instr->m_func));
  5093. }
  5094. // Lower the instruction
  5095. ChangeToSub(instr, true /* needFlags */);
  5096. Legalize(instr);
  5097. // Skip bailout on no overflow
  5098. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNO, skipBailOutLabel, instr->m_func));
  5099. // Fall through to bailOutLabel
  5100. }
  5101. bool
  5102. LowererMD::GenerateSimplifiedInt4Mul(
  5103. IR::Instr *const mulInstr,
  5104. const IR::BailOutKind bailOutKind,
  5105. IR::LabelInstr *const bailOutLabel)
  5106. {
  5107. if (AutoSystemInfo::Data.IsAtomPlatform())
  5108. {
  5109. // On Atom, always optimize unless phase is off
  5110. if (PHASE_OFF(Js::AtomPhase, mulInstr->m_func->GetTopFunc()) ||
  5111. PHASE_OFF(Js::MulStrengthReductionPhase, mulInstr->m_func->GetTopFunc()))
  5112. return false;
  5113. }
  5114. else
  5115. {
  5116. // On other platforms, don't optimize unless phase is forced
  5117. if (!PHASE_FORCE(Js::AtomPhase, mulInstr->m_func->GetTopFunc()) &&
  5118. !PHASE_FORCE(Js::MulStrengthReductionPhase, mulInstr->m_func->GetTopFunc()))
  5119. return false;
  5120. }
  5121. Assert(mulInstr);
  5122. Assert(mulInstr->m_opcode == Js::OpCode::Mul_I4);
  5123. IR::Instr *instr = mulInstr, *nextInstr;
  5124. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5125. if (!src1->IsIntConstOpnd() && !src2->IsIntConstOpnd())
  5126. return false;
  5127. // if two const operands, GlobOpt would have folded the computation
  5128. Assert(!(src1->IsIntConstOpnd() && src2->IsIntConstOpnd()));
  5129. Assert(dst->IsRegOpnd());
  5130. const auto constSrc = src1->IsIntConstOpnd() ? src1 : src2;
  5131. const auto nonConstSrc = src1->IsIntConstOpnd() ? src2 : src1;
  5132. const auto constSrcValue = constSrc->AsIntConstOpnd()->AsInt32();
  5133. auto nonConstSrcCopy = nonConstSrc;
  5134. Assert(nonConstSrc->IsRegOpnd());
  5135. bool doOVF = bailOutKind & IR::BailOutOnMulOverflow || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck;
  5136. // don't simplify mul by large numbers with OF check
  5137. if (doOVF && (constSrcValue > 3 || constSrcValue < -3))
  5138. return false;
  5139. switch(constSrcValue)
  5140. {
  5141. case -3:
  5142. case 3:
  5143. // if dst = src, we need to have a copy of the src for the ADD/SUB
  5144. if (dst->IsEqual(nonConstSrc))
  5145. {
  5146. nonConstSrcCopy = IR::RegOpnd::New(nonConstSrc->GetType(), instr->m_func);
  5147. // MOV
  5148. Lowerer::InsertMove(nonConstSrcCopy, nonConstSrc, instr);
  5149. }
  5150. instr->UnlinkSrc1();
  5151. instr->UnlinkSrc2();
  5152. // SHL
  5153. instr->m_opcode = Js::OpCode::SHL;
  5154. instr->SetSrc1(nonConstSrc);
  5155. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) 1, TyInt32, instr->m_func));
  5156. constSrc->Free(instr->m_func);
  5157. Legalize(instr);
  5158. // JO
  5159. if (doOVF)
  5160. {
  5161. nextInstr = IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func);
  5162. instr->InsertAfter(nextInstr);
  5163. instr = nextInstr;
  5164. }
  5165. // ADD
  5166. nextInstr = IR::Instr::New(Js::OpCode::ADD, dst, dst, nonConstSrcCopy, instr->m_func);
  5167. instr->InsertAfter(nextInstr);
  5168. instr = nextInstr;
  5169. Legalize(instr);
  5170. if (constSrcValue == -3)
  5171. {
  5172. // JO
  5173. if (doOVF)
  5174. {
  5175. nextInstr = IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func);
  5176. instr->InsertAfter(nextInstr);
  5177. instr = nextInstr;
  5178. }
  5179. // NEG
  5180. nextInstr = IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func);
  5181. instr->InsertAfter(nextInstr);
  5182. instr = nextInstr;
  5183. Legalize(instr);
  5184. }
  5185. // last JO inserted by caller
  5186. return true;
  5187. case -2:
  5188. case 2:
  5189. instr->UnlinkSrc1();
  5190. instr->UnlinkSrc2();
  5191. // SHL
  5192. instr->m_opcode = Js::OpCode::SHL;
  5193. instr->SetSrc1(nonConstSrc);
  5194. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) 1, TyInt32, instr->m_func));
  5195. constSrc->Free(instr->m_func);
  5196. Legalize(instr);
  5197. if (constSrcValue == -2)
  5198. {
  5199. // JO
  5200. if (doOVF)
  5201. {
  5202. nextInstr = IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func);
  5203. instr->InsertAfter(nextInstr);
  5204. instr = nextInstr;
  5205. }
  5206. // NEG
  5207. nextInstr = IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func);
  5208. instr->InsertAfter(nextInstr);
  5209. instr = nextInstr;
  5210. Legalize(instr);
  5211. }
  5212. // last JO inserted by caller
  5213. return true;
  5214. case -1:
  5215. instr->UnlinkSrc1();
  5216. instr->UnlinkSrc2();
  5217. // NEG
  5218. instr->m_opcode = Js::OpCode::NEG;
  5219. instr->SetSrc1(nonConstSrc);
  5220. constSrc->Free(instr->m_func);
  5221. Legalize(instr);
  5222. // JO inserted by caller
  5223. return true;
  5224. case 0:
  5225. instr->FreeSrc1();
  5226. instr->FreeSrc2();
  5227. // MOV
  5228. instr->m_opcode = Js::OpCode::MOV;
  5229. instr->SetSrc1(IR::IntConstOpnd::New((IntConstType) 0, TyInt32, instr->m_func));
  5230. Legalize(instr);
  5231. // JO inserted by caller are removed in later phases
  5232. return true;
  5233. case 1:
  5234. instr->UnlinkSrc1();
  5235. instr->UnlinkSrc2();
  5236. // MOV
  5237. instr->m_opcode = Js::OpCode::MOV;
  5238. instr->SetSrc1(nonConstSrc);
  5239. constSrc->Free(instr->m_func);
  5240. Legalize(instr);
  5241. // JO inserted by caller are removed in later phases
  5242. return true;
  5243. default:
  5244. // large numbers with no OF check
  5245. Assert(!doOVF);
  5246. // 2^i
  5247. // -2^i
  5248. if (Math::IsPow2(constSrcValue) || Math::IsPow2(-constSrcValue))
  5249. {
  5250. uint32 shamt = constSrcValue > 0 ? Math::Log2(constSrcValue) : Math::Log2(-constSrcValue);
  5251. instr->UnlinkSrc1();
  5252. instr->UnlinkSrc2();
  5253. // SHL
  5254. instr->m_opcode = Js::OpCode::SHL;
  5255. instr->SetSrc1(nonConstSrc);
  5256. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) shamt, TyInt32, instr->m_func));
  5257. constSrc->Free(instr->m_func);
  5258. Legalize(instr);
  5259. if (constSrcValue < 0)
  5260. {
  5261. // NEG
  5262. nextInstr = IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func);
  5263. instr->InsertAfter(nextInstr);
  5264. Legalize(instr);
  5265. }
  5266. return true;
  5267. }
  5268. // 2^i + 1
  5269. // 2^i - 1
  5270. if (Math::IsPow2(constSrcValue - 1) || Math::IsPow2(constSrcValue + 1))
  5271. {
  5272. bool plusOne = Math::IsPow2(constSrcValue - 1);
  5273. uint32 shamt = plusOne ? Math::Log2(constSrcValue - 1) : Math::Log2(constSrcValue + 1);
  5274. if (dst->IsEqual(nonConstSrc))
  5275. {
  5276. nonConstSrcCopy = IR::RegOpnd::New(nonConstSrc->GetType(), instr->m_func);
  5277. // MOV
  5278. Lowerer::InsertMove(nonConstSrcCopy, nonConstSrc, instr);
  5279. }
  5280. instr->UnlinkSrc1();
  5281. instr->UnlinkSrc2();
  5282. // SHL
  5283. instr->m_opcode = Js::OpCode::SHL;
  5284. instr->SetSrc1(nonConstSrc);
  5285. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) shamt, TyInt32, instr->m_func));
  5286. constSrc->Free(instr->m_func);
  5287. Legalize(instr);
  5288. // ADD/SUB
  5289. nextInstr = IR::Instr::New(plusOne ? Js::OpCode::ADD : Js::OpCode::SUB, dst, dst, nonConstSrcCopy, instr->m_func);
  5290. instr->InsertAfter(nextInstr);
  5291. instr = nextInstr;
  5292. Legalize(instr);
  5293. return true;
  5294. }
  5295. return false;
  5296. }
  5297. }
  5298. void
  5299. LowererMD::LowerInt4MulWithBailOut(
  5300. IR::Instr *const instr,
  5301. const IR::BailOutKind bailOutKind,
  5302. IR::LabelInstr *const bailOutLabel,
  5303. IR::LabelInstr *const skipBailOutLabel)
  5304. {
  5305. Assert(instr);
  5306. Assert(instr->m_opcode == Js::OpCode::Mul_I4);
  5307. Assert(!instr->HasBailOutInfo());
  5308. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  5309. Assert(bailOutLabel);
  5310. Assert(instr->m_next == bailOutLabel);
  5311. Assert(skipBailOutLabel);
  5312. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5313. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5314. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  5315. IR::LabelInstr *checkForNegativeZeroLabel = nullptr;
  5316. if(bailOutKind & IR::BailOutOnNegativeZero)
  5317. {
  5318. // We have:
  5319. // s3 = s1 * s2
  5320. //
  5321. // If the result is zero, we need to check and only bail out if it would be -0. The following determines this:
  5322. // bailOut = (s1 < 0 || s2 < 0) (either s1 or s2 has to be zero for the result to be zero, so we don't emit zero checks)
  5323. //
  5324. // Note, however, that if in future we decide to ignore mul overflow in some cases, and overflow occurs with one of the operands as negative,
  5325. // this can lead to bailout. Will handle that case if ever we decide to ignore mul overflow.
  5326. //
  5327. // Generate:
  5328. // $checkForNegativeZeroLabel:
  5329. // test s1, s1
  5330. // js $bailOutLabel
  5331. // test s2, s2
  5332. // jns $skipBailOutLabel
  5333. // (fall through to bail out)
  5334. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5335. Assert(dst->IsRegOpnd());
  5336. Assert(!src1->IsEqual(src2)); // cannot result in -0 if both operands are the same; GlobOpt should have figured that out
  5337. checkForNegativeZeroLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true);
  5338. bailOutLabel->InsertBefore(checkForNegativeZeroLabel);
  5339. if(src1->IsIntConstOpnd() || src2->IsIntConstOpnd())
  5340. {
  5341. Assert(!(src1->IsIntConstOpnd() && src2->IsIntConstOpnd())); // if this results in -0, GlobOpt should have avoided type specialization
  5342. const auto constSrc = src1->IsIntConstOpnd() ? src1 : src2;
  5343. const auto nonConstSrc = src1->IsIntConstOpnd() ? src2 : src1;
  5344. Assert(nonConstSrc->IsRegOpnd());
  5345. const auto newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5346. newInstr->SetSrc1(nonConstSrc);
  5347. newInstr->SetSrc2(nonConstSrc);
  5348. bailOutLabel->InsertBefore(newInstr);
  5349. const auto constSrcValue = constSrc->AsIntConstOpnd()->GetValue();
  5350. if(constSrcValue == 0)
  5351. {
  5352. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNSB, skipBailOutLabel, instr->m_func));
  5353. }
  5354. else
  5355. {
  5356. Assert(constSrcValue < 0); // cannot result in -0 if one operand is positive; GlobOpt should have figured that out
  5357. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, skipBailOutLabel, instr->m_func));
  5358. }
  5359. }
  5360. else
  5361. {
  5362. auto newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5363. newInstr->SetSrc1(src1);
  5364. newInstr->SetSrc2(src1);
  5365. bailOutLabel->InsertBefore(newInstr);
  5366. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JSB, bailOutLabel, instr->m_func));
  5367. newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5368. newInstr->SetSrc1(src2);
  5369. newInstr->SetSrc2(src2);
  5370. bailOutLabel->InsertBefore(newInstr);
  5371. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNSB, skipBailOutLabel, instr->m_func));
  5372. }
  5373. // Fall through to bailOutLabel
  5374. }
  5375. const bool needsOverflowCheck =
  5376. bailOutKind & IR::BailOutOnMulOverflow || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck;
  5377. AssertMsg(!instr->ShouldCheckForNon32BitOverflow() || (needsOverflowCheck && instr->ShouldCheckForNon32BitOverflow()), "Non 32-bit overflow check required without bailout info");
  5378. bool simplifiedMul = LowererMD::GenerateSimplifiedInt4Mul(instr, bailOutKind, bailOutLabel);
  5379. // Lower the instruction
  5380. if (!simplifiedMul)
  5381. {
  5382. LowererMD::ChangeToIMul(instr, needsOverflowCheck);
  5383. }
  5384. const auto insertBeforeInstr = checkForNegativeZeroLabel ? checkForNegativeZeroLabel : bailOutLabel;
  5385. if(needsOverflowCheck)
  5386. {
  5387. // do we care about int32 or non-int32 overflow ?
  5388. if (!simplifiedMul && !instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow())
  5389. LowererMD::EmitNon32BitOvfCheck(instr, insertBeforeInstr, bailOutLabel);
  5390. else
  5391. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func));
  5392. }
  5393. if(bailOutKind & IR::BailOutOnNegativeZero)
  5394. {
  5395. // On zero, branch to determine whether the result would be -0
  5396. Assert(checkForNegativeZeroLabel);
  5397. const auto newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5398. const auto dst = instr->GetDst();
  5399. newInstr->SetSrc1(dst);
  5400. newInstr->SetSrc2(dst);
  5401. insertBeforeInstr->InsertBefore(newInstr);
  5402. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JEQ, checkForNegativeZeroLabel, instr->m_func));
  5403. }
  5404. // Skip bailout
  5405. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, skipBailOutLabel, instr->m_func));
  5406. }
  5407. void
  5408. LowererMD::LowerInt4RemWithBailOut(
  5409. IR::Instr *const instr,
  5410. const IR::BailOutKind bailOutKind,
  5411. IR::LabelInstr *const bailOutLabel,
  5412. IR::LabelInstr *const skipBailOutLabel) const
  5413. {
  5414. Assert(instr);
  5415. Assert(instr->m_opcode == Js::OpCode::Rem_I4);
  5416. Assert(!instr->HasBailOutInfo());
  5417. Assert(bailOutKind & IR::BailOutOnNegativeZero);
  5418. Assert(bailOutLabel);
  5419. Assert(instr->m_next == bailOutLabel);
  5420. Assert(skipBailOutLabel);
  5421. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5422. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5423. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  5424. bool fastPath = m_lowerer->GenerateSimplifiedInt4Rem(instr, skipBailOutLabel);
  5425. // We have:
  5426. // s3 = s1 % s2
  5427. //
  5428. // If the result is zero, we need to check and only bail out if it would be -0. The following determines this:
  5429. // bailOut = (s3 == 0 && s1 < 0)
  5430. //
  5431. // Generate:
  5432. // $checkForNegativeZeroLabel:
  5433. // test s3, s3
  5434. // jne $skipBailOutLabel
  5435. // test s1, s1
  5436. // jns $skipBailOutLabel
  5437. // (fall through to bail out)
  5438. IR::Opnd *dst = instr->GetDst(), *src1 = instr->GetSrc1();
  5439. Assert(dst->IsRegOpnd());
  5440. IR::Instr * newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5441. newInstr->SetSrc1(dst);
  5442. newInstr->SetSrc2(dst);
  5443. bailOutLabel->InsertBefore(newInstr);
  5444. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, skipBailOutLabel, instr->m_func));
  5445. // Fast path already checks if s1 >= 0
  5446. if (!fastPath)
  5447. {
  5448. newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5449. newInstr->SetSrc1(src1);
  5450. newInstr->SetSrc2(src1);
  5451. bailOutLabel->InsertBefore(newInstr);
  5452. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNSB, skipBailOutLabel, instr->m_func));
  5453. }
  5454. // Fall through to bailOutLabel
  5455. // Lower the instruction
  5456. LowererMDArch::EmitInt4Instr(instr);
  5457. }
  5458. IR::Instr *
  5459. LowererMD::LoadFloatZero(IR::Opnd * opndDst, IR::Instr * instrInsert)
  5460. {
  5461. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOVSD_ZERO, opndDst, instrInsert->m_func);
  5462. instrInsert->InsertBefore(instr);
  5463. return instr;
  5464. }
  5465. template <typename T>
  5466. IR::Instr *
  5467. LowererMD::LoadFloatValue(IR::Opnd * opndDst, T value, IR::Instr * instrInsert)
  5468. {
  5469. if (value == 0.0 && !Js::JavascriptNumber::IsNegZero(value))
  5470. {
  5471. // zero can be loaded with "XORPS xmm, xmm" rather than needing memory load
  5472. return LoadFloatZero(opndDst, instrInsert);
  5473. }
  5474. IR::Opnd * opnd;
  5475. void* pValue = nullptr;
  5476. const bool isFloat64 = opndDst->IsFloat64();
  5477. IRType irtype = isFloat64 ? TyMachDouble : TyFloat32;
  5478. // Cast the value to the matching opndDst's type because T might not match
  5479. if (isFloat64)
  5480. {
  5481. pValue = NativeCodeDataNewNoFixup(instrInsert->m_func->GetNativeCodeDataAllocator(), DoubleType<DataDesc_LowererMD_LoadFloatValue_Double>, (double)value);
  5482. }
  5483. else
  5484. {
  5485. Assert(opndDst->IsFloat32());
  5486. pValue = NativeCodeDataNewNoFixup(instrInsert->m_func->GetNativeCodeDataAllocator(), FloatType<DataDesc_LowererMD_LoadFloatValue_Float>, (float)value);
  5487. }
  5488. if (!instrInsert->m_func->IsOOPJIT())
  5489. {
  5490. opnd = IR::MemRefOpnd::New((void*)pValue, irtype,
  5491. instrInsert->m_func, isFloat64 ? IR::AddrOpndKindDynamicDoubleRef : IR::AddrOpndKindDynamicFloatRef);
  5492. }
  5493. else // OOP JIT
  5494. {
  5495. int offset = NativeCodeData::GetDataTotalOffset(pValue);
  5496. auto addressRegOpnd = IR::RegOpnd::New(TyMachPtr, instrInsert->m_func);
  5497. Lowerer::InsertMove(
  5498. addressRegOpnd,
  5499. IR::MemRefOpnd::New(instrInsert->m_func->GetWorkItem()->GetWorkItemData()->nativeDataAddr, TyMachPtr, instrInsert->m_func, IR::AddrOpndKindDynamicNativeCodeDataRef),
  5500. instrInsert);
  5501. opnd = IR::IndirOpnd::New(addressRegOpnd, offset, irtype,
  5502. #if DBG
  5503. NativeCodeData::GetDataDescription(pValue, instrInsert->m_func->m_alloc),
  5504. #endif
  5505. instrInsert->m_func, true);
  5506. }
  5507. // movsd xmm, [reg+offset]
  5508. IR::Instr * instr = IR::Instr::New(LowererMDArch::GetAssignOp(opndDst->GetType()), opndDst, opnd, instrInsert->m_func);
  5509. instrInsert->InsertBefore(instr);
  5510. Legalize(instr);
  5511. return instr;
  5512. }
  5513. template IR::Instr * LowererMD::LoadFloatValue<float>(IR::Opnd * opndDst, float value, IR::Instr * instrInsert);
  5514. template IR::Instr * LowererMD::LoadFloatValue<double>(IR::Opnd * opndDst, double value, IR::Instr * instrInsert);
  5515. IR::Instr *
  5516. LowererMD::EnsureAdjacentArgs(IR::Instr * instrArg)
  5517. {
  5518. // Ensure that the arg instructions for a given call site are adjacent.
  5519. // This isn't normally desirable for CQ, but it's required by, for instance, the cloner,
  5520. // which must clone a complete call sequence.
  5521. IR::Opnd * opnd = instrArg->GetSrc2();
  5522. IR::Instr * instrNextArg;
  5523. StackSym * sym;
  5524. AssertMsg(opnd, "opnd");
  5525. while (opnd->IsSymOpnd())
  5526. {
  5527. sym = opnd->AsSymOpnd()->m_sym->AsStackSym();
  5528. instrNextArg = sym->m_instrDef;
  5529. Assert(instrNextArg);
  5530. instrNextArg->SinkInstrBefore(instrArg);
  5531. instrArg = instrNextArg;
  5532. opnd = instrArg->GetSrc2();
  5533. }
  5534. sym = opnd->AsRegOpnd()->m_sym;
  5535. instrNextArg = sym->m_instrDef;
  5536. Assert(instrNextArg && instrNextArg->m_opcode == Js::OpCode::StartCall);
  5537. // The StartCall can be trivially moved down.
  5538. if (instrNextArg->m_next != instrArg)
  5539. {
  5540. instrNextArg->UnlinkStartCallFromBailOutInfo(instrArg);
  5541. instrNextArg->Unlink();
  5542. instrArg->InsertBefore(instrNextArg);
  5543. }
  5544. return instrNextArg->m_prev;
  5545. }
  5546. #if INT32VAR
  5547. //
  5548. // Convert an int32 to Var representation.
  5549. //
  5550. void LowererMD::GenerateInt32ToVarConversion( IR::Opnd * opndSrc, IR::Instr * insertInstr )
  5551. {
  5552. AssertMsg(TySize[opndSrc->GetType()] == MachPtr, "For this to work it should be a 64-bit register");
  5553. IR::Instr* instr = IR::Instr::New(Js::OpCode::BTS, opndSrc, opndSrc, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  5554. insertInstr->InsertBefore(instr);
  5555. }
  5556. //
  5557. // jump to $labelHelper, based on the result of CMP
  5558. //
  5559. void LowererMD::GenerateSmIntTest(IR::Opnd *opndSrc, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::Instr **instrFirst /* = nullptr */, bool fContinueLabel /*= false*/)
  5560. {
  5561. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  5562. IR::Opnd * opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  5563. // s1 = MOV src1 - Move to a temporary
  5564. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc, this->m_func);
  5565. insertInstr->InsertBefore(instr);
  5566. if (instrFirst)
  5567. {
  5568. *instrFirst = instr;
  5569. }
  5570. // s1 = SHR s1, VarTag_Shift
  5571. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  5572. insertInstr->InsertBefore(instr);
  5573. // CMP s1, AtomTag
  5574. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  5575. instr->SetSrc1(opndReg);
  5576. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt32, this->m_func, /* dontEncode = */ true));
  5577. insertInstr->InsertBefore(instr);
  5578. if(fContinueLabel)
  5579. {
  5580. // JEQ $labelHelper
  5581. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  5582. }
  5583. else
  5584. {
  5585. // JNE $labelHelper
  5586. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  5587. }
  5588. insertInstr->InsertBefore(instr);
  5589. }
  5590. //
  5591. // If lower 32-bits are zero (value is zero), jump to $helper.
  5592. //
  5593. void LowererMD::GenerateTaggedZeroTest( IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelHelper )
  5594. {
  5595. // Cast the var to 32 bit integer.
  5596. if(opndSrc->GetSize() != 4)
  5597. {
  5598. opndSrc = opndSrc->UseWithNewType(TyUint32, this->m_func);
  5599. }
  5600. AssertMsg(TySize[opndSrc->GetType()] == 4, "This technique works only on the 32-bit version");
  5601. // TEST src1, src1
  5602. IR::Instr* instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  5603. instr->SetSrc1(opndSrc);
  5604. instr->SetSrc2(opndSrc);
  5605. insertInstr->InsertBefore(instr);
  5606. if(labelHelper != nullptr)
  5607. {
  5608. // JZ $labelHelper
  5609. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  5610. insertInstr->InsertBefore(instr);
  5611. }
  5612. }
  5613. //
  5614. // If top 16 bits are not zero i.e. it is NOT object, jump to $helper.
  5615. //
  5616. bool LowererMD::GenerateObjectTest(IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
  5617. {
  5618. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  5619. if (opndSrc->IsTaggedValue() && fContinueLabel)
  5620. {
  5621. // Insert delete branch opcode to tell the dbChecks not to assert on the helper label we may fall through into
  5622. IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
  5623. insertInstr->InsertBefore(fakeBr);
  5624. return false;
  5625. }
  5626. else if (opndSrc->IsNotTaggedValue() && !fContinueLabel)
  5627. {
  5628. return false;
  5629. }
  5630. IR::Opnd * opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  5631. // s1 = MOV src1 - Move to a temporary
  5632. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc, this->m_func);
  5633. insertInstr->InsertBefore(instr);
  5634. // s1 = SHR s1, VarTag_Shift
  5635. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  5636. insertInstr->InsertBefore(instr);
  5637. if (fContinueLabel)
  5638. {
  5639. // JEQ $labelHelper
  5640. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelTarget, this->m_func);
  5641. insertInstr->InsertBefore(instr);
  5642. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5643. insertInstr->InsertBefore(labelHelper);
  5644. }
  5645. else
  5646. {
  5647. // JNZ $labelHelper
  5648. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelTarget, this->m_func);
  5649. insertInstr->InsertBefore(instr);
  5650. }
  5651. return true;
  5652. }
  5653. #else
  5654. //
  5655. // Convert an int32 value to a Var.
  5656. //
  5657. void LowererMD::GenerateInt32ToVarConversion( IR::Opnd * opndSrc, IR::Instr * insertInstr )
  5658. {
  5659. // SHL r1, AtomTag
  5660. IR::Instr * instr = IR::Instr::New(Js::OpCode::SHL, opndSrc, opndSrc, IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func), this->m_func);
  5661. insertInstr->InsertBefore(instr);
  5662. // INC r1
  5663. instr = IR::Instr::New(Js::OpCode::INC, opndSrc, opndSrc, this->m_func);
  5664. insertInstr->InsertBefore(instr);
  5665. }
  5666. //
  5667. // jump to $labelHelper, based on the result of TEST
  5668. //
  5669. void LowererMD::GenerateSmIntTest(IR::Opnd *opndSrc, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::Instr **instrFirst /* = nullptr */, bool fContinueLabel /*= false*/)
  5670. {
  5671. if (opndSrc->IsTaggedInt() && !fContinueLabel)
  5672. {
  5673. return;
  5674. }
  5675. else if (opndSrc->IsNotTaggedValue() && fContinueLabel)
  5676. {
  5677. return;
  5678. }
  5679. // TEST src1, AtomTag
  5680. IR::Instr* instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  5681. instr->SetSrc1(opndSrc);
  5682. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func));
  5683. insertInstr->InsertBefore(instr);
  5684. if (instrFirst)
  5685. {
  5686. *instrFirst = instr;
  5687. }
  5688. if(fContinueLabel)
  5689. {
  5690. // JNE $labelHelper
  5691. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  5692. }
  5693. else
  5694. {
  5695. // JEQ $labelHelper
  5696. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  5697. }
  5698. insertInstr->InsertBefore(instr);
  5699. }
  5700. //
  5701. // If value is zero in tagged int representation, jump to $labelHelper.
  5702. //
  5703. void LowererMD::GenerateTaggedZeroTest( IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelHelper )
  5704. {
  5705. if (opndSrc->IsNotTaggedValue())
  5706. {
  5707. return;
  5708. }
  5709. // CMP src1, AtomTag
  5710. IR::Instr* instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  5711. instr->SetSrc1(opndSrc);
  5712. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt32, this->m_func));
  5713. insertInstr->InsertBefore(instr);
  5714. // JEQ $helper
  5715. if(labelHelper != nullptr)
  5716. {
  5717. // JEQ $labelHelper
  5718. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  5719. insertInstr->InsertBefore(instr);
  5720. }
  5721. }
  5722. //
  5723. // If not object, jump to $labelHelper.
  5724. //
  5725. bool LowererMD::GenerateObjectTest(IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
  5726. {
  5727. if (opndSrc->IsTaggedInt() && fContinueLabel)
  5728. {
  5729. // Insert delete branch opcode to tell the dbChecks not to assert on this helper label
  5730. IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
  5731. insertInstr->InsertBefore(fakeBr);
  5732. return false;
  5733. }
  5734. else if (opndSrc->IsNotTaggedValue() && !fContinueLabel)
  5735. {
  5736. return false;
  5737. }
  5738. // TEST src1, AtomTag
  5739. IR::Instr* instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  5740. instr->SetSrc1(opndSrc);
  5741. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func));
  5742. insertInstr->InsertBefore(instr);
  5743. if (fContinueLabel)
  5744. {
  5745. // JEQ $labelHelper
  5746. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelTarget, this->m_func);
  5747. insertInstr->InsertBefore(instr);
  5748. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5749. insertInstr->InsertBefore(labelHelper);
  5750. }
  5751. else
  5752. {
  5753. // JNE $labelHelper
  5754. IR::BranchInstr* branchInstr = IR::BranchInstr::New(Js::OpCode::JNE, labelTarget, this->m_func);
  5755. insertInstr->InsertBefore(branchInstr);
  5756. InsertObjectPoison(opndSrc, branchInstr, insertInstr, false);
  5757. }
  5758. return true;
  5759. }
  5760. #endif
  5761. #if FLOATVAR
  5762. //
  5763. // If any of the top 14 bits are not set, then the var is not a float value and hence, jump to $labelHelper.
  5764. //
  5765. void LowererMD::GenerateFloatTest(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper, const bool checkForNullInLoopBody)
  5766. {
  5767. if (opndSrc->GetValueType().IsFloat())
  5768. {
  5769. return;
  5770. }
  5771. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  5772. // s1 = MOV src1 - Move to a temporary
  5773. IR::Opnd * opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  5774. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc, this->m_func);
  5775. insertInstr->InsertBefore(instr);
  5776. // s1 = SHR s1, 50
  5777. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(50, TyInt8, this->m_func), this->m_func);
  5778. insertInstr->InsertBefore(instr);
  5779. // JZ $helper
  5780. instr = IR::BranchInstr::New(Js::OpCode::JEQ /* JZ */, labelHelper, this->m_func);
  5781. insertInstr->InsertBefore(instr);
  5782. }
  5783. IR::RegOpnd* LowererMD::CheckFloatAndUntag(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper)
  5784. {
  5785. IR::Opnd* floatTag = IR::AddrOpnd::New((Js::Var)Js::FloatTag_Value, IR::AddrOpndKindConstantVar, this->m_func, /* dontEncode = */ true);
  5786. IR::RegOpnd* regOpndFloatTag = IR::RegOpnd::New(TyUint64, this->m_func);
  5787. // MOV floatTagReg, FloatTag_Value
  5788. IR::Instr* instr = IR::Instr::New(Js::OpCode::MOV, regOpndFloatTag, floatTag, this->m_func);
  5789. insertInstr->InsertBefore(instr);
  5790. if (!opndSrc->GetValueType().IsFloat())
  5791. {
  5792. // TEST s1, floatTagReg
  5793. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  5794. instr->SetSrc1(opndSrc);
  5795. instr->SetSrc2(regOpndFloatTag);
  5796. insertInstr->InsertBefore(instr);
  5797. // JZ $helper
  5798. instr = IR::BranchInstr::New(Js::OpCode::JEQ /* JZ */, labelHelper, this->m_func);
  5799. insertInstr->InsertBefore(instr);
  5800. }
  5801. // untaggedFloat = XOR floatTagReg, s1 // where untaggedFloat == floatTagReg; use floatTagReg temporarily for the untagged float
  5802. IR::RegOpnd* untaggedFloat = regOpndFloatTag;
  5803. instr = IR::Instr::New(Js::OpCode::XOR, untaggedFloat, regOpndFloatTag, opndSrc, this->m_func);
  5804. insertInstr->InsertBefore(instr);
  5805. IR::RegOpnd *floatReg = IR::RegOpnd::New(TyMachDouble, this->m_func);
  5806. instr = IR::Instr::New(Js::OpCode::MOVD, floatReg, untaggedFloat, this->m_func);
  5807. insertInstr->InsertBefore(instr);
  5808. return floatReg;
  5809. }
  5810. #else
  5811. void LowererMD::GenerateFloatTest(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper, const bool checkForNullInLoopBody)
  5812. {
  5813. if (opndSrc->GetValueType().IsFloat())
  5814. {
  5815. return;
  5816. }
  5817. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  5818. if(checkForNullInLoopBody && m_func->IsLoopBody())
  5819. {
  5820. // It's possible that the value was determined dead by the jitted function and was not restored. The jitted loop
  5821. // body may not realize that it's dead and may try to use it. Check for null in loop bodies.
  5822. // test src1, src1
  5823. // jz $helper (bail out)
  5824. m_lowerer->InsertCompareBranch(
  5825. opndSrc,
  5826. IR::AddrOpnd::NewNull(m_func),
  5827. Js::OpCode::BrEq_A,
  5828. labelHelper,
  5829. insertInstr);
  5830. }
  5831. IR::Instr* instr = IR::Instr::New(Js::OpCode::CMP, insertInstr->m_func);
  5832. instr->SetSrc1(IR::IndirOpnd::New(opndSrc, 0, TyMachPtr, insertInstr->m_func));
  5833. instr->SetSrc2(m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptNumber));
  5834. insertInstr->InsertBefore(instr);
  5835. // JNZ $helper
  5836. instr = IR::BranchInstr::New(Js::OpCode::JNE /* JZ */, labelHelper, this->m_func);
  5837. insertInstr->InsertBefore(instr);
  5838. }
  5839. #endif
  5840. #if DBG
  5841. //
  5842. // Helps in debugging of fast paths.
  5843. //
  5844. void LowererMD::GenerateDebugBreak( IR::Instr * insertInstr )
  5845. {
  5846. // int 3
  5847. IR::Instr *int3 = IR::Instr::New(Js::OpCode::INT, insertInstr->m_func);
  5848. int3->SetSrc1(IR::IntConstOpnd::New(3, TyInt32, insertInstr->m_func));
  5849. insertInstr->InsertBefore(int3);
  5850. }
  5851. #endif
  5852. template <bool verify>
  5853. void
  5854. LowererMD::MakeDstEquSrc1(IR::Instr *const instr)
  5855. {
  5856. Assert(instr);
  5857. Assert(instr->IsLowered());
  5858. Assert(instr->GetDst());
  5859. Assert(instr->GetSrc1());
  5860. if(instr->GetDst()->IsEqual(instr->GetSrc1()))
  5861. {
  5862. return;
  5863. }
  5864. if (verify)
  5865. {
  5866. AssertMsg(false, "dst and src1 should be the same at this point. Missing Legalization");
  5867. return;
  5868. }
  5869. if(instr->GetSrc2() && instr->GetDst()->IsEqual(instr->GetSrc2()))
  5870. {
  5871. switch(instr->m_opcode)
  5872. {
  5873. #ifdef _M_IX86
  5874. case Js::OpCode::ADC:
  5875. #endif
  5876. case Js::OpCode::Add_I4:
  5877. case Js::OpCode::Mul_I4:
  5878. case Js::OpCode::Or_I4:
  5879. case Js::OpCode::Xor_I4:
  5880. case Js::OpCode::And_I4:
  5881. case Js::OpCode::ADD:
  5882. case Js::OpCode::IMUL2:
  5883. case Js::OpCode::OR:
  5884. case Js::OpCode::XOR:
  5885. case Js::OpCode::AND:
  5886. case Js::OpCode::ADDSD:
  5887. case Js::OpCode::MULSD:
  5888. case Js::OpCode::ADDSS:
  5889. case Js::OpCode::MULSS:
  5890. case Js::OpCode::ADDPS:
  5891. // For (a = b & a), generate (a = a & b)
  5892. instr->SwapOpnds();
  5893. return;
  5894. }
  5895. // For (a = b - a), generate (c = a; a = b - c) and fall through
  5896. ChangeToAssign(instr->HoistSrc2(Js::OpCode::Ld_A));
  5897. }
  5898. // For (a = b - c), generate (a = b; a = a - c)
  5899. IR::Instr *const mov = IR::Instr::New(Js::OpCode::Ld_A, instr->GetDst(), instr->UnlinkSrc1(), instr->m_func);
  5900. instr->InsertBefore(mov);
  5901. ChangeToAssign(mov);
  5902. instr->SetSrc1(instr->GetDst());
  5903. }
  5904. void
  5905. LowererMD::EmitInt64Instr(IR::Instr * instr)
  5906. {
  5907. #ifdef _M_IX86
  5908. lowererMDArch.EmitInt64Instr(instr);
  5909. #else
  5910. Assert(UNREACHED);
  5911. #endif
  5912. }
  5913. void
  5914. LowererMD::EmitInt4Instr(IR::Instr *instr)
  5915. {
  5916. LowererMDArch::EmitInt4Instr(instr);
  5917. }
  5918. void
  5919. LowererMD::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  5920. {
  5921. lowererMDArch.EmitLoadVar(instrLoad, isFromUint32, isHelper);
  5922. }
  5923. bool
  5924. LowererMD::EmitLoadInt32(IR::Instr *instrLoad, bool conversionFromObjectAllowed, bool bailOutOnHelper, IR::LabelInstr * labelBailOut)
  5925. {
  5926. return lowererMDArch.EmitLoadInt32(instrLoad, conversionFromObjectAllowed, bailOutOnHelper, labelBailOut);
  5927. }
  5928. void
  5929. LowererMD::EmitIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5930. {
  5931. this->lowererMDArch.EmitIntToFloat(dst, src, instrInsert);
  5932. }
  5933. void
  5934. LowererMD::EmitUIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5935. {
  5936. this->lowererMDArch.EmitUIntToFloat(dst, src, instrInsert);
  5937. }
  5938. void
  5939. LowererMD::EmitIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5940. {
  5941. this->lowererMDArch.EmitIntToLong(dst, src, instrInsert);
  5942. }
  5943. void
  5944. LowererMD::EmitUIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5945. {
  5946. this->lowererMDArch.EmitUIntToLong(dst, src, instrInsert);
  5947. }
  5948. void
  5949. LowererMD::EmitLongToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5950. {
  5951. this->lowererMDArch.EmitLongToInt(dst, src, instrInsert);
  5952. }
  5953. void LowererMD::EmitSignExtend(IR::Instr * instr)
  5954. {
  5955. IR::Opnd* dst = instr->GetDst();
  5956. IR::Opnd* src1 = instr->GetSrc1();
  5957. IR::Opnd* src2 = instr->GetSrc2();
  5958. Assert(dst && src1 && src2);
  5959. // Src2 is used to determine what's the from type size
  5960. Assert(src2->GetSize() < dst->GetSize());
  5961. IRType fromType = src2->GetType();
  5962. Js::OpCode op = Js::OpCode::MOVSX;
  5963. switch (src2->GetSize())
  5964. {
  5965. case 1: break; // default
  5966. case 2: op = Js::OpCode::MOVSXW; break;
  5967. case 4:
  5968. #if _M_X64
  5969. op = Js::OpCode::MOVSXD;
  5970. #else
  5971. op = LowererMDArch::GetAssignOp(fromType);
  5972. #endif
  5973. break;
  5974. default:
  5975. Assert(UNREACHED);
  5976. }
  5977. #if _M_IX86
  5978. // Special handling of int64 on x86
  5979. if (dst->IsInt64())
  5980. {
  5981. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(dst);
  5982. Int64RegPair srcPair = m_func->FindOrCreateInt64Pair(src1);
  5983. IR::RegOpnd * eaxReg = IR::RegOpnd::New(RegEAX, TyInt32, m_func);
  5984. IR::RegOpnd * edxReg = IR::RegOpnd::New(RegEDX, TyInt32, m_func);
  5985. instr->InsertBefore(IR::Instr::New(op, eaxReg, srcPair.low->UseWithNewType(fromType, m_func), m_func));
  5986. Legalize(instr->m_prev);
  5987. instr->InsertBefore(IR::Instr::New(Js::OpCode::CDQ, edxReg, m_func));
  5988. Legalize(instr->m_prev);
  5989. m_lowerer->InsertMove(dstPair.low, eaxReg, instr);
  5990. m_lowerer->InsertMove(dstPair.high, edxReg, instr);
  5991. }
  5992. else
  5993. #endif
  5994. {
  5995. instr->InsertBefore(IR::Instr::New(op, dst, src1->UseWithNewType(fromType, m_func), m_func));
  5996. Legalize(instr->m_prev);
  5997. }
  5998. }
  5999. void
  6000. LowererMD::EmitFloat32ToFloat64(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  6001. {
  6002. // We should only generate this if sse2 is available
  6003. Assert(AutoSystemInfo::Data.SSE2Available());
  6004. Assert(dst->IsRegOpnd() && dst->IsFloat64());
  6005. Assert(src->IsRegOpnd() && src->GetType() == TyFloat32);
  6006. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::CVTSS2SD, dst, src, this->m_func));
  6007. }
  6008. void
  6009. LowererMD::EmitInt64toFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instr)
  6010. {
  6011. #ifdef _M_IX86
  6012. IR::Opnd *srcOpnd = instr->UnlinkSrc1();
  6013. LoadInt64HelperArgument(instr, srcOpnd);
  6014. IR::Instr* callinstr = IR::Instr::New(Js::OpCode::CALL, dst, this->m_func);
  6015. instr->InsertBefore(callinstr);
  6016. CompileAssert(sizeof(IRType) == 1);
  6017. const uint16 fromToType = dst->GetType() | (srcOpnd->GetType() << 8);
  6018. IR::JnHelperMethod method = IR::HelperOp_Throw;
  6019. switch (fromToType)
  6020. {
  6021. case TyFloat32 | (TyInt64 << 8) : method = IR::HelperI64TOF32; break;
  6022. case TyFloat32 | (TyUint64 << 8) : method = IR::HelperUI64TOF32; break;
  6023. case TyFloat64 | (TyInt64 << 8) : method = IR::HelperI64TOF64; break;
  6024. case TyFloat64 | (TyUint64 << 8) : method = IR::HelperUI64TOF64; break;
  6025. default:
  6026. Assert(UNREACHED);
  6027. }
  6028. this->ChangeToHelperCall(callinstr, method);
  6029. #else
  6030. IR::Opnd* origDst = nullptr;
  6031. if (dst->IsFloat32())
  6032. {
  6033. origDst = dst;
  6034. dst = IR::RegOpnd::New(TyFloat64, this->m_func);
  6035. }
  6036. const auto insertLegalize = [instr](IR::Instr* newInstr)
  6037. {
  6038. instr->InsertBefore(newInstr);
  6039. Legalize(newInstr);
  6040. };
  6041. if (src->IsUnsigned())
  6042. {
  6043. insertLegalize(IR::Instr::New(Js::OpCode::TEST, nullptr, src, src, m_func));
  6044. IR::LabelInstr* msbSetLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  6045. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  6046. insertLegalize(IR::BranchInstr::New(Js::OpCode::JSB, msbSetLabel, m_func));
  6047. // MSB not set, simple case
  6048. insertLegalize(IR::Instr::New(Js::OpCode::CVTSI2SD, dst, src, m_func));
  6049. insertLegalize(IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, m_func));
  6050. insertLegalize(msbSetLabel);
  6051. IR::RegOpnd* halfOpnd = IR::RegOpnd::New(TyInt64, m_func);
  6052. IR::RegOpnd* lsbOpnd = IR::RegOpnd::New(TyInt64, m_func);
  6053. m_lowerer->InsertMove(halfOpnd, src, instr);
  6054. m_lowerer->InsertMove(lsbOpnd, src, instr);
  6055. insertLegalize(IR::Instr::New(Js::OpCode::SHR, halfOpnd, halfOpnd, IR::IntConstOpnd::New(1, TyInt8, m_func), m_func));
  6056. insertLegalize(IR::Instr::New(Js::OpCode::AND, lsbOpnd, lsbOpnd, IR::Int64ConstOpnd::New(1, TyInt64, m_func), m_func));
  6057. insertLegalize(IR::Instr::New(Js::OpCode::OR, halfOpnd, halfOpnd, lsbOpnd, m_func));
  6058. insertLegalize(IR::Instr::New(Js::OpCode::CVTSI2SD, dst, halfOpnd, m_func));
  6059. insertLegalize(IR::Instr::New(Js::OpCode::ADDSD, dst, dst, dst, m_func));
  6060. insertLegalize(doneLabel);
  6061. }
  6062. else
  6063. {
  6064. insertLegalize(IR::Instr::New(Js::OpCode::CVTSI2SD, dst, src, m_func));
  6065. }
  6066. if (origDst)
  6067. {
  6068. insertLegalize(IR::Instr::New(Js::OpCode::CVTSD2SS, origDst, dst, m_func));
  6069. }
  6070. #endif
  6071. }
  6072. void
  6073. LowererMD::EmitNon32BitOvfCheck(IR::Instr *instr, IR::Instr *insertInstr, IR::LabelInstr* bailOutLabel)
  6074. {
  6075. AssertMsg(instr->m_opcode == Js::OpCode::IMUL, "IMUL should be used to check for non-32 bit overflow check on x86.");
  6076. IR::RegOpnd *edxSym = IR::RegOpnd::New(TyInt32, instr->m_func);
  6077. #ifdef _M_IX86
  6078. edxSym->SetReg(RegEDX);
  6079. #else
  6080. edxSym->SetReg(RegRDX);
  6081. #endif
  6082. // dummy def for edx to force RegAlloc to generate a lifetime. This is removed later by the Peeps phase.
  6083. IR::Instr *newInstr = IR::Instr::New(Js::OpCode::NOP, edxSym, instr->m_func);
  6084. insertInstr->InsertBefore(newInstr);
  6085. IR::RegOpnd *temp = IR::RegOpnd::New(TyInt32, instr->m_func);
  6086. Assert(instr->ignoreOverflowBitCount > 32);
  6087. uint8 shamt = 64 - instr->ignoreOverflowBitCount;
  6088. // MOV temp, edx
  6089. newInstr = IR::Instr::New(Js::OpCode::MOV, temp, edxSym, instr->m_func);
  6090. insertInstr->InsertBefore(newInstr);
  6091. // SHL temp, shamt
  6092. newInstr = IR::Instr::New(Js::OpCode::SHL, temp, temp, IR::IntConstOpnd::New(shamt, TyInt8, instr->m_func, true), instr->m_func);
  6093. insertInstr->InsertBefore(newInstr);
  6094. // SAR temp, shamt
  6095. newInstr = IR::Instr::New(Js::OpCode::SAR, temp, temp, IR::IntConstOpnd::New(shamt, TyInt8, instr->m_func, true), instr->m_func);
  6096. insertInstr->InsertBefore(newInstr);
  6097. // CMP temp, edx
  6098. newInstr = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  6099. newInstr->SetSrc1(temp);
  6100. newInstr->SetSrc2(edxSym);
  6101. insertInstr->InsertBefore(newInstr);
  6102. // JNE
  6103. Lowerer::InsertBranch(Js::OpCode::JNE, false, bailOutLabel, insertInstr);
  6104. }
  6105. void LowererMD::ConvertFloatToInt32(IR::Opnd* intOpnd, IR::Opnd* floatOpnd, IR::LabelInstr * labelHelper, IR::LabelInstr * labelDone, IR::Instr * instInsert)
  6106. {
  6107. UNREFERENCED_PARAMETER(labelHelper); // used on ARM
  6108. #if defined(_M_IX86)
  6109. // We should only generate this if sse2 is available
  6110. Assert(AutoSystemInfo::Data.SSE2Available());
  6111. #endif
  6112. Assert((floatOpnd->IsRegOpnd() && floatOpnd->IsFloat()) || (floatOpnd->IsIndirOpnd() && floatOpnd->GetType() == TyMachDouble));
  6113. Assert(intOpnd->GetType() == TyInt32);
  6114. IR::Instr* instr;
  6115. {
  6116. #ifdef _M_X64
  6117. IR::Opnd* dstOpnd = IR::RegOpnd::New(TyInt64, m_func);
  6118. #else
  6119. IR::Opnd* dstOpnd = intOpnd;
  6120. #endif
  6121. // CVTTSD2SI dst, floatOpnd
  6122. instr = IR::Instr::New(floatOpnd->IsFloat64() ? Js::OpCode::CVTTSD2SI : Js::OpCode::CVTTSS2SI, dstOpnd, floatOpnd, this->m_func);
  6123. instInsert->InsertBefore(instr);
  6124. // CMP dst, 0x80000000 {0x8000000000000000 on x64} -- Check for overflow
  6125. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6126. instr->SetSrc1(dstOpnd);
  6127. instr->SetSrc2(IR::IntConstOpnd::New(MachSignBit, TyMachReg, this->m_func, true));
  6128. instInsert->InsertBefore(instr);
  6129. Legalize(instr);
  6130. #ifdef _M_X64
  6131. // Truncate to int32 for x64. We still need to go to helper though if we have int64 overflow.
  6132. // MOV_TRUNC intOpnd, tmpOpnd
  6133. instr = IR::Instr::New(Js::OpCode::MOV_TRUNC, intOpnd, dstOpnd, this->m_func);
  6134. instInsert->InsertBefore(instr);
  6135. #endif
  6136. }
  6137. // JNE $done
  6138. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelDone, this->m_func);
  6139. instInsert->InsertBefore(instr);
  6140. // It does overflow - Let's try using FISTTP which uses 64 bits and is relevant only for x86
  6141. // but requires going to memory and should only be used in overflow scenarios
  6142. #ifdef _M_IX86
  6143. if (AutoSystemInfo::Data.SSE3Available())
  6144. {
  6145. IR::Opnd* floatStackOpnd;
  6146. StackSym* tempSymDouble = this->m_func->tempSymDouble;
  6147. if (!tempSymDouble)
  6148. {
  6149. this->m_func->tempSymDouble = StackSym::New(TyFloat64, this->m_func);
  6150. this->m_func->StackAllocate(this->m_func->tempSymDouble, MachDouble);
  6151. tempSymDouble = this->m_func->tempSymDouble;
  6152. }
  6153. IR::Opnd * float64Opnd;
  6154. if (floatOpnd->IsFloat32())
  6155. {
  6156. float64Opnd = IR::RegOpnd::New(TyFloat64, m_func);
  6157. instr = IR::Instr::New(Js::OpCode::CVTSS2SD, float64Opnd, floatOpnd, m_func);
  6158. instInsert->InsertBefore(instr);
  6159. }
  6160. else
  6161. {
  6162. float64Opnd = floatOpnd;
  6163. }
  6164. if (float64Opnd->IsRegOpnd())
  6165. {
  6166. floatStackOpnd = IR::SymOpnd::New(tempSymDouble, TyMachDouble, m_func);
  6167. instr = IR::Instr::New(Js::OpCode::MOVSD, floatStackOpnd, float64Opnd, m_func);
  6168. instInsert->InsertBefore(instr);
  6169. }
  6170. else
  6171. {
  6172. floatStackOpnd = float64Opnd;
  6173. }
  6174. // FLD [tmpDouble]
  6175. instr = IR::Instr::New(Js::OpCode::FLD, floatStackOpnd, floatStackOpnd, m_func);
  6176. instInsert->InsertBefore(instr);
  6177. if (!float64Opnd->IsRegOpnd())
  6178. {
  6179. floatStackOpnd = IR::SymOpnd::New(tempSymDouble, TyMachDouble, m_func);
  6180. }
  6181. // FISTTP qword ptr [tmpDouble]
  6182. instr = IR::Instr::New(Js::OpCode::FISTTP, floatStackOpnd, m_func);
  6183. instInsert->InsertBefore(instr);
  6184. StackSym *intSym = StackSym::New(TyInt32, m_func);
  6185. intSym->m_offset = tempSymDouble->m_offset;
  6186. intSym->m_allocated = true;
  6187. IR::Opnd* lowerBitsOpnd = IR::SymOpnd::New(intSym, TyInt32, m_func);
  6188. // MOV dst, dword ptr [tmpDouble]
  6189. instr = IR::Instr::New(Js::OpCode::MOV, intOpnd, lowerBitsOpnd, m_func);
  6190. instInsert->InsertBefore(instr);
  6191. // TEST dst, dst -- Check for overflow
  6192. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  6193. instr->SetSrc1(intOpnd);
  6194. instr->SetSrc2(intOpnd);
  6195. instInsert->InsertBefore(instr);
  6196. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelDone, this->m_func);
  6197. instInsert->InsertBefore(instr);
  6198. // CMP [tmpDouble - 4], 0x80000000
  6199. StackSym* higherBitsSym = StackSym::New(TyInt32, m_func);
  6200. higherBitsSym->m_offset = tempSymDouble->m_offset + 4;
  6201. higherBitsSym->m_allocated = true;
  6202. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6203. instr->SetSrc1(IR::SymOpnd::New(higherBitsSym, TyInt32, m_func));
  6204. instr->SetSrc2(IR::IntConstOpnd::New(0x80000000, TyInt32, this->m_func, true));
  6205. instInsert->InsertBefore(instr);
  6206. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelDone, this->m_func);
  6207. instInsert->InsertBefore(instr);
  6208. }
  6209. #endif
  6210. }
  6211. IR::Instr *
  6212. LowererMD::InsertConvertFloat64ToInt32(const RoundMode roundMode, IR::Opnd *const dst, IR::Opnd *const src, IR::Instr *const insertBeforeInstr)
  6213. {
  6214. Assert(dst);
  6215. Assert(dst->IsInt32());
  6216. Assert(src);
  6217. Assert(src->IsFloat64());
  6218. Assert(insertBeforeInstr);
  6219. // The caller is expected to check for overflow. To have that work be done automatically, use LowererMD::EmitFloatToInt.
  6220. Func *const func = insertBeforeInstr->m_func;
  6221. IR::AutoReuseOpnd autoReuseSrcPlusHalf;
  6222. IR::Instr *instr = nullptr;
  6223. switch (roundMode)
  6224. {
  6225. case RoundModeTowardInteger:
  6226. {
  6227. // Conversion with rounding towards nearest integer is not supported by the architecture. Add 0.5 and do a
  6228. // round-toward-zero conversion instead.
  6229. IR::RegOpnd *const srcPlusHalf = IR::RegOpnd::New(TyFloat64, func);
  6230. autoReuseSrcPlusHalf.Initialize(srcPlusHalf, func);
  6231. Lowerer::InsertAdd(
  6232. false /* needFlags */,
  6233. srcPlusHalf,
  6234. src,
  6235. IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetDoublePointFiveAddr(), TyFloat64, func,
  6236. IR::AddrOpndKindDynamicDoubleRef),
  6237. insertBeforeInstr);
  6238. instr = IR::Instr::New(LowererMD::MDConvertFloat64ToInt32Opcode(RoundModeTowardZero), dst, srcPlusHalf, func);
  6239. insertBeforeInstr->InsertBefore(instr);
  6240. LowererMD::Legalize(instr);
  6241. return instr;
  6242. }
  6243. case RoundModeHalfToEven:
  6244. {
  6245. instr = IR::Instr::New(LowererMD::MDConvertFloat64ToInt32Opcode(RoundModeHalfToEven), dst, src, func);
  6246. insertBeforeInstr->InsertBefore(instr);
  6247. LowererMD::Legalize(instr);
  6248. return instr;
  6249. }
  6250. default:
  6251. AssertMsg(0, "RoundMode not supported.");
  6252. return nullptr;
  6253. }
  6254. }
  6255. void
  6256. LowererMD::EmitFloatToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert, IR::Instr *instrBailOut, IR::LabelInstr * labelBailOut)
  6257. {
  6258. #ifdef _M_IX86
  6259. // We should only generate this if sse2 is available
  6260. Assert(AutoSystemInfo::Data.SSE2Available());
  6261. #endif
  6262. IR::BailOutKind bailOutKind = IR::BailOutInvalid;
  6263. if (instrBailOut && instrBailOut->HasBailOutInfo())
  6264. {
  6265. bailOutKind = instrBailOut->GetBailOutKind();
  6266. if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  6267. {
  6268. // Bail out instead of calling helper. If this is happening unconditionally, the caller should instead throw a rejit exception.
  6269. Assert(labelBailOut);
  6270. m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, instrInsert);
  6271. return;
  6272. }
  6273. }
  6274. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6275. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6276. IR::Instr *instr;
  6277. ConvertFloatToInt32(dst, src, labelHelper, labelDone, instrInsert);
  6278. // $Helper
  6279. instrInsert->InsertBefore(labelHelper);
  6280. IR::Opnd * arg = src;
  6281. if (src->IsFloat32())
  6282. {
  6283. arg = IR::RegOpnd::New(TyFloat64, m_func);
  6284. EmitFloat32ToFloat64(arg, src, instrInsert);
  6285. }
  6286. instr = IR::Instr::New(Js::OpCode::CALL, dst, this->m_func);
  6287. instrInsert->InsertBefore(instr);
  6288. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  6289. {
  6290. _Analysis_assume_(instrBailOut != nullptr);
  6291. instr = instr->ConvertToBailOutInstr(instrBailOut->GetBailOutInfo(), bailOutKind);
  6292. if (instrBailOut->GetBailOutInfo()->bailOutInstr == instrBailOut)
  6293. {
  6294. IR::Instr * instrShare = instrBailOut->ShareBailOut();
  6295. m_lowerer->LowerBailTarget(instrShare);
  6296. }
  6297. }
  6298. // dst = ToInt32Core(src);
  6299. LoadDoubleHelperArgument(instr, arg);
  6300. this->ChangeToHelperCall(instr, IR::HelperConv_ToInt32Core);
  6301. // $Done
  6302. instrInsert->InsertBefore(labelDone);
  6303. }
  6304. void
  6305. LowererMD::EmitLoadVarNoCheck(IR::RegOpnd * dst, IR::RegOpnd * src, IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  6306. {
  6307. #ifdef _M_IX86
  6308. if (!AutoSystemInfo::Data.SSE2Available())
  6309. {
  6310. IR::JnHelperMethod helperMethod;
  6311. // PUSH &floatTemp
  6312. IR::Opnd *tempOpnd;
  6313. if (instrLoad->dstIsTempNumber)
  6314. {
  6315. helperMethod = isFromUint32 ? IR::HelperOp_UInt32ToAtomInPlace : IR::HelperOp_Int32ToAtomInPlace;
  6316. // Use the original dst to get the temp number sym
  6317. StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(instrLoad->GetDst(), instrLoad->dstIsTempNumberTransferred);
  6318. IR::Instr *load = this->m_lowerer->InsertLoadStackAddress(tempNumberSym, instrLoad);
  6319. tempOpnd = load->GetDst();
  6320. this->LoadHelperArgument(instrLoad, tempOpnd);
  6321. }
  6322. else
  6323. {
  6324. helperMethod = isFromUint32 ? IR::HelperOp_UInt32ToAtom : IR::HelperOp_Int32ToAtom;
  6325. }
  6326. // PUSH memContext
  6327. this->m_lowerer->LoadScriptContext(instrLoad);
  6328. // PUSH s1
  6329. this->LoadHelperArgument(instrLoad, src);
  6330. // dst = ToVar()
  6331. IR::Instr * instr = IR::Instr::New(Js::OpCode::Call, dst,
  6332. IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
  6333. instrLoad->InsertBefore(instr);
  6334. this->LowerCall(instr, 0);
  6335. return;
  6336. }
  6337. #endif
  6338. IR::RegOpnd * floatReg = IR::RegOpnd::New(TyFloat64, this->m_func);
  6339. if (isFromUint32)
  6340. {
  6341. this->EmitUIntToFloat(floatReg, src, instrLoad);
  6342. }
  6343. else
  6344. {
  6345. this->EmitIntToFloat(floatReg, src, instrLoad);
  6346. }
  6347. this->SaveDoubleToVar(dst, floatReg, instrLoad, instrLoad, isHelper);
  6348. }
  6349. void
  6350. LowererMD::ImmedSrcToReg(IR::Instr * instr, IR::Opnd * newOpnd, int srcNum)
  6351. {
  6352. if (srcNum == 2)
  6353. {
  6354. instr->SetSrc2(newOpnd);
  6355. }
  6356. else
  6357. {
  6358. Assert(srcNum == 1);
  6359. instr->SetSrc1(newOpnd);
  6360. }
  6361. }
  6362. IR::LabelInstr *
  6363. LowererMD::GetBailOutStackRestoreLabel(BailOutInfo * bailOutInfo, IR::LabelInstr * exitTargetInstr)
  6364. {
  6365. return lowererMDArch.GetBailOutStackRestoreLabel(bailOutInfo, exitTargetInstr);
  6366. }
  6367. StackSym *
  6368. LowererMD::GetImplicitParamSlotSym(Js::ArgSlot argSlot)
  6369. {
  6370. return GetImplicitParamSlotSym(argSlot, this->m_func);
  6371. }
  6372. StackSym *
  6373. LowererMD::GetImplicitParamSlotSym(Js::ArgSlot argSlot, Func * func)
  6374. {
  6375. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  6376. // Pass in the EBP+8 to start at the function object, the start of the implicit param slots
  6377. StackSym * stackSym = StackSym::NewImplicitParamSym(argSlot, func);
  6378. func->SetArgOffset(stackSym, (2 + argSlot) * MachPtr);
  6379. func->SetHasImplicitParamLoad();
  6380. return stackSym;
  6381. }
  6382. bool LowererMD::GenerateFastAnd(IR::Instr * instrAnd)
  6383. {
  6384. return this->lowererMDArch.GenerateFastAnd(instrAnd);
  6385. }
  6386. bool LowererMD::GenerateFastDivAndRem(IR::Instr* instrDiv, IR::LabelInstr* bailoutLabel)
  6387. {
  6388. return this->lowererMDArch.GenerateFastDivAndRem(instrDiv, bailoutLabel);
  6389. }
  6390. bool LowererMD::GenerateFastXor(IR::Instr * instrXor)
  6391. {
  6392. return this->lowererMDArch.GenerateFastXor(instrXor);
  6393. }
  6394. bool LowererMD::GenerateFastOr(IR::Instr * instrOr)
  6395. {
  6396. return this->lowererMDArch.GenerateFastOr(instrOr);
  6397. }
  6398. bool LowererMD::GenerateFastNot(IR::Instr * instrNot)
  6399. {
  6400. return this->lowererMDArch.GenerateFastNot(instrNot);
  6401. }
  6402. bool LowererMD::GenerateFastShiftLeft(IR::Instr * instrShift)
  6403. {
  6404. return this->lowererMDArch.GenerateFastShiftLeft(instrShift);
  6405. }
  6406. bool LowererMD::GenerateFastShiftRight(IR::Instr * instrShift)
  6407. {
  6408. return this->lowererMDArch.GenerateFastShiftRight(instrShift);
  6409. }
  6410. void LowererMD::GenerateIsJsObjectTest(IR::RegOpnd* instanceReg, IR::Instr* insertInstr, IR::LabelInstr* labelHelper)
  6411. {
  6412. // TEST instanceReg, (Js::AtomTag_IntPtr | Js::FloatTag_Value )
  6413. GenerateObjectTest(instanceReg, insertInstr, labelHelper);
  6414. IR::RegOpnd * typeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  6415. // MOV typeReg, instanceReg + offsetof(RecyclableObject::type)
  6416. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, typeReg,
  6417. IR::IndirOpnd::New(instanceReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  6418. m_func));
  6419. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_LastJavascriptPrimitiveType
  6420. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6421. cmp->SetSrc1(IR::IndirOpnd::New(typeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func));
  6422. cmp->SetSrc2(IR::IntConstOpnd::New(Js::TypeId::TypeIds_LastJavascriptPrimitiveType, TyInt32, this->m_func));
  6423. insertInstr->InsertBefore(cmp);
  6424. // JLE labelHelper
  6425. insertInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JLE, labelHelper, this->m_func));
  6426. }
  6427. void
  6428. LowererMD::EmitReinterpretPrimitive(IR::Opnd* dst, IR::Opnd* src, IR::Instr* insertBeforeInstr)
  6429. {
  6430. Assert(dst && src);
  6431. Assert(dst->GetSize() == src->GetSize());
  6432. Assert(dst->GetType() != src->GetType());
  6433. if (
  6434. // Additional runtime check to prevent unknown behavior
  6435. (dst->GetSize() != src->GetSize()) ||
  6436. // There is nothing to do in this case
  6437. (dst->GetType() == src->GetType())
  6438. )
  6439. {
  6440. Lowerer::InsertMove(dst, src, insertBeforeInstr);
  6441. return;
  6442. }
  6443. auto LegalizeInsert = [insertBeforeInstr](IR::Instr* instr)
  6444. {
  6445. Legalize(instr);
  6446. insertBeforeInstr->InsertBefore(instr);
  6447. };
  6448. if (dst->GetSize() == 8)
  6449. {
  6450. #if _M_AMD64
  6451. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVQ, dst, src, m_func));
  6452. #elif LOWER_SPLIT_INT64
  6453. if (dst->IsInt64())
  6454. {
  6455. // movd xmm2, xmm1
  6456. // movd low_bits, xmm2
  6457. // shufps xmm2, xmm2, 1
  6458. // movd high_bits, xmm2
  6459. Assert(src->IsFloat64());
  6460. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(dst);
  6461. // shufps modifies the register, we shouldn't change the source here
  6462. IR::RegOpnd* tmpDouble = IR::RegOpnd::New(TyFloat64, m_func);
  6463. Lowerer::InsertMove(tmpDouble, src, insertBeforeInstr);
  6464. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dstPair.low, tmpDouble, m_func));
  6465. LegalizeInsert(IR::Instr::New(Js::OpCode::SHUFPS, tmpDouble, tmpDouble, IR::IntConstOpnd::New(1, TyInt8, m_func, true), m_func));
  6466. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dstPair.high, tmpDouble, m_func));
  6467. }
  6468. else
  6469. {
  6470. // movd xmm0, lowBits;
  6471. // movd xmm1, highBits;
  6472. // shufps xmm0, xmm1, (0 | 2 << 2 | 0 << 4 | 1 << 6);
  6473. // shufps xmm0, xmm0, (0 | 2 << 2 | 3 << 4 | 3 << 6);
  6474. Assert(src->IsInt64());
  6475. Assert(dst->IsFloat64());
  6476. Int64RegPair srcPair = m_func->FindOrCreateInt64Pair(src);
  6477. IR::RegOpnd* tmpDouble = IR::RegOpnd::New(TyFloat64, m_func);
  6478. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dst, srcPair.low, m_func));
  6479. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, tmpDouble, srcPair.high, m_func));
  6480. LegalizeInsert(IR::Instr::New(Js::OpCode::SHUFPS, dst, tmpDouble, IR::IntConstOpnd::New((0 | 2 << 2 | 0 << 4 | 1 << 6), TyInt8, m_func, true), m_func));
  6481. LegalizeInsert(IR::Instr::New(Js::OpCode::SHUFPS, dst, dst, IR::IntConstOpnd::New((0 | 2 << 2 | 3 << 4 | 3 << 6), TyInt8, m_func, true), m_func));
  6482. }
  6483. #endif
  6484. }
  6485. else if (dst->GetSize() == 4)
  6486. {
  6487. // 32bit reinterprets
  6488. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dst, src, m_func));
  6489. }
  6490. else
  6491. {
  6492. Assert(UNREACHED);
  6493. }
  6494. }
  6495. void LowererMD::EmitReinterpretFloatToInt(IR::Opnd* dst, IR::Opnd* src, IR::Instr* insertBeforeInstr)
  6496. {
  6497. Assert(dst->IsInt32() || dst->IsUInt32() || dst->IsInt64());
  6498. Assert(src->IsFloat());
  6499. EmitReinterpretPrimitive(dst, src, insertBeforeInstr);
  6500. }
  6501. void LowererMD::EmitReinterpretIntToFloat(IR::Opnd* dst, IR::Opnd* src, IR::Instr* insertBeforeInstr)
  6502. {
  6503. Assert(dst->IsFloat());
  6504. Assert(src->IsInt32() || src->IsUInt32() || src->IsInt64());
  6505. EmitReinterpretPrimitive(dst, src, insertBeforeInstr);
  6506. }
  6507. IR::Instr *
  6508. LowererMD::LowerToFloat(IR::Instr *instr)
  6509. {
  6510. switch (instr->m_opcode)
  6511. {
  6512. case Js::OpCode::Add_A:
  6513. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  6514. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  6515. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::ADDSD : Js::OpCode::ADDSS;
  6516. break;
  6517. case Js::OpCode::Sub_A:
  6518. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  6519. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  6520. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::SUBSD : Js::OpCode::SUBSS;
  6521. break;
  6522. case Js::OpCode::Mul_A:
  6523. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  6524. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  6525. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::MULSD : Js::OpCode::MULSS;
  6526. break;
  6527. case Js::OpCode::Div_A:
  6528. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  6529. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  6530. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::DIVSD : Js::OpCode::DIVSS;
  6531. break;
  6532. case Js::OpCode::Neg_A:
  6533. {
  6534. IR::Opnd *opnd;
  6535. instr->m_opcode = Js::OpCode::XORPS;
  6536. if (instr->GetDst()->IsFloat32())
  6537. {
  6538. opnd = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetMaskNegFloatAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6539. }
  6540. else
  6541. {
  6542. Assert(instr->GetDst()->IsFloat64());
  6543. opnd = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetMaskNegDoubleAddr(), TyMachDouble, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6544. }
  6545. instr->SetSrc2(opnd);
  6546. break;
  6547. }
  6548. case Js::OpCode::BrEq_A:
  6549. case Js::OpCode::BrNeq_A:
  6550. case Js::OpCode::BrSrEq_A:
  6551. case Js::OpCode::BrSrNeq_A:
  6552. case Js::OpCode::BrGt_A:
  6553. case Js::OpCode::BrGe_A:
  6554. case Js::OpCode::BrLt_A:
  6555. case Js::OpCode::BrLe_A:
  6556. case Js::OpCode::BrNotEq_A:
  6557. case Js::OpCode::BrNotNeq_A:
  6558. case Js::OpCode::BrSrNotEq_A:
  6559. case Js::OpCode::BrSrNotNeq_A:
  6560. case Js::OpCode::BrNotGt_A:
  6561. case Js::OpCode::BrNotGe_A:
  6562. case Js::OpCode::BrNotLt_A:
  6563. case Js::OpCode::BrNotLe_A:
  6564. return this->LowerFloatCondBranch(instr->AsBranchInstr());
  6565. default:
  6566. Assume(UNREACHED);
  6567. }
  6568. Legalize(instr);
  6569. return instr;
  6570. }
  6571. IR::BranchInstr *
  6572. LowererMD::LowerFloatCondBranch(IR::BranchInstr *instrBranch, bool ignoreNan)
  6573. {
  6574. Js::OpCode brOpcode = Js::OpCode::InvalidOpCode;
  6575. Js::OpCode cmpOpcode = Js::OpCode::InvalidOpCode;
  6576. IR::Instr *instr;
  6577. bool swapCmpOpnds = false;
  6578. bool addJP = false;
  6579. IR::LabelInstr *labelNaN = nullptr;
  6580. // Generate float compare that behave correctly for NaN's.
  6581. // These branch on unordered:
  6582. // JB
  6583. // JBE
  6584. // JE
  6585. // These don't branch on unordered:
  6586. // JA
  6587. // JAE
  6588. // JNE
  6589. // Unfortunately, only JA and JAE do what we'd like....
  6590. Func * func = instrBranch->m_func;
  6591. IR::Opnd *src1 = instrBranch->UnlinkSrc1();
  6592. IR::Opnd *src2 = instrBranch->UnlinkSrc2();
  6593. Assert(src1->GetType() == src2->GetType());
  6594. switch (instrBranch->m_opcode)
  6595. {
  6596. case Js::OpCode::BrSrEq_A:
  6597. case Js::OpCode::BrEq_A:
  6598. case Js::OpCode::BrSrNotNeq_A:
  6599. case Js::OpCode::BrNotNeq_A:
  6600. cmpOpcode = src1->IsFloat64() ? Js::OpCode::UCOMISD : Js::OpCode::UCOMISS;
  6601. brOpcode = Js::OpCode::JEQ;
  6602. if (!ignoreNan)
  6603. {
  6604. // Don't jump on NaN's
  6605. labelNaN = instrBranch->GetOrCreateContinueLabel();
  6606. addJP = true;
  6607. }
  6608. break;
  6609. case Js::OpCode::BrNeq_A:
  6610. case Js::OpCode::BrSrNeq_A:
  6611. case Js::OpCode::BrSrNotEq_A:
  6612. case Js::OpCode::BrNotEq_A:
  6613. cmpOpcode = src1->IsFloat64() ? Js::OpCode::UCOMISD : Js::OpCode::UCOMISS;
  6614. brOpcode = Js::OpCode::JNE;
  6615. if (!ignoreNan)
  6616. {
  6617. // Jump on NaN's
  6618. labelNaN = instrBranch->GetTarget();
  6619. addJP = true;
  6620. }
  6621. break;
  6622. case Js::OpCode::BrLe_A:
  6623. swapCmpOpnds = true;
  6624. brOpcode = Js::OpCode::JAE;
  6625. break;
  6626. case Js::OpCode::BrLt_A:
  6627. swapCmpOpnds = true;
  6628. brOpcode = Js::OpCode::JA;
  6629. break;
  6630. case Js::OpCode::BrGe_A:
  6631. brOpcode = Js::OpCode::JAE;
  6632. break;
  6633. case Js::OpCode::BrGt_A:
  6634. brOpcode = Js::OpCode::JA;
  6635. break;
  6636. case Js::OpCode::BrNotLe_A:
  6637. swapCmpOpnds = true;
  6638. brOpcode = Js::OpCode::JB;
  6639. break;
  6640. case Js::OpCode::BrNotLt_A:
  6641. swapCmpOpnds = true;
  6642. brOpcode = Js::OpCode::JBE;
  6643. break;
  6644. case Js::OpCode::BrNotGe_A:
  6645. brOpcode = Js::OpCode::JB;
  6646. break;
  6647. case Js::OpCode::BrNotGt_A:
  6648. brOpcode = Js::OpCode::JBE;
  6649. break;
  6650. default:
  6651. Assume(UNREACHED);
  6652. }
  6653. // if we haven't set cmpOpcode, then we are using COMISD/COMISS
  6654. if (cmpOpcode == Js::OpCode::InvalidOpCode)
  6655. {
  6656. cmpOpcode = src1->IsFloat64() ? Js::OpCode::COMISD : Js::OpCode::COMISS;
  6657. }
  6658. if (swapCmpOpnds)
  6659. {
  6660. IR::Opnd *tmp = src1;
  6661. src1 = src2;
  6662. src2 = tmp;
  6663. }
  6664. // VC generates UCOMISD for BrEq/BrNeq, and COMISD for all others, accordingly to IEEE 754.
  6665. // We'll do the same.
  6666. // COMISD / UCOMISD src1, src2
  6667. IR::Instr *instrCmp = IR::Instr::New(cmpOpcode, func);
  6668. instrCmp->SetSrc1(src1);
  6669. instrCmp->SetSrc2(src2);
  6670. instrBranch->InsertBefore(instrCmp);
  6671. Legalize(instrCmp);
  6672. if (addJP)
  6673. {
  6674. // JP $LabelNaN
  6675. instr = IR::BranchInstr::New(Js::OpCode::JP, labelNaN, func);
  6676. instrBranch->InsertBefore(instr);
  6677. }
  6678. // Jcc $L
  6679. instr = IR::BranchInstr::New(brOpcode, instrBranch->GetTarget(), func);
  6680. instrBranch->InsertBefore(instr);
  6681. instrBranch->Remove();
  6682. return instr->AsBranchInstr();
  6683. }
  6684. void LowererMD::HelperCallForAsmMathBuiltin(IR::Instr* instr, IR::JnHelperMethod helperMethodFloat, IR::JnHelperMethod helperMethodDouble)
  6685. {
  6686. Assert(instr->m_opcode == Js::OpCode::InlineMathFloor || instr->m_opcode == Js::OpCode::InlineMathCeil || instr->m_opcode == Js::OpCode::Trunc_A || instr->m_opcode == Js::OpCode::Nearest_A);
  6687. AssertMsg(instr->GetDst()->IsFloat(), "dst must be float.");
  6688. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  6689. Assert(!instr->GetSrc2());
  6690. IR::Opnd * argOpnd = instr->UnlinkSrc1();
  6691. IR::JnHelperMethod helperMethod;
  6692. if (argOpnd->IsFloat32())
  6693. {
  6694. helperMethod = helperMethodFloat;
  6695. LoadFloatHelperArgument(instr, argOpnd);
  6696. }
  6697. else
  6698. {
  6699. helperMethod = helperMethodDouble;
  6700. LoadDoubleHelperArgument(instr, argOpnd);
  6701. }
  6702. ChangeToHelperCall(instr, helperMethod);
  6703. }
  6704. void LowererMD::GenerateFastInlineBuiltInCall(IR::Instr* instr, IR::JnHelperMethod helperMethod)
  6705. {
  6706. switch (instr->m_opcode)
  6707. {
  6708. case Js::OpCode::InlineMathSqrt:
  6709. {
  6710. // Sqrt maps directly to the SSE2 instruction.
  6711. // src and dst should already be XMM registers, all we need is just change the opcode.
  6712. Assert(helperMethod == (IR::JnHelperMethod)0);
  6713. Assert(instr->GetSrc2() == nullptr);
  6714. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::SQRTSD : Js::OpCode::SQRTSS;
  6715. IR::Opnd *src = instr->GetSrc1();
  6716. IR::Opnd *dst = instr->GetDst();
  6717. if (!src->IsEqual(dst))
  6718. {
  6719. Assert(src->IsRegOpnd() && dst->IsRegOpnd());
  6720. // Force source to be the same as destination to break false dependency on the register
  6721. Lowerer::InsertMove(dst, src, instr, false /* generateWriteBarrier */);
  6722. instr->ReplaceSrc1(dst);
  6723. }
  6724. break;
  6725. }
  6726. case Js::OpCode::InlineMathAbs:
  6727. Assert(helperMethod == (IR::JnHelperMethod)0);
  6728. return GenerateFastInlineBuiltInMathAbs(instr);
  6729. case Js::OpCode::InlineMathPow:
  6730. #ifdef _M_IX86
  6731. if (!instr->GetSrc2()->IsFloat())
  6732. {
  6733. #endif
  6734. this->GenerateFastInlineBuiltInMathPow(instr);
  6735. break;
  6736. #ifdef _M_IX86
  6737. }
  6738. // fallthrough
  6739. #endif
  6740. case Js::OpCode::InlineMathAcos:
  6741. case Js::OpCode::InlineMathAsin:
  6742. case Js::OpCode::InlineMathAtan:
  6743. case Js::OpCode::InlineMathAtan2:
  6744. case Js::OpCode::InlineMathCos:
  6745. case Js::OpCode::InlineMathExp:
  6746. case Js::OpCode::InlineMathLog:
  6747. case Js::OpCode::Expo_A: //** operator reuses InlineMathPow fastpath
  6748. case Js::OpCode::InlineMathSin:
  6749. case Js::OpCode::InlineMathTan:
  6750. {
  6751. AssertMsg(instr->GetDst()->IsFloat(), "dst must be float.");
  6752. AssertMsg(instr->GetSrc1()->IsFloat(), "src1 must be float.");
  6753. AssertMsg(!instr->GetSrc2() || instr->GetSrc2()->IsFloat(), "src2 must be float.");
  6754. // Before:
  6755. // dst = <Built-in call> src1, src2
  6756. // After:
  6757. // I386:
  6758. // XMM0 = MOVSD src1
  6759. // CALL helperMethod
  6760. // dst = MOVSD call->dst
  6761. // AMD64:
  6762. // XMM0 = MOVSD src1
  6763. // RAX = MOV helperMethod
  6764. // CALL RAX
  6765. // dst = MOVSD call->dst
  6766. // Src1
  6767. IR::Instr* argOut = IR::Instr::New(Js::OpCode::MOVSD, this->m_func);
  6768. IR::RegOpnd* dst1 = IR::RegOpnd::New(nullptr, (RegNum)FIRST_FLOAT_ARG_REG, TyMachDouble, this->m_func);
  6769. dst1->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  6770. argOut->SetDst(dst1);
  6771. argOut->SetSrc1(instr->UnlinkSrc1());
  6772. instr->InsertBefore(argOut);
  6773. // Src2
  6774. if (instr->GetSrc2() != nullptr)
  6775. {
  6776. IR::Instr* argOut2 = IR::Instr::New(Js::OpCode::MOVSD, this->m_func);
  6777. IR::RegOpnd* dst2 = IR::RegOpnd::New(nullptr, (RegNum)(FIRST_FLOAT_ARG_REG + 1), TyMachDouble, this->m_func);
  6778. dst2->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  6779. argOut2->SetDst(dst2);
  6780. argOut2->SetSrc1(instr->UnlinkSrc2());
  6781. instr->InsertBefore(argOut2);
  6782. }
  6783. // Call CRT.
  6784. IR::RegOpnd* floatCallDst = IR::RegOpnd::New(nullptr, (RegNum)(FIRST_FLOAT_REG), TyMachDouble, this->m_func); // Dst in XMM0.
  6785. #ifdef _M_IX86
  6786. IR::Instr* floatCall = IR::Instr::New(Js::OpCode::CALL, floatCallDst, this->m_func);
  6787. floatCall->SetSrc1(IR::HelperCallOpnd::New(helperMethod, this->m_func));
  6788. instr->InsertBefore(floatCall);
  6789. #else
  6790. // s1 = MOV helperAddr
  6791. IR::RegOpnd* s1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  6792. IR::AddrOpnd* helperAddr = IR::AddrOpnd::New((Js::Var)IR::GetMethodOriginalAddress(m_func->GetThreadContextInfo(), helperMethod), IR::AddrOpndKind::AddrOpndKindDynamicMisc, this->m_func);
  6793. IR::Instr* mov = IR::Instr::New(Js::OpCode::MOV, s1, helperAddr, this->m_func);
  6794. instr->InsertBefore(mov);
  6795. // dst(XMM0) = CALL s1
  6796. IR::Instr *floatCall = IR::Instr::New(Js::OpCode::CALL, floatCallDst, s1, this->m_func);
  6797. instr->InsertBefore(floatCall);
  6798. #endif
  6799. instr->m_func->SetHasCallsOnSelfAndParents();
  6800. // Save the result.
  6801. instr->m_opcode = Js::OpCode::MOVSD;
  6802. instr->SetSrc1(floatCall->GetDst());
  6803. break;
  6804. }
  6805. case Js::OpCode::InlineMathFloor:
  6806. case Js::OpCode::InlineMathCeil:
  6807. case Js::OpCode::InlineMathRound:
  6808. #ifdef ENABLE_WASM
  6809. case Js::OpCode::Trunc_A:
  6810. case Js::OpCode::Nearest_A:
  6811. #endif //ENABLE_WASM
  6812. {
  6813. Assert(AutoSystemInfo::Data.SSE4_1Available());
  6814. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsFloat());
  6815. // MOVSD roundedFloat, src
  6816. //
  6817. // if(round)
  6818. // {
  6819. // /* N.B.: the following CMPs are lowered to COMISDs, whose results can only be >, <, or =.
  6820. // In fact, only ">" can be used if NaN has not been handled.
  6821. // */
  6822. // CMP 0.5, roundedFloat
  6823. // JA $ltHalf
  6824. // CMP TwoToFraction, roundedFloat
  6825. // JA $addHalfToRoundSrcLabel
  6826. // J $skipRoundSd (NaN is also handled here)
  6827. // $ltHalf:
  6828. // CMP roundedFloat, -0.5
  6829. // JL $ltNegHalf
  6830. // if (shouldCheckNegZero) {
  6831. // CMP roundedFloat, 0
  6832. // JA $setZero
  6833. // $negZeroTest [Helper]:
  6834. // JB $bailoutLabel
  6835. // isNegZero(src)
  6836. // JE $bailoutLabel
  6837. // J $skipRoundSd
  6838. // } // else: setZero
  6839. // $setZero:
  6840. // MOV roundedFloat, 0
  6841. // J $skipRoundSd
  6842. // $ltNegHalf:
  6843. // CMP roundedFloat, NegTwoToFraction
  6844. // JA $addHalfToRoundSrc
  6845. // J $skipRoundSd
  6846. // $addHalfToRoundSrc:
  6847. // ADDSD roundedFloat, 0.5
  6848. // $skipAddHalf:
  6849. // }
  6850. //
  6851. // if(isNotCeil)
  6852. // {
  6853. // CMP roundedFloat, 0
  6854. // JGE $skipRoundSd
  6855. // }
  6856. // ROUNDSD roundedFloat, roundedFloat, round_mode
  6857. //
  6858. // $skipRoundSd:
  6859. // if(isNotCeil)
  6860. // MOVSD checkNegZeroOpnd, roundedFloat
  6861. // else if (ceil)
  6862. // MOVSD checkNegZeroOpnd, src
  6863. //
  6864. // CMP checkNegZeroOpnd, 0
  6865. // JNE $convertToInt
  6866. //
  6867. // if(instr->ShouldCheckForNegativeZero())
  6868. // {
  6869. // isNegZero CALL IsNegZero(checkNegZeroOpnd)
  6870. // CMP isNegZero, 0
  6871. // JNE $bailoutLabel
  6872. // }
  6873. //
  6874. // $convertToInt:
  6875. // CVT(T)SD2SI dst, roundedFloat //CVTTSD2SI for floor/round and CVTSD2SI for ceil
  6876. // CMP dst 0x80000000
  6877. // JNE $fallthrough
  6878. //
  6879. // if(!sharedBailout)
  6880. // {
  6881. // $bailoutLabel:
  6882. // }
  6883. // GenerateBailout(instr)
  6884. //
  6885. // $fallthrough:
  6886. bool isNotCeil = instr->m_opcode != Js::OpCode::InlineMathCeil;
  6887. // MOVSD roundedFloat, src
  6888. IR::Opnd * src = instr->UnlinkSrc1();
  6889. IR::RegOpnd* roundedFloat = IR::RegOpnd::New(src->GetType(), this->m_func);
  6890. IR::Instr* argOut = IR::Instr::New(LowererMDArch::GetAssignOp(src->GetType()), roundedFloat, src, this->m_func);
  6891. instr->InsertBefore(argOut);
  6892. bool negZeroCheckDone = false;
  6893. IR::LabelInstr * bailoutLabel = nullptr;
  6894. bool sharedBailout = false;
  6895. if (instr->GetDst()->IsInt32())
  6896. {
  6897. sharedBailout = (instr->GetBailOutInfo()->bailOutInstr != instr) ? true : false;
  6898. bailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /*helperLabel*/true);
  6899. }
  6900. IR::Opnd * zero;
  6901. if (src->IsFloat64())
  6902. {
  6903. zero = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleZeroAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6904. }
  6905. else
  6906. {
  6907. Assert(src->IsFloat32());
  6908. zero = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatZeroAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6909. }
  6910. IR::AutoReuseOpnd autoReuseZero(zero, this->m_func);
  6911. IR::LabelInstr * skipRoundSd = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6912. if(instr->m_opcode == Js::OpCode::InlineMathRound)
  6913. {
  6914. IR::LabelInstr * addHalfToRoundSrcLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6915. IR::LabelInstr * ltHalf = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6916. IR::LabelInstr * setZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6917. IR::LabelInstr * ltNegHalf = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6918. IR::Opnd * pointFive;
  6919. IR::Opnd * negPointFive;
  6920. if (src->IsFloat64())
  6921. {
  6922. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoublePointFiveAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6923. negPointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNegPointFiveAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6924. }
  6925. else
  6926. {
  6927. Assert(src->IsFloat32());
  6928. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatPointFiveAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6929. negPointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNegPointFiveAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6930. }
  6931. // CMP 0.5, roundedFloat
  6932. // JA $ltHalf
  6933. this->m_lowerer->InsertCompareBranch(pointFive, roundedFloat, Js::OpCode::BrGt_A, ltHalf, instr);
  6934. if (instr->GetDst()->IsInt32())
  6935. {
  6936. // if we are specializing dst to int, we will bailout on overflow so don't need upperbound check
  6937. // Also, we will bailout on NaN, so it doesn't need special handling either
  6938. // J $addHalfToRoundSrcLabel
  6939. this->m_lowerer->InsertBranch(Js::OpCode::Br, addHalfToRoundSrcLabel, instr);
  6940. }
  6941. else
  6942. {
  6943. IR::Opnd * twoToFraction;
  6944. if (src->IsFloat64())
  6945. {
  6946. twoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleTwoToFractionAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6947. }
  6948. else
  6949. {
  6950. Assert(src->IsFloat32());
  6951. twoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatTwoToFractionAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6952. }
  6953. // CMP 2^fraction, roundedFloat
  6954. // JA $addHalfToRoundSrcLabel
  6955. this->m_lowerer->InsertCompareBranch(twoToFraction, roundedFloat, Js::OpCode::BrGt_A, addHalfToRoundSrcLabel, instr);
  6956. // J $skipRoundSd (NaN also handled here)
  6957. this->m_lowerer->InsertBranch(Js::OpCode::Br, skipRoundSd, instr);
  6958. }
  6959. // $ltHalf:
  6960. instr->InsertBefore(ltHalf);
  6961. // CMP roundedFloat, -0.5
  6962. // JL $ltNegHalf
  6963. this->m_lowerer->InsertCompareBranch(roundedFloat, negPointFive, Js::OpCode::BrLt_A, ltNegHalf, instr);
  6964. if (instr->ShouldCheckForNegativeZero())
  6965. {
  6966. // CMP roundedFloat, 0
  6967. // JA $setZero
  6968. this->m_lowerer->InsertCompareBranch(roundedFloat, zero, Js::OpCode::BrGt_A, setZero, instr);
  6969. // $negZeroTest [helper]
  6970. m_lowerer->InsertLabel(true, instr);
  6971. // JB $bailoutLabel
  6972. this->m_lowerer->InsertBranch(Js::OpCode::JB, bailoutLabel, instr);
  6973. // if isNegZero(src) J $bailoutLabel else J $skipRoundSd
  6974. NegZeroBranching(src, instr, bailoutLabel, skipRoundSd);
  6975. negZeroCheckDone = true;
  6976. }
  6977. // $setZero:
  6978. instr->InsertBefore(setZero);
  6979. // MOVSD_ZERO roundedFloat
  6980. LoadFloatZero(roundedFloat, instr);
  6981. // J $skipRoundSd
  6982. this->m_lowerer->InsertBranch(Js::OpCode::Br, skipRoundSd, instr);
  6983. // $ltNegHalf:
  6984. instr->InsertBefore(ltNegHalf);
  6985. if (!instr->GetDst()->IsInt32())
  6986. {
  6987. // if we are specializing dst to int, we will bailout on overflow so don't need lowerbound check
  6988. IR::Opnd * negTwoToFraction;
  6989. if (src->IsFloat64())
  6990. {
  6991. negTwoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNegTwoToFractionAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6992. }
  6993. else
  6994. {
  6995. Assert(src->IsFloat32());
  6996. negTwoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNegTwoToFractionAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6997. }
  6998. // CMP roundedFloat, negTwoToFraction
  6999. // JA $addHalfToRoundSrcLabel
  7000. this->m_lowerer->InsertCompareBranch(roundedFloat, negTwoToFraction, Js::OpCode::BrGt_A, addHalfToRoundSrcLabel, instr);
  7001. // J $skipRoundSd
  7002. this->m_lowerer->InsertBranch(Js::OpCode::Br, skipRoundSd, instr);
  7003. }
  7004. if (src->IsFloat64())
  7005. {
  7006. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoublePointFiveAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  7007. }
  7008. else
  7009. {
  7010. Assert(src->IsFloat32());
  7011. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatPointFiveAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  7012. }
  7013. // $addHalfToRoundSrcLabel
  7014. instr->InsertBefore(addHalfToRoundSrcLabel);
  7015. // ADDSD roundedFloat, 0.5
  7016. IR::Instr * addInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::ADDSD : Js::OpCode::ADDSS, roundedFloat, roundedFloat, pointFive, this->m_func);
  7017. instr->InsertBefore(addInstr);
  7018. Legalize(addInstr);
  7019. }
  7020. if (instr->m_opcode == Js::OpCode::InlineMathFloor && instr->GetDst()->IsInt32())
  7021. {
  7022. this->m_lowerer->InsertCompareBranch(roundedFloat, zero, Js::OpCode::BrGe_A, skipRoundSd, instr);
  7023. }
  7024. // ROUNDSD srcCopy, srcCopy, round_mode
  7025. IR::Opnd * roundMode = nullptr;
  7026. switch (instr->m_opcode)
  7027. {
  7028. #ifdef ENABLE_WASM
  7029. case Js::OpCode::Trunc_A:
  7030. roundMode = IR::IntConstOpnd::New(0x03, TyInt32, this->m_func);
  7031. break;
  7032. case Js::OpCode::Nearest_A:
  7033. roundMode = IR::IntConstOpnd::New(0x00, TyInt32, this->m_func);
  7034. break;
  7035. #endif //ENABLE_WASM
  7036. case Js::OpCode::InlineMathRound:
  7037. case Js::OpCode::InlineMathFloor:
  7038. roundMode = IR::IntConstOpnd::New(0x01, TyInt32, this->m_func);
  7039. break;
  7040. case Js::OpCode::InlineMathCeil:
  7041. roundMode = IR::IntConstOpnd::New(0x02, TyInt32, this->m_func);
  7042. break;
  7043. }
  7044. IR::Instr* roundInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::ROUNDSD : Js::OpCode::ROUNDSS, roundedFloat, roundedFloat, roundMode, this->m_func);
  7045. instr->InsertBefore(roundInstr);
  7046. if (instr->m_opcode == Js::OpCode::InlineMathRound)
  7047. {
  7048. instr->InsertBefore(skipRoundSd);
  7049. }
  7050. if (instr->GetDst()->IsInt32())
  7051. {
  7052. if (instr->m_opcode == Js::OpCode::InlineMathFloor)
  7053. {
  7054. instr->InsertBefore(skipRoundSd);
  7055. }
  7056. //negZero bailout
  7057. if(instr->ShouldCheckForNegativeZero() && !negZeroCheckDone)
  7058. {
  7059. IR::LabelInstr * convertToInt = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7060. IR::Opnd * checkNegZeroOpnd = isNotCeil ? src : roundedFloat;
  7061. this->m_lowerer->InsertCompareBranch(checkNegZeroOpnd, zero, Js::OpCode::BrNeq_A, convertToInt, instr);
  7062. m_lowerer->InsertLabel(true, instr);
  7063. NegZeroBranching(checkNegZeroOpnd, instr, bailoutLabel, convertToInt);
  7064. instr->InsertBefore(convertToInt);
  7065. }
  7066. IR::Opnd * originalDst = instr->UnlinkDst();
  7067. // CVT(T)SD2SI dst, srcCopy
  7068. IR::Instr* convertToIntInstr;
  7069. if (isNotCeil)
  7070. {
  7071. convertToIntInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::CVTTSD2SI : Js::OpCode::CVTTSS2SI, originalDst, roundedFloat, this->m_func);
  7072. }
  7073. else
  7074. {
  7075. convertToIntInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::CVTSD2SI : Js::OpCode::CVTSS2SI, originalDst, roundedFloat, this->m_func);
  7076. }
  7077. instr->InsertBefore(convertToIntInstr);
  7078. IR::LabelInstr * fallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7079. IR::Opnd * intOverflowValue = IR::IntConstOpnd::New(INT32_MIN, IRType::TyInt32, this->m_func, true);
  7080. this->m_lowerer->InsertCompareBranch(originalDst, intOverflowValue, Js::OpCode::BrNeq_A, fallthrough, instr);
  7081. instr->InsertAfter(fallthrough);
  7082. if (!sharedBailout)
  7083. {
  7084. instr->InsertBefore(bailoutLabel);
  7085. }
  7086. // In case of a shared bailout, we should jump to the code that sets some data on the bailout record which is specific
  7087. // to this bailout. Pass the bailoutLabel to GenerateFunction so that it may use the label as the collectRuntimeStatsLabel.
  7088. this->m_lowerer->GenerateBailOut(instr, nullptr, nullptr, sharedBailout ? bailoutLabel : nullptr);
  7089. }
  7090. else
  7091. {
  7092. IR::Opnd * originalDst = instr->UnlinkDst();
  7093. Assert(originalDst->IsFloat());
  7094. Assert(originalDst->GetType() == roundedFloat->GetType());
  7095. IR::Instr * movInstr = IR::Instr::New(originalDst->IsFloat64() ? Js::OpCode::MOVSD : Js::OpCode::MOVSS, originalDst, roundedFloat, this->m_func);
  7096. instr->InsertBefore(movInstr);
  7097. instr->Remove();
  7098. }
  7099. break;
  7100. }
  7101. case Js::OpCode::InlineMathMin:
  7102. case Js::OpCode::InlineMathMax:
  7103. {
  7104. IR::Opnd* src1 = instr->GetSrc1();
  7105. IR::Opnd* src2 = instr->GetSrc2();
  7106. IR::Opnd* dst = instr->GetDst();
  7107. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7108. IR::LabelInstr* labelNaNHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  7109. IR::LabelInstr* labelNegZeroAndNaNCheckHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  7110. IR::Instr* branchInstr;
  7111. bool min = instr->m_opcode == Js::OpCode::InlineMathMin ? true : false;
  7112. bool dstEqualsSrc1 = dst->IsEqual(src1);
  7113. bool dstEqualsSrc2 = dst->IsEqual(src2);
  7114. IR::Opnd * otherSrc = src2;
  7115. IR::Opnd * compareSrc1 = src1;
  7116. IR::Opnd * compareSrc2 = src2;
  7117. if (dstEqualsSrc2)
  7118. {
  7119. otherSrc = src1;
  7120. compareSrc1 = src2;
  7121. compareSrc2 = src1;
  7122. }
  7123. if (!dstEqualsSrc1 && !dstEqualsSrc2)
  7124. {
  7125. //MOV dst, src1;
  7126. this->m_lowerer->InsertMove(dst, src1, instr);
  7127. }
  7128. // CMP src1, src2
  7129. if(dst->IsInt32())
  7130. {
  7131. if(min)
  7132. {
  7133. // JLT $continueLabel
  7134. branchInstr = IR::BranchInstr::New(Js::OpCode::BrLt_I4, doneLabel, compareSrc1, compareSrc2, instr->m_func);
  7135. instr->InsertBefore(branchInstr);
  7136. LowererMDArch::EmitInt4Instr(branchInstr);
  7137. }
  7138. else
  7139. {
  7140. // JGT $continueLabel
  7141. branchInstr = IR::BranchInstr::New(Js::OpCode::BrGt_I4, doneLabel, compareSrc1, compareSrc2, instr->m_func);
  7142. instr->InsertBefore(branchInstr);
  7143. LowererMDArch::EmitInt4Instr(branchInstr);
  7144. }
  7145. // MOV dst, src1
  7146. this->m_lowerer->InsertMove(dst, otherSrc, instr);
  7147. }
  7148. else if(dst->IsFloat())
  7149. {
  7150. // COMISD/COMISS src1 (src2), src2 (src1)
  7151. // JA $doneLabel
  7152. // JEQ $labelNegZeroAndNaNCheckHelper
  7153. // MOVSD/MOVSS dst, src2
  7154. // JMP $doneLabel
  7155. //
  7156. // $labelNegZeroAndNaNCheckHelper
  7157. // JP $labelNaNHelper
  7158. // if(min)
  7159. // {
  7160. // if(src2 == -0.0)
  7161. // MOVSD/MOVSS dst, src2
  7162. // }
  7163. // else
  7164. // {
  7165. // if(src1 == -0.0)
  7166. // MOVSD/MOVSS dst, src2
  7167. // }
  7168. // JMP $doneLabel
  7169. //
  7170. // $labelNaNHelper
  7171. // MOVSD/MOVSS dst, NaN
  7172. //
  7173. // $doneLabel
  7174. if(min)
  7175. {
  7176. this->m_lowerer->InsertCompareBranch(compareSrc1, compareSrc2, Js::OpCode::BrLt_A, doneLabel, instr); // Lowering of BrLt_A for floats is done to JA with operands swapped
  7177. }
  7178. else
  7179. {
  7180. this->m_lowerer->InsertCompareBranch(compareSrc1, compareSrc2, Js::OpCode::BrGt_A, doneLabel, instr);
  7181. }
  7182. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JEQ, labelNegZeroAndNaNCheckHelper, instr->m_func));
  7183. this->m_lowerer->InsertMove(dst, otherSrc, instr);
  7184. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, instr->m_func));
  7185. instr->InsertBefore(labelNegZeroAndNaNCheckHelper);
  7186. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JP, labelNaNHelper, instr->m_func));
  7187. IR::LabelInstr *isNeg0Label = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  7188. NegZeroBranching(min ? compareSrc2 : compareSrc1, instr, isNeg0Label, doneLabel);
  7189. instr->InsertBefore(isNeg0Label);
  7190. this->m_lowerer->InsertMove(dst, otherSrc, instr);
  7191. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, instr->m_func));
  7192. instr->InsertBefore(labelNaNHelper);
  7193. IR::Opnd * opndNaN = nullptr;
  7194. if (dst->IsFloat32())
  7195. {
  7196. opndNaN = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNaNAddr(), IRType::TyFloat32, this->m_func);
  7197. }
  7198. else
  7199. {
  7200. opndNaN = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNaNAddr(), IRType::TyFloat64, this->m_func);
  7201. }
  7202. this->m_lowerer->InsertMove(dst, opndNaN, instr);
  7203. }
  7204. instr->InsertBefore(doneLabel);
  7205. instr->Remove();
  7206. break;
  7207. }
  7208. default:
  7209. AssertMsg(FALSE, "Unknown inline built-in opcode");
  7210. break;
  7211. }
  7212. }
  7213. void LowererMD::GenerateFastInlineBuiltInMathAbs(IR::Instr* inlineInstr)
  7214. {
  7215. IR::Opnd* src = inlineInstr->GetSrc1()->Copy(this->m_func);
  7216. IR::Opnd* dst = inlineInstr->UnlinkDst();
  7217. Assert(src);
  7218. IR::Instr* tmpInstr;
  7219. IR::Instr* nextInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  7220. IR::Instr* continueInstr = m_lowerer->LowerBailOnIntMin(inlineInstr);
  7221. continueInstr->InsertAfter(nextInstr);
  7222. IRType srcType = src->GetType();
  7223. if (srcType == IRType::TyInt32)
  7224. {
  7225. // Note: if execution gets so far, we always get (untagged) int32 here.
  7226. // Since -x = ~x + 1, abs(x) = x, abs(-x) = -x, sign-extend(x) = 0, sign_extend(-x) = -1, where 0 <= x.
  7227. // Then: abs(x) = sign-extend(x) XOR x - sign-extend(x)
  7228. // Expected input (otherwise bailout):
  7229. // - src1 is (untagged) int, not equal to int_min (abs(int_min) would produce overflow, as there's no corresponding positive int).
  7230. // MOV EAX, src
  7231. IR::RegOpnd *regEAX = IR::RegOpnd::New(TyInt32, this->m_func);
  7232. regEAX->SetReg(LowererMDArch::GetRegIMulDestLower());
  7233. tmpInstr = IR::Instr::New(Js::OpCode::MOV, regEAX, src, this->m_func);
  7234. nextInstr->InsertBefore(tmpInstr);
  7235. IR::RegOpnd *regEDX = IR::RegOpnd::New(TyInt32, this->m_func);
  7236. regEDX->SetReg(LowererMDArch::GetRegIMulHighDestLower());
  7237. // CDQ (sign-extend EAX into EDX, producing 64bit EDX:EAX value)
  7238. // Note: put EDX on dst to give of def to the EDX lifetime
  7239. tmpInstr = IR::Instr::New(Js::OpCode::CDQ, regEDX, this->m_func);
  7240. nextInstr->InsertBefore(tmpInstr);
  7241. // XOR EAX, EDX
  7242. tmpInstr = IR::Instr::New(Js::OpCode::XOR, regEAX, regEAX, regEDX, this->m_func);
  7243. nextInstr->InsertBefore(tmpInstr);
  7244. // SUB EAX, EDX
  7245. tmpInstr = IR::Instr::New(Js::OpCode::SUB, regEAX, regEAX, regEDX, this->m_func);
  7246. nextInstr->InsertBefore(tmpInstr);
  7247. // MOV dst, EAX
  7248. tmpInstr = IR::Instr::New(Js::OpCode::MOV, dst, regEAX, this->m_func);
  7249. nextInstr->InsertBefore(tmpInstr);
  7250. }
  7251. else if (srcType == IRType::TyFloat64)
  7252. {
  7253. if (!dst->IsRegOpnd())
  7254. {
  7255. // MOVSD tempRegOpnd, src
  7256. IR::RegOpnd* tempRegOpnd = IR::RegOpnd::New(nullptr, TyMachDouble, this->m_func);
  7257. tempRegOpnd->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  7258. tmpInstr = IR::Instr::New(Js::OpCode::MOVSD, tempRegOpnd, src, this->m_func);
  7259. nextInstr->InsertBefore(tmpInstr);
  7260. // This saves the result in the same register.
  7261. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(tempRegOpnd), nextInstr);
  7262. // MOVSD dst, tempRegOpnd
  7263. tmpInstr = IR::Instr::New(Js::OpCode::MOVSD, dst, tempRegOpnd, this->m_func);
  7264. nextInstr->InsertBefore(tmpInstr);
  7265. }
  7266. else
  7267. {
  7268. // MOVSD dst, src
  7269. tmpInstr = IR::Instr::New(Js::OpCode::MOVSD, dst, src, this->m_func);
  7270. nextInstr->InsertBefore(tmpInstr);
  7271. // This saves the result in the same register.
  7272. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(dst), nextInstr);
  7273. }
  7274. }
  7275. else if (srcType == IRType::TyFloat32)
  7276. {
  7277. if (!dst->IsRegOpnd())
  7278. {
  7279. // MOVSS tempRegOpnd, src
  7280. IR::RegOpnd* tempRegOpnd = IR::RegOpnd::New(nullptr, TyFloat32, this->m_func);
  7281. tempRegOpnd->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  7282. tmpInstr = IR::Instr::New(Js::OpCode::MOVSS, tempRegOpnd, src, this->m_func);
  7283. nextInstr->InsertBefore(tmpInstr);
  7284. // This saves the result in the same register.
  7285. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(tempRegOpnd), nextInstr);
  7286. // MOVSS dst, tempRegOpnd
  7287. tmpInstr = IR::Instr::New(Js::OpCode::MOVSS, dst, tempRegOpnd, this->m_func);
  7288. nextInstr->InsertBefore(tmpInstr);
  7289. }
  7290. else
  7291. {
  7292. // MOVSS dst, src
  7293. tmpInstr = IR::Instr::New(Js::OpCode::MOVSS, dst, src, this->m_func);
  7294. nextInstr->InsertBefore(tmpInstr);
  7295. // This saves the result in the same register.
  7296. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(dst), nextInstr);
  7297. }
  7298. }
  7299. else
  7300. {
  7301. AssertMsg(FALSE, "GenerateFastInlineBuiltInMathAbs: unexpected type of the src!");
  7302. }
  7303. }
  7304. void LowererMD::GenerateFastInlineBuiltInMathPow(IR::Instr* instr)
  7305. {
  7306. #ifdef _M_IX86
  7307. AssertMsg(!instr->GetSrc2()->IsFloat(), "Math.pow(*, double) needs customized lowering!");
  7308. #endif
  7309. IR::JnHelperMethod directPowHelper = (IR::JnHelperMethod)0;
  7310. IR::Opnd* bailoutOpnd = nullptr;
  7311. if (!instr->GetSrc2()->IsFloat())
  7312. {
  7313. LoadHelperArgument(instr, instr->UnlinkSrc2());
  7314. if (instr->GetSrc1()->IsFloat())
  7315. {
  7316. directPowHelper = IR::HelperDirectMath_PowDoubleInt;
  7317. LoadDoubleHelperArgument(instr, instr->UnlinkSrc1());
  7318. }
  7319. else
  7320. {
  7321. directPowHelper = IR::HelperDirectMath_PowIntInt;
  7322. LoadHelperArgument(instr, instr->UnlinkSrc1());
  7323. if (!this->m_func->tempSymBool)
  7324. {
  7325. this->m_func->tempSymBool = StackSym::New(TyUint8, this->m_func);
  7326. this->m_func->StackAllocate(this->m_func->tempSymBool, TySize[TyUint8]);
  7327. }
  7328. IR::SymOpnd* boolOpnd = IR::SymOpnd::New(this->m_func->tempSymBool, TyUint8, this->m_func);
  7329. IR::RegOpnd* boolRefOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  7330. this->m_lowerer->InsertLea(boolRefOpnd, boolOpnd, instr);
  7331. LoadHelperArgument(instr, boolRefOpnd);
  7332. bailoutOpnd = boolOpnd;
  7333. }
  7334. }
  7335. #ifndef _M_IX86
  7336. else
  7337. {
  7338. AssertMsg(instr->GetSrc1()->IsFloat(), "Math.Pow(int, double) should not generated by GlobOpt!");
  7339. directPowHelper = IR::HelperDirectMath_Pow;
  7340. LoadDoubleHelperArgument(instr, instr->UnlinkSrc2());
  7341. LoadDoubleHelperArgument(instr, instr->UnlinkSrc1());
  7342. }
  7343. #endif
  7344. ChangeToHelperCall(instr, directPowHelper, nullptr, bailoutOpnd);
  7345. }
  7346. IR::Instr *
  7347. LowererMD::NegZeroBranching(IR::Opnd* opnd, IR::Instr* instr, IR::LabelInstr* isNeg0Label, IR::LabelInstr* isNotNeg0Label)
  7348. {
  7349. Assert(opnd->IsFloat());
  7350. bool is32Bits = opnd->IsFloat32();
  7351. IRType regType = is32Bits ? TyUint32 : TyUint64;
  7352. // Use UInt64 comparison between the opnd to check and negative zero constant.
  7353. // For this we have to convert opnd which is a double to uint64.
  7354. // MOV intOpnd, src
  7355. IR::RegOpnd *intOpnd = IR::RegOpnd::New(regType, this->m_func);
  7356. EmitReinterpretFloatToInt(intOpnd, opnd, instr);
  7357. #if LOWER_SPLIT_INT64
  7358. if (!is32Bits)
  7359. {
  7360. // For 64bits comparisons on x86 we need to check 2 registers
  7361. // CMP intOpnd.high, (k_NegZero >> 32).i32
  7362. // BRNEQ isNotNeg0Label
  7363. // CMP intOpnd.low, k_NegZero.i32
  7364. // BREQ isNeg0Label
  7365. // JMP isNotNeg0Label
  7366. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(intOpnd);
  7367. const uint32 high64NegZero = Js::NumberConstants::k_NegZero >> 32;
  7368. const uint32 low64NegZero = Js::NumberConstants::k_NegZero & UINT32_MAX;
  7369. IR::IntConstOpnd *negZeroHighOpnd = IR::IntConstOpnd::New(high64NegZero, TyUint32, m_func);
  7370. IR::IntConstOpnd *negZeroLowOpnd = IR::IntConstOpnd::New(low64NegZero, TyUint32, m_func);
  7371. m_lowerer->InsertCompareBranch(dstPair.high, negZeroHighOpnd, Js::OpCode::BrNeq_A, isNotNeg0Label, instr);
  7372. m_lowerer->InsertCompareBranch(dstPair.low, negZeroLowOpnd, Js::OpCode::BrEq_A, isNeg0Label, instr);
  7373. }
  7374. else
  7375. #endif
  7376. {
  7377. #if _M_IX86
  7378. IR::IntConstOpnd *negZeroOpnd = IR::IntConstOpnd::New(Js::NumberConstants::k_Float32NegZero, regType, m_func);
  7379. #else
  7380. IR::IntConstOpnd *negZeroOpnd = IR::IntConstOpnd::New(is32Bits ? Js::NumberConstants::k_Float32NegZero : Js::NumberConstants::k_NegZero, regType, m_func);
  7381. #endif
  7382. // CMP intOpnd, k_NegZero
  7383. // BREQ isNeg0Label
  7384. // JMP isNotNeg0Label
  7385. m_lowerer->InsertCompareBranch(intOpnd, negZeroOpnd, Js::OpCode::BrEq_A, isNeg0Label, instr);
  7386. }
  7387. IR::Instr* jmpNotNegZero = IR::BranchInstr::New(Js::OpCode::JMP, isNotNeg0Label, m_func);
  7388. instr->InsertBefore(jmpNotNegZero);
  7389. return jmpNotNegZero;
  7390. }
  7391. void
  7392. LowererMD::FinalLower()
  7393. {
  7394. this->lowererMDArch.FinalLower();
  7395. }
  7396. IR::Instr *
  7397. LowererMD::LowerDivI4AndBailOnReminder(IR::Instr * instr, IR::LabelInstr * bailOutLabel)
  7398. {
  7399. // Don't have save the operand for bailout because the lowering of IDIV don't overwrite their values
  7400. // (EDX) = CDQ
  7401. // EAX = numerator
  7402. // (EDX:EAX)= IDIV (EAX), denominator
  7403. // TEST EDX, EDX
  7404. // JNE bailout
  7405. // <Caller insert more checks here>
  7406. // dst = MOV EAX <-- assignInstr
  7407. Assert(instr);
  7408. Assert(instr->m_opcode == Js::OpCode::Div_I4);
  7409. Assert(!instr->HasBailOutInfo());
  7410. EmitInt4Instr(instr);
  7411. Assert(instr->m_opcode == Js::OpCode::IDIV);
  7412. IR::Instr * prev = instr->m_prev;
  7413. Assert(prev->m_opcode == Js::OpCode::CDQ);
  7414. #ifdef _M_IX86
  7415. Assert(prev->GetDst()->AsRegOpnd()->GetReg() == RegEDX);
  7416. #else
  7417. Assert(prev->GetDst()->AsRegOpnd()->GetReg() == RegRDX);
  7418. #endif
  7419. IR::Opnd * reminderOpnd = prev->GetDst();
  7420. // Insert all check before the assignment to the actual dst.
  7421. IR::Instr * insertBeforeInstr = instr->m_next;
  7422. Assert(insertBeforeInstr->m_opcode == Js::OpCode::MOV);
  7423. #ifdef _M_IX86
  7424. Assert(insertBeforeInstr->GetSrc1()->AsRegOpnd()->GetReg() == RegEAX);
  7425. #else
  7426. Assert(insertBeforeInstr->GetSrc1()->AsRegOpnd()->GetReg() == RegRAX);
  7427. #endif
  7428. // Jump to bailout if the reminder is not 0 (not int result)
  7429. this->m_lowerer->InsertTestBranch(reminderOpnd, reminderOpnd, Js::OpCode::BrNeq_A, bailOutLabel, insertBeforeInstr);
  7430. return insertBeforeInstr;
  7431. }
  7432. void
  7433. LowererMD::LowerTypeof(IR::Instr * typeOfInstr)
  7434. {
  7435. Func * func = typeOfInstr->m_func;
  7436. IR::Opnd * src1 = typeOfInstr->GetSrc1();
  7437. IR::Opnd * dst = typeOfInstr->GetDst();
  7438. Assert(src1->IsRegOpnd() && dst->IsRegOpnd());
  7439. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  7440. IR::LabelInstr * taggedIntLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  7441. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  7442. // MOV typeDisplayStringsArray, &javascriptLibrary->typeDisplayStrings
  7443. IR::RegOpnd * typeDisplayStringsArrayOpnd = IR::RegOpnd::New(TyMachPtr, func);
  7444. m_lowerer->InsertMove(typeDisplayStringsArrayOpnd, IR::AddrOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetTypeDisplayStringsOffset(), IR::AddrOpndKindConstantAddress, this->m_func), typeOfInstr);
  7445. GenerateObjectTest(src1, typeOfInstr, taggedIntLabel);
  7446. // MOV typeId, TypeIds_Object
  7447. // MOV typeRegOpnd, [src1 + offset(Type)]
  7448. // MOV objTypeId, [typeRegOpnd + offsetof(typeId)]
  7449. // CMP objTypeId, TypeIds_Limit /*external object test*/
  7450. // CMOVB typeId, objTypeId
  7451. // TEST [typeRegOpnd + offsetof(flags)], TypeFlagMask_IsFalsy /*test for falsy*/
  7452. // CMOVNE typeId, TypeIds_Undefined
  7453. // MOV dst, typeDisplayStrings[typeId]
  7454. // TEST dst, dst
  7455. // JE $helper
  7456. // JMP $done
  7457. IR::RegOpnd * typeIdOpnd = IR::RegOpnd::New(TyUint32, func);
  7458. m_lowerer->InsertMove(typeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_Object, TyUint32, func), typeOfInstr);
  7459. IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, func);
  7460. m_lowerer->InsertMove(typeRegOpnd,
  7461. IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func),
  7462. typeOfInstr);
  7463. IR::RegOpnd * objTypeIdOpnd = IR::RegOpnd::New(TyUint32, func);
  7464. m_lowerer->InsertMove(objTypeIdOpnd, IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, func), typeOfInstr);
  7465. m_lowerer->InsertCompare(objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_Limit, TyUint32, func), typeOfInstr);
  7466. InsertCmovCC(Js::OpCode::CMOVB, typeIdOpnd, objTypeIdOpnd, typeOfInstr);
  7467. // Insert MOV reg, 0 before the TEST because MOV reg, 0 will be peeped to XOR reg, reg and that may affect the zero flags that CMOVE depends on
  7468. IR::RegOpnd* typeIdUndefinedOpnd = IR::RegOpnd::New(TyUint32, func);
  7469. m_lowerer->InsertMove(typeIdUndefinedOpnd, IR::IntConstOpnd::New(Js::TypeIds_Undefined, TyUint32, func), typeOfInstr);
  7470. IR::Opnd *flagsOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfFlags(), TyInt32, this->m_func);
  7471. m_lowerer->InsertTest(flagsOpnd, IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyInt32, this->m_func), typeOfInstr);
  7472. InsertCmovCC(Js::OpCode::CMOVNE, typeIdOpnd, typeIdUndefinedOpnd, typeOfInstr);
  7473. if (dst->IsEqual(src1))
  7474. {
  7475. ChangeToAssign(typeOfInstr->HoistSrc1(Js::OpCode::Ld_A));
  7476. }
  7477. m_lowerer->InsertMove(dst, IR::IndirOpnd::New(typeDisplayStringsArrayOpnd, typeIdOpnd, this->GetDefaultIndirScale(), TyMachPtr, func), typeOfInstr);
  7478. m_lowerer->InsertTestBranch(dst, dst, Js::OpCode::BrEq_A, helperLabel, typeOfInstr);
  7479. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, typeOfInstr);
  7480. // $taggedInt:
  7481. // MOV dst, typeDisplayStrings[TypeIds_Number]
  7482. // JMP $done
  7483. typeOfInstr->InsertBefore(taggedIntLabel);
  7484. m_lowerer->InsertMove(dst, IR::IndirOpnd::New(typeDisplayStringsArrayOpnd, Js::TypeIds_Number * sizeof(Js::Var), TyMachPtr, func), typeOfInstr);
  7485. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, typeOfInstr);
  7486. // $helper
  7487. // CALL OP_TypeOf
  7488. // $done
  7489. typeOfInstr->InsertBefore(helperLabel);
  7490. typeOfInstr->InsertAfter(doneLabel);
  7491. m_lowerer->LowerUnaryHelperMem(typeOfInstr, IR::HelperOp_Typeof);
  7492. }
  7493. void
  7494. LowererMD::InsertObjectPoison(IR::Opnd* poisonedOpnd, IR::BranchInstr* branchInstr, IR::Instr* insertInstr, bool isForStore)
  7495. {
  7496. if ((isForStore && CONFIG_FLAG_RELEASE(PoisonObjectsForStores)) || (!isForStore && CONFIG_FLAG_RELEASE(PoisonObjectsForLoads)))
  7497. {
  7498. Js::OpCode opcode;
  7499. if (branchInstr->m_opcode == Js::OpCode::JNE)
  7500. {
  7501. opcode = Js::OpCode::CMOVNE;
  7502. }
  7503. else
  7504. {
  7505. AssertOrFailFastMsg(branchInstr->m_opcode == Js::OpCode::JEQ, "Unexpected branch type in InsertObjectPoison preceeding instruction");
  7506. opcode = Js::OpCode::CMOVE;
  7507. }
  7508. AssertOrFailFast(branchInstr->m_prev->m_opcode == Js::OpCode::CMP || branchInstr->m_prev->m_opcode == Js::OpCode::TEST);
  7509. IR::RegOpnd* regZero = IR::RegOpnd::New(TyMachPtr, insertInstr->m_func);
  7510. Lowerer::InsertMove(regZero, IR::IntConstOpnd::New(0, TyMachPtr, insertInstr->m_func), branchInstr->m_prev);
  7511. InsertCmovCC(opcode, poisonedOpnd, regZero, insertInstr);
  7512. }
  7513. }
  7514. IR::Instr*
  7515. LowererMD::InsertCmovCC(const Js::OpCode opCode, IR::Opnd * dst, IR::Opnd* src1, IR::Instr* insertBeforeInstr, bool postRegAlloc)
  7516. {
  7517. Assert(opCode > Js::OpCode::MDStart);
  7518. Func* func = insertBeforeInstr->m_func;
  7519. IR::Opnd* src2 = nullptr;
  7520. if (!postRegAlloc)
  7521. {
  7522. src2 = src1;
  7523. src1 = dst;
  7524. }
  7525. IR::Instr * instr = IR::Instr::New(opCode, dst, src1, src2, func);
  7526. insertBeforeInstr->InsertBefore(instr);
  7527. LowererMD::Legalize(instr);
  7528. return instr;
  7529. }
  7530. IR::BranchInstr*
  7531. LowererMD::InsertMissingItemCompareBranch(IR::Opnd* compareSrc, IR::Opnd* missingItemOpnd, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr)
  7532. {
  7533. return this->lowererMDArch.InsertMissingItemCompareBranch(compareSrc, missingItemOpnd, opcode, target, insertBeforeInstr);
  7534. }