LowerMDShared.cpp 305 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Language/JavascriptFunctionArgIndex.h"
  7. const Js::OpCode LowererMD::MDUncondBranchOpcode = Js::OpCode::JMP;
  8. const Js::OpCode LowererMD::MDMultiBranchOpcode = Js::OpCode::JMP;
  9. const Js::OpCode LowererMD::MDTestOpcode = Js::OpCode::TEST;
  10. const Js::OpCode LowererMD::MDOrOpcode = Js::OpCode::OR;
  11. const Js::OpCode LowererMD::MDXorOpcode = Js::OpCode::XOR;
  12. #if _M_X64
  13. const Js::OpCode LowererMD::MDMovUint64ToFloat64Opcode = Js::OpCode::MOVQ;
  14. #endif
  15. const Js::OpCode LowererMD::MDOverflowBranchOpcode = Js::OpCode::JO;
  16. const Js::OpCode LowererMD::MDNotOverflowBranchOpcode = Js::OpCode::JNO;
  17. const Js::OpCode LowererMD::MDConvertFloat32ToFloat64Opcode = Js::OpCode::CVTSS2SD;
  18. const Js::OpCode LowererMD::MDConvertFloat64ToFloat32Opcode = Js::OpCode::CVTSD2SS;
  19. const Js::OpCode LowererMD::MDCallOpcode = Js::OpCode::CALL;
  20. const Js::OpCode LowererMD::MDImulOpcode = Js::OpCode::IMUL2;
  21. const Js::OpCode LowererMD::MDLea = Js::OpCode::LEA;
  22. const Js::OpCode LowererMD::MDSpecBlockNEOpcode = Js::OpCode::CMOVNE;
  23. const Js::OpCode LowererMD::MDSpecBlockFNEOpcode = Js::OpCode::CMOVNE;
  24. static const int TWO_31_FLOAT = 0x4f000000;
  25. static const int FLOAT_INT_MIN = 0xcf000000;
  26. //
  27. // Static utility fn()
  28. //
  29. bool
  30. LowererMD::IsAssign(IR::Instr *instr)
  31. {
  32. return instr->GetDst() && instr->m_opcode == LowererMDArch::GetAssignOp(instr->GetDst()->GetType());
  33. }
  34. ///----------------------------------------------------------------------------
  35. ///
  36. /// LowererMD::IsCall
  37. ///
  38. ///----------------------------------------------------------------------------
  39. bool
  40. LowererMD::IsCall(IR::Instr *instr)
  41. {
  42. return instr->m_opcode == Js::OpCode::CALL;
  43. }
  44. ///----------------------------------------------------------------------------
  45. ///
  46. /// LowererMD::IsUnconditionalBranch
  47. ///
  48. ///----------------------------------------------------------------------------
  49. bool
  50. LowererMD::IsUnconditionalBranch(const IR::Instr *instr)
  51. {
  52. return (instr->m_opcode == Js::OpCode::JMP);
  53. }
  54. // GenerateMemRef: Return an opnd that can be used to access the given address.
  55. IR::Opnd *
  56. LowererMD::GenerateMemRef(intptr_t addr, IRType type, IR::Instr *instr, bool dontEncode)
  57. {
  58. return IR::MemRefOpnd::New(addr, type, this->m_func);
  59. }
  60. void
  61. LowererMD::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, size_t value, IR::Instr * insertBeforeInstr, bool isZeroed)
  62. {
  63. #if _M_X64
  64. lowererMDArch.GenerateMemInit(opnd, offset, value, insertBeforeInstr, isZeroed);
  65. #else
  66. m_lowerer->GenerateMemInit(opnd, offset, (uint32)value, insertBeforeInstr, isZeroed);
  67. #endif
  68. }
  69. ///----------------------------------------------------------------------------
  70. ///
  71. /// LowererMD::InvertBranch
  72. ///
  73. ///----------------------------------------------------------------------------
  74. void
  75. LowererMD::InvertBranch(IR::BranchInstr *branchInstr)
  76. {
  77. switch (branchInstr->m_opcode)
  78. {
  79. case Js::OpCode::JA:
  80. branchInstr->m_opcode = Js::OpCode::JBE;
  81. break;
  82. case Js::OpCode::JAE:
  83. branchInstr->m_opcode = Js::OpCode::JB;
  84. break;
  85. case Js::OpCode::JB:
  86. branchInstr->m_opcode = Js::OpCode::JAE;
  87. break;
  88. case Js::OpCode::JBE:
  89. branchInstr->m_opcode = Js::OpCode::JA;
  90. break;
  91. case Js::OpCode::JEQ:
  92. branchInstr->m_opcode = Js::OpCode::JNE;
  93. break;
  94. case Js::OpCode::JNE:
  95. branchInstr->m_opcode = Js::OpCode::JEQ;
  96. break;
  97. case Js::OpCode::JGE:
  98. branchInstr->m_opcode = Js::OpCode::JLT;
  99. break;
  100. case Js::OpCode::JGT:
  101. branchInstr->m_opcode = Js::OpCode::JLE;
  102. break;
  103. case Js::OpCode::JLT:
  104. branchInstr->m_opcode = Js::OpCode::JGE;
  105. break;
  106. case Js::OpCode::JLE:
  107. branchInstr->m_opcode = Js::OpCode::JGT;
  108. break;
  109. case Js::OpCode::JO:
  110. branchInstr->m_opcode = Js::OpCode::JNO;
  111. break;
  112. case Js::OpCode::JNO:
  113. branchInstr->m_opcode = Js::OpCode::JO;
  114. break;
  115. case Js::OpCode::JP:
  116. branchInstr->m_opcode = Js::OpCode::JNP;
  117. break;
  118. case Js::OpCode::JNP:
  119. branchInstr->m_opcode = Js::OpCode::JP;
  120. break;
  121. case Js::OpCode::JSB:
  122. branchInstr->m_opcode = Js::OpCode::JNSB;
  123. break;
  124. case Js::OpCode::JNSB:
  125. branchInstr->m_opcode = Js::OpCode::JSB;
  126. break;
  127. default:
  128. AssertMsg(UNREACHED, "JCC missing in InvertBranch()");
  129. }
  130. }
  131. void
  132. LowererMD::ReverseBranch(IR::BranchInstr *branchInstr)
  133. {
  134. switch (branchInstr->m_opcode)
  135. {
  136. case Js::OpCode::JA:
  137. branchInstr->m_opcode = Js::OpCode::JB;
  138. break;
  139. case Js::OpCode::JAE:
  140. branchInstr->m_opcode = Js::OpCode::JBE;
  141. break;
  142. case Js::OpCode::JB:
  143. branchInstr->m_opcode = Js::OpCode::JA;
  144. break;
  145. case Js::OpCode::JBE:
  146. branchInstr->m_opcode = Js::OpCode::JAE;
  147. break;
  148. case Js::OpCode::JGE:
  149. branchInstr->m_opcode = Js::OpCode::JLE;
  150. break;
  151. case Js::OpCode::JGT:
  152. branchInstr->m_opcode = Js::OpCode::JLT;
  153. break;
  154. case Js::OpCode::JLT:
  155. branchInstr->m_opcode = Js::OpCode::JGT;
  156. break;
  157. case Js::OpCode::JLE:
  158. branchInstr->m_opcode = Js::OpCode::JGE;
  159. break;
  160. case Js::OpCode::JEQ:
  161. case Js::OpCode::JNE:
  162. case Js::OpCode::JO:
  163. case Js::OpCode::JNO:
  164. case Js::OpCode::JP:
  165. case Js::OpCode::JNP:
  166. case Js::OpCode::JSB:
  167. case Js::OpCode::JNSB:
  168. break;
  169. default:
  170. AssertMsg(UNREACHED, "JCC missing in ReverseBranch()");
  171. }
  172. }
  173. IR::Instr *
  174. LowererMD::LowerCallHelper(IR::Instr *instrCall)
  175. {
  176. IR::Opnd *argOpnd = instrCall->UnlinkSrc2();
  177. IR::Instr *prevInstr = nullptr;
  178. IR::JnHelperMethod helperMethod = instrCall->GetSrc1()->AsHelperCallOpnd()->m_fnHelper;
  179. instrCall->FreeSrc1();
  180. #ifndef _M_X64
  181. prevInstr = ChangeToHelperCall(instrCall, helperMethod);
  182. #endif
  183. prevInstr = instrCall;
  184. while (argOpnd)
  185. {
  186. Assert(argOpnd->IsRegOpnd());
  187. IR::RegOpnd *regArg = argOpnd->AsRegOpnd();
  188. Assert(regArg->m_sym->m_isSingleDef);
  189. IR::Instr *instrArg = regArg->m_sym->m_instrDef;
  190. Assert(instrArg->m_opcode == Js::OpCode::ArgOut_A ||
  191. (helperMethod == IR::JnHelperMethod::HelperOP_InitCachedScope && instrArg->m_opcode == Js::OpCode::ExtendArg_A) ||
  192. (helperMethod == IR::JnHelperMethod::HelperScrFunc_OP_NewScFuncHomeObj && instrArg->m_opcode == Js::OpCode::ExtendArg_A) ||
  193. (helperMethod == IR::JnHelperMethod::HelperScrFunc_OP_NewScGenFuncHomeObj && instrArg->m_opcode == Js::OpCode::ExtendArg_A)
  194. );
  195. prevInstr = LoadHelperArgument(prevInstr, instrArg->GetSrc1());
  196. argOpnd = instrArg->GetSrc2();
  197. if (prevInstr == instrArg)
  198. {
  199. prevInstr = prevInstr->m_prev;
  200. }
  201. if (instrArg->m_opcode == Js::OpCode::ArgOut_A)
  202. {
  203. instrArg->UnlinkSrc1();
  204. if (argOpnd)
  205. {
  206. instrArg->UnlinkSrc2();
  207. }
  208. regArg->Free(this->m_func);
  209. instrArg->Remove();
  210. }
  211. else if (instrArg->m_opcode == Js::OpCode::ExtendArg_A)
  212. {
  213. if (instrArg->GetSrc1()->IsRegOpnd())
  214. {
  215. m_lowerer->addToLiveOnBackEdgeSyms->Set(instrArg->GetSrc1()->AsRegOpnd()->GetStackSym()->m_id);
  216. }
  217. }
  218. }
  219. switch (helperMethod)
  220. {
  221. case IR::JnHelperMethod::HelperScrFunc_OP_NewScFuncHomeObj:
  222. case IR::JnHelperMethod::HelperScrFunc_OP_NewScGenFuncHomeObj:
  223. break;
  224. default:
  225. prevInstr = m_lowerer->LoadScriptContext(prevInstr);
  226. break;
  227. }
  228. #ifdef _M_X64
  229. FlipHelperCallArgsOrder();
  230. ChangeToHelperCall(instrCall, helperMethod);
  231. #else
  232. this->lowererMDArch.ResetHelperArgsCount();
  233. #endif
  234. // There might be ToVar in between the ArgOut, need to continue lower from the call still
  235. return instrCall;
  236. }
  237. //
  238. // forwarding functions
  239. //
  240. IR::Instr *
  241. LowererMD::LowerCall(IR::Instr * callInstr, Js::ArgSlot argCount)
  242. {
  243. return this->lowererMDArch.LowerCall(callInstr, argCount);
  244. }
  245. IR::Instr *
  246. LowererMD::LowerCallI(IR::Instr * callInstr, ushort callFlags, bool isHelper, IR::Instr * insertBeforeInstrForCFG)
  247. {
  248. return this->lowererMDArch.LowerCallI(callInstr, callFlags, isHelper, insertBeforeInstrForCFG);
  249. }
  250. IR::Instr *
  251. LowererMD::LowerAsmJsCallI(IR::Instr * callInstr)
  252. {
  253. #if DBG
  254. if (PHASE_ON(Js::AsmjsCallDebugBreakPhase, this->m_func))
  255. {
  256. this->GenerateDebugBreak(callInstr->m_next);
  257. }
  258. #endif
  259. return this->lowererMDArch.LowerAsmJsCallI(callInstr);
  260. }
  261. IR::Instr *
  262. LowererMD::LowerAsmJsCallE(IR::Instr * callInstr)
  263. {
  264. #if DBG
  265. if (PHASE_ON(Js::AsmjsCallDebugBreakPhase, this->m_func))
  266. {
  267. this->GenerateDebugBreak(callInstr->m_next);
  268. }
  269. #endif
  270. return this->lowererMDArch.LowerAsmJsCallE(callInstr);
  271. }
  272. IR::Instr *
  273. LowererMD::LowerWasmArrayBoundsCheck(IR::Instr * instr, IR::Opnd *addrOpnd)
  274. {
  275. return this->lowererMDArch.LowerWasmArrayBoundsCheck(instr, addrOpnd);
  276. }
  277. void LowererMD::LowerAtomicStore(IR::Opnd * dst, IR::Opnd * src1, IR::Instr * insertBeforeInstr)
  278. {
  279. return this->lowererMDArch.LowerAtomicStore(dst, src1, insertBeforeInstr);
  280. }
  281. void LowererMD::LowerAtomicLoad(IR::Opnd * dst, IR::Opnd * src1, IR::Instr * insertBeforeInstr)
  282. {
  283. return this->lowererMDArch.LowerAtomicLoad(dst, src1, insertBeforeInstr);
  284. }
  285. IR::Instr *
  286. LowererMD::LowerAsmJsLdElemHelper(IR::Instr * callInstr)
  287. {
  288. return this->lowererMDArch.LowerAsmJsLdElemHelper(callInstr);
  289. }
  290. IR::Instr *
  291. LowererMD::LowerAsmJsStElemHelper(IR::Instr * callInstr)
  292. {
  293. return this->lowererMDArch.LowerAsmJsStElemHelper(callInstr);
  294. }
  295. IR::Instr *
  296. LowererMD::LoadInt64HelperArgument(IR::Instr * instr, IR::Opnd* opnd)
  297. {
  298. return this->lowererMDArch.LoadInt64HelperArgument(instr, opnd);
  299. }
  300. IR::Instr *
  301. LowererMD::LoadHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  302. {
  303. return this->lowererMDArch.LoadHelperArgument(instr, opndArg);
  304. }
  305. IR::Instr *
  306. LowererMD::LoadDoubleHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  307. {
  308. return this->lowererMDArch.LoadDoubleHelperArgument(instr, opndArg);
  309. }
  310. IR::Instr *
  311. LowererMD::LoadFloatHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  312. {
  313. return this->lowererMDArch.LoadFloatHelperArgument(instr, opndArg);
  314. }
  315. IR::Instr *
  316. LowererMD::LowerEntryInstr(IR::EntryInstr * entryInstr)
  317. {
  318. return this->lowererMDArch.LowerEntryInstr(entryInstr);
  319. }
  320. IR::Instr *
  321. LowererMD::LowerExitInstr(IR::ExitInstr * exitInstr)
  322. {
  323. return this->lowererMDArch.LowerExitInstr(exitInstr);
  324. }
  325. IR::Instr *
  326. LowererMD::LowerExitInstrAsmJs(IR::ExitInstr * exitInstr)
  327. {
  328. return this->lowererMDArch.LowerExitInstrAsmJs(exitInstr);
  329. }
  330. IR::Instr *
  331. LowererMD::LoadNewScObjFirstArg(IR::Instr * instr, IR::Opnd * dst, ushort extraArgs)
  332. {
  333. return this->lowererMDArch.LoadNewScObjFirstArg(instr, dst, extraArgs);
  334. }
  335. IR::Instr *
  336. LowererMD::LowerTry(IR::Instr *tryInstr, IR::JnHelperMethod helperMethod)
  337. {
  338. // Mark the entry to the try
  339. IR::Instr *instr = tryInstr->GetNextRealInstrOrLabel();
  340. AssertMsg(instr->IsLabelInstr(), "No label at the entry to a try?");
  341. IR::LabelInstr *tryAddr = instr->AsLabelInstr();
  342. // Arg 5: ScriptContext
  343. this->m_lowerer->LoadScriptContext(tryAddr);
  344. if (tryInstr->m_opcode == Js::OpCode::TryCatch || (this->m_func->DoOptimizeTry() || (this->m_func->IsSimpleJit() && this->m_func->hasBailout)))
  345. {
  346. // Arg 4 : hasBailedOutOffset
  347. IR::Opnd * hasBailedOutOffset = IR::IntConstOpnd::New(this->m_func->m_hasBailedOutSym->m_offset, TyInt32, this->m_func);
  348. this->LoadHelperArgument(tryAddr, hasBailedOutOffset);
  349. }
  350. #ifdef _M_X64
  351. // Arg: args size
  352. IR::RegOpnd *argsSizeOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  353. tryAddr->InsertBefore(IR::Instr::New(Js::OpCode::LdArgSize, argsSizeOpnd, this->m_func));
  354. this->LoadHelperArgument(tryAddr, argsSizeOpnd);
  355. // Arg: spill size
  356. IR::RegOpnd *spillSizeOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  357. tryAddr->InsertBefore(IR::Instr::New(Js::OpCode::LdSpillSize, spillSizeOpnd, this->m_func));
  358. this->LoadHelperArgument(tryAddr, spillSizeOpnd);
  359. #endif
  360. // Arg 3: frame pointer
  361. IR::RegOpnd *ebpOpnd = IR::RegOpnd::New(nullptr, lowererMDArch.GetRegBlockPointer(), TyMachReg, this->m_func);
  362. this->LoadHelperArgument(tryAddr, ebpOpnd);
  363. // Arg 2: handler address
  364. IR::LabelInstr *helperAddr = tryInstr->AsBranchInstr()->GetTarget();
  365. this->LoadHelperArgument(tryAddr, IR::LabelOpnd::New(helperAddr, this->m_func));
  366. // Arg 1: try address
  367. this->LoadHelperArgument(tryAddr, IR::LabelOpnd::New(tryAddr, this->m_func));
  368. // Call the helper
  369. IR::RegOpnd *continuationAddr =
  370. IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), lowererMDArch.GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  371. IR::Instr *callInstr = IR::Instr::New(
  372. Js::OpCode::Call, continuationAddr, IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
  373. tryAddr->InsertBefore(callInstr);
  374. this->LowerCall(callInstr, 0);
  375. #ifdef _M_X64
  376. {
  377. // Emit some instruction to separate the CALL from the JMP following it. The OS stack unwinder
  378. // mistakes the JMP for the start of the epilog otherwise.
  379. IR::Instr *nop = IR::Instr::New(Js::OpCode::NOP, m_func);
  380. tryAddr->InsertBefore(nop);
  381. }
  382. #endif
  383. // Jump to the continuation address supplied by the helper
  384. IR::BranchInstr *branchInstr = IR::MultiBranchInstr::New(Js::OpCode::JMP, continuationAddr, this->m_func);
  385. tryAddr->InsertBefore(branchInstr);
  386. return tryInstr->m_prev;
  387. }
  388. IR::Instr *
  389. LowererMD::LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * targetOpnd)
  390. {
  391. return lowererMDArch.LowerEHRegionReturn(insertBeforeInstr, targetOpnd);
  392. }
  393. IR::Instr *
  394. LowererMD::LowerLeaveNull(IR::Instr *finallyEndInstr)
  395. {
  396. IR::Instr *instrPrev = finallyEndInstr->m_prev;
  397. IR::Instr *instr = nullptr;
  398. // Return a null continuation address to the helper: execution will resume at the point determined by the try
  399. // or the exception handler.
  400. IR::RegOpnd *retReg = IR::RegOpnd::New(StackSym::New(TyMachReg,this->m_func), lowererMDArch.GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  401. instr = IR::Instr::New(Js::OpCode::XOR, retReg, this->m_func);
  402. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(nullptr, lowererMDArch.GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  403. instr->SetSrc1(eaxOpnd);
  404. instr->SetSrc2(eaxOpnd);
  405. finallyEndInstr->InsertBefore(instr);
  406. #if _M_X64
  407. {
  408. // amd64_ReturnFromCallWithFakeFrame expects to find the spill size and args size
  409. // in REG_EH_SPILL_SIZE and REG_EH_ARGS_SIZE.
  410. // MOV REG_EH_SPILL_SIZE, spillSize
  411. IR::Instr *movR8 = IR::Instr::New(Js::OpCode::LdSpillSize,
  412. IR::RegOpnd::New(nullptr, REG_EH_SPILL_SIZE, TyMachReg, m_func),
  413. m_func);
  414. finallyEndInstr->InsertBefore(movR8);
  415. // MOV REG_EH_ARGS_SIZE, argsSize
  416. IR::Instr *movR9 = IR::Instr::New(Js::OpCode::LdArgSize,
  417. IR::RegOpnd::New(nullptr, REG_EH_ARGS_SIZE, TyMachReg, m_func),
  418. m_func);
  419. finallyEndInstr->InsertBefore(movR9);
  420. IR::Opnd *targetOpnd = IR::RegOpnd::New(nullptr, REG_EH_TARGET, TyMachReg, m_func);
  421. IR::Instr *movTarget = IR::Instr::New(Js::OpCode::MOV,
  422. targetOpnd,
  423. IR::HelperCallOpnd::New(IR::HelperOp_ReturnFromCallWithFakeFrame, m_func),
  424. m_func);
  425. finallyEndInstr->InsertBefore(movTarget);
  426. IR::Instr *push = IR::Instr::New(Js::OpCode::PUSH, m_func);
  427. push->SetSrc1(targetOpnd);
  428. finallyEndInstr->InsertBefore(push);
  429. }
  430. #endif
  431. IR::IntConstOpnd *intSrc = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  432. instr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  433. instr->SetSrc1(intSrc);
  434. instr->SetSrc2(retReg);
  435. finallyEndInstr->InsertBefore(instr);
  436. finallyEndInstr->Remove();
  437. return instrPrev;
  438. }
  439. ///----------------------------------------------------------------------------
  440. ///
  441. /// LowererMD::Init
  442. ///
  443. ///----------------------------------------------------------------------------
  444. void
  445. LowererMD::Init(Lowerer *lowerer)
  446. {
  447. m_lowerer = lowerer;
  448. this->lowererMDArch.Init(this);
  449. #ifdef ENABLE_WASM_SIMD
  450. Simd128InitOpcodeMap();
  451. #endif
  452. }
  453. ///----------------------------------------------------------------------------
  454. ///
  455. /// LowererMD::LoadInputParamCount
  456. ///
  457. /// Load the passed-in parameter count from the appropriate EBP slot.
  458. ///
  459. ///----------------------------------------------------------------------------
  460. IR::Instr *
  461. LowererMD::LoadInputParamCount(IR::Instr * instrInsert, int adjust, bool needFlags)
  462. {
  463. IR::Instr * instr;
  464. IR::RegOpnd * dstOpnd;
  465. IR::SymOpnd * srcOpnd;
  466. srcOpnd = Lowerer::LoadCallInfo(instrInsert);
  467. dstOpnd = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), TyMachReg, this->m_func);
  468. instr = IR::Instr::New(Js::OpCode::MOV, dstOpnd, srcOpnd, this->m_func);
  469. instrInsert->InsertBefore(instr);
  470. // Copy the callinfo before masking off the param count
  471. Assert(Js::CallInfo::ksizeofCount == 24);
  472. // Mask off call flags from callinfo
  473. instr = IR::Instr::New(Js::OpCode::AND, dstOpnd, dstOpnd,
  474. IR::IntConstOpnd::New(0x00FFFFFF, TyMachReg, this->m_func, true), this->m_func);
  475. instrInsert->InsertBefore(instr);
  476. instr = m_lowerer->InsertSub(true, dstOpnd, dstOpnd, IR::IntConstOpnd::New(-adjust, TyMachReg, this->m_func), instrInsert);
  477. return instr;
  478. }
  479. IR::Instr *
  480. LowererMD::LoadStackArgPtr(IR::Instr * instr)
  481. {
  482. if (this->m_func->IsLoopBody())
  483. {
  484. // Get the first user param from the interpreter frame instance that was passed in.
  485. // These args don't include the func object and callinfo; we just need to advance past "this".
  486. // t1 = MOV [prm1 + m_inParams]
  487. // dst = LEA &[t1 + sizeof(var)]
  488. Assert(this->m_func->m_loopParamSym);
  489. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  490. size_t offset = Js::InterpreterStackFrame::GetOffsetOfInParams();
  491. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, this->m_func);
  492. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  493. IR::Instr *instrLdParams = IR::Instr::New(Js::OpCode::MOV, tmpOpnd, indirOpnd, this->m_func);
  494. instr->InsertBefore(instrLdParams);
  495. indirOpnd = IR::IndirOpnd::New(tmpOpnd, sizeof(Js::Var), TyMachReg, this->m_func);
  496. instr->SetSrc1(indirOpnd);
  497. instr->m_opcode = Js::OpCode::LEA;
  498. return instr->m_prev;
  499. }
  500. else
  501. {
  502. return this->lowererMDArch.LoadStackArgPtr(instr);
  503. }
  504. }
  505. IR::Instr *
  506. LowererMD::LoadArgumentsFromFrame(IR::Instr * instr)
  507. {
  508. if (this->m_func->IsLoopBody())
  509. {
  510. // Get the arguments ptr from the interpreter frame instance that was passed in.
  511. Assert(this->m_func->m_loopParamSym);
  512. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  513. int32 offset = (int32)Js::InterpreterStackFrame::GetOffsetOfArguments();
  514. instr->SetSrc1(IR::IndirOpnd::New(baseOpnd, offset, TyMachReg, this->m_func));
  515. }
  516. else
  517. {
  518. instr->SetSrc1(this->CreateStackArgumentsSlotOpnd());
  519. }
  520. instr->m_opcode = Js::OpCode::MOV;
  521. return instr->m_prev;
  522. }
  523. // load argument count as I4
  524. IR::Instr *
  525. LowererMD::LoadArgumentCount(IR::Instr * instr)
  526. {
  527. if (this->m_func->IsLoopBody())
  528. {
  529. // Pull the arg count from the interpreter frame instance that was passed in.
  530. // (The callinfo in the loop body's frame just shows the single parameter, the interpreter frame.)
  531. Assert(this->m_func->m_loopParamSym);
  532. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  533. size_t offset = Js::InterpreterStackFrame::GetOffsetOfInSlotsCount();
  534. instr->SetSrc1(IR::IndirOpnd::New(baseOpnd, (int32)offset, TyInt32, this->m_func));
  535. }
  536. else
  537. {
  538. StackSym *sym = StackSym::New(TyVar, this->m_func);
  539. this->m_func->SetArgOffset(sym, (Js::JavascriptFunctionArgIndex_CallInfo - Js::JavascriptFunctionArgIndex_Frame) * sizeof(Js::Var));
  540. instr->SetSrc1(IR::SymOpnd::New(sym, TyMachReg, this->m_func));
  541. }
  542. instr->m_opcode = Js::OpCode::MOV;
  543. return instr->m_prev;
  544. }
  545. IR::Instr *
  546. LowererMD::LoadHeapArguments(IR::Instr * instrArgs)
  547. {
  548. return this->lowererMDArch.LoadHeapArguments(instrArgs);
  549. }
  550. IR::Instr *
  551. LowererMD::LoadHeapArgsCached(IR::Instr * instrArgs)
  552. {
  553. return this->lowererMDArch.LoadHeapArgsCached(instrArgs);
  554. }
  555. ///----------------------------------------------------------------------------
  556. ///
  557. /// LowererMD::ChangeToHelperCall
  558. ///
  559. /// Change the current instruction to a call to the given helper.
  560. ///
  561. ///----------------------------------------------------------------------------
  562. IR::Instr *
  563. LowererMD::ChangeToHelperCall(IR::Instr * callInstr, IR::JnHelperMethod helperMethod, IR::LabelInstr *labelBailOut,
  564. IR::Opnd *opndBailOutArg, IR::PropertySymOpnd *propSymOpnd, bool isHelperContinuation)
  565. {
  566. #if DBG
  567. this->m_lowerer->ReconcileWithLowererStateOnHelperCall(callInstr, helperMethod);
  568. #endif
  569. IR::Instr * bailOutInstr = callInstr;
  570. if (callInstr->HasBailOutInfo())
  571. {
  572. IR::BailOutKind bailOutKind = callInstr->GetBailOutKind();
  573. if (bailOutKind == IR::BailOutOnNotPrimitive ||
  574. bailOutKind == IR::BailOutOnPowIntIntOverflow)
  575. {
  576. callInstr = IR::Instr::New(callInstr->m_opcode, callInstr->m_func);
  577. bailOutInstr->TransferTo(callInstr);
  578. bailOutInstr->InsertBefore(callInstr);
  579. bailOutInstr->m_opcode = bailOutKind == IR::BailOutOnNotPrimitive
  580. ? Js::OpCode::BailOnNotPrimitive
  581. : Js::OpCode::BailOnPowIntIntOverflow;
  582. bailOutInstr->SetSrc1(opndBailOutArg);
  583. }
  584. else
  585. {
  586. bailOutInstr = this->m_lowerer->SplitBailOnImplicitCall(callInstr);
  587. }
  588. }
  589. callInstr->m_opcode = Js::OpCode::CALL;
  590. IR::HelperCallOpnd *helperCallOpnd = Lowerer::CreateHelperCallOpnd(helperMethod, this->lowererMDArch.GetHelperArgsCount(), m_func);
  591. if (helperCallOpnd->IsDiagHelperCallOpnd())
  592. {
  593. // Load arguments for the wrapper.
  594. this->LoadHelperArgument(callInstr, IR::AddrOpnd::New((Js::Var)IR::GetMethodOriginalAddress(m_func->GetThreadContextInfo(), helperMethod), IR::AddrOpndKindDynamicMisc, m_func));
  595. this->m_lowerer->LoadScriptContext(callInstr);
  596. }
  597. callInstr->SetSrc1(helperCallOpnd);
  598. IR::Instr * instrRet = this->lowererMDArch.LowerCall(callInstr, 0);
  599. if (bailOutInstr != callInstr)
  600. {
  601. // The bailout needs to be lowered after we lower the helper call because the helper argument
  602. // has already been loaded. We need to drain them on AMD64 before starting another helper call
  603. if (bailOutInstr->m_opcode == Js::OpCode::BailOnNotObject)
  604. {
  605. this->m_lowerer->LowerBailOnNotObject(bailOutInstr, nullptr, labelBailOut);
  606. }
  607. else if (bailOutInstr->m_opcode == Js::OpCode::BailOnNotPrimitive ||
  608. bailOutInstr->m_opcode == Js::OpCode::BailOnPowIntIntOverflow)
  609. {
  610. this->m_lowerer->LowerBailOnTrue(bailOutInstr, labelBailOut);
  611. }
  612. else if (bailOutInstr->m_opcode == Js::OpCode::BailOut)
  613. {
  614. this->m_lowerer->GenerateBailOut(bailOutInstr, nullptr, labelBailOut);
  615. }
  616. else
  617. {
  618. this->m_lowerer->LowerBailOnEqualOrNotEqual(bailOutInstr, nullptr, labelBailOut, propSymOpnd, isHelperContinuation);
  619. }
  620. }
  621. #if DBG
  622. if (PHASE_ON(Js::AsmjsCallDebugBreakPhase, this->m_func))
  623. {
  624. this->GenerateDebugBreak(instrRet->m_next);
  625. }
  626. #endif
  627. return instrRet;
  628. }
  629. IR::Instr* LowererMD::ChangeToHelperCallMem(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  630. {
  631. this->m_lowerer->LoadScriptContext(instr);
  632. return this->ChangeToHelperCall(instr, helperMethod);
  633. }
  634. ///----------------------------------------------------------------------------
  635. ///
  636. /// LowererMD::ChangeToAssign
  637. ///
  638. /// Change to a MOV.
  639. ///
  640. ///----------------------------------------------------------------------------
  641. IR::Instr *
  642. LowererMD::ChangeToAssignNoBarrierCheck(IR::Instr * instr)
  643. {
  644. return ChangeToAssign(instr, instr->GetDst()->GetType());
  645. }
  646. IR::Instr *
  647. LowererMD::ChangeToAssign(IR::Instr * instr)
  648. {
  649. return ChangeToWriteBarrierAssign(instr, instr->m_func);
  650. }
  651. IR::Instr *
  652. LowererMD::ChangeToAssign(IR::Instr * instr, IRType type)
  653. {
  654. Assert(!instr->HasBailOutInfo() || instr->GetBailOutKind() == IR::BailOutExpectingString);
  655. #if _M_IX86
  656. if (IRType_IsInt64(type))
  657. {
  658. return LowererMDArch::ChangeToAssignInt64(instr);
  659. }
  660. #endif
  661. instr->m_opcode = LowererMDArch::GetAssignOp(type);
  662. Legalize(instr);
  663. return instr;
  664. }
  665. ///----------------------------------------------------------------------------
  666. ///
  667. /// LowererMD::LowerRet
  668. ///
  669. /// Lower Ret to "MOV EAX, src"
  670. /// The real RET is inserted at the exit of the function when emitting the
  671. /// epilog.
  672. ///
  673. ///----------------------------------------------------------------------------
  674. IR::Instr *
  675. LowererMD::LowerRet(IR::Instr * retInstr)
  676. {
  677. IR::RegOpnd * retReg = nullptr;
  678. bool needsRetReg = true;
  679. #ifdef ASMJS_PLAT
  680. if (m_func->GetJITFunctionBody()->IsAsmJsMode() && !m_func->IsLoopBody()) // for loop body ret is the bytecodeoffset
  681. {
  682. Js::AsmJsRetType::Which asmType = m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetRetType();
  683. IRType regType = TyInt32;
  684. switch (asmType)
  685. {
  686. case Js::AsmJsRetType::Double:
  687. regType = TyFloat64;
  688. break;
  689. case Js::AsmJsRetType::Float:
  690. regType = TyFloat32;
  691. break;
  692. case Js::AsmJsRetType::Int64:
  693. {
  694. regType = TyInt64;
  695. #if LOWER_SPLIT_INT64
  696. regType = TyInt32;
  697. {
  698. IR::Opnd* lowOpnd = nullptr;
  699. IR::Opnd* highOpnd = nullptr;
  700. if (retInstr->GetSrc1()->IsRegOpnd())
  701. {
  702. Int64RegPair srcPair = m_func->FindOrCreateInt64Pair(retInstr->GetSrc1()->AsRegOpnd());
  703. lowOpnd = srcPair.low;
  704. highOpnd = srcPair.high;
  705. }
  706. else if (retInstr->GetSrc1()->IsImmediateOpnd())
  707. {
  708. int64 value = retInstr->GetSrc1()->GetImmediateValue(m_func);
  709. lowOpnd = IR::IntConstOpnd::New(value & UINT_MAX, regType, m_func);
  710. highOpnd = IR::IntConstOpnd::New(value >> 32, regType, m_func);
  711. }
  712. else
  713. {
  714. Assert(UNREACHED);
  715. }
  716. retInstr->UnlinkSrc1();
  717. retInstr->SetSrc1(lowOpnd);
  718. // Mov high bits to edx
  719. IR::RegOpnd* regEdx = IR::RegOpnd::New(regType, this->m_func);
  720. regEdx->SetReg(RegEDX);
  721. Lowerer::InsertMove(regEdx, highOpnd, retInstr);
  722. retInstr->SetSrc2(regEdx);
  723. }
  724. #endif
  725. break;
  726. }
  727. case Js::AsmJsRetType::Void:
  728. needsRetReg = false;
  729. break;
  730. case Js::AsmJsRetType::Signed:
  731. regType = TyInt32;
  732. break;
  733. #ifdef ENABLE_WASM_SIMD
  734. case Js::AsmJsRetType::Float32x4:
  735. regType = TySimd128F4;
  736. break;
  737. case Js::AsmJsRetType::Int32x4:
  738. regType = TySimd128I4;
  739. break;
  740. case Js::AsmJsRetType::Float64x2:
  741. regType = TySimd128D2;
  742. break;
  743. case Js::AsmJsRetType::Int64x2:
  744. regType = TySimd128I2;
  745. break;
  746. case Js::AsmJsRetType::Int16x8:
  747. regType = TySimd128I8;
  748. break;
  749. case Js::AsmJsRetType::Int8x16:
  750. regType = TySimd128I16;
  751. break;
  752. case Js::AsmJsRetType::Uint32x4:
  753. regType = TySimd128U4;
  754. break;
  755. case Js::AsmJsRetType::Uint16x8:
  756. regType = TySimd128U8;
  757. break;
  758. case Js::AsmJsRetType::Uint8x16:
  759. regType = TySimd128U16;
  760. break;
  761. case Js::AsmJsRetType::Bool32x4:
  762. regType = TySimd128B4;
  763. break;
  764. case Js::AsmJsRetType::Bool16x8:
  765. regType = TySimd128B8;
  766. break;
  767. case Js::AsmJsRetType::Bool8x16:
  768. regType = TySimd128B16;
  769. break;
  770. #endif
  771. default:
  772. Assert(UNREACHED);
  773. }
  774. if (needsRetReg)
  775. {
  776. retReg = IR::RegOpnd::New(regType, m_func);
  777. retReg->SetReg(lowererMDArch.GetRegReturnAsmJs(regType));
  778. }
  779. }
  780. else
  781. #endif
  782. {
  783. retReg = IR::RegOpnd::New(TyMachReg, m_func);
  784. retReg->SetReg(lowererMDArch.GetRegReturn(TyMachReg));
  785. }
  786. if (needsRetReg)
  787. {
  788. Lowerer::InsertMove(retReg, retInstr->UnlinkSrc1(), retInstr);
  789. retInstr->SetSrc1(retReg);
  790. }
  791. return retInstr;
  792. }
  793. ///----------------------------------------------------------------------------
  794. ///
  795. /// LowererMD::LowerCondBranch
  796. ///
  797. ///----------------------------------------------------------------------------
  798. IR::Instr *
  799. LowererMD::LowerCondBranch(IR::Instr * instr)
  800. {
  801. AssertMsg(instr->GetSrc1() != nullptr, "Expected src opnds on conditional branch");
  802. Assert(!instr->HasBailOutInfo());
  803. IR::Opnd * opndSrc1 = instr->UnlinkSrc1();
  804. IR::Instr * instrPrev = nullptr;
  805. switch (instr->m_opcode)
  806. {
  807. case Js::OpCode::BrTrue_A:
  808. case Js::OpCode::BrFalse_A:
  809. case Js::OpCode::BrNotNull_A:
  810. case Js::OpCode::BrOnObject_A:
  811. case Js::OpCode::BrOnClassConstructor:
  812. case Js::OpCode::BrOnBaseConstructorKind:
  813. Assert(!opndSrc1->IsFloat64());
  814. AssertMsg(instr->GetSrc2() == nullptr, "Expected 1 src on boolean branch");
  815. instrPrev = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  816. instrPrev->SetSrc1(opndSrc1);
  817. instrPrev->SetSrc2(opndSrc1);
  818. instr->InsertBefore(instrPrev);
  819. if (instr->m_opcode != Js::OpCode::BrFalse_A)
  820. {
  821. instr->m_opcode = Js::OpCode::JNE;
  822. }
  823. else
  824. {
  825. instr->m_opcode = Js::OpCode::JEQ;
  826. }
  827. break;
  828. case Js::OpCode::BrOnEmpty:
  829. case Js::OpCode::BrOnNotEmpty:
  830. AssertMsg(0, "BrOnEmpty opcodes should not be passed to MD lowerer");
  831. break;
  832. default:
  833. IR::Opnd * opndSrc2 = instr->UnlinkSrc2();
  834. AssertMsg(opndSrc2 != nullptr, "Expected 2 src's on non-boolean branch");
  835. if (opndSrc1->IsFloat())
  836. {
  837. Assert(opndSrc1->GetType() == opndSrc2->GetType());
  838. instrPrev = IR::Instr::New(opndSrc1->IsFloat64() ? Js::OpCode::COMISD : Js::OpCode::COMISS, m_func);
  839. instrPrev->SetSrc1(opndSrc1);
  840. instrPrev->SetSrc2(opndSrc2);
  841. instr->InsertBefore(instrPrev);
  842. }
  843. else
  844. {
  845. // This check assumes src1 is a variable.
  846. if (opndSrc2->IsIntConstOpnd() && opndSrc2->AsIntConstOpnd()->GetValue() == 0)
  847. {
  848. instrPrev = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  849. instrPrev->SetSrc1(opndSrc1);
  850. instrPrev->SetSrc2(opndSrc1);
  851. instr->InsertBefore(instrPrev);
  852. opndSrc2->Free(this->m_func);
  853. }
  854. else
  855. {
  856. instrPrev = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  857. //
  858. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  859. // relevant only on AMD64.
  860. //
  861. opndSrc1 = instrPrev->SetSrc1(opndSrc1);
  862. opndSrc2 = instrPrev->SetSrc2(opndSrc2);
  863. instr->InsertBefore(instrPrev);
  864. LowererMD::Legalize(instrPrev);
  865. }
  866. }
  867. instr->m_opcode = LowererMD::MDBranchOpcode(instr->m_opcode);
  868. break;
  869. }
  870. return instrPrev;
  871. }
  872. ///----------------------------------------------------------------------------
  873. ///
  874. /// LowererMD::MDBranchOpcode
  875. ///
  876. /// Map HIR branch opcode to machine-dependent equivalent.
  877. ///
  878. ///----------------------------------------------------------------------------
  879. Js::OpCode
  880. LowererMD::MDBranchOpcode(Js::OpCode opcode)
  881. {
  882. switch (opcode)
  883. {
  884. case Js::OpCode::BrSrEq_A:
  885. case Js::OpCode::BrEq_A:
  886. case Js::OpCode::BrSrNotNeq_A:
  887. case Js::OpCode::BrNotNeq_A:
  888. case Js::OpCode::BrAddr_A:
  889. return Js::OpCode::JEQ;
  890. case Js::OpCode::BrSrNeq_A:
  891. case Js::OpCode::BrNeq_A:
  892. case Js::OpCode::BrSrNotEq_A:
  893. case Js::OpCode::BrNotEq_A:
  894. case Js::OpCode::BrNotAddr_A:
  895. return Js::OpCode::JNE;
  896. case Js::OpCode::BrLt_A:
  897. case Js::OpCode::BrNotGe_A:
  898. return Js::OpCode::JLT;
  899. case Js::OpCode::BrLe_A:
  900. case Js::OpCode::BrNotGt_A:
  901. return Js::OpCode::JLE;
  902. case Js::OpCode::BrGt_A:
  903. case Js::OpCode::BrNotLe_A:
  904. return Js::OpCode::JGT;
  905. case Js::OpCode::BrGe_A:
  906. case Js::OpCode::BrNotLt_A:
  907. return Js::OpCode::JGE;
  908. default:
  909. AssertMsg(0, "Branch opcode has no MD mapping");
  910. return opcode;
  911. }
  912. }
  913. Js::OpCode
  914. LowererMD::MDConvertFloat64ToInt32Opcode(const RoundMode roundMode)
  915. {
  916. switch (roundMode)
  917. {
  918. case RoundModeTowardZero:
  919. return Js::OpCode::CVTTSD2SI;
  920. case RoundModeTowardInteger:
  921. return Js::OpCode::Nop;
  922. case RoundModeHalfToEven:
  923. return Js::OpCode::CVTSD2SI;
  924. default:
  925. AssertMsg(0, "RoundMode has no MD mapping.");
  926. return Js::OpCode::Nop;
  927. }
  928. }
  929. Js::OpCode
  930. LowererMD::MDUnsignedBranchOpcode(Js::OpCode opcode)
  931. {
  932. switch (opcode)
  933. {
  934. case Js::OpCode::BrEq_A:
  935. case Js::OpCode::BrSrEq_A:
  936. case Js::OpCode::BrSrNotNeq_A:
  937. case Js::OpCode::BrNotNeq_A:
  938. case Js::OpCode::BrAddr_A:
  939. return Js::OpCode::JEQ;
  940. case Js::OpCode::BrNeq_A:
  941. case Js::OpCode::BrSrNeq_A:
  942. case Js::OpCode::BrSrNotEq_A:
  943. case Js::OpCode::BrNotEq_A:
  944. case Js::OpCode::BrNotAddr_A:
  945. return Js::OpCode::JNE;
  946. case Js::OpCode::BrLt_A:
  947. case Js::OpCode::BrNotGe_A:
  948. return Js::OpCode::JB;
  949. case Js::OpCode::BrLe_A:
  950. case Js::OpCode::BrNotGt_A:
  951. return Js::OpCode::JBE;
  952. case Js::OpCode::BrGt_A:
  953. case Js::OpCode::BrNotLe_A:
  954. return Js::OpCode::JA;
  955. case Js::OpCode::BrGe_A:
  956. case Js::OpCode::BrNotLt_A:
  957. return Js::OpCode::JAE;
  958. default:
  959. AssertMsg(0, "Branch opcode has no MD mapping");
  960. return opcode;
  961. }
  962. }
  963. Js::OpCode LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode opcode)
  964. {
  965. Assert(opcode == Js::OpCode::BrLt_A || opcode == Js::OpCode::BrGe_A);
  966. return opcode == Js::OpCode::BrLt_A ? Js::OpCode::JSB : Js::OpCode::JNSB;
  967. }
  968. void LowererMD::ChangeToAdd(IR::Instr *const instr, const bool needFlags)
  969. {
  970. Assert(instr);
  971. Assert(instr->GetDst());
  972. Assert(instr->GetSrc1());
  973. Assert(instr->GetSrc2());
  974. if(instr->GetDst()->IsFloat64())
  975. {
  976. Assert(instr->GetSrc1()->IsFloat64());
  977. Assert(instr->GetSrc2()->IsFloat64());
  978. Assert(!needFlags);
  979. instr->m_opcode = Js::OpCode::ADDSD;
  980. return;
  981. }
  982. else if (instr->GetDst()->IsFloat32())
  983. {
  984. Assert(instr->GetSrc1()->IsFloat32());
  985. Assert(instr->GetSrc2()->IsFloat32());
  986. Assert(!needFlags);
  987. instr->m_opcode = Js::OpCode::ADDSS;
  988. return;
  989. }
  990. instr->m_opcode = Js::OpCode::ADD;
  991. Legalize(instr);
  992. if (!needFlags)
  993. {
  994. // Prefer INC for add by one
  995. if ((instr->GetDst()->IsEqual(instr->GetSrc1()) &&
  996. instr->GetSrc2()->IsIntConstOpnd() &&
  997. instr->GetSrc2()->AsIntConstOpnd()->GetValue() == 1) ||
  998. (instr->GetDst()->IsEqual(instr->GetSrc2()) &&
  999. instr->GetSrc1()->IsIntConstOpnd() &&
  1000. instr->GetSrc1()->AsIntConstOpnd()->GetValue() == 1))
  1001. {
  1002. if (instr->GetSrc1()->IsIntConstOpnd())
  1003. {
  1004. // Swap the operands, such that we would create (dst = INC src2)
  1005. instr->SwapOpnds();
  1006. }
  1007. instr->FreeSrc2();
  1008. instr->m_opcode = Js::OpCode::INC;
  1009. }
  1010. }
  1011. }
  1012. void LowererMD::ChangeToSub(IR::Instr *const instr, const bool needFlags)
  1013. {
  1014. Assert(instr);
  1015. Assert(instr->GetDst());
  1016. Assert(instr->GetSrc1());
  1017. Assert(instr->GetSrc2());
  1018. if(instr->GetDst()->IsFloat64())
  1019. {
  1020. Assert(instr->GetSrc1()->IsFloat64());
  1021. Assert(instr->GetSrc2()->IsFloat64());
  1022. Assert(!needFlags);
  1023. instr->m_opcode = Js::OpCode::SUBSD;
  1024. return;
  1025. }
  1026. // Prefer DEC for sub by one
  1027. if(instr->GetDst()->IsEqual(instr->GetSrc1()) &&
  1028. instr->GetSrc2()->IsIntConstOpnd() &&
  1029. instr->GetSrc2()->AsIntConstOpnd()->GetValue() == 1)
  1030. {
  1031. instr->FreeSrc2();
  1032. instr->m_opcode = Js::OpCode::DEC;
  1033. return;
  1034. }
  1035. instr->m_opcode = Js::OpCode::SUB;
  1036. }
  1037. void LowererMD::ChangeToShift(IR::Instr *const instr, const bool needFlags)
  1038. {
  1039. Assert(instr);
  1040. Assert(instr->GetDst());
  1041. Assert(instr->GetSrc1());
  1042. Assert(instr->GetSrc2());
  1043. switch(instr->m_opcode)
  1044. {
  1045. case Js::OpCode::Shl_A:
  1046. case Js::OpCode::Shl_I4:
  1047. instr->m_opcode = Js::OpCode::SHL;
  1048. break;
  1049. case Js::OpCode::Shr_A:
  1050. case Js::OpCode::Shr_I4:
  1051. instr->m_opcode = Js::OpCode::SAR;
  1052. break;
  1053. case Js::OpCode::ShrU_A:
  1054. case Js::OpCode::ShrU_I4:
  1055. instr->m_opcode = Js::OpCode::SHR;
  1056. break;
  1057. case Js::OpCode::Rol_I4:
  1058. instr->m_opcode = Js::OpCode::ROL;
  1059. break;
  1060. case Js::OpCode::Ror_I4:
  1061. instr->m_opcode = Js::OpCode::ROR;
  1062. break;
  1063. default:
  1064. Assert(false);
  1065. __assume(false);
  1066. }
  1067. if(instr->GetSrc2()->IsIntConstOpnd() && !instr->GetSrc1()->IsInt64())
  1068. {
  1069. // Only values between 0-31 mean anything
  1070. IntConstType value = instr->GetSrc2()->AsIntConstOpnd()->GetValue();
  1071. value &= TySize[instr->GetDst()->GetType()] == 8 ? 63 : 31;
  1072. instr->GetSrc2()->AsIntConstOpnd()->SetValue(value);
  1073. }
  1074. }
  1075. void LowererMD::ChangeToIMul(IR::Instr *const instr, bool hasOverflowCheck)
  1076. {
  1077. // If non-32 bit overflow check is needed, we have to use the IMUL form.
  1078. if (hasOverflowCheck && !instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow())
  1079. {
  1080. IR::RegOpnd *regEAX = IR::RegOpnd::New(TyInt32, instr->m_func);
  1081. IR::Opnd *temp2 = nullptr;
  1082. // MOV eax, src1
  1083. regEAX->SetReg(LowererMDArch::GetRegIMulDestLower());
  1084. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, regEAX, instr->GetSrc1(), instr->m_func));
  1085. if (instr->GetSrc2()->IsImmediateOpnd())
  1086. {
  1087. // MOV reg, imm
  1088. temp2 = IR::RegOpnd::New(TyInt32, instr->m_func);
  1089. IR::Opnd * src2 = instr->GetSrc2();
  1090. bool dontEncode = false;
  1091. if (src2->IsHelperCallOpnd())
  1092. {
  1093. dontEncode = true;
  1094. }
  1095. else if (src2->IsIntConstOpnd() || src2->IsAddrOpnd())
  1096. {
  1097. dontEncode = src2->IsIntConstOpnd() ? src2->AsIntConstOpnd()->m_dontEncode : src2->AsAddrOpnd()->m_dontEncode;
  1098. }
  1099. else if (src2->IsInt64ConstOpnd())
  1100. {
  1101. dontEncode = false;
  1102. }
  1103. else
  1104. {
  1105. AssertMsg(false, "Unexpected immediate opnd");
  1106. throw Js::OperationAbortedException();
  1107. }
  1108. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, temp2,
  1109. IR::IntConstOpnd::New((IntConstType)instr->GetSrc2()->GetImmediateValue(instr->m_func), TyInt32, instr->m_func, dontEncode),
  1110. instr->m_func));
  1111. }
  1112. // eax = IMUL eax, reg
  1113. instr->m_opcode = Js::OpCode::IMUL;
  1114. instr->ReplaceSrc1(regEAX);
  1115. if (temp2 != nullptr)
  1116. instr->ReplaceSrc2(temp2);
  1117. auto *dst = instr->GetDst()->Copy(instr->m_func);
  1118. instr->ReplaceDst(regEAX);
  1119. // MOV dst, eax
  1120. instr->InsertAfter(IR::Instr::New(Js::OpCode::MOV, dst, regEAX, instr->m_func));
  1121. }
  1122. else
  1123. EmitInt4Instr(instr); // IMUL2
  1124. }
  1125. const uint16
  1126. LowererMD::GetFormalParamOffset()
  1127. {
  1128. //In x86\x64 formal params were offset from EBP by the EBP chain, return address, and the 2 non-user params
  1129. return 4;
  1130. }
  1131. ///----------------------------------------------------------------------------
  1132. ///
  1133. /// LowererMD::ForceDstToReg
  1134. ///
  1135. ///----------------------------------------------------------------------------
  1136. void
  1137. LowererMD::ForceDstToReg(IR::Instr *instr)
  1138. {
  1139. IR::Opnd * dst = instr->GetDst();
  1140. if (dst->IsRegOpnd())
  1141. {
  1142. return;
  1143. }
  1144. if(dst->IsFloat64())
  1145. {
  1146. instr->SinkDst(Js::OpCode::MOVSD);
  1147. return;
  1148. }
  1149. instr->SinkDst(Js::OpCode::MOV);
  1150. }
  1151. struct LegalInstrForms
  1152. {
  1153. const LegalForms dst, src[2];
  1154. };
  1155. namespace LegalInstrFormsImpl
  1156. {
  1157. LegalInstrForms LEGAL_NONE = { L_None, { L_None, L_None } };
  1158. LegalInstrForms LEGAL_CUSTOM = { LF_Custom, { LF_Custom, LF_Custom } };
  1159. LegalInstrForms LEGAL_CALL = { LF_Optional | L_Reg, { L_Reg | L_Mem | L_Ptr, L_None } };
  1160. LegalInstrForms LEGAL_R = { L_Reg, { L_None, L_None } };
  1161. LegalInstrForms LEGAL_M = { L_Mem, { L_None, L_None } };
  1162. LegalInstrForms LEGAL_RM = { L_Reg | L_Mem, { L_None, L_None } };
  1163. LegalInstrForms LEGAL_N_I = { L_None, { L_Imm32, L_None } };
  1164. LegalInstrForms LEGAL_N_RMI = { L_None, { L_Reg | L_Mem | L_Imm32, L_None } };
  1165. LegalInstrForms LEGAL_R_R = { L_Reg, { L_Reg, L_None } };
  1166. LegalInstrForms LEGAL_R_M = { L_Reg, { L_Mem, L_None } };
  1167. LegalInstrForms LEGAL_M_M = { L_Mem, { L_Mem, L_None } };
  1168. LegalInstrForms LEGAL_R_OR = { L_Reg, { LF_Optional | L_Reg, L_None } };
  1169. LegalInstrForms LEGAL_R_RM = { L_Reg, { L_Reg | L_Mem, L_None } };
  1170. LegalInstrForms LEGAL_R_RMI = { L_Reg, { L_Reg | L_Mem | L_Imm32, L_None } };
  1171. LegalInstrForms LEGAL_RM_RM = { L_Reg | L_Mem, { L_Reg | L_Mem, L_None } };
  1172. LegalInstrForms LEGAL_N_R_R = { L_None, { L_Reg, L_Reg } };
  1173. LegalInstrForms LEGAL_N_I_OR = { L_None, { L_Imm32, LF_Optional | L_Reg } };
  1174. LegalInstrForms LEGAL_N_R_RM = { L_None, { L_Reg, L_Reg | L_Mem } };
  1175. LegalInstrForms LEGAL_N_RM_RI = { L_None, { L_Reg | L_Mem, L_Reg | L_Imm32 } };
  1176. LegalInstrForms LEGAL_N_RM_RMI = { L_None, { L_Reg | L_Mem, L_Reg | L_Mem | L_Imm32 } };
  1177. LegalInstrForms LEGAL_R_R_RM = { L_Reg, { L_Reg, L_Reg | L_Mem } };
  1178. LegalInstrForms LEGAL_R_R_RI = { L_Reg, { L_Reg, L_Reg | L_Imm32 } };
  1179. LegalInstrForms LEGAL_R_R_RMI = { L_Reg, { L_Reg, L_Reg | L_Mem | L_Imm32 } };
  1180. LegalInstrForms LEGAL_RM_R_I = { L_Reg | L_Mem, { L_Reg, L_Imm32 } };
  1181. LegalInstrForms LEGAL_R_RM_I = { L_Reg, { L_Reg | L_Mem, L_Imm32 } };
  1182. LegalInstrForms LEGAL_RM_RM_RM = { L_Reg | L_Mem, { L_Reg | L_Mem, L_Reg | L_Mem } };
  1183. LegalInstrForms LEGAL_RM_RM_RI = { L_Reg | L_Mem, { L_Reg | L_Mem, L_Reg | L_Imm32 } };
  1184. LegalInstrForms LEGAL_RM_RM_RMI = { L_Reg | L_Mem, { L_Reg | L_Mem, L_Reg | L_Mem | L_Imm32 } };
  1185. };
  1186. LegalInstrForms AllLegalInstrForms[] = {
  1187. #define MACRO(name, jnLayout, attrib, byte2, form, opByte, dope, leadIn, legal, ...) LegalInstrFormsImpl::legal,
  1188. #include "MdOpCodes.h"
  1189. #undef MACRO
  1190. };
  1191. template <bool verify>
  1192. void
  1193. LowererMD::Legalize(IR::Instr *const instr, bool fPostRegAlloc)
  1194. {
  1195. Assert(instr);
  1196. Assert(!instr->isInlineeEntryInstr
  1197. || (instr->m_opcode == Js::OpCode::MOV && instr->GetSrc1()->IsIntConstOpnd()));
  1198. const bool isMDOpCode = instr->m_opcode > Js::OpCode::MDStart;
  1199. Assert(isMDOpCode || Lowerer::ValidOpcodeAfterLower(instr, instr->m_func));
  1200. const LegalInstrForms legalInstrForms = isMDOpCode ? AllLegalInstrForms[instr->m_opcode - (Js::OpCode::MDStart + 1)] : LegalInstrFormsImpl::LEGAL_NONE;
  1201. LegalForms dstForms = legalInstrForms.dst;
  1202. LegalForms src1Forms = legalInstrForms.src[0];
  1203. LegalForms src2Forms = legalInstrForms.src[1];
  1204. bool hasSwitchCase = true;
  1205. bool isCustomForm = (dstForms & LF_Custom) != 0;;
  1206. switch(instr->m_opcode)
  1207. {
  1208. case Js::OpCode::JA:
  1209. case Js::OpCode::JAE:
  1210. case Js::OpCode::JB:
  1211. case Js::OpCode::JBE:
  1212. case Js::OpCode::JEQ:
  1213. case Js::OpCode::JNE:
  1214. case Js::OpCode::JLT:
  1215. case Js::OpCode::JLE:
  1216. case Js::OpCode::JGT:
  1217. case Js::OpCode::JGE:
  1218. case Js::OpCode::JNO:
  1219. case Js::OpCode::JO:
  1220. case Js::OpCode::JP:
  1221. case Js::OpCode::JNP:
  1222. case Js::OpCode::JNSB:
  1223. case Js::OpCode::JSB:
  1224. case Js::OpCode::JMP:
  1225. Assert(instr->IsBranchInstr());
  1226. break;
  1227. case Js::OpCode::MOV:
  1228. {
  1229. Assert(instr->GetSrc2() == nullptr);
  1230. IR::Opnd *const dst = instr->GetDst();
  1231. const IRType dstType = dst->GetType();
  1232. IR::Opnd *const src = instr->GetSrc1();
  1233. const IRType srcType = src->GetType();
  1234. if(TySize[dstType] > TySize[srcType])
  1235. {
  1236. if (verify)
  1237. {
  1238. return;
  1239. }
  1240. #if DBG
  1241. switch(dstType)
  1242. {
  1243. case TyInt32:
  1244. case TyUint32:
  1245. #ifdef _M_X64
  1246. case TyInt64:
  1247. case TyUint64:
  1248. #endif
  1249. case TyVar:
  1250. break;
  1251. default:
  1252. Assert(false);
  1253. }
  1254. #endif
  1255. IR::IntConstOpnd *const intConstantSrc = src->IsIntConstOpnd() ? src->AsIntConstOpnd() : nullptr;
  1256. const auto UpdateIntConstantSrc = [&](const size_t extendedValue)
  1257. {
  1258. Assert(intConstantSrc);
  1259. #ifdef _M_X64
  1260. if(TySize[dstType] > sizeof(IntConstType))
  1261. {
  1262. instr->ReplaceSrc1(
  1263. IR::AddrOpnd::New(
  1264. reinterpret_cast<void *>(extendedValue),
  1265. IR::AddrOpndKindConstantVar,
  1266. instr->m_func,
  1267. intConstantSrc->m_dontEncode));
  1268. }
  1269. else
  1270. #endif
  1271. {
  1272. intConstantSrc->SetType(dstType);
  1273. intConstantSrc->SetValue(static_cast<IntConstType>(extendedValue));
  1274. }
  1275. };
  1276. switch(srcType)
  1277. {
  1278. case TyInt8:
  1279. if(intConstantSrc)
  1280. {
  1281. UpdateIntConstantSrc(static_cast<int8>(intConstantSrc->GetValue())); // sign-extend
  1282. break;
  1283. }
  1284. instr->m_opcode = Js::OpCode::MOVSX;
  1285. break;
  1286. case TyUint8:
  1287. if(intConstantSrc)
  1288. {
  1289. UpdateIntConstantSrc(static_cast<uint8>(intConstantSrc->GetValue())); // zero-extend
  1290. break;
  1291. }
  1292. instr->m_opcode = Js::OpCode::MOVZX;
  1293. break;
  1294. case TyInt16:
  1295. if(intConstantSrc)
  1296. {
  1297. UpdateIntConstantSrc(static_cast<int16>(intConstantSrc->GetValue())); // sign-extend
  1298. break;
  1299. }
  1300. instr->m_opcode = Js::OpCode::MOVSXW;
  1301. break;
  1302. case TyUint16:
  1303. if(intConstantSrc)
  1304. {
  1305. UpdateIntConstantSrc(static_cast<uint16>(intConstantSrc->GetValue())); // zero-extend
  1306. break;
  1307. }
  1308. instr->m_opcode = Js::OpCode::MOVZXW;
  1309. break;
  1310. #ifdef _M_X64
  1311. case TyInt32:
  1312. if(intConstantSrc)
  1313. {
  1314. UpdateIntConstantSrc(static_cast<int32>(intConstantSrc->GetValue())); // sign-extend
  1315. break;
  1316. }
  1317. instr->m_opcode = Js::OpCode::MOVSXD;
  1318. break;
  1319. case TyUint32:
  1320. if(intConstantSrc)
  1321. {
  1322. UpdateIntConstantSrc(static_cast<uint32>(intConstantSrc->GetValue())); // zero-extend
  1323. break;
  1324. }
  1325. switch(dst->GetKind())
  1326. {
  1327. case IR::OpndKindReg:
  1328. // (mov r0.u32, r1.u32) clears the upper 32 bits of r0
  1329. dst->SetType(TyUint32);
  1330. instr->m_opcode = Js::OpCode::MOV_TRUNC;
  1331. break;
  1332. case IR::OpndKindSym:
  1333. case IR::OpndKindIndir:
  1334. case IR::OpndKindMemRef:
  1335. // Even if the src is a reg, we don't know if the upper 32 bits are zero. Copy the value to a
  1336. // reg first to zero-extend it to 64 bits, and then copy the 64-bit value to the original dst.
  1337. instr->HoistSrc1(Js::OpCode::MOV_TRUNC);
  1338. instr->GetSrc1()->SetType(dstType);
  1339. break;
  1340. default:
  1341. Assert(false);
  1342. __assume(false);
  1343. }
  1344. break;
  1345. #endif
  1346. default:
  1347. Assert(false);
  1348. __assume(false);
  1349. }
  1350. }
  1351. else if (TySize[dstType] < TySize[srcType])
  1352. {
  1353. instr->GetSrc1()->SetType(dst->GetType());
  1354. }
  1355. if(instr->m_opcode == Js::OpCode::MOV)
  1356. {
  1357. // Allow 64 bit values in x64 as well
  1358. src1Forms = L_Reg | L_Mem | L_Ptr;
  1359. #if _M_X64
  1360. if (dst->IsMemoryOpnd())
  1361. {
  1362. // Only allow <= 32 bit values
  1363. src1Forms = L_Reg | L_Imm32;
  1364. }
  1365. #endif
  1366. LegalizeOpnds<verify>(
  1367. instr,
  1368. L_Reg | L_Mem,
  1369. src1Forms,
  1370. L_None);
  1371. }
  1372. else
  1373. {
  1374. LegalizeOpnds<verify>(
  1375. instr,
  1376. L_Reg,
  1377. L_Reg | L_Mem,
  1378. L_None);
  1379. }
  1380. break;
  1381. }
  1382. case Js::OpCode::CMOVA:
  1383. case Js::OpCode::CMOVAE:
  1384. case Js::OpCode::CMOVB:
  1385. case Js::OpCode::CMOVBE:
  1386. case Js::OpCode::CMOVE:
  1387. case Js::OpCode::CMOVG:
  1388. case Js::OpCode::CMOVGE:
  1389. case Js::OpCode::CMOVL:
  1390. case Js::OpCode::CMOVLE:
  1391. case Js::OpCode::CMOVNE:
  1392. case Js::OpCode::CMOVNO:
  1393. case Js::OpCode::CMOVNP:
  1394. case Js::OpCode::CMOVNS:
  1395. case Js::OpCode::CMOVO:
  1396. case Js::OpCode::CMOVP:
  1397. case Js::OpCode::CMOVS:
  1398. if (instr->GetSrc2())
  1399. {
  1400. Assert(instr->GetDst()->GetSize() == instr->GetSrc2()->GetSize());
  1401. Assert(instr->GetDst()->GetSize() == instr->GetSrc1()->GetSize());
  1402. // 0 shouldn't be the src2 of a CMOVcc.
  1403. // CMOVcc doesn't support moving a constant and the legalizer will hoist the load of the constant
  1404. // to a register. If the constant was 0, Peeps will turn it into a XOR which, in turn, may change
  1405. // the zero flags and hence the result of CMOVcc. If you do want to CMOVcc 0, you should load 0
  1406. // into a register before the instruction whose result the CMOVcc depends on.
  1407. Assert(!instr->GetSrc2()->IsIntConstOpnd() || instr->GetSrc2()->AsIntConstOpnd()->GetValue() != 0);
  1408. // sometimes we have fake src1 to help reg alloc
  1409. LegalizeOpnds<verify>(
  1410. instr,
  1411. L_Reg,
  1412. L_Reg,
  1413. L_Reg | L_Mem);
  1414. }
  1415. else
  1416. {
  1417. Assert(instr->GetDst()->GetSize() == instr->GetSrc1()->GetSize());
  1418. LegalizeOpnds<verify>(
  1419. instr,
  1420. L_Reg,
  1421. L_Reg | L_Mem,
  1422. L_None);
  1423. }
  1424. break;
  1425. case Js::OpCode::MOVSD:
  1426. case Js::OpCode::MOVSS:
  1427. Assert(instr->GetDst()->GetType() == (instr->m_opcode == Js::OpCode::MOVSD? TyFloat64 : TyFloat32) || instr->GetDst()->IsSimd128());
  1428. Assert(instr->GetSrc1()->GetType() == (instr->m_opcode == Js::OpCode::MOVSD ? TyFloat64 : TyFloat32) || instr->GetSrc1()->IsSimd128());
  1429. goto LegalizeDefault;
  1430. case Js::OpCode::NOP:
  1431. {
  1432. Assert(!instr->GetSrc2());
  1433. #if _M_IX86
  1434. RegNum edx = RegEDX;
  1435. #else
  1436. RegNum edx = RegRDX;
  1437. #endif
  1438. // Special case handled by peeps
  1439. Assert(!instr->GetDst() || (instr->GetDst()->IsRegOpnd() && instr->GetDst()->AsRegOpnd()->GetReg() == edx));
  1440. break;
  1441. }
  1442. case Js::OpCode::MOVSX:
  1443. case Js::OpCode::MOVSXW:
  1444. Assert(instr->GetDst()->GetSize() == 4 || instr->GetDst()->GetSize() == 8);
  1445. Assert(instr->m_opcode != Js::OpCode::MOVSX || instr->GetSrc1()->GetSize() == 1);
  1446. Assert(instr->m_opcode != Js::OpCode::MOVSXW || instr->GetSrc1()->GetSize() == 2);
  1447. goto LegalizeDefault;
  1448. case Js::OpCode::LOCKCMPXCHG8B:
  1449. case Js::OpCode::CMPXCHG8B:
  1450. {
  1451. const auto getRegMask = [](IR::Opnd* opnd)
  1452. {
  1453. Assert(opnd->IsListOpnd());
  1454. return opnd->AsListOpnd()->Reduce(
  1455. [](int i, IR::Opnd* opnd) {
  1456. Assert(opnd->IsRegOpnd());
  1457. return 1 << opnd->AsRegOpnd()->GetReg();
  1458. },
  1459. [](int i, uint32 regmask, uint32 allReg)
  1460. {
  1461. AssertMsg((allReg & regmask) == 0, "Should not have the same register twice");
  1462. return allReg | regmask;
  1463. }, 0);
  1464. };
  1465. #if _M_IX86
  1466. const uint32 dstMask = (1 << RegEAX | 1 << RegEDX);
  1467. const uint32 srcMask = (1 << RegEAX | 1 << RegEBX | 1 << RegECX | 1 << RegEDX);
  1468. #else
  1469. const uint32 dstMask = (1 << RegRAX | 1 << RegRDX);
  1470. const uint32 srcMask = (1 << RegRAX | 1 << RegRBX | 1 << RegRCX | 1 << RegRDX);
  1471. #endif
  1472. AssertMsg(!instr->m_func->isPostFinalLower || !instr->GetDst(), "After FinalLower, there should not be a dst");
  1473. AssertMsg(instr->m_func->isPostFinalLower || getRegMask(instr->GetDst()) == dstMask,
  1474. "Before FinalLower, instr should have eax,edx as dst");
  1475. AssertMsg(!instr->m_func->isPostFinalLower || !instr->GetSrc2(), "After FinalLower, there should not be a src2");
  1476. AssertMsg(instr->m_func->isPostFinalLower || getRegMask(instr->GetSrc2()) == srcMask,
  1477. "Before FinalLower, instr should have eax,edx,ecx,ebx as src2");
  1478. LegalizeSrc<verify>(
  1479. instr,
  1480. instr->GetSrc1(),
  1481. L_Mem);
  1482. break;
  1483. }
  1484. case Js::OpCode::TEST:
  1485. if((instr->GetSrc1()->IsImmediateOpnd() && !instr->GetSrc2()->IsImmediateOpnd()) ||
  1486. (instr->GetSrc2()->IsMemoryOpnd() && !instr->GetSrc1()->IsMemoryOpnd()))
  1487. {
  1488. if (verify)
  1489. {
  1490. AssertMsg(false, "Invalid Js::OpCode::TEST opnd order. Missing legalization");
  1491. return;
  1492. }
  1493. instr->SwapOpnds();
  1494. }
  1495. goto LegalizeDefault;
  1496. case Js::OpCode::SHL:
  1497. case Js::OpCode::SHR:
  1498. case Js::OpCode::SAR:
  1499. case Js::OpCode::ROL:
  1500. case Js::OpCode::ROR:
  1501. if (verify)
  1502. {
  1503. Assert(instr->GetSrc2()->IsIntConstOpnd()
  1504. || instr->GetSrc2()->AsRegOpnd()->GetReg() == LowererMDArch::GetRegShiftCount());
  1505. }
  1506. else
  1507. {
  1508. if(!instr->GetSrc2()->IsIntConstOpnd())
  1509. {
  1510. IR::Instr *const newInstr = instr->HoistSrc2(Js::OpCode::MOV);
  1511. newInstr->GetDst()->AsRegOpnd()->SetReg(LowererMDArch::GetRegShiftCount());
  1512. instr->GetSrc2()->AsRegOpnd()->SetReg(LowererMDArch::GetRegShiftCount());
  1513. }
  1514. instr->GetSrc2()->SetType(TyUint8);
  1515. }
  1516. goto LegalizeDefault;
  1517. case Js::OpCode::TZCNT:
  1518. Assert(AutoSystemInfo::Data.TZCntAvailable());
  1519. goto LegalizeDefault;
  1520. case Js::OpCode::LZCNT:
  1521. Assert(AutoSystemInfo::Data.LZCntAvailable());
  1522. goto LegalizeDefault;
  1523. case Js::OpCode::ROUNDSD:
  1524. case Js::OpCode::ROUNDSS:
  1525. Assert(AutoSystemInfo::Data.SSE4_1Available());
  1526. goto LegalizeDefault;
  1527. default:
  1528. LegalizeDefault:
  1529. if (isMDOpCode)
  1530. {
  1531. AssertMsg(!isCustomForm, "Custom legal forms should have a case in the switch statement");
  1532. hasSwitchCase = false;
  1533. if (EncoderMD::IsOPEQ(instr))
  1534. {
  1535. MakeDstEquSrc1<verify>(instr);
  1536. Assert((dstForms & L_FormMask) == (src1Forms & L_FormMask));
  1537. }
  1538. LegalizeOpnds<verify>(
  1539. instr,
  1540. dstForms,
  1541. src1Forms,
  1542. src2Forms);
  1543. }
  1544. break;
  1545. }
  1546. #if DBG
  1547. // Asserting general rules
  1548. // There should be at most 1 memory opnd in an instruction
  1549. if (instr->GetDst() && instr->GetDst()->IsMemoryOpnd())
  1550. {
  1551. // All memref address need to fit in a dword
  1552. Assert(!instr->GetDst()->IsMemRefOpnd() || Math::FitsInDWord((size_t)instr->GetDst()->AsMemRefOpnd()->GetMemLoc()));
  1553. if (instr->GetSrc1())
  1554. {
  1555. Assert(instr->GetSrc1()->IsEqual(instr->GetDst()) || !instr->GetSrc1()->IsMemoryOpnd());
  1556. if (instr->GetSrc2())
  1557. {
  1558. Assert(!instr->GetSrc2()->IsMemoryOpnd());
  1559. }
  1560. }
  1561. }
  1562. else if (instr->GetSrc1() && instr->GetSrc1()->IsMemoryOpnd())
  1563. {
  1564. // All memref address need to fit in a dword
  1565. Assert(!instr->GetSrc1()->IsMemRefOpnd() || Math::FitsInDWord((size_t)instr->GetSrc1()->AsMemRefOpnd()->GetMemLoc()));
  1566. Assert(!instr->GetSrc2() || !instr->GetSrc2()->IsMemoryOpnd());
  1567. }
  1568. else if (instr->GetSrc2() && instr->GetSrc2()->IsMemRefOpnd())
  1569. {
  1570. // All memref address need to fit in a dword
  1571. Assert(Math::FitsInDWord((size_t)instr->GetSrc2()->AsMemRefOpnd()->GetMemLoc()));
  1572. }
  1573. // Non-MOV (second operand) immediate need to fit in DWORD for AMD64
  1574. Assert(!instr->GetSrc2() || !instr->GetSrc2()->IsImmediateOpnd()
  1575. || (TySize[instr->GetSrc2()->GetType()] != 8) || Math::FitsInDWord(instr->GetSrc2()->GetImmediateValue(instr->m_func)));
  1576. #endif
  1577. }
  1578. template <bool verify>
  1579. void LowererMD::LegalizeOpnds(IR::Instr *const instr, const LegalForms dstForms, LegalForms src1Forms, LegalForms src2Forms)
  1580. {
  1581. Assert(instr);
  1582. Assert(dstForms & LF_Optional || !instr->GetDst() == !dstForms);
  1583. Assert(src1Forms & LF_Optional || !instr->GetSrc1() == !src1Forms);
  1584. Assert(src2Forms & LF_Optional || !instr->GetSrc2() == !src2Forms);
  1585. Assert(src1Forms || !src2Forms);
  1586. const auto NormalizeForms = [](LegalForms forms) -> LegalForms
  1587. {
  1588. #ifdef _M_X64
  1589. if(forms & L_Ptr)
  1590. {
  1591. forms |= L_Imm32;
  1592. }
  1593. #else
  1594. if(forms & (L_Imm32 | L_Ptr))
  1595. {
  1596. forms |= L_Imm32 | L_Ptr;
  1597. }
  1598. #endif
  1599. // Remove Legal Flags
  1600. forms &= L_FormMask;
  1601. return forms;
  1602. };
  1603. if(dstForms && instr->GetDst())
  1604. {
  1605. LegalizeDst<verify>(instr, NormalizeForms(dstForms));
  1606. }
  1607. if(!src1Forms || !instr->GetSrc1())
  1608. {
  1609. return;
  1610. }
  1611. bool hasMemOpnd = instr->GetDst() && instr->GetDst()->IsMemoryOpnd();
  1612. // Allow src1 to be a mem opnd if dst & src1 must be the same
  1613. if (hasMemOpnd && src1Forms & L_Mem && !EncoderMD::IsOPEQ(instr))
  1614. {
  1615. src1Forms ^= L_Mem;
  1616. }
  1617. LegalizeSrc<verify>(instr, instr->GetSrc1(), NormalizeForms(src1Forms));
  1618. hasMemOpnd |= instr->GetSrc1()->IsMemoryOpnd();
  1619. // If dst or src1 is a mem opnd, mem2 cannot be a mem opnd
  1620. if(hasMemOpnd && src2Forms & L_Mem)
  1621. {
  1622. src2Forms ^= L_Mem;
  1623. }
  1624. if(src2Forms && instr->GetSrc2())
  1625. {
  1626. LegalizeSrc<verify>(instr, instr->GetSrc2(), NormalizeForms(src2Forms));
  1627. }
  1628. }
  1629. template <bool verify>
  1630. void LowererMD::LegalizeDst(IR::Instr *const instr, const LegalForms forms)
  1631. {
  1632. Assert(instr);
  1633. Assert(forms);
  1634. IR::Opnd *dst = instr->GetDst();
  1635. Assert(dst);
  1636. #ifndef _M_X64
  1637. AssertMsg(!dst->IsInt64(), "Int64 supported only on x64");
  1638. #endif
  1639. switch(dst->GetKind())
  1640. {
  1641. case IR::OpndKindReg:
  1642. Assert(forms & L_Reg);
  1643. return;
  1644. case IR::OpndKindMemRef:
  1645. {
  1646. IR::MemRefOpnd *const memRefOpnd = dst->AsMemRefOpnd();
  1647. if(!LowererMDArch::IsLegalMemLoc(memRefOpnd))
  1648. {
  1649. if (verify)
  1650. {
  1651. AssertMsg(false, "Memory reference not legal in dst opnd. Missing legalization");
  1652. return;
  1653. }
  1654. dst = instr->HoistMemRefAddress(memRefOpnd, Js::OpCode::MOV);
  1655. }
  1656. // fall through
  1657. }
  1658. case IR::OpndKindSym:
  1659. case IR::OpndKindIndir:
  1660. if(forms & L_Mem)
  1661. {
  1662. return;
  1663. }
  1664. break;
  1665. default:
  1666. Assert(false);
  1667. __assume(false);
  1668. }
  1669. if (verify)
  1670. {
  1671. AssertMsg(false, "Dst opnd not legal. Missing legalization");
  1672. return;
  1673. }
  1674. // Use a reg dst, then store that reg into the original dst
  1675. Assert(forms & L_Reg);
  1676. const IRType irType = dst->GetType();
  1677. IR::RegOpnd *const regOpnd = IR::RegOpnd::New(irType, instr->m_func);
  1678. regOpnd->SetValueType(dst->GetValueType());
  1679. instr->UnlinkDst();
  1680. instr->SetDst(regOpnd);
  1681. instr->InsertAfter(IR::Instr::New(GetStoreOp(irType), dst, regOpnd, instr->m_func));
  1682. // If the original dst is the same as one of the srcs, hoist a src into the same reg and replace the same srcs with the reg
  1683. const bool equalsSrc1 = instr->GetSrc1() && dst->IsEqual(instr->GetSrc1());
  1684. const bool equalsSrc2 = instr->GetSrc2() && dst->IsEqual(instr->GetSrc2());
  1685. if(!(equalsSrc1 || equalsSrc2))
  1686. {
  1687. return;
  1688. }
  1689. const Js::OpCode loadOpCode = GetLoadOp(irType);
  1690. if(equalsSrc1)
  1691. {
  1692. instr->HoistSrc1(loadOpCode, RegNOREG, regOpnd->m_sym);
  1693. if(equalsSrc2)
  1694. {
  1695. instr->ReplaceSrc2(regOpnd);
  1696. }
  1697. }
  1698. else
  1699. {
  1700. instr->HoistSrc2(loadOpCode, RegNOREG, regOpnd->m_sym);
  1701. }
  1702. }
  1703. bool LowererMD::HoistLargeConstant(IR::IndirOpnd *indirOpnd, IR::Opnd *src, IR::Instr *instr) {
  1704. if (indirOpnd != nullptr)
  1705. {
  1706. if (indirOpnd->GetOffset() == 0)
  1707. {
  1708. instr->ReplaceSrc(src, indirOpnd->GetBaseOpnd());
  1709. }
  1710. else
  1711. {
  1712. // Hoist the address load as LEA [reg + offset]
  1713. // with the reg = MOV <some address within 32-bit range at the start of the function
  1714. IR::RegOpnd * regOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  1715. Lowerer::InsertLea(regOpnd, indirOpnd, instr);
  1716. instr->ReplaceSrc(src, regOpnd);
  1717. }
  1718. return true;
  1719. }
  1720. return false;
  1721. }
  1722. template <bool verify>
  1723. void LowererMD::LegalizeSrc(IR::Instr *const instr, IR::Opnd *src, const LegalForms forms)
  1724. {
  1725. Assert(instr);
  1726. Assert(src);
  1727. Assert(src == instr->GetSrc1() || src == instr->GetSrc2());
  1728. Assert(forms);
  1729. #ifndef _M_X64
  1730. AssertMsg(!src->IsInt64() || src->IsMemoryOpnd(), "Int64 supported only on x64");
  1731. #endif
  1732. switch(src->GetKind())
  1733. {
  1734. case IR::OpndKindReg:
  1735. Assert(forms & L_Reg);
  1736. return;
  1737. case IR::OpndKindIntConst:
  1738. if(forms & L_Ptr)
  1739. {
  1740. return;
  1741. }
  1742. #ifdef _M_X64
  1743. {
  1744. IR::IntConstOpnd * intOpnd = src->AsIntConstOpnd();
  1745. if ((TySize[intOpnd->GetType()] != 8) ||
  1746. (!instr->isInlineeEntryInstr && Math::FitsInDWord(intOpnd->GetValue())))
  1747. {
  1748. if (forms & L_Imm32)
  1749. {
  1750. // the constant fits in 32-bit, no need to hoist
  1751. return;
  1752. }
  1753. break;
  1754. }
  1755. if (verify)
  1756. {
  1757. AssertMsg(false, "IntConstOpnd doesn't fit in 32 bits. Missing legalization");
  1758. return;
  1759. }
  1760. // The actual value for inlinee entry instr isn't determined until encoder
  1761. // So it need to be hoisted conventionally.
  1762. if (!instr->isInlineeEntryInstr)
  1763. {
  1764. Assert(forms & L_Reg);
  1765. IR::IntConstOpnd * newIntOpnd = intOpnd->Copy(instr->m_func)->AsIntConstOpnd();
  1766. IR::IndirOpnd * indirOpnd = instr->m_func->GetTopFunc()->GetConstantAddressIndirOpnd(intOpnd->GetValue(), newIntOpnd, IR::AddrOpndKindConstantAddress, TyMachPtr, Js::OpCode::MOV);
  1767. if (HoistLargeConstant(indirOpnd, src, instr))
  1768. {
  1769. return;
  1770. }
  1771. }
  1772. }
  1773. #endif
  1774. break;
  1775. case IR::OpndKindFloatConst:
  1776. break; // assume for now that it always needs to be hoisted
  1777. case IR::OpndKindInt64Const:
  1778. if (forms & L_Ptr)
  1779. {
  1780. return;
  1781. }
  1782. #ifdef _M_X64
  1783. {
  1784. IR::Int64ConstOpnd * int64Opnd = src->AsInt64ConstOpnd();
  1785. if ((forms & L_Imm32) && ((src->GetSize() != 8) ||
  1786. (!instr->isInlineeEntryInstr && Math::FitsInDWord(int64Opnd->GetValue()))))
  1787. {
  1788. // the immediate fits in 32-bit, no need to hoist
  1789. return;
  1790. }
  1791. if (verify)
  1792. {
  1793. AssertMsg(false, "Int64ConstOpnd doesn't fit in 32 bits. Missing legalization");
  1794. return;
  1795. }
  1796. Assert(forms & L_Reg);
  1797. IR::Opnd* regOpnd = IR::RegOpnd::New(src->GetType(), instr->m_func);
  1798. IR::Instr* moveToReg = IR::Instr::New(Js::OpCode::MOV, regOpnd, src, instr->m_func);
  1799. instr->InsertBefore(moveToReg);
  1800. instr->ReplaceSrc(src, regOpnd);
  1801. return;
  1802. }
  1803. #endif
  1804. break;
  1805. case IR::OpndKindAddr:
  1806. if (forms & L_Ptr)
  1807. {
  1808. return;
  1809. }
  1810. #ifdef _M_X64
  1811. {
  1812. IR::AddrOpnd * addrOpnd = src->AsAddrOpnd();
  1813. if ((forms & L_Imm32) && ((TySize[addrOpnd->GetType()] != 8) ||
  1814. (!instr->isInlineeEntryInstr && Math::FitsInDWord((size_t)addrOpnd->m_address))))
  1815. {
  1816. // the address fits in 32-bit, no need to hoist
  1817. return;
  1818. }
  1819. if (verify)
  1820. {
  1821. AssertMsg(false, "AddrOpnd doesn't fit in 32 bits. Missing legalization");
  1822. return;
  1823. }
  1824. Assert(!instr->isInlineeEntryInstr);
  1825. Assert(forms & L_Reg);
  1826. // TODO: michhol, remove cast after making m_address intptr
  1827. IR::AddrOpnd * newAddrOpnd = addrOpnd->Copy(instr->m_func)->AsAddrOpnd();
  1828. IR::IndirOpnd * indirOpnd = instr->m_func->GetTopFunc()->GetConstantAddressIndirOpnd((intptr_t)addrOpnd->m_address, newAddrOpnd, addrOpnd->GetAddrOpndKind(), TyMachPtr, Js::OpCode::MOV);
  1829. if (HoistLargeConstant(indirOpnd, src, instr))
  1830. {
  1831. return;
  1832. }
  1833. }
  1834. #endif
  1835. break;
  1836. case IR::OpndKindMemRef:
  1837. {
  1838. IR::MemRefOpnd *const memRefOpnd = src->AsMemRefOpnd();
  1839. if(!LowererMDArch::IsLegalMemLoc(memRefOpnd))
  1840. {
  1841. if (verify)
  1842. {
  1843. AssertMsg(false, "Memory reference not legal in src opnd. Missing legalization");
  1844. return;
  1845. }
  1846. src = instr->HoistMemRefAddress(memRefOpnd, Js::OpCode::MOV);
  1847. }
  1848. // fall through
  1849. }
  1850. case IR::OpndKindSym:
  1851. case IR::OpndKindIndir:
  1852. if(forms & L_Mem)
  1853. {
  1854. return;
  1855. }
  1856. break;
  1857. case IR::OpndKindHelperCall:
  1858. case IR::OpndKindLabel:
  1859. Assert(!instr->isInlineeEntryInstr);
  1860. Assert(forms & L_Ptr);
  1861. return;
  1862. default:
  1863. Assert(false);
  1864. __assume(false);
  1865. }
  1866. if (verify)
  1867. {
  1868. AssertMsg(false, "Src opnd not legal. Missing legalization");
  1869. return;
  1870. }
  1871. // Hoist the src into a reg
  1872. Assert(forms & L_Reg);
  1873. Assert(!(instr->GetDst() && instr->GetDst()->IsEqual(src)));
  1874. const Js::OpCode loadOpCode = GetLoadOp(src->GetType());
  1875. if(src == instr->GetSrc2())
  1876. {
  1877. instr->HoistSrc2(loadOpCode);
  1878. return;
  1879. }
  1880. const bool equalsSrc2 = instr->GetSrc2() && src->IsEqual(instr->GetSrc2());
  1881. IR::Instr * hoistInstr = instr->HoistSrc1(loadOpCode);
  1882. if(equalsSrc2)
  1883. {
  1884. instr->ReplaceSrc2(hoistInstr->GetDst());
  1885. }
  1886. hoistInstr->isInlineeEntryInstr = instr->isInlineeEntryInstr;
  1887. instr->isInlineeEntryInstr = false;
  1888. }
  1889. template void LowererMD::Legalize<false>(IR::Instr *const instr, bool fPostRegAlloc);
  1890. template void LowererMD::LegalizeOpnds<false>(IR::Instr *const instr, const LegalForms dstForms, const LegalForms src1Forms, LegalForms src2Forms);
  1891. template void LowererMD::LegalizeDst<false>(IR::Instr *const instr, const LegalForms forms);
  1892. template void LowererMD::LegalizeSrc<false>(IR::Instr *const instr, IR::Opnd *src, const LegalForms forms);
  1893. template void LowererMD::MakeDstEquSrc1<false>(IR::Instr *const instr);
  1894. #if DBG
  1895. template void LowererMD::Legalize<true>(IR::Instr *const instr, bool fPostRegAlloc);
  1896. template void LowererMD::LegalizeOpnds<true>(IR::Instr *const instr, const LegalForms dstForms, const LegalForms src1Forms, LegalForms src2Forms);
  1897. template void LowererMD::LegalizeDst<true>(IR::Instr *const instr, const LegalForms forms);
  1898. template void LowererMD::LegalizeSrc<true>(IR::Instr *const instr, IR::Opnd *src, const LegalForms forms);
  1899. template void LowererMD::MakeDstEquSrc1<true>(IR::Instr *const instr);
  1900. #endif
  1901. IR::Instr *
  1902. LowererMD::LoadFunctionObjectOpnd(IR::Instr *instr, IR::Opnd *&functionObjOpnd)
  1903. {
  1904. IR::Opnd * src1 = instr->GetSrc1();
  1905. IR::Instr * instrPrev = instr->m_prev;
  1906. if (src1 == nullptr)
  1907. {
  1908. IR::RegOpnd * regOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  1909. StackSym *paramSym = StackSym::New(TyMachPtr, m_func);
  1910. IR::SymOpnd *paramOpnd = IR::SymOpnd::New(paramSym, TyMachPtr, m_func);
  1911. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  1912. IR::Instr * mov1 = IR::Instr::New(Js::OpCode::MOV, regOpnd, paramOpnd, m_func);
  1913. instr->InsertBefore(mov1);
  1914. functionObjOpnd = mov1->GetDst()->AsRegOpnd();
  1915. instrPrev = mov1;
  1916. instr->m_func->SetHasImplicitParamLoad();
  1917. }
  1918. else
  1919. {
  1920. // Inlinee, use the function object opnd on the instruction
  1921. functionObjOpnd = instr->UnlinkSrc1();
  1922. if (!functionObjOpnd->IsRegOpnd())
  1923. {
  1924. Assert(functionObjOpnd->IsAddrOpnd());
  1925. }
  1926. }
  1927. return instrPrev;
  1928. }
  1929. void
  1930. LowererMD::GenerateFastDivByPow2(IR::Instr *instr)
  1931. {
  1932. //
  1933. // Given:
  1934. // dst = Div_A src1, src2
  1935. // where src2 == power of 2
  1936. //
  1937. // Generate:
  1938. // MOV s1, src1
  1939. // AND s1, 0xFFFF000000000000 | (src2Value-1) ----- test for tagged int and divisibility by src2Value [int32]
  1940. // AND s1, 0x00000001 | ((src2Value-1)<<1) [int31]
  1941. // CMP s1, AtomTag_IntPtr
  1942. // JNE $divbyhalf
  1943. // MOV s1, src1
  1944. // SAR s1, log2(src2Value) ------ perform the divide
  1945. // OR s1, 1
  1946. // MOV dst, s1
  1947. // JMP $done
  1948. // $divbyhalf:
  1949. // AND s1, 0xFFFF000000000000 | (src2Value-1>>1) ----- test for tagged int and divisibility by src2Value /2 [int32]
  1950. // AND s1, 0x00000001 | ((src2Value-1)) [int31]
  1951. // CMP s1, AtomTag_IntPtr
  1952. // JNE $helper
  1953. // MOV s1, src1
  1954. // SAR s1, log2(src2Value) [int32]
  1955. // SAR s1, log2(src2Value) + 1 ------ removes the tag and divides [int31]
  1956. // PUSH s1
  1957. // PUSH 0xXXXXXXXX (ScriptContext)
  1958. // CALL Op_FinishOddDivByPow2
  1959. // MOV dst, eax
  1960. // JMP $done
  1961. // $helper:
  1962. // ...
  1963. // $done:
  1964. //
  1965. if (instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->IsNotInt())
  1966. return;
  1967. IR::Opnd *dst = instr->GetDst();
  1968. IR::Opnd *src1 = instr->GetSrc1();
  1969. IR::AddrOpnd *src2 = instr->GetSrc2()->IsAddrOpnd() ? instr->GetSrc2()->AsAddrOpnd() : nullptr;
  1970. IR::LabelInstr *divbyhalf = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  1971. IR::LabelInstr *helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  1972. IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  1973. IR::RegOpnd *s1 = IR::RegOpnd::New(TyVar, m_func);
  1974. AnalysisAssert(src2);
  1975. Assert(src2->IsVar() && Js::TaggedInt::Is(src2->m_address) && (Math::IsPow2(Js::TaggedInt::ToInt32(src2->m_address))));
  1976. int32 src2Value = Js::TaggedInt::ToInt32(src2->m_address);
  1977. // MOV s1, src1
  1978. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, s1, src1, m_func));
  1979. #if INT32VAR
  1980. // dontEncode as src2 is a power of 2.
  1981. IR::Opnd *constant = IR::AddrOpnd::New((Js::Var)(0xFFFF000000000000 | (src2Value - 1)), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true);
  1982. #else
  1983. IR::Opnd *constant = IR::IntConstOpnd::New((0x00000001 | ((src2Value - 1) << 1)), TyInt32, m_func);
  1984. #endif
  1985. // AND s1, constant
  1986. {
  1987. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, s1, s1, constant, m_func);
  1988. instr->InsertBefore(andInstr);
  1989. Legalize(andInstr);
  1990. }
  1991. // CMP s1, AtomTag_IntPtr
  1992. {
  1993. IR::Instr *cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  1994. cmp->SetSrc1(s1);
  1995. cmp->SetSrc2(IR::AddrOpnd::New((Js::Var)(Js::AtomTag_IntPtr), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true));
  1996. instr->InsertBefore(cmp);
  1997. Legalize(cmp);
  1998. }
  1999. // JNE $divbyhalf
  2000. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, divbyhalf, m_func));
  2001. // MOV s1, src1
  2002. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, s1, src1, m_func));
  2003. s1 = s1->UseWithNewType(TyInt32, m_func)->AsRegOpnd();
  2004. // SAR s1, log2(src2Value)
  2005. instr->InsertBefore(IR::Instr::New(Js::OpCode::SAR, s1, s1, IR::IntConstOpnd::New(Math::Log2(src2Value), TyInt32, m_func), m_func));
  2006. if(s1->GetSize() != MachPtr)
  2007. {
  2008. s1 = s1->UseWithNewType(TyMachPtr, m_func)->AsRegOpnd();
  2009. }
  2010. #if INT32VAR
  2011. GenerateInt32ToVarConversion(s1, instr);
  2012. #else
  2013. // OR s1, 1
  2014. instr->InsertBefore(IR::Instr::New(Js::OpCode::OR, s1, s1, IR::IntConstOpnd::New(1, TyInt32, m_func), m_func));
  2015. #endif
  2016. // MOV dst, s1
  2017. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, s1, m_func));
  2018. // JMP $done
  2019. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  2020. // $divbyhalf:
  2021. instr->InsertBefore(divbyhalf);
  2022. #if INT32VAR
  2023. constant = IR::AddrOpnd::New((Js::Var)(0xFFFF000000000000 | ((src2Value-1) >> 1)), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true);
  2024. #else
  2025. constant = IR::IntConstOpnd::New((0x00000001 | (src2Value-1)), TyInt32, m_func);
  2026. #endif
  2027. // AND s1, constant
  2028. {
  2029. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, s1, s1, constant, m_func);
  2030. instr->InsertBefore(andInstr);
  2031. Legalize(andInstr);
  2032. }
  2033. // CMP s1, AtomTag_IntPtr
  2034. {
  2035. IR::Instr *cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  2036. cmp->SetSrc1(s1);
  2037. cmp->SetSrc2(IR::AddrOpnd::New((Js::Var)(Js::AtomTag_IntPtr), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true));
  2038. instr->InsertBefore(cmp);
  2039. Legalize(cmp);
  2040. }
  2041. // JNE $helper
  2042. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, helper, m_func));
  2043. // MOV s1, src1
  2044. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, s1, src1, m_func));
  2045. s1 = s1->UseWithNewType(TyInt32, this->m_func)->AsRegOpnd();
  2046. #if INT32VAR
  2047. IR::Opnd* shiftOpnd = IR::IntConstOpnd::New(Math::Log2(src2Value), TyInt32, m_func);
  2048. #else
  2049. IR::Opnd* shiftOpnd = IR::IntConstOpnd::New(Math::Log2(src2Value) + 1, TyInt32, m_func);
  2050. #endif
  2051. // SAR s1, shiftOpnd
  2052. instr->InsertBefore(IR::Instr::New(Js::OpCode::SAR, s1, s1, shiftOpnd, m_func));
  2053. // PUSH s1
  2054. // PUSH ScriptContext
  2055. // CALL Op_FinishOddDivByPow2
  2056. {
  2057. IR::JnHelperMethod helperMethod;
  2058. if (instr->dstIsTempNumber)
  2059. {
  2060. IR::Opnd *tempOpnd;
  2061. helperMethod = IR::HelperOp_FinishOddDivByPow2InPlace;
  2062. Assert(dst->IsRegOpnd());
  2063. StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(dst, instr->dstIsTempNumberTransferred);
  2064. IR::Instr *load = this->m_lowerer->InsertLoadStackAddress(tempNumberSym, instr);
  2065. tempOpnd = load->GetDst();
  2066. this->lowererMDArch.LoadHelperArgument(instr, tempOpnd);
  2067. }
  2068. else
  2069. {
  2070. helperMethod = IR::HelperOp_FinishOddDivByPow2;
  2071. }
  2072. m_lowerer->LoadScriptContext(instr);
  2073. lowererMDArch.LoadHelperArgument(instr, s1);
  2074. IR::Instr *call = IR::Instr::New(Js::OpCode::Call, dst, IR::HelperCallOpnd::New(helperMethod, m_func), m_func);
  2075. instr->InsertBefore(call);
  2076. lowererMDArch.LowerCall(call, 0);
  2077. }
  2078. // JMP $done
  2079. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  2080. // $helper:
  2081. instr->InsertBefore(helper);
  2082. // $done:
  2083. instr->InsertAfter(done);
  2084. }
  2085. ///----------------------------------------------------------------------------
  2086. ///
  2087. /// LowererMD::GenerateFastCmSrEqConst
  2088. ///
  2089. ///----------------------------------------------------------------------------
  2090. bool
  2091. LowererMD::GenerateFastCmSrEqConst(IR::Instr *instr)
  2092. {
  2093. //
  2094. // Given:
  2095. // s1 = CmSrEq_A s2, s3
  2096. // where either s2 or s3 is 'null', 'true' or 'false'
  2097. //
  2098. // Generate:
  2099. //
  2100. // CMP s2, s3
  2101. // JEQ $mov_true
  2102. // MOV s1, Library.GetFalse()
  2103. // JMP $done
  2104. // $mov_true:
  2105. // MOV s1, Library.GetTrue()
  2106. // $done:
  2107. //
  2108. Assert(m_lowerer->IsConstRegOpnd(instr->GetSrc2()->AsRegOpnd()));
  2109. IR::Opnd *opnd = instr->GetSrc1();
  2110. IR::RegOpnd *opndReg = instr->GetSrc2()->AsRegOpnd();
  2111. IR::LabelInstr *labelMovTrue = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2112. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2113. if (!opnd->IsRegOpnd())
  2114. {
  2115. IR::RegOpnd *lhsReg = IR::RegOpnd::New(TyVar, m_func);
  2116. IR::Instr *mov = IR::Instr::New(Js::OpCode::MOV, lhsReg, opnd, m_func);
  2117. instr->InsertBefore(mov);
  2118. opnd = lhsReg;
  2119. }
  2120. Assert(opnd->IsRegOpnd());
  2121. // CMP s2, s3
  2122. // JEQ $mov_true
  2123. this->m_lowerer->InsertCompareBranch(opnd, opndReg->m_sym->GetConstOpnd(), Js::OpCode::BrEq_A, labelMovTrue, instr);
  2124. // MOV s1, 'false'
  2125. IR::Instr *instrMov = IR::Instr::New(Js::OpCode::MOV,
  2126. instr->GetDst(),
  2127. m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse),
  2128. m_func);
  2129. instr->InsertBefore(instrMov);
  2130. // JMP $done
  2131. IR::BranchInstr *jmp = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, this->m_func);
  2132. instr->InsertBefore(jmp);
  2133. // $mov_true:
  2134. instr->InsertBefore(labelMovTrue);
  2135. // MOV s1, 'true'
  2136. instr->m_opcode = Js::OpCode::MOV;
  2137. instr->UnlinkSrc1();
  2138. instr->UnlinkSrc2();
  2139. instr->SetSrc1(m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue));
  2140. instr->ClearBailOutInfo();
  2141. Legalize(instr);
  2142. // $done:
  2143. instr->InsertAfter(labelDone);
  2144. return true;
  2145. }
  2146. ///----------------------------------------------------------------------------
  2147. ///
  2148. /// LowererMD::GenerateFastCmXxTaggedInt
  2149. ///
  2150. ///----------------------------------------------------------------------------
  2151. bool LowererMD::GenerateFastCmXxTaggedInt(IR::Instr *instr, bool isInHelper /* = false */)
  2152. {
  2153. // The idea is to do an inline compare if we can prove that both sources
  2154. // are tagged ints (i.e., are vars with the low bit set).
  2155. //
  2156. // Given:
  2157. //
  2158. // Cmxx_A dst, src1, src2
  2159. //
  2160. // Generate:
  2161. //
  2162. // (If not Int31's, goto $helper)
  2163. // MOV r1, src1
  2164. // if (==, !=, !== or ===)
  2165. // SUB r1, src2
  2166. // NEG r1 // Sets CF if r1 != 0
  2167. // SBB r1, r1 // CF == 1 ? r1 = -1 : r1 = 0
  2168. // else
  2169. // MOV r2, 0
  2170. // CMP r1, src2
  2171. // SETcc r2
  2172. // DEC r2
  2173. // set r1 to r2
  2174. // AND r1, (notEqualResult - equalResult)
  2175. // ADD r1, equalResult
  2176. // MOV dst, r1
  2177. // JMP $fallthru
  2178. // $helper:
  2179. // (caller will generate normal helper call sequence)
  2180. // $fallthru:
  2181. IR::Opnd * src1 = instr->GetSrc1();
  2182. IR::Opnd * src2 = instr->GetSrc2();
  2183. IR::Opnd * dst = instr->GetDst();
  2184. IR::RegOpnd * r1 = IR::RegOpnd::New(TyMachReg, m_func);
  2185. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  2186. IR::LabelInstr * fallthru = IR::LabelInstr::New(Js::OpCode::Label, m_func, isInHelper);
  2187. Assert(src1 && src2 && dst);
  2188. // Not tagged ints?
  2189. if (src1->IsRegOpnd() && src1->AsRegOpnd()->IsNotInt())
  2190. {
  2191. return false;
  2192. }
  2193. if (src2->IsRegOpnd() && src2->AsRegOpnd()->IsNotInt())
  2194. {
  2195. return false;
  2196. }
  2197. bool isNeqOp = instr->m_opcode == Js::OpCode::CmSrNeq_A || instr->m_opcode == Js::OpCode::CmNeq_A;
  2198. intptr_t notEqualResult = isNeqOp ? m_func->GetScriptContextInfo()->GetTrueAddr() : m_func->GetScriptContextInfo()->GetFalseAddr();
  2199. intptr_t equalResult = !isNeqOp ? m_func->GetScriptContextInfo()->GetTrueAddr() : m_func->GetScriptContextInfo()->GetFalseAddr();
  2200. // Tagged ints?
  2201. bool isTaggedInts = false;
  2202. if (src1->IsTaggedInt())
  2203. {
  2204. if (src2->IsTaggedInt())
  2205. {
  2206. isTaggedInts = true;
  2207. }
  2208. }
  2209. if (!isTaggedInts)
  2210. {
  2211. this->GenerateSmIntPairTest(instr, src1, src2, helper);
  2212. }
  2213. // MOV r1, src1
  2214. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, r1, src1, m_func));
  2215. Js::OpCode setCC_Opcode = Js::OpCode::Nop;
  2216. switch(instr->m_opcode)
  2217. {
  2218. case Js::OpCode::CmSrEq_A:
  2219. case Js::OpCode::CmEq_A:
  2220. break;
  2221. case Js::OpCode::CmSrNeq_A:
  2222. case Js::OpCode::CmNeq_A:
  2223. break;
  2224. case Js::OpCode::CmGe_A:
  2225. setCC_Opcode = Js::OpCode::SETGE;
  2226. break;
  2227. case Js::OpCode::CmGt_A:
  2228. setCC_Opcode = Js::OpCode::SETG;
  2229. break;
  2230. case Js::OpCode::CmLe_A:
  2231. setCC_Opcode = Js::OpCode::SETLE;
  2232. break;
  2233. case Js::OpCode::CmLt_A:
  2234. setCC_Opcode = Js::OpCode::SETL;
  2235. break;
  2236. default:
  2237. Assume(UNREACHED);
  2238. }
  2239. if (setCC_Opcode == Js::OpCode::Nop)
  2240. {
  2241. // SUB r1, src2
  2242. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::SUB, r1, r1, src2, m_func);
  2243. instr->InsertBefore(subInstr);
  2244. Legalize(subInstr); // src2 may need legalizing
  2245. // NEG r1
  2246. instr->InsertBefore(IR::Instr::New(Js::OpCode::NEG, r1, r1, m_func));
  2247. // SBB r1, r1
  2248. instr->InsertBefore(IR::Instr::New(Js::OpCode::SBB, r1, r1, r1, m_func));
  2249. }
  2250. else
  2251. {
  2252. IR::Instr *instrNew;
  2253. IR::RegOpnd *r2 = IR::RegOpnd::New(TyMachPtr, this->m_func);
  2254. // MOV r2, 0
  2255. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, r2, IR::IntConstOpnd::New(0, TyMachReg, this->m_func), m_func));
  2256. // CMP r1, src2
  2257. IR::Opnd *r1_32 = r1->UseWithNewType(TyInt32, this->m_func);
  2258. IR::Opnd *src2_32 =src2->UseWithNewType(TyInt32, this->m_func);
  2259. instrNew = IR::Instr::New(Js::OpCode::CMP, m_func);
  2260. instrNew->SetSrc1(r1_32);
  2261. instrNew->SetSrc2(src2_32);
  2262. instr->InsertBefore(instrNew);
  2263. // SETcc r2
  2264. IR::RegOpnd *r2_i8 = (IR::RegOpnd*) r2->UseWithNewType(TyInt8, this->m_func);
  2265. instrNew = IR::Instr::New(setCC_Opcode, r2_i8, r2_i8, m_func);
  2266. instr->InsertBefore(instrNew);
  2267. // DEC r2
  2268. instr->InsertBefore(IR::Instr::New(Js::OpCode::DEC, r2, r2, m_func));
  2269. // r1 <- r2
  2270. r1 = r2;
  2271. }
  2272. // AND r1, (notEqualResult - equalResult)
  2273. {
  2274. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, r1, r1, m_func);
  2275. andInstr->SetSrc2(IR::AddrOpnd::New((void*)((size_t)notEqualResult - (size_t)equalResult), IR::AddrOpndKind::AddrOpndKindDynamicMisc, this->m_func));
  2276. instr->InsertBefore(andInstr);
  2277. Legalize(andInstr);
  2278. }
  2279. // ADD r1, equalResult
  2280. {
  2281. IR::Instr * add = IR::Instr::New(Js::OpCode::ADD, r1, r1, m_func);
  2282. add->SetSrc2(IR::AddrOpnd::New(equalResult, IR::AddrOpndKind::AddrOpndKindDynamicVar, this->m_func));
  2283. instr->InsertBefore(add);
  2284. Legalize(add);
  2285. }
  2286. // MOV dst, r1
  2287. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, r1, m_func));
  2288. if (isTaggedInts)
  2289. {
  2290. instr->Remove();
  2291. return true;
  2292. }
  2293. // JMP $fallthru
  2294. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, fallthru, m_func));
  2295. instr->InsertBefore(helper);
  2296. instr->InsertAfter(fallthru);
  2297. return false;
  2298. }
  2299. void LowererMD::GenerateFastCmXxR8(IR::Instr *instr)
  2300. {
  2301. GenerateFastCmXx(instr);
  2302. }
  2303. void LowererMD::GenerateFastCmXxI4(IR::Instr *instr)
  2304. {
  2305. GenerateFastCmXx(instr);
  2306. }
  2307. void LowererMD::GenerateFastCmXx(IR::Instr *instr)
  2308. {
  2309. // For float src:
  2310. // dst = MOV 0/1
  2311. // (U)COMISD src1, src2
  2312. // JP $done
  2313. // dst.i8 = SetCC dst.i8
  2314. // $done:
  2315. // for int src:
  2316. // CMP src1, src2
  2317. // dst = MOV 0 / false
  2318. // dst.i8 = SetCC dst.i8 / CMOCcc true
  2319. IR::Opnd * src1 = instr->UnlinkSrc1();
  2320. IR::Opnd * src2 = instr->UnlinkSrc2();
  2321. IR::Opnd * dst = instr->UnlinkDst();
  2322. IR::Opnd * tmp = dst;
  2323. bool isIntDst = dst->AsRegOpnd()->m_sym->IsInt32();
  2324. bool isFloatSrc = src1->IsFloat();
  2325. bool isInt64Src = src1->IsInt64();
  2326. Assert(!isFloatSrc || src2->IsFloat());
  2327. Assert(!isFloatSrc || isIntDst);
  2328. Assert(!isInt64Src || src2->IsInt64());
  2329. Assert(!isInt64Src || isIntDst);
  2330. Assert(!isFloatSrc || AutoSystemInfo::Data.SSE2Available());
  2331. IR::Opnd *opnd;
  2332. IR::Instr *newInstr;
  2333. Assert(src1->IsRegOpnd());
  2334. #if LOWER_SPLIT_INT64
  2335. Int64RegPair src1Pair, src2Pair;
  2336. if (isInt64Src)
  2337. {
  2338. src1Pair = this->m_func->FindOrCreateInt64Pair(src1);
  2339. src2Pair = this->m_func->FindOrCreateInt64Pair(src2);
  2340. src1 = src1Pair.high;
  2341. src2 = src2Pair.high;
  2342. }
  2343. #endif
  2344. IR::Instr * done;
  2345. if (isFloatSrc)
  2346. {
  2347. done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2348. instr->InsertBefore(done);
  2349. }
  2350. else
  2351. {
  2352. done = instr;
  2353. }
  2354. if (isIntDst)
  2355. {
  2356. // reg = MOV 0 will get peeped to XOR reg, reg which sets the flags.
  2357. // Put the MOV before the CMP, but use a tmp if dst == src1/src2
  2358. if (dst->IsEqual(src1) || dst->IsEqual(src2))
  2359. {
  2360. tmp = IR::RegOpnd::New(dst->GetType(), this->m_func);
  2361. }
  2362. // dst = MOV 0
  2363. if (isFloatSrc && instr->m_opcode == Js::OpCode::CmNeq_A)
  2364. {
  2365. opnd = IR::IntConstOpnd::New(1, TyInt32, this->m_func);
  2366. }
  2367. else
  2368. {
  2369. opnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  2370. }
  2371. m_lowerer->InsertMove(tmp, opnd, done);
  2372. }
  2373. Js::OpCode cmpOp;
  2374. if (isFloatSrc)
  2375. {
  2376. if (instr->m_opcode == Js::OpCode::CmEq_A || instr->m_opcode == Js::OpCode::CmNeq_A)
  2377. {
  2378. cmpOp = src1->IsFloat64() ? Js::OpCode::UCOMISD : Js::OpCode::UCOMISS;
  2379. }
  2380. else
  2381. {
  2382. cmpOp = src1->IsFloat64() ? Js::OpCode::COMISD : Js::OpCode::COMISS;
  2383. }
  2384. }
  2385. else
  2386. {
  2387. cmpOp = Js::OpCode::CMP;
  2388. }
  2389. // CMP src1, src2
  2390. newInstr = IR::Instr::New(cmpOp, this->m_func);
  2391. newInstr->SetSrc1(src1);
  2392. newInstr->SetSrc2(src2);
  2393. done->InsertBefore(newInstr);
  2394. LowererMD::Legalize(newInstr);
  2395. if (isFloatSrc)
  2396. {
  2397. newInstr = IR::BranchInstr::New(Js::OpCode::JP, done->AsLabelInstr(), this->m_func);
  2398. done->InsertBefore(newInstr);
  2399. }
  2400. if (!isIntDst)
  2401. {
  2402. opnd = this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse);
  2403. Lowerer::InsertMove(tmp, opnd, done);
  2404. }
  2405. Js::OpCode useCC;
  2406. switch(instr->m_opcode)
  2407. {
  2408. case Js::OpCode::CmEq_I4:
  2409. case Js::OpCode::CmEq_A:
  2410. useCC = isIntDst ? Js::OpCode::SETE : Js::OpCode::CMOVE;
  2411. break;
  2412. case Js::OpCode::CmNeq_I4:
  2413. case Js::OpCode::CmNeq_A:
  2414. useCC = isIntDst ? Js::OpCode::SETNE : Js::OpCode::CMOVNE;
  2415. break;
  2416. case Js::OpCode::CmGe_I4:
  2417. useCC = isIntDst ? Js::OpCode::SETGE : Js::OpCode::CMOVGE;
  2418. break;
  2419. case Js::OpCode::CmGt_I4:
  2420. useCC = isIntDst ? Js::OpCode::SETG : Js::OpCode::CMOVG;
  2421. break;
  2422. case Js::OpCode::CmLe_I4:
  2423. useCC = isIntDst ? Js::OpCode::SETLE : Js::OpCode::CMOVLE;
  2424. break;
  2425. case Js::OpCode::CmLt_I4:
  2426. useCC = isIntDst ? Js::OpCode::SETL : Js::OpCode::CMOVL;
  2427. break;
  2428. case Js::OpCode::CmUnGe_I4:
  2429. case Js::OpCode::CmGe_A:
  2430. useCC = isIntDst ? Js::OpCode::SETAE : Js::OpCode::CMOVAE;
  2431. break;
  2432. case Js::OpCode::CmUnGt_I4:
  2433. case Js::OpCode::CmGt_A:
  2434. useCC = isIntDst ? Js::OpCode::SETA : Js::OpCode::CMOVA;
  2435. break;
  2436. case Js::OpCode::CmUnLe_I4:
  2437. case Js::OpCode::CmLe_A:
  2438. useCC = isIntDst ? Js::OpCode::SETBE : Js::OpCode::CMOVBE;
  2439. break;
  2440. case Js::OpCode::CmUnLt_I4:
  2441. case Js::OpCode::CmLt_A:
  2442. useCC = isIntDst ? Js::OpCode::SETB : Js::OpCode::CMOVB;
  2443. break;
  2444. default:
  2445. useCC = Js::OpCode::InvalidOpCode;
  2446. Assume(UNREACHED);
  2447. }
  2448. if (isIntDst)
  2449. {
  2450. // tmp.i8 = SetCC tmp.i8
  2451. IR::Opnd *tmp_i8 = tmp->UseWithNewType(TyInt8, this->m_func);
  2452. newInstr = IR::Instr::New(useCC, tmp_i8, tmp_i8, this->m_func);
  2453. }
  2454. else
  2455. {
  2456. // regTrue = MOV true
  2457. IR::Opnd *regTrue = IR::RegOpnd::New(TyMachPtr, this->m_func);
  2458. Lowerer::InsertMove(regTrue, this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), done);
  2459. // tmp = CMOVcc tmp, regTrue
  2460. newInstr = IR::Instr::New(useCC, tmp, tmp, regTrue, this->m_func);
  2461. }
  2462. done->InsertBefore(newInstr);
  2463. #ifndef _M_X64
  2464. if (isInt64Src)
  2465. {
  2466. IR::LabelInstr* skipLow = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2467. newInstr = IR::BranchInstr::New(Js::OpCode::JNE, skipLow, this->m_func);
  2468. done->InsertBefore(newInstr);
  2469. newInstr = IR::Instr::New(cmpOp, this->m_func);
  2470. newInstr->SetSrc1(src1Pair.low);
  2471. newInstr->SetSrc2(src2Pair.low);
  2472. done->InsertBefore(newInstr);
  2473. Js::OpCode lowUseCC = useCC;
  2474. // Need to do an unsigned compare for the lower part
  2475. switch (instr->m_opcode)
  2476. {
  2477. case Js::OpCode::CmGe_I4: lowUseCC = Js::OpCode::SETAE; break;
  2478. case Js::OpCode::CmGt_I4: lowUseCC = Js::OpCode::SETA; break;
  2479. case Js::OpCode::CmLe_I4: lowUseCC = Js::OpCode::SETBE; break;
  2480. case Js::OpCode::CmLt_I4: lowUseCC = Js::OpCode::SETB; break;
  2481. }
  2482. // tmp.i8 = SetCC tmp.i8
  2483. IR::Opnd *tmp_i8 = tmp->UseWithNewType(TyInt8, this->m_func);
  2484. newInstr = IR::Instr::New(lowUseCC, tmp_i8, tmp_i8, this->m_func);
  2485. done->InsertBefore(newInstr);
  2486. done->InsertBefore(skipLow);
  2487. }
  2488. #endif
  2489. if (tmp != dst)
  2490. {
  2491. newInstr = IR::Instr::New(Js::OpCode::MOV, dst, tmp, this->m_func);
  2492. instr->InsertBefore(newInstr);
  2493. }
  2494. instr->Remove();
  2495. }
  2496. IR::Instr * LowererMD::GenerateConvBool(IR::Instr *instr)
  2497. {
  2498. // TEST src1, src1
  2499. // dst = MOV true
  2500. // rf = MOV false
  2501. // dst = CMOV dst, rf
  2502. IR::Instr *instrNew, *instrFirst;
  2503. IR::RegOpnd *dst = instr->GetDst()->AsRegOpnd();
  2504. IR::RegOpnd *regFalse;
  2505. // TEST src1, src2
  2506. instrFirst = instrNew = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  2507. instrNew->SetSrc1(instr->GetSrc1());
  2508. instrNew->SetSrc2(instr->GetSrc1());
  2509. instr->InsertBefore(instrNew);
  2510. // dst = MOV true
  2511. Lowerer::InsertMove(dst, this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), instr);
  2512. // rf = MOV false
  2513. regFalse = IR::RegOpnd::New(TyMachPtr, this->m_func);
  2514. Lowerer::InsertMove(regFalse, this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
  2515. // Add dst as src1 of CMOV to create a pseudo use of dst. Otherwise, the register allocator
  2516. // won't know the previous dst is needed. and needed in the same register as the dst of the CMOV.
  2517. // dst = CMOV dst, rf
  2518. instrNew = IR::Instr::New(Js::OpCode::CMOVE, dst, dst, regFalse, this->m_func);
  2519. instr->InsertBefore(instrNew);
  2520. instr->Remove();
  2521. return instrFirst;
  2522. }
  2523. ///----------------------------------------------------------------------------
  2524. ///
  2525. /// LowererMD::GenerateFastAdd
  2526. ///
  2527. /// NOTE: We assume that only the sum of two Int31's will have 0x2 set. This
  2528. /// is only true until we have a var type with tag == 0x2.
  2529. ///
  2530. ///----------------------------------------------------------------------------
  2531. bool
  2532. LowererMD::GenerateFastAdd(IR::Instr * instrAdd)
  2533. {
  2534. // Given:
  2535. //
  2536. // dst = Add src1, src2
  2537. //
  2538. // Generate:
  2539. //
  2540. // (If not 2 Int31's, jump to $helper.)
  2541. // s1 = MOV src1
  2542. // s1 = DEC s1 -- Get rid of one of the tag [Int31 only]
  2543. // s1 = ADD s1, src2 -- try an inline add
  2544. // JO $helper -- bail if the add overflowed
  2545. // s1 = OR s1, AtomTag_IntPtr [Int32 only]
  2546. // dst = MOV s1
  2547. // JMP $fallthru
  2548. // $helper:
  2549. // (caller generates helper call)
  2550. // $fallthru:
  2551. IR::Instr * instr;
  2552. IR::LabelInstr * labelHelper;
  2553. IR::LabelInstr * labelFallThru;
  2554. IR::Opnd * opndReg;
  2555. IR::Opnd * opndSrc1;
  2556. IR::Opnd * opndSrc2;
  2557. opndSrc1 = instrAdd->GetSrc1();
  2558. opndSrc2 = instrAdd->GetSrc2();
  2559. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Add instruction");
  2560. // Generate fastpath for Incr_A anyway -
  2561. // Incrementing strings representing integers can be inter-mixed with integers e.g. "1"++ -> converts 1 to an int and thereafter, integer increment is expected.
  2562. if (opndSrc1->IsRegOpnd() && (opndSrc1->AsRegOpnd()->IsNotInt() || opndSrc1->GetValueType().IsString()
  2563. || (instrAdd->m_opcode != Js::OpCode::Incr_A && opndSrc1->GetValueType().IsLikelyString())))
  2564. {
  2565. return false;
  2566. }
  2567. if (opndSrc2->IsRegOpnd() && (opndSrc2->AsRegOpnd()->IsNotInt() ||
  2568. opndSrc2->GetValueType().IsLikelyString()))
  2569. {
  2570. return false;
  2571. }
  2572. // Tagged ints?
  2573. bool isTaggedInts = false;
  2574. if (opndSrc1->IsTaggedInt())
  2575. {
  2576. if (opndSrc2->IsTaggedInt())
  2577. {
  2578. isTaggedInts = true;
  2579. }
  2580. }
  2581. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2582. if (!isTaggedInts)
  2583. {
  2584. // (If not 2 Int31's, jump to $helper.)
  2585. this->GenerateSmIntPairTest(instrAdd, opndSrc1, opndSrc2, labelHelper);
  2586. }
  2587. if (opndSrc1->IsAddrOpnd())
  2588. {
  2589. // If opnd1 is a constant, just swap them.
  2590. IR::Opnd *opndTmp = opndSrc1;
  2591. opndSrc1 = opndSrc2;
  2592. opndSrc2 = opndTmp;
  2593. }
  2594. //
  2595. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2596. // relevant only on AMD64.
  2597. //
  2598. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2599. // s1 = MOV src1
  2600. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2601. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  2602. instrAdd->InsertBefore(instr);
  2603. #if !INT32VAR
  2604. // Do the DEC in place
  2605. if (opndSrc2->IsAddrOpnd())
  2606. {
  2607. Assert(opndSrc2->AsAddrOpnd()->GetAddrOpndKind() == IR::AddrOpndKindConstantVar);
  2608. opndSrc2 = IR::IntConstOpnd::New(*((int *)&(opndSrc2->AsAddrOpnd()->m_address)) - 1, TyInt32, this->m_func, opndSrc2->AsAddrOpnd()->m_dontEncode);
  2609. opndSrc2 = opndSrc2->Use(this->m_func);
  2610. }
  2611. else if (opndSrc2->IsIntConstOpnd())
  2612. {
  2613. Assert(opndSrc2->GetType() == TyInt32);
  2614. opndSrc2 = opndSrc2->Use(this->m_func);
  2615. opndSrc2->AsIntConstOpnd()->DecrValue(1);
  2616. }
  2617. else
  2618. {
  2619. // s1 = DEC s1
  2620. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2621. instr = IR::Instr::New(Js::OpCode::DEC, opndReg, opndReg, this->m_func);
  2622. instrAdd->InsertBefore(instr);
  2623. }
  2624. instr = IR::Instr::New(Js::OpCode::ADD, opndReg, opndReg, opndSrc2, this->m_func);
  2625. #else
  2626. if (opndSrc2->IsAddrOpnd())
  2627. {
  2628. // truncate to untag
  2629. int value = ::Math::PointerCastToIntegralTruncate<int>(opndSrc2->AsAddrOpnd()->m_address);
  2630. if (value == 1)
  2631. {
  2632. instr = IR::Instr::New(Js::OpCode::INC, opndReg, opndReg, this->m_func);
  2633. }
  2634. else
  2635. {
  2636. opndSrc2 = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
  2637. instr = IR::Instr::New(Js::OpCode::ADD, opndReg, opndReg, opndSrc2, this->m_func);
  2638. }
  2639. }
  2640. else
  2641. {
  2642. instr = IR::Instr::New(Js::OpCode::ADD, opndReg, opndReg, opndSrc2->UseWithNewType(TyInt32, this->m_func), this->m_func);
  2643. }
  2644. #endif
  2645. // s1 = ADD s1, src2
  2646. instrAdd->InsertBefore(instr);
  2647. Legalize(instr);
  2648. // JO $helper
  2649. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  2650. instrAdd->InsertBefore(instr);
  2651. //
  2652. // Convert TyInt32 operand, back to TyMachPtr type.
  2653. //
  2654. if(TyMachReg != opndReg->GetType())
  2655. {
  2656. opndReg = opndReg->UseWithNewType(TyMachPtr, this->m_func);
  2657. }
  2658. #if INT32VAR
  2659. // s1 = OR s1, AtomTag_IntPtr
  2660. GenerateInt32ToVarConversion(opndReg, instrAdd);
  2661. #endif
  2662. // dst = MOV s1
  2663. instr = IR::Instr::New(Js::OpCode::MOV, instrAdd->GetDst(), opndReg, this->m_func);
  2664. instrAdd->InsertBefore(instr);
  2665. // JMP $fallthru
  2666. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2667. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2668. instrAdd->InsertBefore(instr);
  2669. // $helper:
  2670. // (caller generates helper call)
  2671. // $fallthru:
  2672. instrAdd->InsertBefore(labelHelper);
  2673. instrAdd->InsertAfter(labelFallThru);
  2674. return true;
  2675. }
  2676. ///----------------------------------------------------------------------------
  2677. ///
  2678. /// LowererMD::GenerateFastSub
  2679. ///
  2680. ///
  2681. ///----------------------------------------------------------------------------
  2682. bool
  2683. LowererMD::GenerateFastSub(IR::Instr * instrSub)
  2684. {
  2685. // Given:
  2686. //
  2687. // dst = Sub src1, src2
  2688. //
  2689. // Generate:
  2690. //
  2691. // (If not 2 Int31's, jump to $helper.)
  2692. // s1 = MOV src1
  2693. // s1 = SUB s1, src2 -- try an inline sub
  2694. // JO $helper -- bail if the subtract overflowed
  2695. // JNE $helper
  2696. // s1 = INC s1 -- restore the var tag on the result [Int31 only]
  2697. // s1 = OR s1, AtomTag_IntPtr [Int32 only]
  2698. // dst = MOV s1
  2699. // JMP $fallthru
  2700. // $helper:
  2701. // (caller generates helper call)
  2702. // $fallthru:
  2703. IR::Instr * instr;
  2704. IR::LabelInstr * labelHelper;
  2705. IR::LabelInstr * labelFallThru;
  2706. IR::Opnd * opndReg;
  2707. IR::Opnd * opndSrc1;
  2708. IR::Opnd * opndSrc2;
  2709. opndSrc1 = instrSub->GetSrc1();
  2710. opndSrc2 = instrSub->GetSrc2();
  2711. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Sub instruction");
  2712. // Not tagged ints?
  2713. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2714. {
  2715. return false;
  2716. }
  2717. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2718. {
  2719. return false;
  2720. }
  2721. // Tagged ints?
  2722. bool isTaggedInts = false;
  2723. if (opndSrc1->IsTaggedInt())
  2724. {
  2725. if (opndSrc2->IsTaggedInt())
  2726. {
  2727. isTaggedInts = true;
  2728. }
  2729. }
  2730. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2731. if (!isTaggedInts)
  2732. {
  2733. // (If not 2 Int31's, jump to $helper.)
  2734. this->GenerateSmIntPairTest(instrSub, opndSrc1, opndSrc2, labelHelper);
  2735. }
  2736. //
  2737. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2738. // relevant only on AMD64.
  2739. //
  2740. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2741. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2742. // s1 = MOV src1
  2743. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2744. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  2745. instrSub->InsertBefore(instr);
  2746. // s1 = SUB s1, src2
  2747. instr = IR::Instr::New(Js::OpCode::SUB, opndReg, opndReg, opndSrc2, this->m_func);
  2748. instrSub->InsertBefore(instr);
  2749. // JO $helper
  2750. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  2751. instrSub->InsertBefore(instr);
  2752. #if !INT32VAR
  2753. // s1 = INC s1
  2754. instr = IR::Instr::New(Js::OpCode::INC, opndReg, opndReg, this->m_func);
  2755. instrSub->InsertBefore(instr);
  2756. #endif
  2757. //
  2758. // Convert TyInt32 operand, back to TyMachPtr type.
  2759. //
  2760. if(TyMachReg != opndReg->GetType())
  2761. {
  2762. opndReg = opndReg->UseWithNewType(TyMachPtr, this->m_func);
  2763. }
  2764. #if INT32VAR
  2765. // s1 = OR s1, AtomTag_IntPtr
  2766. GenerateInt32ToVarConversion(opndReg, instrSub);
  2767. #endif
  2768. // dst = MOV s1
  2769. instr = IR::Instr::New(Js::OpCode::MOV, instrSub->GetDst(), opndReg, this->m_func);
  2770. instrSub->InsertBefore(instr);
  2771. // JMP $fallthru
  2772. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2773. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2774. instrSub->InsertBefore(instr);
  2775. // $helper:
  2776. // (caller generates helper call)
  2777. // $fallthru:
  2778. instrSub->InsertBefore(labelHelper);
  2779. instrSub->InsertAfter(labelFallThru);
  2780. return true;
  2781. }
  2782. ///----------------------------------------------------------------------------
  2783. ///
  2784. /// LowererMD::GenerateFastMul
  2785. ///
  2786. ///----------------------------------------------------------------------------
  2787. bool
  2788. LowererMD::GenerateFastMul(IR::Instr * instrMul)
  2789. {
  2790. // Given:
  2791. //
  2792. // dst = Mul src1, src2
  2793. //
  2794. // Generate:
  2795. //
  2796. // (If not 2 Int31's, jump to $helper.)
  2797. // s1 = MOV src1
  2798. // s1 = DEC s1 -- clear the var tag from the value to be multiplied [Int31 only]
  2799. // s2 = MOV src2
  2800. // s2 = SAR s2, Js::VarTag_Shift -- extract the real src2 amount from the var [Int31 only]
  2801. // s1 = IMUL s1, s2 -- do the signed mul
  2802. // JO $helper -- bail if the result overflowed
  2803. // s3 = MOV s1
  2804. // TEST s3, s3 -- Check result is 0. might be -0. Result is -0 when a negative number is multiplied with 0.
  2805. // JEQ $zero
  2806. // JMP $nonzero
  2807. // $zero: -- result of mul was 0. try to check for -0
  2808. // s2 = ADD s2, src1 -- Add src1 to s2
  2809. // JGT $nonzero -- positive 0. [Int31 only]
  2810. // JGE $nonzero -- positive 0. [Int32 only]
  2811. // dst = ToVar(-0.0) -- load negative 0
  2812. // JMP $fallthru
  2813. // $nonzero:
  2814. // s3 = INC s3 -- restore the var tag on the result [Int31 only]
  2815. // s3 = OR s3, AtomTag_IntPtr [Int32 only]
  2816. // dst= MOV s3
  2817. // JMP $fallthru
  2818. // $helper:
  2819. // (caller generates helper call)
  2820. // $fallthru:
  2821. IR::LabelInstr * labelHelper;
  2822. IR::LabelInstr * labelFallThru;
  2823. IR::LabelInstr * labelNonZero;
  2824. IR::Instr * instr;
  2825. IR::RegOpnd * opndReg1;
  2826. IR::RegOpnd * opndReg2;
  2827. IR::RegOpnd * s3;
  2828. IR::Opnd * opndSrc1;
  2829. IR::Opnd * opndSrc2;
  2830. opndSrc1 = instrMul->GetSrc1();
  2831. opndSrc2 = instrMul->GetSrc2();
  2832. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on mul instruction");
  2833. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2834. {
  2835. return true;
  2836. }
  2837. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2838. {
  2839. return true;
  2840. }
  2841. // (If not 2 Int31's, jump to $helper.)
  2842. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2843. labelNonZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2844. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2845. this->GenerateSmIntPairTest(instrMul, opndSrc1, opndSrc2, labelHelper);
  2846. //
  2847. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2848. // relevant only on AMD64.
  2849. //
  2850. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2851. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2852. if (opndSrc1->IsImmediateOpnd())
  2853. {
  2854. IR::Opnd * temp = opndSrc1;
  2855. opndSrc1 = opndSrc2;
  2856. opndSrc2 = temp;
  2857. }
  2858. // s1 = MOV src1
  2859. opndReg1 = IR::RegOpnd::New(TyInt32, this->m_func);
  2860. instr = IR::Instr::New(Js::OpCode::MOV, opndReg1, opndSrc1, this->m_func);
  2861. instrMul->InsertBefore(instr);
  2862. #if !INT32VAR
  2863. // s1 = DEC s1
  2864. instr = IR::Instr::New(Js::OpCode::DEC, opndReg1, opndReg1, this->m_func);
  2865. instrMul->InsertBefore(instr);
  2866. #endif
  2867. if (opndSrc2->IsImmediateOpnd())
  2868. {
  2869. Assert(opndSrc2->IsAddrOpnd() && opndSrc2->AsAddrOpnd()->IsVar());
  2870. IR::Opnd *opnd2 = IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(opndSrc2->AsAddrOpnd()->m_address), TyInt32, this->m_func);
  2871. // s2 = MOV src2
  2872. opndReg2 = IR::RegOpnd::New(TyInt32, this->m_func);
  2873. instr = IR::Instr::New(Js::OpCode::MOV, opndReg2, opnd2, this->m_func);
  2874. instrMul->InsertBefore(instr);
  2875. }
  2876. else
  2877. {
  2878. // s2 = MOV src2
  2879. opndReg2 = IR::RegOpnd::New(TyInt32, this->m_func);
  2880. instr = IR::Instr::New(Js::OpCode::MOV, opndReg2, opndSrc2, this->m_func);
  2881. instrMul->InsertBefore(instr);
  2882. #if !INT32VAR
  2883. // s2 = SAR s2, Js::VarTag_Shift
  2884. instr = IR::Instr::New(
  2885. Js::OpCode::SAR, opndReg2, opndReg2,
  2886. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  2887. instrMul->InsertBefore(instr);
  2888. #endif
  2889. }
  2890. // s1 = IMUL s1, s2
  2891. instr = IR::Instr::New(Js::OpCode::IMUL2, opndReg1, opndReg1, opndReg2, this->m_func);
  2892. instrMul->InsertBefore(instr);
  2893. // JO $helper
  2894. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  2895. instrMul->InsertBefore(instr);
  2896. // MOV s3, s1
  2897. s3 = IR::RegOpnd::New(TyInt32, this->m_func);
  2898. instr = IR::Instr::New(Js::OpCode::MOV, s3, opndReg1, this->m_func);
  2899. instrMul->InsertBefore(instr);
  2900. // TEST s3, s3
  2901. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  2902. instr->SetSrc1(s3);
  2903. instr->SetSrc2(s3);
  2904. instrMul->InsertBefore(instr);
  2905. // JEQ $zero
  2906. IR::LabelInstr *labelZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2907. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelZero, this->m_func);
  2908. instrMul->InsertBefore(instr);
  2909. // JMP $nonzero
  2910. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelNonZero, this->m_func);
  2911. instrMul->InsertBefore(instr);
  2912. // $zero:
  2913. instrMul->InsertBefore(labelZero);
  2914. // s2 = ADD s2, src1
  2915. instr = IR::Instr::New(Js::OpCode::ADD, opndReg2, opndReg2, opndSrc1, this->m_func);
  2916. instrMul->InsertBefore(instr);
  2917. Legalize(instr);
  2918. // JGT $nonzero
  2919. #if INT32VAR
  2920. Js::OpCode greaterOpCode = Js::OpCode::JGE;
  2921. #else
  2922. Js::OpCode greaterOpCode = Js::OpCode::JGT;
  2923. #endif
  2924. instr = IR::BranchInstr::New(greaterOpCode, labelNonZero, this->m_func);
  2925. instrMul->InsertBefore(instr);
  2926. // dst = ToVar(-0.0) -- load negative 0
  2927. instr = IR::Instr::New(Js::OpCode::MOV, instrMul->GetDst(), m_lowerer->LoadLibraryValueOpnd(instrMul, LibraryValue::ValueNegativeZero), this->m_func);
  2928. instrMul->InsertBefore(instr);
  2929. // JMP $fallthru
  2930. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2931. instrMul->InsertBefore(instr);
  2932. // $nonzero:
  2933. instrMul->InsertBefore(labelNonZero);
  2934. #if !INT32VAR
  2935. // s3 = INC s3
  2936. instr = IR::Instr::New(Js::OpCode::INC, s3, s3, this->m_func);
  2937. instrMul->InsertBefore(instr);
  2938. #endif
  2939. //
  2940. // Convert TyInt32 operand, back to TyMachPtr type.
  2941. // Cast is fine. We know ChangeType returns IR::Opnd * but it
  2942. // preserves the Type.
  2943. //
  2944. if(TyMachReg != s3->GetType())
  2945. {
  2946. s3 = static_cast<IR::RegOpnd *>(s3->UseWithNewType(TyMachPtr, this->m_func));
  2947. }
  2948. #if INT32VAR
  2949. // s3 = OR s3, AtomTag_IntPtr
  2950. GenerateInt32ToVarConversion(s3, instrMul);
  2951. #endif
  2952. // dst = MOV s3
  2953. instr = IR::Instr::New(Js::OpCode::MOV, instrMul->GetDst(), s3, this->m_func);
  2954. instrMul->InsertBefore(instr);
  2955. // JMP $fallthru
  2956. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2957. instrMul->InsertBefore(instr);
  2958. // $helper:
  2959. // (caller generates helper call)
  2960. // $fallthru:
  2961. instrMul->InsertBefore(labelHelper);
  2962. instrMul->InsertAfter(labelFallThru);
  2963. return true;
  2964. }
  2965. bool
  2966. LowererMD::GenerateFastNeg(IR::Instr * instrNeg)
  2967. {
  2968. // Given:
  2969. //
  2970. // dst = Not src
  2971. //
  2972. // Generate:
  2973. //
  2974. // if not int, jump $helper
  2975. // if src == 0 -- test for zero (must be handled by the runtime to preserve
  2976. // JEQ $helper difference btw +0 and -0)
  2977. // dst = MOV src
  2978. // dst = NEG dst -- do an inline NEG
  2979. // dst = ADD dst, 2 -- restore the var tag on the result [int31 only]
  2980. // JO $helper
  2981. // dst = OR dst, AtomTag_Ptr [int32 only]
  2982. // JMP $fallthru
  2983. // $helper:
  2984. // (caller generates helper call)
  2985. // $fallthru:
  2986. IR::Instr * instr;
  2987. IR::LabelInstr * labelHelper = nullptr;
  2988. IR::LabelInstr * labelFallThru = nullptr;
  2989. IR::Opnd * opndSrc1;
  2990. IR::Opnd * opndDst;
  2991. bool usingNewDst = false;
  2992. opndSrc1 = instrNeg->GetSrc1();
  2993. AssertMsg(opndSrc1, "Expected src opnd on Neg instruction");
  2994. if(opndSrc1->IsEqual(instrNeg->GetDst()))
  2995. {
  2996. usingNewDst = true;
  2997. opndDst = IR::RegOpnd::New(TyInt32, this->m_func);
  2998. }
  2999. else
  3000. {
  3001. opndDst = instrNeg->GetDst()->UseWithNewType(TyInt32, this->m_func);
  3002. }
  3003. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->m_sym->IsIntConst())
  3004. {
  3005. IR::Opnd *newOpnd;
  3006. IntConstType value = opndSrc1->AsRegOpnd()->m_sym->GetIntConstValue();
  3007. if (value == 0)
  3008. {
  3009. // If the negate operand is zero, the result is -0.0, which is a Number rather than an Int31.
  3010. newOpnd = m_lowerer->LoadLibraryValueOpnd(instrNeg, LibraryValue::ValueNegativeZero);
  3011. }
  3012. else
  3013. {
  3014. // negation below can overflow because max negative int32 value > max positive value by 1.
  3015. newOpnd = IR::AddrOpnd::NewFromNumber(-(int64)value, m_func);
  3016. }
  3017. instrNeg->ClearBailOutInfo();
  3018. instrNeg->FreeSrc1();
  3019. instrNeg->SetSrc1(newOpnd);
  3020. instrNeg = this->ChangeToAssign(instrNeg);
  3021. // Skip lowering call to helper
  3022. return false;
  3023. }
  3024. bool isInt = (opndSrc1->IsTaggedInt());
  3025. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  3026. {
  3027. return true;
  3028. }
  3029. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3030. if (!isInt)
  3031. {
  3032. GenerateSmIntTest(opndSrc1, instrNeg, labelHelper);
  3033. }
  3034. //
  3035. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  3036. // relevant only on AMD64.
  3037. //
  3038. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  3039. GenerateTaggedZeroTest(opndSrc1, instrNeg, labelHelper);
  3040. // dst = MOV src
  3041. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndSrc1, this->m_func);
  3042. instrNeg->InsertBefore(instr);
  3043. // dst = NEG dst
  3044. instr = IR::Instr::New(Js::OpCode::NEG, opndDst, opndDst, this->m_func);
  3045. instrNeg->InsertBefore(instr);
  3046. #if !INT32VAR
  3047. // dst = ADD dst, 2
  3048. instr = IR::Instr::New(Js::OpCode::ADD, opndDst, opndDst, IR::IntConstOpnd::New(2, TyInt32, this->m_func), this->m_func);
  3049. instrNeg->InsertBefore(instr);
  3050. #endif
  3051. // JO $helper
  3052. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  3053. instrNeg->InsertBefore(instr);
  3054. //
  3055. // Convert TyInt32 operand, back to TyMachPtr type.
  3056. //
  3057. if(TyMachReg != opndDst->GetType())
  3058. {
  3059. opndDst = opndDst->UseWithNewType(TyMachPtr, this->m_func);
  3060. }
  3061. #if INT32VAR
  3062. GenerateInt32ToVarConversion(opndDst, instrNeg);
  3063. #endif
  3064. if(usingNewDst)
  3065. {
  3066. instr = IR::Instr::New(Js::OpCode::MOV, instrNeg->GetDst(), opndDst, this->m_func);
  3067. instrNeg->InsertBefore(instr);
  3068. }
  3069. // JMP $fallthru
  3070. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3071. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  3072. instrNeg->InsertBefore(instr);
  3073. // $helper:
  3074. // (caller generates helper sequence)
  3075. // $fallthru:
  3076. AssertMsg(labelHelper, "Should not be NULL");
  3077. instrNeg->InsertBefore(labelHelper);
  3078. instrNeg->InsertAfter(labelFallThru);
  3079. return true;
  3080. }
  3081. void
  3082. LowererMD::GenerateFastBrS(IR::BranchInstr *brInstr)
  3083. {
  3084. IR::Opnd *src1 = brInstr->UnlinkSrc1();
  3085. Assert(src1->IsIntConstOpnd() || src1->IsAddrOpnd() || src1->IsRegOpnd());
  3086. IR::Instr *cmpInstr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  3087. cmpInstr->SetSrc1(m_lowerer->LoadOptimizationOverridesValueOpnd(brInstr, OptimizationOverridesValue::OptimizationOverridesSideEffects));
  3088. cmpInstr->SetSrc2(src1);
  3089. brInstr->InsertBefore(cmpInstr);
  3090. Legalize(cmpInstr);
  3091. Js::OpCode opcode = Js::OpCode::InvalidOpCode;
  3092. switch(brInstr->m_opcode)
  3093. {
  3094. case Js::OpCode::BrHasSideEffects:
  3095. opcode = Js::OpCode::JNE;
  3096. break;
  3097. case Js::OpCode::BrNotHasSideEffects:
  3098. opcode = Js::OpCode::JEQ;
  3099. break;
  3100. default:
  3101. Assert(UNREACHED);
  3102. __assume(false);
  3103. }
  3104. brInstr->m_opcode = opcode;
  3105. }
  3106. ///----------------------------------------------------------------------------
  3107. ///
  3108. /// LowererMD::GenerateSmIntPairTest
  3109. ///
  3110. /// Generate code to test whether the given operands are both Int31 vars
  3111. /// and branch to the given label if not.
  3112. ///
  3113. ///----------------------------------------------------------------------------
  3114. #if !INT32VAR
  3115. IR::Instr *
  3116. LowererMD::GenerateSmIntPairTest(
  3117. IR::Instr * instrInsert,
  3118. IR::Opnd * opndSrc1,
  3119. IR::Opnd * opndSrc2,
  3120. IR::LabelInstr * labelFail)
  3121. {
  3122. IR::Opnd * opndReg;
  3123. IR::Instr * instrPrev = instrInsert->m_prev;
  3124. IR::Instr * instr;
  3125. Assert(opndSrc1->GetType() == TyVar);
  3126. Assert(opndSrc2->GetType() == TyVar);
  3127. if (opndSrc1->IsTaggedInt())
  3128. {
  3129. IR::Opnd *tempOpnd = opndSrc1;
  3130. opndSrc1 = opndSrc2;
  3131. opndSrc2 = tempOpnd;
  3132. }
  3133. if (opndSrc2->IsTaggedInt())
  3134. {
  3135. if (opndSrc1->IsTaggedInt())
  3136. {
  3137. return instrPrev;
  3138. }
  3139. // TEST src1, AtomTag
  3140. // JEQ $fail
  3141. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  3142. instr->SetSrc1(opndSrc1);
  3143. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func));
  3144. instrInsert->InsertBefore(instr);
  3145. }
  3146. else
  3147. {
  3148. // s1 = MOV src1
  3149. // s1 = AND s1, 1
  3150. // TEST s1, src2
  3151. // JEQ $fail
  3152. // s1 = MOV src1
  3153. opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  3154. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  3155. instrInsert->InsertBefore(instr);
  3156. // s1 = AND s1, AtomTag
  3157. instr = IR::Instr::New(
  3158. Js::OpCode::AND, opndReg, opndReg, IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func), this->m_func);
  3159. instrInsert->InsertBefore(instr);
  3160. // TEST s1, src2
  3161. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  3162. instr->SetSrc1(opndReg);
  3163. instr->SetSrc2(opndSrc2);
  3164. instrInsert->InsertBefore(instr);
  3165. }
  3166. // JEQ $fail
  3167. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelFail, this->m_func);
  3168. instrInsert->InsertBefore(instr);
  3169. return instrPrev;
  3170. }
  3171. #else
  3172. IR::Instr *
  3173. LowererMD::GenerateSmIntPairTest(
  3174. IR::Instr * instrInsert,
  3175. IR::Opnd * opndSrc1,
  3176. IR::Opnd * opndSrc2,
  3177. IR::LabelInstr * labelFail)
  3178. {
  3179. IR::Opnd * opndReg;
  3180. IR::Instr * instrPrev = instrInsert->m_prev;
  3181. IR::Instr * instr;
  3182. Assert(opndSrc1->GetType() == TyVar);
  3183. Assert(opndSrc2->GetType() == TyVar);
  3184. if (opndSrc1->IsTaggedInt())
  3185. {
  3186. IR::Opnd *tempOpnd = opndSrc1;
  3187. opndSrc1 = opndSrc2;
  3188. opndSrc2 = tempOpnd;
  3189. }
  3190. if (opndSrc2->IsTaggedInt())
  3191. {
  3192. if (opndSrc1->IsTaggedInt())
  3193. {
  3194. return instrPrev;
  3195. }
  3196. GenerateSmIntTest(opndSrc1, instrInsert, labelFail);
  3197. return instrPrev;
  3198. }
  3199. else
  3200. {
  3201. opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  3202. IR::Opnd * opndReg1;
  3203. // s1 = MOV src1
  3204. // s1 = SHR s1, VarTag_Shift
  3205. // s2 = MOV src2
  3206. // s2 = SHR s2, 32
  3207. // s1 = OR s1, s2 ------ move both tags to the lower 32 bits
  3208. // CMP s1, AtomTag_Pair ------ compare the tags together to the expected tag pair
  3209. // JNE $fail
  3210. // s1 = MOV src1
  3211. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  3212. instrInsert->InsertBefore(instr);
  3213. // s1 = SHR s1, VarTag_Shift
  3214. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  3215. instrInsert->InsertBefore(instr);
  3216. // s2 = MOV src2
  3217. opndReg1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  3218. instr = IR::Instr::New(Js::OpCode::MOV, opndReg1, opndSrc2, this->m_func);
  3219. instrInsert->InsertBefore(instr);
  3220. // s2 = SHR s2, 32
  3221. instr = IR::Instr::New(Js::OpCode::SHR, opndReg1, opndReg1, IR::IntConstOpnd::New(32, TyInt8, this->m_func), this->m_func);
  3222. instrInsert->InsertBefore(instr);
  3223. // s1 = OR s1, s2
  3224. instr = IR::Instr::New(Js::OpCode::OR, opndReg, opndReg, opndReg1, this->m_func);
  3225. instrInsert->InsertBefore(instr);
  3226. opndReg = opndReg->UseWithNewType(TyInt32, this->m_func)->AsRegOpnd();
  3227. // CMP s1, AtomTag_Pair
  3228. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  3229. instr->SetSrc1(opndReg);
  3230. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag_Pair, TyInt32, this->m_func, true));
  3231. instrInsert->InsertBefore(instr);
  3232. }
  3233. // JNE $fail
  3234. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelFail, this->m_func);
  3235. instrInsert->InsertBefore(instr);
  3236. return instrPrev;
  3237. }
  3238. #endif
  3239. void
  3240. LowererMD::GenerateLoadTaggedType(IR::Instr * instrLdSt, IR::RegOpnd * opndType, IR::RegOpnd * opndTaggedType)
  3241. {
  3242. // Generate
  3243. //
  3244. // MOV taggedType, type
  3245. // OR taggedType, InlineCacheAuxSlotTypeTag
  3246. // MOV taggedType, type
  3247. {
  3248. IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV, opndTaggedType, opndType, instrLdSt->m_func);
  3249. instrLdSt->InsertBefore(instrMov);
  3250. }
  3251. // OR taggedType, InlineCacheAuxSlotTypeTag
  3252. {
  3253. IR::IntConstOpnd * opndAuxSlotTag = IR::IntConstOpnd::New(InlineCacheAuxSlotTypeTag, TyMachPtr, instrLdSt->m_func);
  3254. IR::Instr * instrAnd = IR::Instr::New(Js::OpCode::OR, opndTaggedType, opndTaggedType, opndAuxSlotTag, instrLdSt->m_func);
  3255. instrLdSt->InsertBefore(instrAnd);
  3256. }
  3257. }
  3258. ///----------------------------------------------------------------------------
  3259. ///
  3260. /// LowererMD::GenerateFastLdMethodFromFlags
  3261. ///
  3262. /// Make use of the helper to cache the type and slot index used to do a LdFld
  3263. /// and do an inline load from the appropriate slot if the type hasn't changed
  3264. /// since the last time this LdFld was executed.
  3265. ///
  3266. ///----------------------------------------------------------------------------
  3267. bool
  3268. LowererMD::GenerateFastLdMethodFromFlags(IR::Instr * instrLdFld)
  3269. {
  3270. IR::LabelInstr * labelFallThru;
  3271. IR::LabelInstr * bailOutLabel;
  3272. IR::Opnd * opndSrc;
  3273. IR::Opnd * opndDst;
  3274. IR::RegOpnd * opndBase;
  3275. IR::RegOpnd * opndType;
  3276. IR::RegOpnd * opndInlineCache;
  3277. opndSrc = instrLdFld->GetSrc1();
  3278. AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->IsPropertySymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(),
  3279. "Expected property sym operand as src of LdFldFlags");
  3280. IR::PropertySymOpnd * propertySymOpnd = opndSrc->AsPropertySymOpnd();
  3281. Assert(!instrLdFld->DoStackArgsOpt(this->m_func));
  3282. if (propertySymOpnd->IsTypeCheckSeqCandidate())
  3283. {
  3284. AssertMsg(propertySymOpnd->HasObjectTypeSym(), "Type optimized property sym operand without a type sym?");
  3285. StackSym *typeSym = propertySymOpnd->GetObjectTypeSym();
  3286. opndType = IR::RegOpnd::New(typeSym, TyMachReg, this->m_func);
  3287. }
  3288. else
  3289. {
  3290. opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3291. }
  3292. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  3293. opndDst = instrLdFld->GetDst();
  3294. opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  3295. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3296. // Label to jump to (or fall through to) when bailing out
  3297. bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instrLdFld->m_func, true /* isOpHelper */);
  3298. instrLdFld->InsertBefore(IR::Instr::New(Js::OpCode::MOV, opndInlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrLdFld, propertySymOpnd), this->m_func));
  3299. IR::LabelInstr * labelFlagAux = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3300. // Check the flag cache with the untagged type
  3301. this->m_lowerer->GenerateObjectTestAndTypeLoad(instrLdFld, opndBase, opndType, bailOutLabel);
  3302. // Blindly do the check for getter flag first and then do the type check
  3303. // We avoid repeated check for getter flag when the function object may be in either
  3304. // inline slots or auxiliary slots
  3305. this->m_lowerer->GenerateFlagInlineCacheCheckForGetterSetter(instrLdFld, opndInlineCache, bailOutLabel);
  3306. this->m_lowerer->GenerateFlagInlineCacheCheck(instrLdFld, opndType, opndInlineCache, labelFlagAux);
  3307. this->m_lowerer->GenerateLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  3308. // Check the flag cache with the tagged type
  3309. instrLdFld->InsertBefore(labelFlagAux);
  3310. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3311. GenerateLoadTaggedType(instrLdFld, opndType, opndTaggedType);
  3312. this->m_lowerer->GenerateFlagInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, bailOutLabel);
  3313. this->m_lowerer->GenerateLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  3314. instrLdFld->InsertBefore(bailOutLabel);
  3315. instrLdFld->InsertAfter(labelFallThru);
  3316. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  3317. // ordering instructions anymore.
  3318. instrLdFld->UnlinkSrc1();
  3319. this->m_lowerer->GenerateBailOut(instrLdFld);
  3320. return true;
  3321. }
  3322. void
  3323. LowererMD::GenerateLoadPolymorphicInlineCacheSlot(IR::Instr * instrLdSt, IR::RegOpnd * opndInlineCache, IR::RegOpnd * opndType, uint polymorphicInlineCacheSize)
  3324. {
  3325. // Generate
  3326. //
  3327. // MOV r1, type
  3328. // SHR r1, PolymorphicInlineCacheShift
  3329. // AND r1, (size - 1)
  3330. // SHL r1, log2(sizeof(Js::InlineCache))
  3331. // LEA inlineCache, [inlineCache + r1]
  3332. // MOV r1, type
  3333. IR::RegOpnd * opndOffset = IR::RegOpnd::New(TyMachPtr, instrLdSt->m_func);
  3334. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndOffset, opndType, instrLdSt->m_func);
  3335. instrLdSt->InsertBefore(instr);
  3336. IntConstType rightShiftAmount = PolymorphicInlineCacheShift;
  3337. IntConstType leftShiftAmount = Math::Log2(sizeof(Js::InlineCache));
  3338. // instead of generating
  3339. // SHR r1, PolymorphicInlineCacheShift
  3340. // AND r1, (size - 1)
  3341. // SHL r1, log2(sizeof(Js::InlineCache))
  3342. //
  3343. // we can generate:
  3344. // SHR r1, (PolymorphicInlineCacheShift - log2(sizeof(Js::InlineCache))
  3345. // AND r1, (size - 1) << log2(sizeof(Js::InlineCache))
  3346. Assert(rightShiftAmount > leftShiftAmount);
  3347. instr = IR::Instr::New(Js::OpCode::SHR, opndOffset, opndOffset, IR::IntConstOpnd::New(rightShiftAmount - leftShiftAmount, TyUint8, instrLdSt->m_func, true), instrLdSt->m_func);
  3348. instrLdSt->InsertBefore(instr);
  3349. instr = IR::Instr::New(Js::OpCode::AND, opndOffset, opndOffset, IR::IntConstOpnd::New(((__int64)(polymorphicInlineCacheSize - 1) << leftShiftAmount), TyMachReg, instrLdSt->m_func, true), instrLdSt->m_func);
  3350. instrLdSt->InsertBefore(instr);
  3351. // LEA inlineCache, [inlineCache + r1]
  3352. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(opndInlineCache, opndOffset, TyMachPtr, instrLdSt->m_func);
  3353. instr = IR::Instr::New(Js::OpCode::LEA, opndInlineCache, indirOpnd, instrLdSt->m_func);
  3354. instrLdSt->InsertBefore(instr);
  3355. }
  3356. IR::Instr *
  3357. LowererMD::ChangeToWriteBarrierAssign(IR::Instr * assignInstr, const Func* func)
  3358. {
  3359. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3360. IR::Opnd* dest = assignInstr->GetDst();
  3361. auto threadContextInfo = func->GetTopFunc()->GetThreadContextInfo();
  3362. void* destAddr = nullptr;
  3363. bool isPossibleBarrieredDest = false;
  3364. if (TySize[dest->GetType()] == sizeof(void*))
  3365. {
  3366. if (dest->IsIndirOpnd())
  3367. {
  3368. Assert(!dest->AsIndirOpnd()->HasAddrKind());
  3369. isPossibleBarrieredDest = true;
  3370. }
  3371. else if (dest->IsMemRefOpnd())
  3372. {
  3373. // looks all thread context field access are from MemRefOpnd
  3374. destAddr = (void*)dest->AsMemRefOpnd()->GetMemLoc();
  3375. isPossibleBarrieredDest = destAddr != nullptr
  3376. && ((intptr_t)destAddr % sizeof(void*)) == 0
  3377. && destAddr != (void*)threadContextInfo->GetImplicitCallFlagsAddr()
  3378. && destAddr != (void*)threadContextInfo->GetDisableImplicitFlagsAddr()
  3379. && destAddr != (void*)threadContextInfo->GetBailOutRegisterSaveSpaceAddr();
  3380. if (isPossibleBarrieredDest)
  3381. {
  3382. Assert(Recycler::WBCheckIsRecyclerAddress((char*)destAddr));
  3383. }
  3384. }
  3385. }
  3386. #endif
  3387. IR::Instr * instr = ChangeToAssignNoBarrierCheck(assignInstr);
  3388. // Now insert write barrier if necessary
  3389. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3390. if (isPossibleBarrieredDest
  3391. && assignInstr->m_opcode == Js::OpCode::MOV // ignore SSE instructions like MOVSD
  3392. && assignInstr->GetSrc1()->IsWriteBarrierTriggerableValue())
  3393. {
  3394. instr = LowererMD::GenerateWriteBarrier(assignInstr);
  3395. }
  3396. #endif
  3397. return instr;
  3398. }
  3399. void
  3400. LowererMD::GenerateWriteBarrierAssign(IR::MemRefOpnd * opndDst, IR::Opnd * opndSrc, IR::Instr * insertBeforeInstr)
  3401. {
  3402. Lowerer::InsertMove(opndDst, opndSrc, insertBeforeInstr);
  3403. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3404. if (opndSrc->IsWriteBarrierTriggerableValue())
  3405. {
  3406. void * address = (void *)opndDst->AsMemRefOpnd()->GetMemLoc();
  3407. #ifdef RECYCLER_WRITE_BARRIER_BYTE
  3408. // WriteBarrier-TODO: need to pass card table address through RPC
  3409. IR::MemRefOpnd * cardTableEntry = IR::MemRefOpnd::New(
  3410. &RecyclerWriteBarrierManager::GetAddressOfCardTable()[RecyclerWriteBarrierManager::GetCardTableIndex(address)], TyInt8, insertBeforeInstr->m_func);
  3411. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, cardTableEntry, IR::IntConstOpnd::New(1, TyInt8, insertBeforeInstr->m_func), insertBeforeInstr->m_func);
  3412. insertBeforeInstr->InsertBefore(movInstr);
  3413. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  3414. if (CONFIG_FLAG(ForceSoftwareWriteBarrier) && CONFIG_FLAG(RecyclerVerifyMark))
  3415. {
  3416. this->LoadHelperArgument(insertBeforeInstr, opndDst);
  3417. IR::Instr* instrCall = IR::Instr::New(Js::OpCode::Call, m_func);
  3418. insertBeforeInstr->InsertBefore(instrCall);
  3419. this->ChangeToHelperCall(instrCall, IR::HelperWriteBarrierSetVerifyBit);
  3420. }
  3421. #endif
  3422. #else
  3423. IR::MemRefOpnd * cardTableEntry = IR::MemRefOpnd::New(
  3424. &RecyclerWriteBarrierManager::GetAddressOfCardTable()[RecyclerWriteBarrierManager::GetCardTableIndex(address)], TyMachPtr, assignInstr->m_func);
  3425. IR::Instr * orInstr = IR::Instr::New(Js::OpCode::OR, cardTableEntry,
  3426. IR::IntConstOpnd::New(1 << ((uint)address >> 7), TyInt32, assignInstr->m_func), assignInstr->m_func);
  3427. assignInstr->InsertBefore(orInstr);
  3428. #endif
  3429. }
  3430. #endif
  3431. }
  3432. void
  3433. LowererMD::GenerateWriteBarrierAssign(IR::IndirOpnd * opndDst, IR::Opnd * opndSrc, IR::Instr * insertBeforeInstr)
  3434. {
  3435. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3436. if (opndSrc->IsWriteBarrierTriggerableValue())
  3437. {
  3438. IR::RegOpnd * writeBarrierAddrRegOpnd = IR::RegOpnd::New(TyMachPtr, insertBeforeInstr->m_func);
  3439. insertBeforeInstr->InsertBefore(IR::Instr::New(Js::OpCode::LEA, writeBarrierAddrRegOpnd, opndDst, insertBeforeInstr->m_func));
  3440. IR::Instr* movInstr = IR::Instr::New(Js::OpCode::MOV,
  3441. IR::IndirOpnd::New(writeBarrierAddrRegOpnd, 0, TyMachReg, insertBeforeInstr->m_func), opndSrc, insertBeforeInstr->m_func);
  3442. insertBeforeInstr->InsertBefore(movInstr);
  3443. GenerateWriteBarrier(movInstr);
  3444. // The mov happens above, and it's slightly faster doing it that way since we've already calculated the address we're writing to
  3445. return;
  3446. }
  3447. #endif
  3448. Lowerer::InsertMove(opndDst, opndSrc, insertBeforeInstr);
  3449. return;
  3450. }
  3451. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3452. IR::Instr*
  3453. LowererMD::GenerateWriteBarrier(IR::Instr * assignInstr)
  3454. {
  3455. #if defined(RECYCLER_WRITE_BARRIER_BYTE)
  3456. PHASE_PRINT_TRACE(Js::JitWriteBarrierPhase, assignInstr->m_func, _u("Generating write barrier\n"));
  3457. IR::RegOpnd * indexOpnd = IR::RegOpnd::New(TyMachPtr, assignInstr->m_func);
  3458. IR::Instr * loadIndexInstr = IR::Instr::New(Js::OpCode::LEA, indexOpnd, assignInstr->GetDst(), assignInstr->m_func);
  3459. assignInstr->InsertBefore(loadIndexInstr);
  3460. IR::Instr * shiftBitInstr = IR::Instr::New(Js::OpCode::SHR, indexOpnd, indexOpnd,
  3461. IR::IntConstOpnd::New(12 /* 1 << 12 = 4096 */, TyInt8, assignInstr->m_func), assignInstr->m_func);
  3462. assignInstr->InsertAfter(shiftBitInstr);
  3463. // The cardtable address is likely 64 bits already so we have to load it to a register
  3464. // That is, we have to do the following:
  3465. // LEA reg1, targetOfWrite
  3466. // SHR reg1, 12
  3467. // MOV reg2, cardTableAddress
  3468. // MOV [reg1 + reg2], 1
  3469. //
  3470. // Instead of doing this:
  3471. // LEA reg1, targetOfWrite
  3472. // SHR reg1, 12
  3473. // MOV [cardTableAddress + reg2], 1
  3474. //
  3475. //TODO: (leish)(swb) hoist RecyclerWriteBarrierManager::GetAddressOfCardTable()
  3476. IR::RegOpnd * cardTableRegOpnd = IR::RegOpnd::New(TyMachReg, assignInstr->m_func);
  3477. IR::Instr * cardTableAddrInstr = IR::Instr::New(Js::OpCode::MOV, cardTableRegOpnd,
  3478. IR::AddrOpnd::New(RecyclerWriteBarrierManager::GetAddressOfCardTable(), IR::AddrOpndKindWriteBarrierCardTable, assignInstr->m_func),
  3479. assignInstr->m_func);
  3480. shiftBitInstr->InsertAfter(cardTableAddrInstr);
  3481. IR::IndirOpnd * cardTableEntryOpnd = IR::IndirOpnd::New(cardTableRegOpnd, indexOpnd,
  3482. TyInt8, assignInstr->m_func);
  3483. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, cardTableEntryOpnd, IR::IntConstOpnd::New(1, TyInt8, assignInstr->m_func), assignInstr->m_func);
  3484. cardTableAddrInstr->InsertAfter(movInstr);
  3485. return loadIndexInstr;
  3486. #else
  3487. Assert(writeBarrierAddrRegOpnd->IsRegOpnd());
  3488. IR::RegOpnd * shiftBitOpnd = IR::RegOpnd::New(TyInt32, assignInstr->m_func);
  3489. shiftBitOpnd->SetReg(LowererMDArch::GetRegShiftCount());
  3490. IR::Instr * moveShiftBitOpnd = IR::Instr::New(Js::OpCode::MOV, shiftBitOpnd, writeBarrierAddrRegOpnd, assignInstr->m_func);
  3491. assignInstr->InsertBefore(moveShiftBitOpnd);
  3492. IR::Instr * shiftBitInstr = IR::Instr::New(Js::OpCode::SHR, shiftBitOpnd, shiftBitOpnd,
  3493. IR::IntConstOpnd::New(7 /* 1 << 7 = 128 */, TyInt32, assignInstr->m_func), assignInstr->m_func);
  3494. assignInstr->InsertBefore(shiftBitInstr);
  3495. IR::RegOpnd * bitOpnd = IR::RegOpnd::New(TyInt32, assignInstr->m_func);
  3496. IR::Instr * mov1Instr = IR::Instr::New(Js::OpCode::MOV, bitOpnd,
  3497. IR::IntConstOpnd::New(1, TyInt32, assignInstr->m_func), assignInstr->m_func);
  3498. assignInstr->InsertBefore(mov1Instr);
  3499. IR::Instr * bitInstr = IR::Instr::New(Js::OpCode::SHL, bitOpnd, bitOpnd, shiftBitOpnd, assignInstr->m_func);
  3500. assignInstr->InsertBefore(bitInstr);
  3501. IR::RegOpnd * indexOpnd = shiftBitOpnd;
  3502. IR::Instr * indexInstr = IR::Instr::New(Js::OpCode::SHR, indexOpnd, indexOpnd,
  3503. IR::IntConstOpnd::New(5 /* 1 << 5 = 32 */, TyInt32, assignInstr->m_func), assignInstr->m_func);
  3504. assignInstr->InsertBefore(indexInstr);
  3505. IR::RegOpnd * cardTableRegOpnd = IR::RegOpnd::New(TyMachReg, assignInstr->m_func);
  3506. IR::Instr * cardTableAddrInstr = IR::Instr::New(Js::OpCode::MOV, cardTableRegOpnd,
  3507. IR::AddrOpnd::New(RecyclerWriteBarrierManager::GetAddressOfCardTable(), IR::AddrOpndKindDynamicMisc, assignInstr->m_func),
  3508. assignInstr->m_func);
  3509. assignInstr->InsertBefore(cardTableAddrInstr);
  3510. IR::IndirOpnd * cardTableEntryOpnd = IR::IndirOpnd::New(cardTableRegOpnd, indexOpnd, LowererMDArch::GetDefaultIndirScale(),
  3511. TyInt32, assignInstr->m_func);
  3512. IR::Instr * orInstr = IR::Instr::New(Js::OpCode::OR, cardTableEntryOpnd, cardTableEntryOpnd,
  3513. bitOpnd, assignInstr->m_func);
  3514. assignInstr->InsertBefore(orInstr);
  3515. #endif
  3516. }
  3517. #endif
  3518. void
  3519. LowererMD::GenerateStFldFromLocalInlineCache(
  3520. IR::Instr * instrStFld,
  3521. IR::RegOpnd * opndBase,
  3522. IR::Opnd * opndSrc,
  3523. IR::RegOpnd * inlineCache,
  3524. IR::LabelInstr * labelFallThru,
  3525. bool isInlineSlot)
  3526. {
  3527. IR::Instr * instr;
  3528. IR::Opnd* slotIndexOpnd;
  3529. IR::RegOpnd * opndIndirBase = opndBase;
  3530. if (!isInlineSlot)
  3531. {
  3532. // slotArray = MOV base->slots -- load the slot array
  3533. IR::RegOpnd * opndSlotArray = IR::RegOpnd::New(TyMachReg, instrStFld->m_func);
  3534. IR::IndirOpnd * opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrStFld->m_func);
  3535. instr = IR::Instr::New(Js::OpCode::MOV, opndSlotArray, opndIndir, instrStFld->m_func);
  3536. instrStFld->InsertBefore(instr);
  3537. opndIndirBase = opndSlotArray;
  3538. }
  3539. // slotIndex = MOV [&inlineCache->u.local.inlineSlotOffsetOrAuxSlotIndex] -- load the cached slot offset or index
  3540. IR::RegOpnd * opndSlotIndex = IR::RegOpnd::New(TyMachReg, instrStFld->m_func);
  3541. slotIndexOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.local.slotIndex), TyUint16, instrStFld->m_func);
  3542. instr = IR::Instr::New(Js::OpCode::MOVZXW, opndSlotIndex, slotIndexOpnd, instrStFld->m_func);
  3543. instrStFld->InsertBefore(instr);
  3544. // [base + slotIndex * (1 << indirScale)] = MOV src -- store the value directly to the slot
  3545. // [slotArray + slotIndex * (1 << indirScale)] = MOV src -- store the value directly to the slot
  3546. IR::IndirOpnd * storeLocIndirOpnd = IR::IndirOpnd::New(opndIndirBase, opndSlotIndex,
  3547. LowererMDArch::GetDefaultIndirScale(), TyMachReg, instrStFld->m_func);
  3548. GenerateWriteBarrierAssign(storeLocIndirOpnd, opndSrc, instrStFld);
  3549. // JMP $fallthru
  3550. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrStFld->m_func);
  3551. instrStFld->InsertBefore(instr);
  3552. }
  3553. //----------------------------------------------------------------------------
  3554. //
  3555. // LowererMD::GenerateFastScopedLdFld
  3556. //
  3557. // Make use of the helper to cache the type and slot index used to do a ScopedLdFld
  3558. // when the scope is an array of length 1.
  3559. // Extract the only element from array and do an inline load from the appropriate slot
  3560. // if the type hasn't changed since the last time this ScopedLdFld was executed.
  3561. //
  3562. //----------------------------------------------------------------------------
  3563. IR::Instr *
  3564. LowererMD::GenerateFastScopedLdFld(IR::Instr * instrLdScopedFld)
  3565. {
  3566. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  3567. // JNE $helper
  3568. // MOV r1, [base + offset(scopes)] -- load the first scope
  3569. // MOV r2, r1->type
  3570. // CMP r2, [&(inlineCache->u.local.type)] -- check type
  3571. // JNE $helper
  3572. // MOV r1, r1->slots -- load the slots array
  3573. // MOV r2 , [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  3574. // MOV dst, [r1+r2] -- load the value from the slot
  3575. // JMP $fallthru
  3576. // $helper:
  3577. // dst = CALL PatchGetPropertyScoped(inlineCache, base, field, defaultInstance, scriptContext)
  3578. // $fallthru:
  3579. IR::RegOpnd * opndBase;
  3580. IR::Instr * instr;
  3581. IR::IndirOpnd * indirOpnd;
  3582. IR::LabelInstr * labelHelper;
  3583. IR::Opnd * opndDst;
  3584. IR::RegOpnd * inlineCache;
  3585. IR::RegOpnd *r1;
  3586. IR::LabelInstr * labelFallThru;
  3587. IR::Opnd *propertySrc = instrLdScopedFld->GetSrc1();
  3588. AssertMsg(propertySrc->IsSymOpnd() && propertySrc->AsSymOpnd()->IsPropertySymOpnd() && propertySrc->AsSymOpnd()->m_sym->IsPropertySym(),
  3589. "Expected property sym operand as src of LdScoped");
  3590. IR::PropertySymOpnd * propertySymOpnd = propertySrc->AsPropertySymOpnd();
  3591. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  3592. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3593. AssertMsg(opndBase->m_sym->m_isSingleDef, "We assume this isn't redefined");
  3594. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  3595. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfLength(), TyInt16, this->m_func);
  3596. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  3597. instr->SetSrc1(indirOpnd);
  3598. instr->SetSrc2(IR::IntConstOpnd::New(0x1, TyInt8, this->m_func));
  3599. instrLdScopedFld->InsertBefore(instr);
  3600. // JNE $helper
  3601. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  3602. instrLdScopedFld->InsertBefore(instr);
  3603. // MOV r1, [base + offset(scopes)] -- load the first scope
  3604. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg, this->m_func);
  3605. r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  3606. instr = IR::Instr::New(Js::OpCode::MOV, r1, indirOpnd, this->m_func);
  3607. instrLdScopedFld->InsertBefore(instr);
  3608. //first load the inlineCache type
  3609. inlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  3610. Assert(inlineCache != nullptr);
  3611. IR::RegOpnd * opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3612. opndDst = instrLdScopedFld->GetDst();
  3613. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3614. r1->m_sym->m_isNotNumber = true;
  3615. // Load the type
  3616. this->m_lowerer->GenerateObjectTestAndTypeLoad(instrLdScopedFld, r1, opndType, labelHelper);
  3617. // Check the local cache with the tagged type
  3618. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3619. GenerateLoadTaggedType(instrLdScopedFld, opndType, opndTaggedType);
  3620. instrLdScopedFld->InsertBefore(IR::Instr::New(Js::OpCode::MOV, inlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrLdScopedFld, propertySymOpnd), this->m_func));
  3621. Lowerer::GenerateLocalInlineCacheCheck(instrLdScopedFld, opndTaggedType, inlineCache, labelHelper);
  3622. Lowerer::GenerateLdFldFromLocalInlineCache(instrLdScopedFld, r1, opndDst, inlineCache, labelFallThru, false);
  3623. // $helper:
  3624. // dst = CALL PatchGetPropertyScoped(inlineCache, opndBase, propertyId, srcBase, scriptContext)
  3625. // $fallthru:
  3626. instrLdScopedFld->InsertBefore(labelHelper);
  3627. instrLdScopedFld->InsertAfter(labelFallThru);
  3628. return instrLdScopedFld->m_prev;
  3629. }
  3630. //----------------------------------------------------------------------------
  3631. //
  3632. // LowererMD::GenerateFastScopedStFld
  3633. //
  3634. // Make use of the helper to cache the type and slot index used to do a ScopedStFld
  3635. // when the scope is an array of length 1.
  3636. // Extract the only element from array and do an inline load from the appropriate slot
  3637. // if the type hasn't changed since the last time this ScopedStFld was executed.
  3638. //
  3639. //----------------------------------------------------------------------------
  3640. IR::Instr *
  3641. LowererMD::GenerateFastScopedStFld(IR::Instr * instrStScopedFld)
  3642. {
  3643. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  3644. // JNE $helper
  3645. // MOV r1, [base + offset(scopes)] -- load the first scope
  3646. // MOV r2, r1->type
  3647. // CMP r2, [&(inlineCache->u.local.type)] -- check type
  3648. // JNE $helper
  3649. // MOV r1, r1->slots -- load the slots array
  3650. // MOV r2, [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  3651. // [r1 + r2*4] = MOV value -- store the value directly to the slot
  3652. // JMP $fallthru
  3653. // $helper:
  3654. // CALL PatchSetPropertyScoped(inlineCache, base, field, value, defaultInstance, scriptContext)
  3655. // $fallthru:
  3656. IR::RegOpnd * opndBase;
  3657. IR::Instr * instr;
  3658. IR::IndirOpnd * indirOpnd;
  3659. IR::LabelInstr * labelHelper;
  3660. IR::Opnd * opndDst;
  3661. IR::RegOpnd * inlineCache;
  3662. IR::RegOpnd *r1;
  3663. IR::LabelInstr * labelFallThru;
  3664. IR::Opnd *newValue = instrStScopedFld->GetSrc1();
  3665. // IR::Opnd *defaultInstance = instrStScopedFld->UnlinkSrc2();
  3666. opndDst = instrStScopedFld->GetDst();
  3667. AssertMsg(opndDst->IsSymOpnd() && opndDst->AsSymOpnd()->IsPropertySymOpnd() && opndDst->AsSymOpnd()->m_sym->IsPropertySym(),
  3668. "Expected property sym operand as dst of StScoped");
  3669. IR::PropertySymOpnd * propertySymOpnd = opndDst->AsPropertySymOpnd();
  3670. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  3671. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3672. AssertMsg(opndBase->m_sym->m_isSingleDef, "We assume this isn't redefined");
  3673. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  3674. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfLength(), TyInt16, this->m_func);
  3675. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  3676. instr->SetSrc1(indirOpnd);
  3677. instr->SetSrc2(IR::IntConstOpnd::New(0x1, TyInt8, this->m_func));
  3678. instrStScopedFld->InsertBefore(instr);
  3679. // JNE $helper
  3680. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  3681. instrStScopedFld->InsertBefore(instr);
  3682. // MOV r1, [base + offset(scopes)] -- load the first scope
  3683. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg, this->m_func);
  3684. r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  3685. instr = IR::Instr::New(Js::OpCode::MOV, r1, indirOpnd, this->m_func);
  3686. instrStScopedFld->InsertBefore(instr);
  3687. //first load the inlineCache type
  3688. inlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  3689. Assert(inlineCache != nullptr);
  3690. IR::RegOpnd * opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3691. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3692. r1->m_sym->m_isNotNumber = true;
  3693. // Load the type
  3694. this->m_lowerer->GenerateObjectTestAndTypeLoad(instrStScopedFld, r1, opndType, labelHelper);
  3695. // Check the local cache with the tagged type
  3696. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3697. GenerateLoadTaggedType(instrStScopedFld, opndType, opndTaggedType);
  3698. instrStScopedFld->InsertBefore(IR::Instr::New(Js::OpCode::MOV, inlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrStScopedFld, propertySymOpnd), this->m_func));
  3699. Lowerer::GenerateLocalInlineCacheCheck(instrStScopedFld, opndTaggedType, inlineCache, labelHelper);
  3700. GenerateStFldFromLocalInlineCache(instrStScopedFld, r1, newValue, inlineCache, labelFallThru, false);
  3701. // $helper:
  3702. // CALL PatchSetPropertyScoped(inlineCache, opndBase, propertyId, newValue, defaultInstance, scriptContext)
  3703. // $fallthru:
  3704. instrStScopedFld->InsertBefore(labelHelper);
  3705. instrStScopedFld->InsertAfter(labelFallThru);
  3706. return instrStScopedFld->m_prev;
  3707. }
  3708. IR::Opnd *
  3709. LowererMD::CreateStackArgumentsSlotOpnd()
  3710. {
  3711. StackSym *sym = StackSym::New(TyMachReg, this->m_func);
  3712. sym->m_offset = -MachArgsSlotOffset;
  3713. sym->m_allocated = true;
  3714. return IR::SymOpnd::New(sym, TyMachReg, this->m_func);
  3715. }
  3716. IR::RegOpnd *
  3717. LowererMD::GenerateUntagVar(IR::RegOpnd * src, IR::LabelInstr * labelFail, IR::Instr * assignInstr, bool generateTagCheck)
  3718. {
  3719. Assert(src->IsVar());
  3720. // MOV valueOpnd, index
  3721. IR::RegOpnd *valueOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  3722. //
  3723. // Convert Index to 32 bits.
  3724. //
  3725. IR::Opnd * opnd = src->UseWithNewType(TyMachReg, this->m_func);
  3726. #if INT32VAR
  3727. if (generateTagCheck)
  3728. {
  3729. Assert(!opnd->IsTaggedInt());
  3730. this->GenerateSmIntTest(opnd, assignInstr, labelFail);
  3731. }
  3732. // Moving into r2 clears the tag bits on AMD64.
  3733. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV_TRUNC, valueOpnd, opnd, this->m_func);
  3734. assignInstr->InsertBefore(instr);
  3735. #else
  3736. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, valueOpnd, opnd, this->m_func);
  3737. assignInstr->InsertBefore(instr);
  3738. // SAR valueOpnd, Js::VarTag_Shift
  3739. instr = IR::Instr::New(Js::OpCode::SAR, valueOpnd, valueOpnd,
  3740. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  3741. assignInstr->InsertBefore(instr);
  3742. if (generateTagCheck)
  3743. {
  3744. Assert(!opnd->IsTaggedInt());
  3745. // SAR set the carry flag (CF) to 1 if the lower bit is 1
  3746. // JAE will jmp if CF = 0
  3747. instr = IR::BranchInstr::New(Js::OpCode::JAE, labelFail, this->m_func);
  3748. assignInstr->InsertBefore(instr);
  3749. }
  3750. #endif
  3751. return valueOpnd;
  3752. }
  3753. IR::RegOpnd *LowererMD::LoadNonnegativeIndex(
  3754. IR::RegOpnd *indexOpnd,
  3755. const bool skipNegativeCheck,
  3756. IR::LabelInstr *const notTaggedIntLabel,
  3757. IR::LabelInstr *const negativeLabel,
  3758. IR::Instr *const insertBeforeInstr)
  3759. {
  3760. Assert(indexOpnd);
  3761. Assert(indexOpnd->IsVar() || indexOpnd->GetType() == TyInt32 || indexOpnd->GetType() == TyUint32);
  3762. Assert(indexOpnd->GetType() != TyUint32 || skipNegativeCheck);
  3763. Assert(!indexOpnd->IsVar() || notTaggedIntLabel);
  3764. Assert(skipNegativeCheck || negativeLabel);
  3765. Assert(insertBeforeInstr);
  3766. if(indexOpnd->IsVar())
  3767. {
  3768. if (indexOpnd->GetValueType().IsLikelyFloat()
  3769. #ifdef _M_IX86
  3770. && AutoSystemInfo::Data.SSE2Available()
  3771. #endif
  3772. )
  3773. {
  3774. return m_lowerer->LoadIndexFromLikelyFloat(indexOpnd, skipNegativeCheck, notTaggedIntLabel, negativeLabel, insertBeforeInstr);
  3775. }
  3776. // mov intIndex, index
  3777. // sar intIndex, 1
  3778. // jae $notTaggedIntOrNegative
  3779. indexOpnd = m_lowerer->GenerateUntagVar(indexOpnd, notTaggedIntLabel, insertBeforeInstr, !indexOpnd->IsTaggedInt());
  3780. }
  3781. if(!skipNegativeCheck)
  3782. {
  3783. // test index, index
  3784. // js $notTaggedIntOrNegative
  3785. Lowerer::InsertTestBranch(indexOpnd, indexOpnd, Js::OpCode::JSB, negativeLabel, insertBeforeInstr);
  3786. }
  3787. return indexOpnd;
  3788. }
  3789. // Inlines fast-path for int Mul/Add or int Mul/Sub. If not int, call MulAdd/MulSub helper
  3790. bool LowererMD::TryGenerateFastMulAdd(IR::Instr * instrAdd, IR::Instr ** pInstrPrev)
  3791. {
  3792. IR::Instr *instrMul = instrAdd->GetPrevRealInstrOrLabel();
  3793. IR::Opnd *addSrc;
  3794. IR::RegOpnd *addCommonSrcOpnd;
  3795. Assert(instrAdd->m_opcode == Js::OpCode::Add_A || instrAdd->m_opcode == Js::OpCode::Sub_A);
  3796. bool isSub = (instrAdd->m_opcode == Js::OpCode::Sub_A) ? true : false;
  3797. // Mul needs to be a single def reg
  3798. if (instrMul->m_opcode != Js::OpCode::Mul_A || instrMul->GetDst()->IsRegOpnd() == false)
  3799. {
  3800. // Cannot generate MulAdd
  3801. return false;
  3802. }
  3803. if (instrMul->HasBailOutInfo())
  3804. {
  3805. // Bailout will be generated for the Add, but not the Mul.
  3806. // We could handle this, but this path isn't used that much anymore.
  3807. return false;
  3808. }
  3809. IR::RegOpnd *regMulDst = instrMul->GetDst()->AsRegOpnd();
  3810. if (regMulDst->m_sym->m_isSingleDef == false)
  3811. {
  3812. // Cannot generate MulAdd
  3813. return false;
  3814. }
  3815. // Only handle a * b + c, so dst of Mul needs to match left source of Add
  3816. if (instrMul->GetDst()->IsEqual(instrAdd->GetSrc1()))
  3817. {
  3818. addCommonSrcOpnd = instrAdd->GetSrc1()->AsRegOpnd();
  3819. addSrc = instrAdd->GetSrc2();
  3820. }
  3821. else if (instrMul->GetDst()->IsEqual(instrAdd->GetSrc2()))
  3822. {
  3823. addSrc = instrAdd->GetSrc1();
  3824. addCommonSrcOpnd = instrAdd->GetSrc2()->AsRegOpnd();
  3825. }
  3826. else
  3827. {
  3828. return false;
  3829. }
  3830. // Only handle a * b + c where c != a * b
  3831. if (instrAdd->GetSrc1()->IsEqual(instrAdd->GetSrc2()))
  3832. {
  3833. return false;
  3834. }
  3835. if (addCommonSrcOpnd->m_isTempLastUse == false)
  3836. {
  3837. return false;
  3838. }
  3839. IR::Opnd *mulSrc1 = instrMul->GetSrc1();
  3840. IR::Opnd *mulSrc2 = instrMul->GetSrc2();
  3841. if (mulSrc1->IsRegOpnd() && mulSrc1->AsRegOpnd()->IsTaggedInt()
  3842. && mulSrc2->IsRegOpnd() && mulSrc2->AsRegOpnd()->IsTaggedInt())
  3843. {
  3844. return false;
  3845. }
  3846. // Save prevInstr for the main lower loop
  3847. *pInstrPrev = instrMul->m_prev;
  3848. // Generate int31 fast-path for Mul, go to MulAdd helper if it fails, or one of the source is marked notInt
  3849. if (!(addSrc->IsRegOpnd() && addSrc->AsRegOpnd()->IsNotInt())
  3850. && !(mulSrc1->IsRegOpnd() && mulSrc1->AsRegOpnd()->IsNotInt())
  3851. && !(mulSrc2->IsRegOpnd() && mulSrc2->AsRegOpnd()->IsNotInt()))
  3852. {
  3853. this->GenerateFastMul(instrMul);
  3854. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3855. IR::Instr *instr = IR::BranchInstr::New(Js::OpCode::JMP, labelHelper, this->m_func);
  3856. instrMul->InsertBefore(instr);
  3857. // Generate int31 fast-path for Add
  3858. bool success;
  3859. if (isSub)
  3860. {
  3861. success = this->GenerateFastSub(instrAdd);
  3862. }
  3863. else
  3864. {
  3865. success = this->GenerateFastAdd(instrAdd);
  3866. }
  3867. if (!success)
  3868. {
  3869. labelHelper->isOpHelper = false;
  3870. }
  3871. // Generate MulAdd helper call
  3872. instrAdd->InsertBefore(labelHelper);
  3873. }
  3874. if (instrAdd->dstIsTempNumber)
  3875. {
  3876. m_lowerer->LoadHelperTemp(instrAdd, instrAdd);
  3877. }
  3878. else
  3879. {
  3880. IR::Opnd *tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  3881. this->LoadHelperArgument(instrAdd, tempOpnd);
  3882. }
  3883. this->m_lowerer->LoadScriptContext(instrAdd);
  3884. IR::JnHelperMethod helper;
  3885. if (addSrc == instrAdd->GetSrc2())
  3886. {
  3887. instrAdd->FreeSrc1();
  3888. IR::Opnd *addOpnd = instrAdd->UnlinkSrc2();
  3889. this->LoadHelperArgument(instrAdd, addOpnd);
  3890. helper = isSub ? IR::HelperOp_MulSubRight : IR::HelperOp_MulAddRight;
  3891. }
  3892. else
  3893. {
  3894. instrAdd->FreeSrc2();
  3895. IR::Opnd *addOpnd = instrAdd->UnlinkSrc1();
  3896. this->LoadHelperArgument(instrAdd, addOpnd);
  3897. helper = isSub ? IR::HelperOp_MulSubLeft : IR::HelperOp_MulAddLeft;
  3898. }
  3899. IR::Opnd *src2 = instrMul->UnlinkSrc2();
  3900. this->LoadHelperArgument(instrAdd, src2);
  3901. IR::Opnd *src1 = instrMul->UnlinkSrc1();
  3902. this->LoadHelperArgument(instrAdd, src1);
  3903. this->ChangeToHelperCall(instrAdd, helper);
  3904. instrMul->Remove();
  3905. return true;
  3906. }
  3907. void
  3908. LowererMD::GenerateFastAbs(IR::Opnd *dst, IR::Opnd *src, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
  3909. {
  3910. // TEST src1, AtomTag
  3911. // JEQ $float
  3912. // MOV EAX, src
  3913. // SAR EAX, AtomTag_Int32
  3914. // CDQ
  3915. // XOR EAX, EDX
  3916. // SUB EAX, EDX
  3917. // SHL EAX, AtomTag_Int32
  3918. // JO $labelHelper
  3919. // INC EAX
  3920. // MOV dst, EAX
  3921. // JMP $done
  3922. // $float
  3923. // CMP [src], JavascriptNumber.vtable
  3924. // JNE $helper
  3925. // MOVSD r1, [src + offsetof(value)]
  3926. // ANDPD r1, absDoubleCst
  3927. // dst = DoubleToVar(r1)
  3928. IR::Instr *instr = nullptr;
  3929. IR::LabelInstr *labelFloat = nullptr;
  3930. bool isInt = false;
  3931. bool isNotInt = false;
  3932. if (src->IsRegOpnd())
  3933. {
  3934. if (src->AsRegOpnd()->IsTaggedInt())
  3935. {
  3936. isInt = true;
  3937. }
  3938. else if (src->AsRegOpnd()->IsNotInt())
  3939. {
  3940. isNotInt = true;
  3941. }
  3942. }
  3943. else if (src->IsAddrOpnd())
  3944. {
  3945. IR::AddrOpnd *varOpnd = src->AsAddrOpnd();
  3946. Assert(varOpnd->IsVar() && Js::TaggedInt::Is(varOpnd->m_address));
  3947. #ifdef _M_X64
  3948. __int64 absValue = ::_abs64(Js::TaggedInt::ToInt32(varOpnd->m_address));
  3949. #else
  3950. __int32 absValue = ::abs(Js::TaggedInt::ToInt32(varOpnd->m_address));
  3951. #endif
  3952. if (!Js::TaggedInt::IsOverflow(absValue))
  3953. {
  3954. varOpnd->SetAddress(Js::TaggedInt::ToVarUnchecked((__int32)absValue), IR::AddrOpndKindConstantVar);
  3955. instr = IR::Instr::New(Js::OpCode::MOV, dst, varOpnd, this->m_func);
  3956. insertInstr->InsertBefore(instr);
  3957. return;
  3958. }
  3959. }
  3960. if (src->IsRegOpnd() == false)
  3961. {
  3962. IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  3963. instr = IR::Instr::New(Js::OpCode::MOV, regOpnd, src, this->m_func);
  3964. insertInstr->InsertBefore(instr);
  3965. src = regOpnd;
  3966. }
  3967. #ifdef _M_IX86
  3968. bool emitFloatAbs = !isInt && AutoSystemInfo::Data.SSE2Available();
  3969. #else
  3970. bool emitFloatAbs = !isInt;
  3971. #endif
  3972. if (!isNotInt)
  3973. {
  3974. if (!isInt)
  3975. {
  3976. IR::LabelInstr *label = labelHelper;
  3977. if (emitFloatAbs)
  3978. {
  3979. label = labelFloat = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3980. }
  3981. GenerateSmIntTest(src, insertInstr, label);
  3982. }
  3983. // MOV EAX, src
  3984. IR::RegOpnd *regEAX = IR::RegOpnd::New(TyInt32, this->m_func);
  3985. regEAX->SetReg(LowererMDArch::GetRegIMulDestLower());
  3986. instr = IR::Instr::New(Js::OpCode::MOV, regEAX, src, this->m_func);
  3987. insertInstr->InsertBefore(instr);
  3988. #ifdef _M_IX86
  3989. // SAR EAX, AtomTag_Int32
  3990. instr = IR::Instr::New(Js::OpCode::SAR, regEAX, regEAX, IR::IntConstOpnd::New(Js::AtomTag_Int32, TyInt32, this->m_func), this->m_func);
  3991. insertInstr->InsertBefore(instr);
  3992. #endif
  3993. IR::RegOpnd *regEDX = IR::RegOpnd::New(TyInt32, this->m_func);
  3994. regEDX->SetReg(LowererMDArch::GetRegIMulHighDestLower());
  3995. // CDQ
  3996. // Note: put EDX on dst to give of def to the EDX lifetime
  3997. instr = IR::Instr::New(Js::OpCode::CDQ, regEDX, this->m_func);
  3998. insertInstr->InsertBefore(instr);
  3999. // XOR EAX, EDX
  4000. instr = IR::Instr::New(Js::OpCode::XOR, regEAX, regEAX, regEDX, this->m_func);
  4001. insertInstr->InsertBefore(instr);
  4002. // SUB EAX, EDX
  4003. instr = IR::Instr::New(Js::OpCode::SUB, regEAX, regEAX, regEDX, this->m_func);
  4004. insertInstr->InsertBefore(instr);
  4005. #ifdef _M_X64
  4006. // abs(INT_MIN) overflows a 32 bit integer.
  4007. // JO $labelHelper
  4008. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  4009. insertInstr->InsertBefore(instr);
  4010. #endif
  4011. #ifdef _M_IX86
  4012. // SHL EAX, AtomTag_Int32
  4013. instr = IR::Instr::New(Js::OpCode::SHL, regEAX, regEAX, IR::IntConstOpnd::New(Js::AtomTag_Int32, TyInt32, this->m_func), this->m_func);
  4014. insertInstr->InsertBefore(instr);
  4015. // JO $labelHelper
  4016. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  4017. insertInstr->InsertBefore(instr);
  4018. // INC EAX
  4019. instr = IR::Instr::New(Js::OpCode::INC, regEAX, regEAX, this->m_func);
  4020. insertInstr->InsertBefore(instr);
  4021. #endif
  4022. // MOV dst, EAX
  4023. instr = IR::Instr::New(Js::OpCode::MOV, dst, regEAX, this->m_func);
  4024. insertInstr->InsertBefore(instr);
  4025. #ifdef _M_X64
  4026. GenerateInt32ToVarConversion(dst, insertInstr);
  4027. #endif
  4028. }
  4029. if (labelFloat)
  4030. {
  4031. // JMP $done
  4032. instr = IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, this->m_func);
  4033. insertInstr->InsertBefore(instr);
  4034. // $float
  4035. insertInstr->InsertBefore(labelFloat);
  4036. }
  4037. if (emitFloatAbs)
  4038. {
  4039. #if defined(_M_IX86)
  4040. // CMP [src], JavascriptNumber.vtable
  4041. IR::Opnd *opnd = IR::IndirOpnd::New(src->AsRegOpnd(), (int32)0, TyMachPtr, this->m_func);
  4042. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4043. instr->SetSrc1(opnd);
  4044. instr->SetSrc2(m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptNumber));
  4045. insertInstr->InsertBefore(instr);
  4046. // JNE $helper
  4047. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  4048. insertInstr->InsertBefore(instr);
  4049. // MOVSD r1, [src + offsetof(value)]
  4050. opnd = IR::IndirOpnd::New(src->AsRegOpnd(), Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  4051. IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyMachDouble, this->m_func);
  4052. instr = IR::Instr::New(Js::OpCode::MOVSD, regOpnd, opnd, this->m_func);
  4053. insertInstr->InsertBefore(instr);
  4054. this->GenerateFloatAbs(regOpnd, insertInstr);
  4055. // dst = DoubleToVar(r1)
  4056. SaveDoubleToVar(callInstr->GetDst()->AsRegOpnd(), regOpnd, callInstr, insertInstr);
  4057. #elif defined(_M_X64)
  4058. // if (typeof(src) == double)
  4059. IR::RegOpnd *src64 = src->AsRegOpnd();
  4060. GenerateFloatTest(src64, insertInstr, labelHelper);
  4061. // dst64 = MOV src64
  4062. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, src64, this->m_func));
  4063. // Unconditionally set the sign bit. This will get XORd away when we remove the tag.
  4064. // dst64 = OR 0x8000000000000000
  4065. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::OR, dst, dst, IR::IntConstOpnd::New(MachSignBit, TyMachReg, this->m_func), this->m_func));
  4066. #endif
  4067. }
  4068. else if(!isInt)
  4069. {
  4070. // The source is not known to be a tagged int, so either it's definitely not an int (isNotInt), or the int version of
  4071. // abs failed the tag check and jumped here. We can't emit the float version of abs (!emitFloatAbs) due to SSE2 not
  4072. // being available, so jump straight to the helper.
  4073. // JMP $helper
  4074. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelHelper, this->m_func);
  4075. insertInstr->InsertBefore(instr);
  4076. }
  4077. }
  4078. IR::Instr * LowererMD::GenerateFloatAbs(IR::RegOpnd * regOpnd, IR::Instr * insertInstr)
  4079. {
  4080. // ANDPS reg, absDoubleCst
  4081. IR::Opnd * opnd;
  4082. if (regOpnd->IsFloat64())
  4083. {
  4084. opnd = m_lowerer->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueAbsDoubleCst);
  4085. }
  4086. else
  4087. {
  4088. Assert(regOpnd->IsFloat32());
  4089. opnd = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetAbsFloatCstAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  4090. }
  4091. // ANDPS has smaller encoding then ANDPD
  4092. IR::Instr * instr = IR::Instr::New(Js::OpCode::ANDPS, regOpnd, regOpnd, opnd, this->m_func);
  4093. insertInstr->InsertBefore(instr);
  4094. Legalize(instr);
  4095. return instr;
  4096. }
  4097. IR::RegOpnd* LowererMD::MaterializeDoubleConstFromInt(intptr_t constAddr, IR::Instr* instr)
  4098. {
  4099. IR::Opnd* constVal = IR::MemRefOpnd::New(constAddr, IRType::TyFloat64, this->m_func);
  4100. IR::RegOpnd * xmmReg = IR::RegOpnd::New(TyFloat64, m_func);
  4101. this->m_lowerer->InsertMove(xmmReg, constVal, instr);
  4102. return xmmReg;
  4103. }
  4104. IR::RegOpnd* LowererMD::MaterializeConstFromBits(int bits, IRType type, IR::Instr* instr)
  4105. {
  4106. IR::Opnd * regBits = IR::RegOpnd::New(TyInt32, m_func);
  4107. this->m_lowerer->InsertMove(regBits, IR::IntConstOpnd::New(bits, TyInt32, m_func), instr);
  4108. IR::RegOpnd * regConst = IR::RegOpnd::New(type, m_func);
  4109. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOVD, regConst, regBits, m_func));
  4110. return regConst;
  4111. }
  4112. IR::Opnd* LowererMD::Subtract2To31(IR::Opnd* src1, IR::Opnd* intMinFP, IRType type, IR::Instr* instr)
  4113. {
  4114. Js::OpCode op = (type == TyFloat32) ? Js::OpCode::SUBSS : Js::OpCode::SUBSD;
  4115. IR::Opnd* adjSrc = IR::RegOpnd::New(type, m_func);
  4116. IR::Instr* sub = IR::Instr::New(op, adjSrc, src1, intMinFP, m_func);
  4117. instr->InsertBefore(sub);
  4118. Legalize(sub);
  4119. return adjSrc;
  4120. }
  4121. template <bool Saturate>
  4122. IR::Opnd*
  4123. LowererMD::GenerateTruncChecks(_In_ IR::Instr* instr, _In_opt_ IR::LabelInstr* doneLabel)
  4124. {
  4125. AnalysisAssert(!Saturate || doneLabel);
  4126. IR::Opnd* dst = instr->GetDst();
  4127. Assert(dst->IsInt32() || dst->IsUInt32());
  4128. IR::LabelInstr * nanLabel = (Saturate && dst->IsSigned()) ? IR::LabelInstr::New(Js::OpCode::Label, m_func, true) : nullptr;
  4129. IR::LabelInstr * conversion = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4130. IR::LabelInstr * tooSmallLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  4131. IR::Opnd* src1 = instr->GetSrc1();
  4132. IR::Opnd * src64 = nullptr;
  4133. if (src1->IsFloat32())
  4134. {
  4135. src64 = IR::RegOpnd::New(TyFloat64, m_func);
  4136. EmitFloat32ToFloat64(src64, src1, instr);
  4137. }
  4138. else
  4139. {
  4140. src64 = src1;
  4141. }
  4142. IR::RegOpnd* limitReg = MaterializeDoubleConstFromInt(dst->IsUInt32() ?
  4143. m_func->GetThreadContextInfo()->GetDoubleNegOneAddr() :
  4144. m_func->GetThreadContextInfo()->GetDoubleIntMinMinusOneAddr(), instr);
  4145. m_lowerer->InsertCompareBranch(src64, limitReg, Js::OpCode::BrLe_A, tooSmallLabel, instr);
  4146. limitReg = MaterializeDoubleConstFromInt(dst->IsUInt32() ?
  4147. m_func->GetThreadContextInfo()->GetDoubleUintMaxPlusOneAddr() :
  4148. m_func->GetThreadContextInfo()->GetDoubleIntMaxPlusOneAddr(), instr);
  4149. m_lowerer->InsertCompareBranch(limitReg, src64, Js::OpCode::BrGt_A, conversion, instr, true /*no NaN check*/);
  4150. if (Saturate)
  4151. {
  4152. // Insert a label to mark this as the start of a helper block, so layout knows to move it
  4153. m_lowerer->InsertLabel(true, instr);
  4154. // NaN case is same as too small case for unsigned, so combine them
  4155. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JP, dst->IsSigned() ? nanLabel : tooSmallLabel, m_func));
  4156. // Overflow case
  4157. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(dst->IsUnsigned() ? UINT32_MAX : INT32_MAX, dst->GetType(), m_func), instr);
  4158. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, instr);
  4159. instr->InsertBefore(tooSmallLabel);
  4160. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(dst->IsUnsigned() ? 0 : INT32_MIN, dst->GetType(), m_func), instr);
  4161. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, instr);
  4162. if (dst->IsSigned())
  4163. {
  4164. instr->InsertBefore(nanLabel);
  4165. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(0, dst->GetType(), m_func), instr);
  4166. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, instr);
  4167. }
  4168. }
  4169. else
  4170. {
  4171. instr->InsertBefore(tooSmallLabel);
  4172. m_lowerer->GenerateThrow(IR::IntConstOpnd::New(SCODE_CODE(VBSERR_Overflow), TyInt32, m_func), instr);
  4173. //no jump here we aren't coming back
  4174. }
  4175. instr->InsertBefore(conversion);
  4176. return src64;
  4177. }
  4178. template <bool Saturate>
  4179. void
  4180. LowererMD::GenerateTruncWithCheck(_In_ IR::Instr * instr)
  4181. {
  4182. Assert(AutoSystemInfo::Data.SSE2Available());
  4183. IR::LabelInstr * doneLabel = Saturate ? IR::LabelInstr::New(Js::OpCode::Label, m_func) : nullptr;
  4184. IR::Opnd* src64 = GenerateTruncChecks<Saturate>(instr, doneLabel); //converts src to double and checks if MIN <= src <= MAX
  4185. IR::Opnd* dst = instr->GetDst();
  4186. if (dst->IsUnsigned())
  4187. {
  4188. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(0, TyUint32, m_func), instr);
  4189. IR::LabelInstr * skipUnsignedPart = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4190. IR::Opnd* twoTo31 = MaterializeDoubleConstFromInt(m_func->GetThreadContextInfo()->GetDoubleTwoTo31Addr(), instr);
  4191. m_lowerer->InsertCompareBranch(src64, twoTo31, Js::OpCode::BrLt_A, skipUnsignedPart, instr);
  4192. instr->InsertBefore(IR::Instr::New(Js::OpCode::SUBPD, src64, src64, twoTo31, m_func));
  4193. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(0x80000000 /*2^31*/, TyUint32, m_func), instr);
  4194. instr->InsertBefore(skipUnsignedPart);
  4195. IR::Opnd* tmp = IR::RegOpnd::New(TyInt32, m_func);
  4196. instr->InsertBefore(IR::Instr::New(Js::OpCode::CVTTSD2SI, tmp, src64, m_func));
  4197. instr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, dst, dst, tmp, m_func));
  4198. }
  4199. else
  4200. {
  4201. instr->InsertBefore(IR::Instr::New(Js::OpCode::CVTTSD2SI, dst, src64, m_func));
  4202. }
  4203. if (Saturate)
  4204. {
  4205. instr->InsertBefore(doneLabel);
  4206. }
  4207. instr->UnlinkSrc1();
  4208. instr->UnlinkDst();
  4209. instr->Remove();
  4210. }
  4211. template void LowererMD::GenerateTruncWithCheck<false>(_In_ IR::Instr * instr);
  4212. template void LowererMD::GenerateTruncWithCheck<true>(_In_ IR::Instr * instr);
  4213. void
  4214. LowererMD::GenerateCtz(IR::Instr * instr)
  4215. {
  4216. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  4217. Assert(IRType_IsNativeInt(instr->GetDst()->GetType()));
  4218. #ifdef _M_IX86
  4219. if (instr->GetSrc1()->IsInt64())
  4220. {
  4221. lowererMDArch.EmitInt64Instr(instr);
  4222. return;
  4223. }
  4224. #endif
  4225. if (AutoSystemInfo::Data.TZCntAvailable())
  4226. {
  4227. instr->m_opcode = Js::OpCode::TZCNT;
  4228. Legalize(instr);
  4229. }
  4230. else
  4231. {
  4232. // dst = BSF src
  4233. // dst = CMOVE dst, 32 // dst is src1 to help reg alloc
  4234. int instrSize = instr->GetSrc1()->GetSize();
  4235. IRType type = instrSize == 8 ? TyInt64 : TyInt32;
  4236. instr->m_opcode = Js::OpCode::BSF;
  4237. Legalize(instr);
  4238. IR::IntConstOpnd * const32 = IR::IntConstOpnd::New(instrSize * 8, type, m_func);
  4239. IR::Instr* cmove = IR::Instr::New(Js::OpCode::CMOVE, instr->GetDst(), instr->GetDst(), const32, this->m_func);
  4240. instr->InsertAfter(cmove);
  4241. Legalize(cmove);
  4242. }
  4243. }
  4244. void
  4245. LowererMD::GeneratePopCnt(IR::Instr * instr)
  4246. {
  4247. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  4248. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsUInt32() || instr->GetDst()->IsInt64());
  4249. #ifdef _M_IX86
  4250. if (instr->GetSrc1()->IsInt64())
  4251. {
  4252. lowererMDArch.EmitInt64Instr(instr);
  4253. return;
  4254. }
  4255. #endif
  4256. if (AutoSystemInfo::Data.PopCntAvailable())
  4257. {
  4258. instr->m_opcode = Js::OpCode::POPCNT;
  4259. Legalize(instr);
  4260. }
  4261. else
  4262. {
  4263. int instrSize = instr->GetSrc1()->GetSize();
  4264. LoadHelperArgument(instr, instr->GetSrc1());
  4265. instr->UnlinkSrc1();
  4266. this->ChangeToHelperCall(instr, instrSize == 8 ? IR::HelperPopCnt64 : IR::HelperPopCnt32);
  4267. }
  4268. }
  4269. void
  4270. LowererMD::GenerateClz(IR::Instr * instr)
  4271. {
  4272. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  4273. Assert(IRType_IsNativeInt(instr->GetDst()->GetType()));
  4274. #ifdef _M_IX86
  4275. if (instr->GetSrc1()->IsInt64())
  4276. {
  4277. lowererMDArch.EmitInt64Instr(instr);
  4278. return;
  4279. }
  4280. #endif
  4281. if (AutoSystemInfo::Data.LZCntAvailable())
  4282. {
  4283. instr->m_opcode = Js::OpCode::LZCNT;
  4284. Legalize(instr);
  4285. }
  4286. else
  4287. {
  4288. // tmp = BSR src
  4289. // JE $label32
  4290. // dst = SUB 31, tmp
  4291. // dst = SUB 63, tmp; for int64
  4292. // JMP $done
  4293. // label32:
  4294. // dst = mov 32;
  4295. // dst = mov 64; for int64
  4296. // $done
  4297. int instrSize = instr->GetSrc1()->GetSize();
  4298. IRType type = instrSize == 8 ? TyInt64 : TyInt32;
  4299. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr->m_next);
  4300. IR::Opnd * dst = instr->UnlinkDst();
  4301. IR::Opnd * tmpOpnd = IR::RegOpnd::New(type, m_func);
  4302. instr->SetDst(tmpOpnd);
  4303. instr->m_opcode = Js::OpCode::BSR;
  4304. Legalize(instr);
  4305. IR::LabelInstr * label32 = Lowerer::InsertLabel(false, doneLabel);
  4306. instr = IR::BranchInstr::New(Js::OpCode::JEQ, label32, m_func);
  4307. label32->InsertBefore(instr);
  4308. Lowerer::InsertSub(false, dst, IR::IntConstOpnd::New(instrSize == 8 ? 63 : 31, type, m_func), tmpOpnd, label32);
  4309. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, label32);
  4310. Lowerer::InsertMove(dst, IR::IntConstOpnd::New(instrSize == 8 ? 64 : 32, type, m_func), doneLabel);
  4311. }
  4312. }
  4313. #if !FLOATVAR
  4314. void
  4315. LowererMD::GenerateNumberAllocation(IR::RegOpnd * opndDst, IR::Instr * instrInsert, bool isHelper)
  4316. {
  4317. size_t alignedAllocSize = Js::RecyclerJavascriptNumberAllocator::GetAlignedAllocSize(
  4318. m_func->GetScriptContextInfo()->IsRecyclerVerifyEnabled(),
  4319. m_func->GetScriptContextInfo()->GetRecyclerVerifyPad());
  4320. IR::Opnd * endAddressOpnd = m_lowerer->LoadNumberAllocatorValueOpnd(instrInsert, NumberAllocatorValue::NumberAllocatorEndAddress);
  4321. IR::Opnd * freeObjectListOpnd = m_lowerer->LoadNumberAllocatorValueOpnd(instrInsert, NumberAllocatorValue::NumberAllocatorFreeObjectList);
  4322. // MOV dst, allocator->freeObjectList
  4323. IR::Instr * loadMemBlockInstr = IR::Instr::New(Js::OpCode::MOV, opndDst, freeObjectListOpnd, this->m_func);
  4324. instrInsert->InsertBefore(loadMemBlockInstr);
  4325. // LEA nextMemBlock, [dst + allocSize]
  4326. IR::RegOpnd * nextMemBlockOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  4327. IR::Instr * loadNextMemBlockInstr = IR::Instr::New(Js::OpCode::LEA, nextMemBlockOpnd,
  4328. IR::IndirOpnd::New(opndDst, alignedAllocSize, TyMachPtr, this->m_func), this->m_func);
  4329. instrInsert->InsertBefore(loadNextMemBlockInstr);
  4330. // CMP nextMemBlock, allocator->endAddress
  4331. IR::Instr * checkInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4332. checkInstr->SetSrc1(nextMemBlockOpnd);
  4333. checkInstr->SetSrc2(endAddressOpnd);
  4334. instrInsert->InsertBefore(checkInstr);
  4335. // JA $helper
  4336. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4337. IR::BranchInstr * branchInstr = IR::BranchInstr::New(Js::OpCode::JA, helperLabel, this->m_func);
  4338. instrInsert->InsertBefore(branchInstr);
  4339. // MOV allocator->freeObjectList, nextMemBlock
  4340. IR::Instr * setFreeObjectListInstr = IR::Instr::New(Js::OpCode::MOV, freeObjectListOpnd, nextMemBlockOpnd, this->m_func);
  4341. instrInsert->InsertBefore(setFreeObjectListInstr);
  4342. // JMP $done
  4343. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  4344. IR::BranchInstr * branchToDoneInstr = IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, this->m_func);
  4345. instrInsert->InsertBefore(branchToDoneInstr);
  4346. // $helper:
  4347. instrInsert->InsertBefore(helperLabel);
  4348. // PUSH allocator
  4349. this->LoadHelperArgument(instrInsert, m_lowerer->LoadScriptContextValueOpnd(instrInsert, ScriptContextValue::ScriptContextNumberAllocator));
  4350. // dst = Call AllocUninitializedNumber
  4351. IR::Instr * instrCall = IR::Instr::New(Js::OpCode::CALL, opndDst,
  4352. IR::HelperCallOpnd::New(IR::HelperAllocUninitializedNumber, this->m_func), this->m_func);
  4353. instrInsert->InsertBefore(instrCall);
  4354. this->lowererMDArch.LowerCall(instrCall, 0);
  4355. // $done:
  4356. instrInsert->InsertBefore(doneLabel);
  4357. }
  4358. #endif
  4359. #ifdef _CONTROL_FLOW_GUARD
  4360. void
  4361. LowererMD::GenerateCFGCheck(IR::Opnd * entryPointOpnd, IR::Instr * insertBeforeInstr)
  4362. {
  4363. bool useJITTrampoline = CONFIG_FLAG(UseJITTrampoline);
  4364. IR::LabelInstr * callLabelInstr = nullptr;
  4365. uintptr_t jitThunkStartAddress = NULL;
  4366. if (useJITTrampoline)
  4367. {
  4368. #if ENABLE_OOP_NATIVE_CODEGEN
  4369. if (m_func->IsOOPJIT())
  4370. {
  4371. OOPJITThunkEmitter * jitThunkEmitter = m_func->GetOOPThreadContext()->GetJITThunkEmitter();
  4372. jitThunkStartAddress = jitThunkEmitter->EnsureInitialized();
  4373. }
  4374. else
  4375. #endif
  4376. {
  4377. InProcJITThunkEmitter * jitThunkEmitter = m_func->GetInProcThreadContext()->GetJITThunkEmitter();
  4378. jitThunkStartAddress = jitThunkEmitter->EnsureInitialized();
  4379. }
  4380. if (jitThunkStartAddress)
  4381. {
  4382. uintptr_t endAddressOfSegment = jitThunkStartAddress + InProcJITThunkEmitter::TotalThunkSize;
  4383. Assert(endAddressOfSegment > jitThunkStartAddress);
  4384. // Generate instructions for local Pre-Reserved Segment Range check
  4385. IR::AddrOpnd * endAddressOfSegmentConstOpnd = IR::AddrOpnd::New(endAddressOfSegment, IR::AddrOpndKindDynamicMisc, m_func);
  4386. IR::RegOpnd *resultOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4387. callLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4388. IR::LabelInstr * cfgLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  4389. // resultOpnd = SUB endAddressOfSegmentConstOpnd, entryPointOpnd
  4390. // CMP resultOpnd, TotalThunkSize
  4391. // JAE $cfgLabel
  4392. // AND entryPointOpnd, ~(ThunkSize-1)
  4393. // JMP $callLabel
  4394. m_lowerer->InsertSub(false, resultOpnd, endAddressOfSegmentConstOpnd, entryPointOpnd, insertBeforeInstr);
  4395. m_lowerer->InsertCompareBranch(resultOpnd, IR::IntConstOpnd::New(InProcJITThunkEmitter::TotalThunkSize, TyMachReg, m_func, true), Js::OpCode::BrGe_A, true, cfgLabelInstr, insertBeforeInstr);
  4396. m_lowerer->InsertAnd(entryPointOpnd, entryPointOpnd, IR::IntConstOpnd::New(InProcJITThunkEmitter::ThunkAlignmentMask, TyMachReg, m_func, true), insertBeforeInstr);
  4397. m_lowerer->InsertBranch(Js::OpCode::Br, callLabelInstr, insertBeforeInstr);
  4398. insertBeforeInstr->InsertBefore(cfgLabelInstr);
  4399. }
  4400. }
  4401. //MOV ecx, entryPoint
  4402. IR::RegOpnd * entryPointRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4403. #if _M_IX86
  4404. entryPointRegOpnd->SetReg(RegECX);
  4405. #elif _M_X64
  4406. entryPointRegOpnd->SetReg(RegRCX);
  4407. #endif
  4408. entryPointRegOpnd->m_isCallArg = true;
  4409. IR::Instr* movInstrEntryPointToRegister = IR::Instr::New(Js::OpCode::MOV, entryPointRegOpnd, entryPointOpnd, this->m_func);
  4410. insertBeforeInstr->InsertBefore(movInstrEntryPointToRegister);
  4411. //Generate CheckCFG CALL here
  4412. IR::HelperCallOpnd *cfgCallOpnd = IR::HelperCallOpnd::New(IR::HelperGuardCheckCall, this->m_func);
  4413. IR::Instr* cfgCallInstr = IR::Instr::New(Js::OpCode::CALL, this->m_func);
  4414. this->m_func->SetHasCallsOnSelfAndParents();
  4415. #if _M_IX86
  4416. //call[__guard_check_icall_fptr]
  4417. cfgCallInstr->SetSrc1(cfgCallOpnd);
  4418. #elif _M_X64
  4419. //mov rax, __guard_check_icall_fptr
  4420. IR::RegOpnd *targetOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, m_func), RegRAX, TyMachPtr, this->m_func);
  4421. IR::Instr *movInstr = IR::Instr::New(Js::OpCode::MOV, targetOpnd, cfgCallOpnd, this->m_func);
  4422. insertBeforeInstr->InsertBefore(movInstr);
  4423. //call rax
  4424. cfgCallInstr->SetSrc1(targetOpnd);
  4425. #endif
  4426. //CALL cfg(rax)
  4427. insertBeforeInstr->InsertBefore(cfgCallInstr);
  4428. if (jitThunkStartAddress)
  4429. {
  4430. Assert(callLabelInstr);
  4431. if (CONFIG_FLAG(ForceJITCFGCheck))
  4432. {
  4433. // Always generate CFG check to make sure that the address is still valid
  4434. movInstrEntryPointToRegister->InsertBefore(callLabelInstr);
  4435. }
  4436. else
  4437. {
  4438. insertBeforeInstr->InsertBefore(callLabelInstr);
  4439. }
  4440. }
  4441. }
  4442. #endif
  4443. void
  4444. LowererMD::GenerateFastRecyclerAlloc(size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, IR::LabelInstr* allocHelperLabel, IR::LabelInstr* allocDoneLabel)
  4445. {
  4446. IR::Opnd * endAddressOpnd;
  4447. IR::Opnd * freeListOpnd;
  4448. ScriptContextInfo* scriptContext = this->m_func->GetScriptContextInfo();
  4449. void* allocatorAddress;
  4450. uint32 endAddressOffset;
  4451. uint32 freeListOffset;
  4452. size_t alignedSize = HeapInfo::GetAlignedSizeNoCheck(allocSize);
  4453. bool allowNativeCodeBumpAllocation = scriptContext->GetRecyclerAllowNativeCodeBumpAllocation();
  4454. Recycler::GetNormalHeapBlockAllocatorInfoForNativeAllocation((void*)scriptContext->GetRecyclerAddr(), alignedSize,
  4455. allocatorAddress, endAddressOffset, freeListOffset,
  4456. allowNativeCodeBumpAllocation, this->m_func->IsOOPJIT());
  4457. endAddressOpnd = IR::MemRefOpnd::New((char*)allocatorAddress + endAddressOffset, TyMachPtr, this->m_func, IR::AddrOpndKindDynamicRecyclerAllocatorEndAddressRef);
  4458. freeListOpnd = IR::MemRefOpnd::New((char*)allocatorAddress + freeListOffset, TyMachPtr, this->m_func, IR::AddrOpndKindDynamicRecyclerAllocatorFreeListRef);
  4459. const IR::AutoReuseOpnd autoReuseTempOpnd(freeListOpnd, m_func);
  4460. // MOV newObjDst, allocator->freeObjectList
  4461. Lowerer::InsertMove(newObjDst, freeListOpnd, insertionPointInstr);
  4462. // LEA nextMemBlock, [newObjDst + allocSize]
  4463. IR::RegOpnd * nextMemBlockOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  4464. IR::IndirOpnd* nextMemBlockSrc = IR::IndirOpnd::New(newObjDst, (int32)alignedSize, TyMachPtr, this->m_func);
  4465. IR::Instr * loadNextMemBlockInstr = IR::Instr::New(Js::OpCode::LEA, nextMemBlockOpnd, nextMemBlockSrc, this->m_func);
  4466. insertionPointInstr->InsertBefore(loadNextMemBlockInstr);
  4467. // CMP nextMemBlock, allocator->endAddress
  4468. IR::Instr * checkInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4469. checkInstr->SetSrc1(nextMemBlockOpnd);
  4470. checkInstr->SetSrc2(endAddressOpnd);
  4471. insertionPointInstr->InsertBefore(checkInstr);
  4472. Legalize(checkInstr);
  4473. // JA $allocHelper
  4474. IR::BranchInstr * branchToAllocHelperInstr = IR::BranchInstr::New(Js::OpCode::JA, allocHelperLabel, this->m_func);
  4475. insertionPointInstr->InsertBefore(branchToAllocHelperInstr);
  4476. // MOV allocator->freeObjectList, nextMemBlock
  4477. Lowerer::InsertMove(freeListOpnd, nextMemBlockOpnd, insertionPointInstr, false);
  4478. // JMP $allocDone
  4479. IR::BranchInstr * branchToAllocDoneInstr = IR::BranchInstr::New(Js::OpCode::JMP, allocDoneLabel, this->m_func);
  4480. insertionPointInstr->InsertBefore(branchToAllocDoneInstr);
  4481. }
  4482. #ifdef ENABLE_WASM
  4483. void
  4484. LowererMD::GenerateCopysign(IR::Instr * instr)
  4485. {
  4486. #if defined(_M_IX86)
  4487. // We should only generate this if sse2 is available
  4488. Assert(AutoSystemInfo::Data.SSE2Available());
  4489. #endif
  4490. // ANDPS reg0, absDoubleCst
  4491. // ANDPS reg1, sgnBitDoubleCst
  4492. // ORPS reg0, reg1
  4493. // Copy sign from src2 to src1
  4494. IR::Opnd* src1 = instr->GetSrc1();
  4495. IR::Opnd* src2 = instr->GetSrc2();
  4496. Assert(src1->IsFloat32() || src1->IsFloat64());
  4497. GenerateFloatAbs(src1->AsRegOpnd(), instr);
  4498. IR::MemRefOpnd *memRef = IR::MemRefOpnd::New(src2->IsFloat32() ? this->m_func->GetThreadContextInfo()->GetSgnFloatBitCst() : this->m_func->GetThreadContextInfo()->GetSgnDoubleBitCst(),
  4499. src2->GetType(), this->m_func, src2->IsFloat32() ? IR::AddrOpndKindDynamicFloatRef : IR::AddrOpndKindDynamicDoubleRef);
  4500. IR::Instr* t2 = IR::Instr::New(Js::OpCode::ANDPS, instr->GetSrc2(), instr->GetSrc2(), memRef, m_func);
  4501. instr->InsertBefore(t2);
  4502. Legalize(t2);
  4503. instr->m_opcode = Js::OpCode::ORPS;
  4504. Legalize(instr);
  4505. };
  4506. #endif //ENABLE_WASM
  4507. void
  4508. LowererMD::SaveDoubleToVar(IR::RegOpnd * dstOpnd, IR::RegOpnd *opndFloat, IR::Instr *instrOrig, IR::Instr *instrInsert, bool isHelper)
  4509. {
  4510. Assert(opndFloat->GetType() == TyFloat64);
  4511. // Call JSNumber::ToVar to save the float operand to the result of the original (var) instruction
  4512. #if !FLOATVAR
  4513. // We should only generate this if sse2 is available
  4514. Assert(AutoSystemInfo::Data.SSE2Available());
  4515. IR::Opnd * symVTableDst;
  4516. IR::Opnd * symDblDst;
  4517. IR::Opnd * symTypeDst;
  4518. IR::Instr * newInstr;
  4519. IR::Instr * numberInitInsertInstr = nullptr;
  4520. if (instrOrig->dstIsTempNumber)
  4521. {
  4522. // Use the original dst to get the temp number sym
  4523. StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(instrOrig->GetDst(), instrOrig->dstIsTempNumberTransferred);
  4524. // LEA dst, &tempSym
  4525. IR::SymOpnd * symTempSrc = IR::SymOpnd::New(tempNumberSym, TyMachPtr, this->m_func);
  4526. IR::Instr * loadTempNumberInstr = IR::Instr::New(Js::OpCode::LEA, dstOpnd, symTempSrc, this->m_func);
  4527. instrInsert->InsertBefore(loadTempNumberInstr);
  4528. symVTableDst = IR::SymOpnd::New(tempNumberSym, TyMachPtr, this->m_func);
  4529. symDblDst = IR::SymOpnd::New(tempNumberSym, (uint32)Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func);
  4530. symTypeDst = IR::SymOpnd::New(tempNumberSym, (uint32)Js::JavascriptNumber::GetOffsetOfType(), TyMachPtr, this->m_func);
  4531. if (this->m_lowerer->outerMostLoopLabel == nullptr)
  4532. {
  4533. // If we are not in loop, just insert in place
  4534. numberInitInsertInstr = instrInsert;
  4535. }
  4536. else
  4537. {
  4538. // Otherwise, initialize in the outer most loop top if we haven't initialized it yet.
  4539. numberInitInsertInstr = this->m_lowerer->initializedTempSym->TestAndSet(tempNumberSym->m_id) ?
  4540. nullptr : this->m_lowerer->outerMostLoopLabel;
  4541. }
  4542. }
  4543. else
  4544. {
  4545. this->GenerateNumberAllocation(dstOpnd, instrInsert, isHelper);
  4546. symVTableDst = IR::IndirOpnd::New(dstOpnd, 0, TyMachPtr, this->m_func);
  4547. symDblDst = IR::IndirOpnd::New(dstOpnd, (uint32)Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func);
  4548. symTypeDst = IR::IndirOpnd::New(dstOpnd, (uint32)Js::JavascriptNumber::GetOffsetOfType(), TyMachPtr, this->m_func);
  4549. numberInitInsertInstr = instrInsert;
  4550. }
  4551. if (numberInitInsertInstr)
  4552. {
  4553. // Inline the case where the dst is marked as temp.
  4554. IR::Opnd *jsNumberVTable = m_lowerer->LoadVTableValueOpnd(numberInitInsertInstr, VTableValue::VtableJavascriptNumber);
  4555. // MOV dst->vtable, JavascriptNumber::vtable
  4556. newInstr = IR::Instr::New(Js::OpCode::MOV, symVTableDst, jsNumberVTable, this->m_func);
  4557. numberInitInsertInstr->InsertBefore(newInstr);
  4558. // MOV dst->type, JavascriptNumber_type
  4559. IR::Opnd *typeOpnd = m_lowerer->LoadLibraryValueOpnd(numberInitInsertInstr, LibraryValue::ValueNumberTypeStatic);
  4560. newInstr = IR::Instr::New(Js::OpCode::MOV, symTypeDst, typeOpnd, this->m_func);
  4561. numberInitInsertInstr->InsertBefore(newInstr);
  4562. }
  4563. // MOVSD dst->value, opndFloat ; copy the float result to the temp JavascriptNumber
  4564. newInstr = IR::Instr::New(Js::OpCode::MOVSD, symDblDst, opndFloat, this->m_func);
  4565. instrInsert->InsertBefore(newInstr);
  4566. #else
  4567. // s1 = MOVD opndFloat
  4568. IR::RegOpnd *s1 = IR::RegOpnd::New(TyMachReg, m_func);
  4569. IR::Instr *movd = IR::Instr::New(Js::OpCode::MOVD, s1, opndFloat, m_func);
  4570. instrInsert->InsertBefore(movd);
  4571. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  4572. {
  4573. // s1 = MOVD src
  4574. // tmp = NOT s1
  4575. // tmp = AND tmp, 0x7FF0000000000000ull
  4576. // test tmp, tmp
  4577. // je helper
  4578. // jmp done
  4579. // helper:
  4580. // tmp2 = AND s1, 0x000FFFFFFFFFFFFFull
  4581. // test tmp2, tmp2
  4582. // je done
  4583. // s1 = JavascriptNumber::k_Nan
  4584. // done:
  4585. IR::RegOpnd *tmp = IR::RegOpnd::New(TyMachReg, m_func);
  4586. IR::Instr * newInstr = IR::Instr::New(Js::OpCode::NOT, tmp, s1, m_func);
  4587. instrInsert->InsertBefore(newInstr);
  4588. LowererMD::MakeDstEquSrc1(newInstr);
  4589. newInstr = IR::Instr::New(Js::OpCode::AND, tmp, tmp, IR::AddrOpnd::New((Js::Var)0x7FF0000000000000, IR::AddrOpndKindConstantVar, m_func, true), m_func);
  4590. instrInsert->InsertBefore(newInstr);
  4591. LowererMD::Legalize(newInstr);
  4592. IR::LabelInstr* helper = Lowerer::InsertLabel(true, instrInsert);
  4593. Lowerer::InsertTestBranch(tmp, tmp, Js::OpCode::BrEq_A, helper, helper);
  4594. IR::LabelInstr* done = Lowerer::InsertLabel(isHelper, instrInsert);
  4595. Lowerer::InsertBranch(Js::OpCode::Br, done, helper);
  4596. IR::RegOpnd *tmp2 = IR::RegOpnd::New(TyMachReg, m_func);
  4597. newInstr = IR::Instr::New(Js::OpCode::AND, tmp2, s1, IR::AddrOpnd::New((Js::Var)0x000FFFFFFFFFFFFFull, IR::AddrOpndKindConstantVar, m_func, true), m_func);
  4598. done->InsertBefore(newInstr);
  4599. LowererMD::Legalize(newInstr);
  4600. Lowerer::InsertTestBranch(tmp2, tmp2, Js::OpCode::BrEq_A, done, done);
  4601. IR::Opnd * opndNaN = IR::AddrOpnd::New((Js::Var)Js::JavascriptNumber::k_Nan, IR::AddrOpndKindConstantVar, m_func, true);
  4602. Lowerer::InsertMove(s1, opndNaN, done);
  4603. }
  4604. // s1 = XOR s1, FloatTag_Value
  4605. // dst = s1
  4606. IR::Instr *setTag = IR::Instr::New(Js::OpCode::XOR,
  4607. s1,
  4608. s1,
  4609. IR::AddrOpnd::New((Js::Var)Js::FloatTag_Value,
  4610. IR::AddrOpndKindConstantVar,
  4611. this->m_func,
  4612. /* dontEncode = */ true),
  4613. this->m_func);
  4614. IR::Instr *movDst = IR::Instr::New(Js::OpCode::MOV, dstOpnd, s1, this->m_func);
  4615. instrInsert->InsertBefore(setTag);
  4616. instrInsert->InsertBefore(movDst);
  4617. LowererMD::Legalize(setTag);
  4618. #endif
  4619. }
  4620. void
  4621. LowererMD::EmitLoadFloatFromNumber(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr)
  4622. {
  4623. IR::LabelInstr *labelDone;
  4624. IR::Instr *instr;
  4625. labelDone = EmitLoadFloatCommon(dst, src, insertInstr, insertInstr->HasBailOutInfo());
  4626. if (labelDone == nullptr)
  4627. {
  4628. // We're done
  4629. insertInstr->Remove();
  4630. return;
  4631. }
  4632. // $Done note: insertAfter
  4633. insertInstr->InsertAfter(labelDone);
  4634. if (!insertInstr->HasBailOutInfo())
  4635. {
  4636. // $Done
  4637. insertInstr->Remove();
  4638. return;
  4639. }
  4640. Assert(!m_func->GetJITFunctionBody()->IsAsmJsMode());
  4641. IR::LabelInstr *labelNoBailOut = nullptr;
  4642. IR::SymOpnd *tempSymOpnd = nullptr;
  4643. if (insertInstr->GetBailOutKind() == IR::BailOutPrimitiveButString)
  4644. {
  4645. if (!this->m_func->tempSymDouble)
  4646. {
  4647. this->m_func->tempSymDouble = StackSym::New(TyFloat64, this->m_func);
  4648. this->m_func->StackAllocate(this->m_func->tempSymDouble, MachDouble);
  4649. }
  4650. // LEA r3, tempSymDouble
  4651. IR::RegOpnd *reg3Opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4652. tempSymOpnd = IR::SymOpnd::New(this->m_func->tempSymDouble, TyFloat64, this->m_func);
  4653. instr = IR::Instr::New(Js::OpCode::LEA, reg3Opnd, tempSymOpnd, this->m_func);
  4654. insertInstr->InsertBefore(instr);
  4655. // regBoolResult = to_number_fromPrimitive(value, &dst, allowUndef, scriptContext);
  4656. this->m_lowerer->LoadScriptContext(insertInstr);
  4657. IR::IntConstOpnd *allowUndefOpnd;
  4658. if (insertInstr->GetBailOutKind() == IR::BailOutPrimitiveButString)
  4659. {
  4660. allowUndefOpnd = IR::IntConstOpnd::New(true, TyInt32, this->m_func);
  4661. }
  4662. else
  4663. {
  4664. Assert(insertInstr->GetBailOutKind() == IR::BailOutNumberOnly);
  4665. allowUndefOpnd = IR::IntConstOpnd::New(false, TyInt32, this->m_func);
  4666. }
  4667. this->LoadHelperArgument(insertInstr, allowUndefOpnd);
  4668. this->LoadHelperArgument(insertInstr, reg3Opnd);
  4669. this->LoadHelperArgument(insertInstr, src);
  4670. IR::RegOpnd *regBoolResult = IR::RegOpnd::New(TyInt32, this->m_func);
  4671. instr = IR::Instr::New(Js::OpCode::CALL, regBoolResult, IR::HelperCallOpnd::New(IR::HelperOp_ConvNumber_FromPrimitive, this->m_func), this->m_func);
  4672. insertInstr->InsertBefore(instr);
  4673. this->lowererMDArch.LowerCall(instr, 0);
  4674. // TEST regBoolResult, regBoolResult
  4675. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  4676. instr->SetSrc1(regBoolResult);
  4677. instr->SetSrc2(regBoolResult);
  4678. insertInstr->InsertBefore(instr);
  4679. // JNE $noBailOut
  4680. labelNoBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4681. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelNoBailOut, this->m_func);
  4682. insertInstr->InsertBefore(instr);
  4683. }
  4684. // Bailout code
  4685. Assert(insertInstr->m_opcode == Js::OpCode::FromVar);
  4686. insertInstr->UnlinkDst();
  4687. insertInstr->FreeSrc1();
  4688. IR::Instr *bailoutInstr = insertInstr;
  4689. insertInstr = bailoutInstr->m_next;
  4690. this->m_lowerer->GenerateBailOut(bailoutInstr);
  4691. // $noBailOut
  4692. if (labelNoBailOut)
  4693. {
  4694. insertInstr->InsertBefore(labelNoBailOut);
  4695. Assert(dst->IsRegOpnd());
  4696. // MOVSD dst, [pResult].f64
  4697. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, tempSymOpnd, this->m_func);
  4698. insertInstr->InsertBefore(instr);
  4699. }
  4700. }
  4701. IR::LabelInstr*
  4702. LowererMD::EmitLoadFloatCommon(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr, bool needHelperLabel)
  4703. {
  4704. IR::Instr *instr;
  4705. Assert(src->GetType() == TyVar);
  4706. Assert(dst->IsFloat());
  4707. bool isFloatConst = false;
  4708. IR::RegOpnd *regFloatOpnd = nullptr;
  4709. if (src->IsRegOpnd() && src->AsRegOpnd()->m_sym->m_isFltConst)
  4710. {
  4711. IR::RegOpnd *regOpnd = src->AsRegOpnd();
  4712. Assert(regOpnd->m_sym->m_isSingleDef);
  4713. Js::Var value = regOpnd->m_sym->GetFloatConstValueAsVar_PostGlobOpt();
  4714. #if FLOATVAR
  4715. void *pDouble = (double*)NativeCodeDataNewNoFixup(this->m_func->GetNativeCodeDataAllocator(), DoubleType<DataDesc_LowererMD_EmitLoadFloatCommon_Double>, Js::JavascriptNumber::GetValue(value));
  4716. IR::Opnd * doubleRef;
  4717. if (!m_func->IsOOPJIT())
  4718. {
  4719. doubleRef = IR::MemRefOpnd::New(pDouble, TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  4720. }
  4721. else
  4722. {
  4723. int offset = NativeCodeData::GetDataTotalOffset(pDouble);
  4724. doubleRef = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), offset, TyMachDouble,
  4725. #if DBG
  4726. NativeCodeData::GetDataDescription(pDouble, m_func->m_alloc),
  4727. #endif
  4728. m_func, true);
  4729. GetLowerer()->addToLiveOnBackEdgeSyms->Set(m_func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  4730. }
  4731. #else
  4732. IR::MemRefOpnd *doubleRef = IR::MemRefOpnd::New((BYTE*)value + Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func,
  4733. IR::AddrOpndKindDynamicDoubleRef);
  4734. #endif
  4735. regFloatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  4736. instr = IR::Instr::New(Js::OpCode::MOVSD, regFloatOpnd, doubleRef, this->m_func);
  4737. insertInstr->InsertBefore(instr);
  4738. Legalize(instr);
  4739. isFloatConst = true;
  4740. }
  4741. // Src is constant?
  4742. if (src->IsImmediateOpnd() || src->IsFloatConstOpnd())
  4743. {
  4744. regFloatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  4745. m_lowerer->LoadFloatFromNonReg(src, regFloatOpnd, insertInstr);
  4746. isFloatConst = true;
  4747. }
  4748. if (isFloatConst)
  4749. {
  4750. if (dst->GetType() == TyFloat32)
  4751. {
  4752. // CVTSD2SS regOpnd32.f32, regOpnd.f64 -- Convert regOpnd from f64 to f32
  4753. IR::RegOpnd *regOpnd32 = regFloatOpnd->UseWithNewType(TyFloat32, this->m_func)->AsRegOpnd();
  4754. instr = IR::Instr::New(Js::OpCode::CVTSD2SS, regOpnd32, regFloatOpnd, this->m_func);
  4755. insertInstr->InsertBefore(instr);
  4756. // MOVSS dst, regOpnd32
  4757. instr = IR::Instr::New(Js::OpCode::MOVSS, dst, regOpnd32, this->m_func);
  4758. insertInstr->InsertBefore(instr);
  4759. }
  4760. else
  4761. {
  4762. // MOVSD dst, regOpnd
  4763. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, regFloatOpnd, this->m_func);
  4764. insertInstr->InsertBefore(instr);
  4765. }
  4766. return nullptr;
  4767. }
  4768. Assert(src->IsRegOpnd());
  4769. IR::LabelInstr *labelStore = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4770. IR::LabelInstr *labelHelper;
  4771. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4772. if (needHelperLabel)
  4773. {
  4774. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4775. }
  4776. else
  4777. {
  4778. labelHelper = labelDone;
  4779. }
  4780. bool const isFloat32 = dst->GetType() == TyFloat32;
  4781. IR::RegOpnd *reg2 = ((isFloat32 || !dst->IsRegOpnd()) ? IR::RegOpnd::New(TyMachDouble, this->m_func) : dst->AsRegOpnd());
  4782. // Load the float value in reg2
  4783. this->lowererMDArch.LoadCheckedFloat(src->AsRegOpnd(), reg2, labelStore, labelHelper, insertInstr, needHelperLabel);
  4784. // $Store
  4785. insertInstr->InsertBefore(labelStore);
  4786. if (isFloat32)
  4787. {
  4788. IR::RegOpnd *reg2_32 = reg2->UseWithNewType(TyFloat32, this->m_func)->AsRegOpnd();
  4789. // CVTSD2SS r2_32.f32, r2.f64 -- Convert regOpnd from f64 to f32
  4790. instr = IR::Instr::New(Js::OpCode::CVTSD2SS, reg2_32, reg2, this->m_func);
  4791. insertInstr->InsertBefore(instr);
  4792. // MOVSS dst, r2_32
  4793. instr = IR::Instr::New(Js::OpCode::MOVSS, dst, reg2_32, this->m_func);
  4794. insertInstr->InsertBefore(instr);
  4795. }
  4796. else if (reg2 != dst)
  4797. {
  4798. // MOVSD dst, r2
  4799. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, reg2, this->m_func);
  4800. insertInstr->InsertBefore(instr);
  4801. }
  4802. // JMP $Done
  4803. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, this->m_func);
  4804. insertInstr->InsertBefore(instr);
  4805. if (needHelperLabel)
  4806. {
  4807. // $Helper
  4808. insertInstr->InsertBefore(labelHelper);
  4809. }
  4810. return labelDone;
  4811. }
  4812. void
  4813. LowererMD::EmitLoadFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr, IR::Instr * instrBailOut, IR::LabelInstr * labelBailOut)
  4814. {
  4815. IR::LabelInstr *labelDone;
  4816. IR::Instr *instr;
  4817. labelDone = EmitLoadFloatCommon(dst, src, insertInstr, true);
  4818. if (labelDone == nullptr)
  4819. {
  4820. // We're done
  4821. return;
  4822. }
  4823. IR::BailOutKind bailOutKind = instrBailOut && instrBailOut->HasBailOutInfo() ? instrBailOut->GetBailOutKind() : IR::BailOutInvalid;
  4824. if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  4825. {
  4826. // Bail out instead of making the helper call.
  4827. Assert(labelBailOut);
  4828. m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, insertInstr);
  4829. insertInstr->InsertBefore(labelDone);
  4830. return;
  4831. }
  4832. IR::Opnd *memAddress = dst;
  4833. if (dst->IsRegOpnd())
  4834. {
  4835. // Create an f64 stack location to store the result of the helper.
  4836. IR::SymOpnd *symOpnd = IR::SymOpnd::New(StackSym::New(dst->GetType(), this->m_func), dst->GetType(), this->m_func);
  4837. this->m_func->StackAllocate(symOpnd->m_sym->AsStackSym(), sizeof(double));
  4838. memAddress = symOpnd;
  4839. }
  4840. // LEA r3, dst
  4841. IR::RegOpnd *reg3Opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4842. instr = IR::Instr::New(Js::OpCode::LEA, reg3Opnd, memAddress, this->m_func);
  4843. insertInstr->InsertBefore(instr);
  4844. // to_number_full(value, &dst, scriptContext);
  4845. // Create dummy binary op to convert into helper
  4846. instr = IR::Instr::New(Js::OpCode::Add_A, this->m_func);
  4847. instr->SetSrc1(src);
  4848. instr->SetSrc2(reg3Opnd);
  4849. insertInstr->InsertBefore(instr);
  4850. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  4851. {
  4852. _Analysis_assume_(instrBailOut != nullptr);
  4853. instr = instr->ConvertToBailOutInstr(instrBailOut->GetBailOutInfo(), bailOutKind);
  4854. if (instrBailOut->GetBailOutInfo()->bailOutInstr == instrBailOut)
  4855. {
  4856. IR::Instr * instrShare = instrBailOut->ShareBailOut();
  4857. m_lowerer->LowerBailTarget(instrShare);
  4858. }
  4859. }
  4860. IR::JnHelperMethod helper;
  4861. if (dst->GetType() == TyFloat32)
  4862. {
  4863. helper = IR::HelperOp_ConvFloat_Helper;
  4864. }
  4865. else
  4866. {
  4867. helper = IR::HelperOp_ConvNumber_Helper;
  4868. }
  4869. this->m_lowerer->LowerBinaryHelperMem(instr, helper);
  4870. if (dst->IsRegOpnd())
  4871. {
  4872. if (dst->GetType() == TyFloat32)
  4873. {
  4874. // MOVSS dst, r32
  4875. instr = IR::Instr::New(Js::OpCode::MOVSS, dst, memAddress, this->m_func);
  4876. insertInstr->InsertBefore(instr);
  4877. }
  4878. else
  4879. {
  4880. // MOVSD dst, [pResult].f64
  4881. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, memAddress, this->m_func);
  4882. insertInstr->InsertBefore(instr);
  4883. }
  4884. }
  4885. // $Done
  4886. insertInstr->InsertBefore(labelDone);
  4887. }
  4888. void
  4889. LowererMD::LowerInt4NegWithBailOut(
  4890. IR::Instr *const instr,
  4891. const IR::BailOutKind bailOutKind,
  4892. IR::LabelInstr *const bailOutLabel,
  4893. IR::LabelInstr *const skipBailOutLabel)
  4894. {
  4895. Assert(instr);
  4896. Assert(instr->m_opcode == Js::OpCode::Neg_I4);
  4897. Assert(!instr->HasBailOutInfo());
  4898. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  4899. Assert(bailOutLabel);
  4900. Assert(instr->m_next == bailOutLabel);
  4901. Assert(skipBailOutLabel);
  4902. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  4903. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  4904. // Lower the instruction
  4905. instr->m_opcode = Js::OpCode::NEG;
  4906. Legalize(instr);
  4907. if(bailOutKind & IR::BailOutOnOverflow || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck)
  4908. {
  4909. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func));
  4910. }
  4911. if(bailOutKind & IR::BailOutOnNegativeZero)
  4912. {
  4913. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JEQ, bailOutLabel, instr->m_func));
  4914. }
  4915. // Skip bailout
  4916. bailOutLabel->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, skipBailOutLabel, instr->m_func));
  4917. }
  4918. void
  4919. LowererMD::LowerInt4AddWithBailOut(
  4920. IR::Instr *const instr,
  4921. const IR::BailOutKind bailOutKind,
  4922. IR::LabelInstr *const bailOutLabel,
  4923. IR::LabelInstr *const skipBailOutLabel)
  4924. {
  4925. Assert(instr);
  4926. Assert(instr->m_opcode == Js::OpCode::Add_I4);
  4927. Assert(!instr->HasBailOutInfo());
  4928. Assert(
  4929. (bailOutKind & IR::BailOutOnResultConditions) == IR::BailOutOnOverflow ||
  4930. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  4931. Assert(bailOutLabel);
  4932. Assert(instr->m_next == bailOutLabel);
  4933. Assert(skipBailOutLabel);
  4934. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  4935. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  4936. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  4937. // Restore sources overwritten by the instruction in the bailout path
  4938. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  4939. Assert(dst->IsRegOpnd());
  4940. const bool dstEquSrc1 = dst->IsEqual(src1), dstEquSrc2 = dst->IsEqual(src2);
  4941. if(dstEquSrc1 & dstEquSrc2)
  4942. {
  4943. // We have:
  4944. // s1 += s1
  4945. // Which is equivalent to:
  4946. // s1 <<= 1
  4947. //
  4948. // These overflow a signed 32-bit integer when for the initial s1:
  4949. // s1 > 0 && (s1 & 0x40000000) - result is negative after overflow
  4950. // s1 < 0 && !(s1 & 0x40000000) - result is nonnegative after overflow
  4951. //
  4952. // To restore s1 to its value before the operation, we first do an arithmetic right-shift by one bit to undo the
  4953. // left-shift and preserve the sign of the result after overflow. Since the result after overflow always has the
  4954. // opposite sign from the operands (hence the overflow), we just need to invert the sign of the result. The following
  4955. // restores s1 to its value before the instruction:
  4956. // s1 = (s1 >> 1) ^ 0x80000000
  4957. //
  4958. // Generate:
  4959. // sar s1, 1
  4960. // xor s1, 0x80000000
  4961. const auto startBailOutInstr = bailOutLabel->m_next;
  4962. Assert(startBailOutInstr);
  4963. startBailOutInstr->InsertBefore(
  4964. IR::Instr::New(
  4965. Js::OpCode::SAR,
  4966. dst,
  4967. dst,
  4968. IR::IntConstOpnd::New(1, TyInt8, instr->m_func),
  4969. instr->m_func)
  4970. );
  4971. startBailOutInstr->InsertBefore(
  4972. IR::Instr::New(
  4973. Js::OpCode::XOR,
  4974. dst,
  4975. dst,
  4976. IR::IntConstOpnd::New(INT32_MIN, TyInt32, instr->m_func, true /* dontEncode */),
  4977. instr->m_func)
  4978. );
  4979. }
  4980. else if(dstEquSrc1 | dstEquSrc2)
  4981. {
  4982. // We have:
  4983. // s1 += s2
  4984. // Or:
  4985. // s1 = s2 + s1
  4986. //
  4987. // The following restores s1 to its value before the instruction:
  4988. // s1 -= s2
  4989. //
  4990. // Generate:
  4991. // sub s1, s2
  4992. if(dstEquSrc1)
  4993. {
  4994. Assert(src2->IsRegOpnd() || src2->IsIntConstOpnd());
  4995. }
  4996. else
  4997. {
  4998. Assert(src1->IsRegOpnd() || src1->IsIntConstOpnd());
  4999. }
  5000. bailOutLabel->InsertAfter(IR::Instr::New(Js::OpCode::SUB, dst, dst, dstEquSrc1 ? src2 : src1, instr->m_func));
  5001. }
  5002. // Lower the instruction
  5003. ChangeToAdd(instr, true /* needFlags */);
  5004. Legalize(instr);
  5005. // Skip bailout on no overflow
  5006. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNO, skipBailOutLabel, instr->m_func));
  5007. // Fall through to bailOutLabel
  5008. }
  5009. void
  5010. LowererMD::LowerInt4SubWithBailOut(
  5011. IR::Instr *const instr,
  5012. const IR::BailOutKind bailOutKind,
  5013. IR::LabelInstr *const bailOutLabel,
  5014. IR::LabelInstr *const skipBailOutLabel)
  5015. {
  5016. Assert(instr);
  5017. Assert(instr->m_opcode == Js::OpCode::Sub_I4);
  5018. Assert(!instr->HasBailOutInfo());
  5019. Assert(
  5020. (bailOutKind & IR::BailOutOnResultConditions) == IR::BailOutOnOverflow ||
  5021. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  5022. Assert(bailOutLabel);
  5023. Assert(instr->m_next == bailOutLabel);
  5024. Assert(skipBailOutLabel);
  5025. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5026. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5027. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  5028. // Restore sources overwritten by the instruction in the bailout path
  5029. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5030. Assert(dst->IsRegOpnd());
  5031. const bool dstEquSrc1 = dst->IsEqual(src1), dstEquSrc2 = dst->IsEqual(src2);
  5032. if(dstEquSrc1 ^ dstEquSrc2)
  5033. {
  5034. // We have:
  5035. // s1 -= s2
  5036. // Or:
  5037. // s1 = s2 - s1
  5038. //
  5039. // The following restores s1 to its value before the instruction:
  5040. // s1 += s2
  5041. // Or:
  5042. // s1 = s2 - s1
  5043. //
  5044. // Generate:
  5045. // neg s1 - only for second case
  5046. // add s1, s2
  5047. if(dstEquSrc1)
  5048. {
  5049. Assert(src2->IsRegOpnd() || src2->IsIntConstOpnd());
  5050. }
  5051. else
  5052. {
  5053. Assert(src1->IsRegOpnd() || src1->IsIntConstOpnd());
  5054. }
  5055. const auto startBailOutInstr = bailOutLabel->m_next;
  5056. Assert(startBailOutInstr);
  5057. if(dstEquSrc2)
  5058. {
  5059. startBailOutInstr->InsertBefore(IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func));
  5060. }
  5061. startBailOutInstr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, dst, dst, dstEquSrc1 ? src2 : src1, instr->m_func));
  5062. }
  5063. // Lower the instruction
  5064. ChangeToSub(instr, true /* needFlags */);
  5065. Legalize(instr);
  5066. // Skip bailout on no overflow
  5067. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNO, skipBailOutLabel, instr->m_func));
  5068. // Fall through to bailOutLabel
  5069. }
  5070. bool
  5071. LowererMD::GenerateSimplifiedInt4Mul(
  5072. IR::Instr *const mulInstr,
  5073. const IR::BailOutKind bailOutKind,
  5074. IR::LabelInstr *const bailOutLabel)
  5075. {
  5076. if (AutoSystemInfo::Data.IsAtomPlatform())
  5077. {
  5078. // On Atom, always optimize unless phase is off
  5079. if (PHASE_OFF(Js::AtomPhase, mulInstr->m_func->GetTopFunc()) ||
  5080. PHASE_OFF(Js::MulStrengthReductionPhase, mulInstr->m_func->GetTopFunc()))
  5081. return false;
  5082. }
  5083. else
  5084. {
  5085. // On other platforms, don't optimize unless phase is forced
  5086. if (!PHASE_FORCE(Js::AtomPhase, mulInstr->m_func->GetTopFunc()) &&
  5087. !PHASE_FORCE(Js::MulStrengthReductionPhase, mulInstr->m_func->GetTopFunc()))
  5088. return false;
  5089. }
  5090. Assert(mulInstr);
  5091. Assert(mulInstr->m_opcode == Js::OpCode::Mul_I4);
  5092. IR::Instr *instr = mulInstr, *nextInstr;
  5093. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5094. if (!src1->IsIntConstOpnd() && !src2->IsIntConstOpnd())
  5095. return false;
  5096. // if two const operands, GlobOpt would have folded the computation
  5097. Assert(!(src1->IsIntConstOpnd() && src2->IsIntConstOpnd()));
  5098. Assert(dst->IsRegOpnd());
  5099. const auto constSrc = src1->IsIntConstOpnd() ? src1 : src2;
  5100. const auto nonConstSrc = src1->IsIntConstOpnd() ? src2 : src1;
  5101. const auto constSrcValue = constSrc->AsIntConstOpnd()->AsInt32();
  5102. auto nonConstSrcCopy = nonConstSrc;
  5103. Assert(nonConstSrc->IsRegOpnd());
  5104. bool doOVF = bailOutKind & IR::BailOutOnMulOverflow || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck;
  5105. // don't simplify mul by large numbers with OF check
  5106. if (doOVF && (constSrcValue > 3 || constSrcValue < -3))
  5107. return false;
  5108. switch(constSrcValue)
  5109. {
  5110. case -3:
  5111. case 3:
  5112. // if dst = src, we need to have a copy of the src for the ADD/SUB
  5113. if (dst->IsEqual(nonConstSrc))
  5114. {
  5115. nonConstSrcCopy = IR::RegOpnd::New(nonConstSrc->GetType(), instr->m_func);
  5116. // MOV
  5117. Lowerer::InsertMove(nonConstSrcCopy, nonConstSrc, instr);
  5118. }
  5119. instr->UnlinkSrc1();
  5120. instr->UnlinkSrc2();
  5121. // SHL
  5122. instr->m_opcode = Js::OpCode::SHL;
  5123. instr->SetSrc1(nonConstSrc);
  5124. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) 1, TyInt32, instr->m_func));
  5125. constSrc->Free(instr->m_func);
  5126. Legalize(instr);
  5127. // JO
  5128. if (doOVF)
  5129. {
  5130. nextInstr = IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func);
  5131. instr->InsertAfter(nextInstr);
  5132. instr = nextInstr;
  5133. }
  5134. // ADD
  5135. nextInstr = IR::Instr::New(Js::OpCode::ADD, dst, dst, nonConstSrcCopy, instr->m_func);
  5136. instr->InsertAfter(nextInstr);
  5137. instr = nextInstr;
  5138. Legalize(instr);
  5139. if (constSrcValue == -3)
  5140. {
  5141. // JO
  5142. if (doOVF)
  5143. {
  5144. nextInstr = IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func);
  5145. instr->InsertAfter(nextInstr);
  5146. instr = nextInstr;
  5147. }
  5148. // NEG
  5149. nextInstr = IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func);
  5150. instr->InsertAfter(nextInstr);
  5151. instr = nextInstr;
  5152. Legalize(instr);
  5153. }
  5154. // last JO inserted by caller
  5155. return true;
  5156. case -2:
  5157. case 2:
  5158. instr->UnlinkSrc1();
  5159. instr->UnlinkSrc2();
  5160. // SHL
  5161. instr->m_opcode = Js::OpCode::SHL;
  5162. instr->SetSrc1(nonConstSrc);
  5163. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) 1, TyInt32, instr->m_func));
  5164. constSrc->Free(instr->m_func);
  5165. Legalize(instr);
  5166. if (constSrcValue == -2)
  5167. {
  5168. // JO
  5169. if (doOVF)
  5170. {
  5171. nextInstr = IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func);
  5172. instr->InsertAfter(nextInstr);
  5173. instr = nextInstr;
  5174. }
  5175. // NEG
  5176. nextInstr = IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func);
  5177. instr->InsertAfter(nextInstr);
  5178. instr = nextInstr;
  5179. Legalize(instr);
  5180. }
  5181. // last JO inserted by caller
  5182. return true;
  5183. case -1:
  5184. instr->UnlinkSrc1();
  5185. instr->UnlinkSrc2();
  5186. // NEG
  5187. instr->m_opcode = Js::OpCode::NEG;
  5188. instr->SetSrc1(nonConstSrc);
  5189. constSrc->Free(instr->m_func);
  5190. Legalize(instr);
  5191. // JO inserted by caller
  5192. return true;
  5193. case 0:
  5194. instr->FreeSrc1();
  5195. instr->FreeSrc2();
  5196. // MOV
  5197. instr->m_opcode = Js::OpCode::MOV;
  5198. instr->SetSrc1(IR::IntConstOpnd::New((IntConstType) 0, TyInt32, instr->m_func));
  5199. Legalize(instr);
  5200. // JO inserted by caller are removed in later phases
  5201. return true;
  5202. case 1:
  5203. instr->UnlinkSrc1();
  5204. instr->UnlinkSrc2();
  5205. // MOV
  5206. instr->m_opcode = Js::OpCode::MOV;
  5207. instr->SetSrc1(nonConstSrc);
  5208. constSrc->Free(instr->m_func);
  5209. Legalize(instr);
  5210. // JO inserted by caller are removed in later phases
  5211. return true;
  5212. default:
  5213. // large numbers with no OF check
  5214. Assert(!doOVF);
  5215. // 2^i
  5216. // -2^i
  5217. if (Math::IsPow2(constSrcValue) || Math::IsPow2(-constSrcValue))
  5218. {
  5219. uint32 shamt = constSrcValue > 0 ? Math::Log2(constSrcValue) : Math::Log2(-constSrcValue);
  5220. instr->UnlinkSrc1();
  5221. instr->UnlinkSrc2();
  5222. // SHL
  5223. instr->m_opcode = Js::OpCode::SHL;
  5224. instr->SetSrc1(nonConstSrc);
  5225. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) shamt, TyInt32, instr->m_func));
  5226. constSrc->Free(instr->m_func);
  5227. Legalize(instr);
  5228. if (constSrcValue < 0)
  5229. {
  5230. // NEG
  5231. nextInstr = IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func);
  5232. instr->InsertAfter(nextInstr);
  5233. Legalize(instr);
  5234. }
  5235. return true;
  5236. }
  5237. // 2^i + 1
  5238. // 2^i - 1
  5239. if (Math::IsPow2(constSrcValue - 1) || Math::IsPow2(constSrcValue + 1))
  5240. {
  5241. bool plusOne = Math::IsPow2(constSrcValue - 1);
  5242. uint32 shamt = plusOne ? Math::Log2(constSrcValue - 1) : Math::Log2(constSrcValue + 1);
  5243. if (dst->IsEqual(nonConstSrc))
  5244. {
  5245. nonConstSrcCopy = IR::RegOpnd::New(nonConstSrc->GetType(), instr->m_func);
  5246. // MOV
  5247. Lowerer::InsertMove(nonConstSrcCopy, nonConstSrc, instr);
  5248. }
  5249. instr->UnlinkSrc1();
  5250. instr->UnlinkSrc2();
  5251. // SHL
  5252. instr->m_opcode = Js::OpCode::SHL;
  5253. instr->SetSrc1(nonConstSrc);
  5254. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) shamt, TyInt32, instr->m_func));
  5255. constSrc->Free(instr->m_func);
  5256. Legalize(instr);
  5257. // ADD/SUB
  5258. nextInstr = IR::Instr::New(plusOne ? Js::OpCode::ADD : Js::OpCode::SUB, dst, dst, nonConstSrcCopy, instr->m_func);
  5259. instr->InsertAfter(nextInstr);
  5260. instr = nextInstr;
  5261. Legalize(instr);
  5262. return true;
  5263. }
  5264. return false;
  5265. }
  5266. }
  5267. void
  5268. LowererMD::LowerInt4MulWithBailOut(
  5269. IR::Instr *const instr,
  5270. const IR::BailOutKind bailOutKind,
  5271. IR::LabelInstr *const bailOutLabel,
  5272. IR::LabelInstr *const skipBailOutLabel)
  5273. {
  5274. Assert(instr);
  5275. Assert(instr->m_opcode == Js::OpCode::Mul_I4);
  5276. Assert(!instr->HasBailOutInfo());
  5277. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  5278. Assert(bailOutLabel);
  5279. Assert(instr->m_next == bailOutLabel);
  5280. Assert(skipBailOutLabel);
  5281. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5282. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5283. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  5284. IR::LabelInstr *checkForNegativeZeroLabel = nullptr;
  5285. if(bailOutKind & IR::BailOutOnNegativeZero)
  5286. {
  5287. // We have:
  5288. // s3 = s1 * s2
  5289. //
  5290. // If the result is zero, we need to check and only bail out if it would be -0. The following determines this:
  5291. // bailOut = (s1 < 0 || s2 < 0) (either s1 or s2 has to be zero for the result to be zero, so we don't emit zero checks)
  5292. //
  5293. // Note, however, that if in future we decide to ignore mul overflow in some cases, and overflow occurs with one of the operands as negative,
  5294. // this can lead to bailout. Will handle that case if ever we decide to ignore mul overflow.
  5295. //
  5296. // Generate:
  5297. // $checkForNegativeZeroLabel:
  5298. // test s1, s1
  5299. // js $bailOutLabel
  5300. // test s2, s2
  5301. // jns $skipBailOutLabel
  5302. // (fall through to bail out)
  5303. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5304. Assert(dst->IsRegOpnd());
  5305. Assert(!src1->IsEqual(src2)); // cannot result in -0 if both operands are the same; GlobOpt should have figured that out
  5306. checkForNegativeZeroLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true);
  5307. bailOutLabel->InsertBefore(checkForNegativeZeroLabel);
  5308. if(src1->IsIntConstOpnd() || src2->IsIntConstOpnd())
  5309. {
  5310. Assert(!(src1->IsIntConstOpnd() && src2->IsIntConstOpnd())); // if this results in -0, GlobOpt should have avoided type specialization
  5311. const auto constSrc = src1->IsIntConstOpnd() ? src1 : src2;
  5312. const auto nonConstSrc = src1->IsIntConstOpnd() ? src2 : src1;
  5313. Assert(nonConstSrc->IsRegOpnd());
  5314. const auto newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5315. newInstr->SetSrc1(nonConstSrc);
  5316. newInstr->SetSrc2(nonConstSrc);
  5317. bailOutLabel->InsertBefore(newInstr);
  5318. const auto constSrcValue = constSrc->AsIntConstOpnd()->GetValue();
  5319. if(constSrcValue == 0)
  5320. {
  5321. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNSB, skipBailOutLabel, instr->m_func));
  5322. }
  5323. else
  5324. {
  5325. Assert(constSrcValue < 0); // cannot result in -0 if one operand is positive; GlobOpt should have figured that out
  5326. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, skipBailOutLabel, instr->m_func));
  5327. }
  5328. }
  5329. else
  5330. {
  5331. auto newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5332. newInstr->SetSrc1(src1);
  5333. newInstr->SetSrc2(src1);
  5334. bailOutLabel->InsertBefore(newInstr);
  5335. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JSB, bailOutLabel, instr->m_func));
  5336. newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5337. newInstr->SetSrc1(src2);
  5338. newInstr->SetSrc2(src2);
  5339. bailOutLabel->InsertBefore(newInstr);
  5340. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNSB, skipBailOutLabel, instr->m_func));
  5341. }
  5342. // Fall through to bailOutLabel
  5343. }
  5344. const bool needsOverflowCheck =
  5345. bailOutKind & IR::BailOutOnMulOverflow || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck;
  5346. AssertMsg(!instr->ShouldCheckForNon32BitOverflow() || (needsOverflowCheck && instr->ShouldCheckForNon32BitOverflow()), "Non 32-bit overflow check required without bailout info");
  5347. bool simplifiedMul = LowererMD::GenerateSimplifiedInt4Mul(instr, bailOutKind, bailOutLabel);
  5348. // Lower the instruction
  5349. if (!simplifiedMul)
  5350. {
  5351. LowererMD::ChangeToIMul(instr, needsOverflowCheck);
  5352. }
  5353. const auto insertBeforeInstr = checkForNegativeZeroLabel ? checkForNegativeZeroLabel : bailOutLabel;
  5354. if(needsOverflowCheck)
  5355. {
  5356. // do we care about int32 or non-int32 overflow ?
  5357. if (!simplifiedMul && !instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow())
  5358. LowererMD::EmitNon32BitOvfCheck(instr, insertBeforeInstr, bailOutLabel);
  5359. else
  5360. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func));
  5361. }
  5362. if(bailOutKind & IR::BailOutOnNegativeZero)
  5363. {
  5364. // On zero, branch to determine whether the result would be -0
  5365. Assert(checkForNegativeZeroLabel);
  5366. const auto newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5367. const auto dst = instr->GetDst();
  5368. newInstr->SetSrc1(dst);
  5369. newInstr->SetSrc2(dst);
  5370. insertBeforeInstr->InsertBefore(newInstr);
  5371. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JEQ, checkForNegativeZeroLabel, instr->m_func));
  5372. }
  5373. // Skip bailout
  5374. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, skipBailOutLabel, instr->m_func));
  5375. }
  5376. void
  5377. LowererMD::LowerInt4RemWithBailOut(
  5378. IR::Instr *const instr,
  5379. const IR::BailOutKind bailOutKind,
  5380. IR::LabelInstr *const bailOutLabel,
  5381. IR::LabelInstr *const skipBailOutLabel) const
  5382. {
  5383. Assert(instr);
  5384. Assert(instr->m_opcode == Js::OpCode::Rem_I4);
  5385. Assert(!instr->HasBailOutInfo());
  5386. Assert(bailOutKind & IR::BailOutOnNegativeZero);
  5387. Assert(bailOutLabel);
  5388. Assert(instr->m_next == bailOutLabel);
  5389. Assert(skipBailOutLabel);
  5390. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5391. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5392. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  5393. bool fastPath = m_lowerer->GenerateSimplifiedInt4Rem(instr, skipBailOutLabel);
  5394. // We have:
  5395. // s3 = s1 % s2
  5396. //
  5397. // If the result is zero, we need to check and only bail out if it would be -0. The following determines this:
  5398. // bailOut = (s3 == 0 && s1 < 0)
  5399. //
  5400. // Generate:
  5401. // $checkForNegativeZeroLabel:
  5402. // test s3, s3
  5403. // jne $skipBailOutLabel
  5404. // test s1, s1
  5405. // jns $skipBailOutLabel
  5406. // (fall through to bail out)
  5407. IR::Opnd *dst = instr->GetDst(), *src1 = instr->GetSrc1();
  5408. Assert(dst->IsRegOpnd());
  5409. IR::Instr * newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5410. newInstr->SetSrc1(dst);
  5411. newInstr->SetSrc2(dst);
  5412. bailOutLabel->InsertBefore(newInstr);
  5413. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, skipBailOutLabel, instr->m_func));
  5414. // Fast path already checks if s1 >= 0
  5415. if (!fastPath)
  5416. {
  5417. newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5418. newInstr->SetSrc1(src1);
  5419. newInstr->SetSrc2(src1);
  5420. bailOutLabel->InsertBefore(newInstr);
  5421. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNSB, skipBailOutLabel, instr->m_func));
  5422. }
  5423. // Fall through to bailOutLabel
  5424. // Lower the instruction
  5425. LowererMDArch::EmitInt4Instr(instr);
  5426. }
  5427. IR::Instr *
  5428. LowererMD::LoadFloatZero(IR::Opnd * opndDst, IR::Instr * instrInsert)
  5429. {
  5430. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOVSD_ZERO, opndDst, instrInsert->m_func);
  5431. instrInsert->InsertBefore(instr);
  5432. return instr;
  5433. }
  5434. template <typename T>
  5435. IR::Instr *
  5436. LowererMD::LoadFloatValue(IR::Opnd * opndDst, T value, IR::Instr * instrInsert)
  5437. {
  5438. if (value == 0.0 && !Js::JavascriptNumber::IsNegZero(value))
  5439. {
  5440. // zero can be loaded with "XORPS xmm, xmm" rather than needing memory load
  5441. return LoadFloatZero(opndDst, instrInsert);
  5442. }
  5443. IR::Opnd * opnd;
  5444. void* pValue = nullptr;
  5445. const bool isFloat64 = opndDst->IsFloat64();
  5446. IRType irtype = isFloat64 ? TyMachDouble : TyFloat32;
  5447. // Cast the value to the matching opndDst's type because T might not match
  5448. if (isFloat64)
  5449. {
  5450. pValue = NativeCodeDataNewNoFixup(instrInsert->m_func->GetNativeCodeDataAllocator(), DoubleType<DataDesc_LowererMD_LoadFloatValue_Double>, (double)value);
  5451. }
  5452. else
  5453. {
  5454. Assert(opndDst->IsFloat32());
  5455. pValue = NativeCodeDataNewNoFixup(instrInsert->m_func->GetNativeCodeDataAllocator(), FloatType<DataDesc_LowererMD_LoadFloatValue_Float>, (float)value);
  5456. }
  5457. if (!instrInsert->m_func->IsOOPJIT())
  5458. {
  5459. opnd = IR::MemRefOpnd::New((void*)pValue, irtype,
  5460. instrInsert->m_func, isFloat64 ? IR::AddrOpndKindDynamicDoubleRef : IR::AddrOpndKindDynamicFloatRef);
  5461. }
  5462. else // OOP JIT
  5463. {
  5464. int offset = NativeCodeData::GetDataTotalOffset(pValue);
  5465. auto addressRegOpnd = IR::RegOpnd::New(TyMachPtr, instrInsert->m_func);
  5466. Lowerer::InsertMove(
  5467. addressRegOpnd,
  5468. IR::MemRefOpnd::New(instrInsert->m_func->GetWorkItem()->GetWorkItemData()->nativeDataAddr, TyMachPtr, instrInsert->m_func, IR::AddrOpndKindDynamicNativeCodeDataRef),
  5469. instrInsert);
  5470. opnd = IR::IndirOpnd::New(addressRegOpnd, offset, irtype,
  5471. #if DBG
  5472. NativeCodeData::GetDataDescription(pValue, instrInsert->m_func->m_alloc),
  5473. #endif
  5474. instrInsert->m_func, true);
  5475. }
  5476. // movsd xmm, [reg+offset]
  5477. IR::Instr * instr = IR::Instr::New(LowererMDArch::GetAssignOp(opndDst->GetType()), opndDst, opnd, instrInsert->m_func);
  5478. instrInsert->InsertBefore(instr);
  5479. Legalize(instr);
  5480. return instr;
  5481. }
  5482. template IR::Instr * LowererMD::LoadFloatValue<float>(IR::Opnd * opndDst, float value, IR::Instr * instrInsert);
  5483. template IR::Instr * LowererMD::LoadFloatValue<double>(IR::Opnd * opndDst, double value, IR::Instr * instrInsert);
  5484. IR::Instr *
  5485. LowererMD::EnsureAdjacentArgs(IR::Instr * instrArg)
  5486. {
  5487. // Ensure that the arg instructions for a given call site are adjacent.
  5488. // This isn't normally desirable for CQ, but it's required by, for instance, the cloner,
  5489. // which must clone a complete call sequence.
  5490. IR::Opnd * opnd = instrArg->GetSrc2();
  5491. IR::Instr * instrNextArg;
  5492. StackSym * sym;
  5493. AssertMsg(opnd, "opnd");
  5494. while (opnd->IsSymOpnd())
  5495. {
  5496. sym = opnd->AsSymOpnd()->m_sym->AsStackSym();
  5497. instrNextArg = sym->m_instrDef;
  5498. Assert(instrNextArg);
  5499. instrNextArg->SinkInstrBefore(instrArg);
  5500. instrArg = instrNextArg;
  5501. opnd = instrArg->GetSrc2();
  5502. }
  5503. sym = opnd->AsRegOpnd()->m_sym;
  5504. instrNextArg = sym->m_instrDef;
  5505. Assert(instrNextArg && instrNextArg->m_opcode == Js::OpCode::StartCall);
  5506. // The StartCall can be trivially moved down.
  5507. if (instrNextArg->m_next != instrArg)
  5508. {
  5509. instrNextArg->UnlinkStartCallFromBailOutInfo(instrArg);
  5510. instrNextArg->Unlink();
  5511. instrArg->InsertBefore(instrNextArg);
  5512. }
  5513. return instrNextArg->m_prev;
  5514. }
  5515. #if INT32VAR
  5516. //
  5517. // Convert an int32 to Var representation.
  5518. //
  5519. void LowererMD::GenerateInt32ToVarConversion( IR::Opnd * opndSrc, IR::Instr * insertInstr )
  5520. {
  5521. AssertMsg(TySize[opndSrc->GetType()] == MachPtr, "For this to work it should be a 64-bit register");
  5522. IR::Instr* instr = IR::Instr::New(Js::OpCode::BTS, opndSrc, opndSrc, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  5523. insertInstr->InsertBefore(instr);
  5524. }
  5525. //
  5526. // jump to $labelHelper, based on the result of CMP
  5527. //
  5528. void LowererMD::GenerateSmIntTest(IR::Opnd *opndSrc, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::Instr **instrFirst /* = nullptr */, bool fContinueLabel /*= false*/)
  5529. {
  5530. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  5531. IR::Opnd * opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  5532. // s1 = MOV src1 - Move to a temporary
  5533. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc, this->m_func);
  5534. insertInstr->InsertBefore(instr);
  5535. if (instrFirst)
  5536. {
  5537. *instrFirst = instr;
  5538. }
  5539. // s1 = SHR s1, VarTag_Shift
  5540. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  5541. insertInstr->InsertBefore(instr);
  5542. // CMP s1, AtomTag
  5543. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  5544. instr->SetSrc1(opndReg);
  5545. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt32, this->m_func, /* dontEncode = */ true));
  5546. insertInstr->InsertBefore(instr);
  5547. if(fContinueLabel)
  5548. {
  5549. // JEQ $labelHelper
  5550. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  5551. }
  5552. else
  5553. {
  5554. // JNE $labelHelper
  5555. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  5556. }
  5557. insertInstr->InsertBefore(instr);
  5558. }
  5559. //
  5560. // If lower 32-bits are zero (value is zero), jump to $helper.
  5561. //
  5562. void LowererMD::GenerateTaggedZeroTest( IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelHelper )
  5563. {
  5564. // Cast the var to 32 bit integer.
  5565. if(opndSrc->GetSize() != 4)
  5566. {
  5567. opndSrc = opndSrc->UseWithNewType(TyUint32, this->m_func);
  5568. }
  5569. AssertMsg(TySize[opndSrc->GetType()] == 4, "This technique works only on the 32-bit version");
  5570. // TEST src1, src1
  5571. IR::Instr* instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  5572. instr->SetSrc1(opndSrc);
  5573. instr->SetSrc2(opndSrc);
  5574. insertInstr->InsertBefore(instr);
  5575. if(labelHelper != nullptr)
  5576. {
  5577. // JZ $labelHelper
  5578. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  5579. insertInstr->InsertBefore(instr);
  5580. }
  5581. }
  5582. //
  5583. // If top 16 bits are not zero i.e. it is NOT object, jump to $helper.
  5584. //
  5585. bool LowererMD::GenerateObjectTest(IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
  5586. {
  5587. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  5588. if (opndSrc->IsTaggedValue() && fContinueLabel)
  5589. {
  5590. // Insert delete branch opcode to tell the dbChecks not to assert on the helper label we may fall through into
  5591. IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
  5592. insertInstr->InsertBefore(fakeBr);
  5593. return false;
  5594. }
  5595. else if (opndSrc->IsNotTaggedValue() && !fContinueLabel)
  5596. {
  5597. return false;
  5598. }
  5599. IR::Opnd * opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  5600. // s1 = MOV src1 - Move to a temporary
  5601. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc, this->m_func);
  5602. insertInstr->InsertBefore(instr);
  5603. // s1 = SHR s1, VarTag_Shift
  5604. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  5605. insertInstr->InsertBefore(instr);
  5606. if (fContinueLabel)
  5607. {
  5608. // JEQ $labelHelper
  5609. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelTarget, this->m_func);
  5610. insertInstr->InsertBefore(instr);
  5611. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5612. insertInstr->InsertBefore(labelHelper);
  5613. }
  5614. else
  5615. {
  5616. // JNZ $labelHelper
  5617. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelTarget, this->m_func);
  5618. insertInstr->InsertBefore(instr);
  5619. }
  5620. return true;
  5621. }
  5622. #else
  5623. //
  5624. // Convert an int32 value to a Var.
  5625. //
  5626. void LowererMD::GenerateInt32ToVarConversion( IR::Opnd * opndSrc, IR::Instr * insertInstr )
  5627. {
  5628. // SHL r1, AtomTag
  5629. IR::Instr * instr = IR::Instr::New(Js::OpCode::SHL, opndSrc, opndSrc, IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func), this->m_func);
  5630. insertInstr->InsertBefore(instr);
  5631. // INC r1
  5632. instr = IR::Instr::New(Js::OpCode::INC, opndSrc, opndSrc, this->m_func);
  5633. insertInstr->InsertBefore(instr);
  5634. }
  5635. //
  5636. // jump to $labelHelper, based on the result of TEST
  5637. //
  5638. void LowererMD::GenerateSmIntTest(IR::Opnd *opndSrc, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::Instr **instrFirst /* = nullptr */, bool fContinueLabel /*= false*/)
  5639. {
  5640. if (opndSrc->IsTaggedInt() && !fContinueLabel)
  5641. {
  5642. return;
  5643. }
  5644. else if (opndSrc->IsNotTaggedValue() && fContinueLabel)
  5645. {
  5646. return;
  5647. }
  5648. // TEST src1, AtomTag
  5649. IR::Instr* instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  5650. instr->SetSrc1(opndSrc);
  5651. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func));
  5652. insertInstr->InsertBefore(instr);
  5653. if (instrFirst)
  5654. {
  5655. *instrFirst = instr;
  5656. }
  5657. if(fContinueLabel)
  5658. {
  5659. // JNE $labelHelper
  5660. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  5661. }
  5662. else
  5663. {
  5664. // JEQ $labelHelper
  5665. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  5666. }
  5667. insertInstr->InsertBefore(instr);
  5668. }
  5669. //
  5670. // If value is zero in tagged int representation, jump to $labelHelper.
  5671. //
  5672. void LowererMD::GenerateTaggedZeroTest( IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelHelper )
  5673. {
  5674. if (opndSrc->IsNotTaggedValue())
  5675. {
  5676. return;
  5677. }
  5678. // CMP src1, AtomTag
  5679. IR::Instr* instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  5680. instr->SetSrc1(opndSrc);
  5681. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt32, this->m_func));
  5682. insertInstr->InsertBefore(instr);
  5683. // JEQ $helper
  5684. if(labelHelper != nullptr)
  5685. {
  5686. // JEQ $labelHelper
  5687. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  5688. insertInstr->InsertBefore(instr);
  5689. }
  5690. }
  5691. //
  5692. // If not object, jump to $labelHelper.
  5693. //
  5694. bool LowererMD::GenerateObjectTest(IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
  5695. {
  5696. if (opndSrc->IsTaggedInt() && fContinueLabel)
  5697. {
  5698. // Insert delete branch opcode to tell the dbChecks not to assert on this helper label
  5699. IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
  5700. insertInstr->InsertBefore(fakeBr);
  5701. return false;
  5702. }
  5703. else if (opndSrc->IsNotTaggedValue() && !fContinueLabel)
  5704. {
  5705. return false;
  5706. }
  5707. // TEST src1, AtomTag
  5708. IR::Instr* instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  5709. instr->SetSrc1(opndSrc);
  5710. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func));
  5711. insertInstr->InsertBefore(instr);
  5712. if (fContinueLabel)
  5713. {
  5714. // JEQ $labelHelper
  5715. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelTarget, this->m_func);
  5716. insertInstr->InsertBefore(instr);
  5717. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5718. insertInstr->InsertBefore(labelHelper);
  5719. }
  5720. else
  5721. {
  5722. // JNE $labelHelper
  5723. IR::BranchInstr* branchInstr = IR::BranchInstr::New(Js::OpCode::JNE, labelTarget, this->m_func);
  5724. insertInstr->InsertBefore(branchInstr);
  5725. InsertObjectPoison(opndSrc, branchInstr, insertInstr, false);
  5726. }
  5727. return true;
  5728. }
  5729. #endif
  5730. #if FLOATVAR
  5731. //
  5732. // If any of the top 14 bits are not set, then the var is not a float value and hence, jump to $labelHelper.
  5733. //
  5734. void LowererMD::GenerateFloatTest(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper, const bool checkForNullInLoopBody)
  5735. {
  5736. if (opndSrc->GetValueType().IsFloat())
  5737. {
  5738. return;
  5739. }
  5740. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  5741. // s1 = MOV src1 - Move to a temporary
  5742. IR::Opnd * opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  5743. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc, this->m_func);
  5744. insertInstr->InsertBefore(instr);
  5745. // s1 = SHR s1, 50
  5746. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(50, TyInt8, this->m_func), this->m_func);
  5747. insertInstr->InsertBefore(instr);
  5748. // JZ $helper
  5749. instr = IR::BranchInstr::New(Js::OpCode::JEQ /* JZ */, labelHelper, this->m_func);
  5750. insertInstr->InsertBefore(instr);
  5751. }
  5752. IR::RegOpnd* LowererMD::CheckFloatAndUntag(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper)
  5753. {
  5754. IR::Opnd* floatTag = IR::AddrOpnd::New((Js::Var)Js::FloatTag_Value, IR::AddrOpndKindConstantVar, this->m_func, /* dontEncode = */ true);
  5755. IR::RegOpnd* regOpndFloatTag = IR::RegOpnd::New(TyUint64, this->m_func);
  5756. // MOV floatTagReg, FloatTag_Value
  5757. IR::Instr* instr = IR::Instr::New(Js::OpCode::MOV, regOpndFloatTag, floatTag, this->m_func);
  5758. insertInstr->InsertBefore(instr);
  5759. if (!opndSrc->GetValueType().IsFloat())
  5760. {
  5761. // TEST s1, floatTagReg
  5762. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  5763. instr->SetSrc1(opndSrc);
  5764. instr->SetSrc2(regOpndFloatTag);
  5765. insertInstr->InsertBefore(instr);
  5766. // JZ $helper
  5767. instr = IR::BranchInstr::New(Js::OpCode::JEQ /* JZ */, labelHelper, this->m_func);
  5768. insertInstr->InsertBefore(instr);
  5769. }
  5770. // untaggedFloat = XOR floatTagReg, s1 // where untaggedFloat == floatTagReg; use floatTagReg temporarily for the untagged float
  5771. IR::RegOpnd* untaggedFloat = regOpndFloatTag;
  5772. instr = IR::Instr::New(Js::OpCode::XOR, untaggedFloat, regOpndFloatTag, opndSrc, this->m_func);
  5773. insertInstr->InsertBefore(instr);
  5774. IR::RegOpnd *floatReg = IR::RegOpnd::New(TyMachDouble, this->m_func);
  5775. instr = IR::Instr::New(Js::OpCode::MOVD, floatReg, untaggedFloat, this->m_func);
  5776. insertInstr->InsertBefore(instr);
  5777. return floatReg;
  5778. }
  5779. #else
  5780. void LowererMD::GenerateFloatTest(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper, const bool checkForNullInLoopBody)
  5781. {
  5782. if (opndSrc->GetValueType().IsFloat())
  5783. {
  5784. return;
  5785. }
  5786. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  5787. if(checkForNullInLoopBody && m_func->IsLoopBody())
  5788. {
  5789. // It's possible that the value was determined dead by the jitted function and was not restored. The jitted loop
  5790. // body may not realize that it's dead and may try to use it. Check for null in loop bodies.
  5791. // test src1, src1
  5792. // jz $helper (bail out)
  5793. m_lowerer->InsertCompareBranch(
  5794. opndSrc,
  5795. IR::AddrOpnd::NewNull(m_func),
  5796. Js::OpCode::BrEq_A,
  5797. labelHelper,
  5798. insertInstr);
  5799. }
  5800. IR::Instr* instr = IR::Instr::New(Js::OpCode::CMP, insertInstr->m_func);
  5801. instr->SetSrc1(IR::IndirOpnd::New(opndSrc, 0, TyMachPtr, insertInstr->m_func));
  5802. instr->SetSrc2(m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptNumber));
  5803. insertInstr->InsertBefore(instr);
  5804. // JNZ $helper
  5805. instr = IR::BranchInstr::New(Js::OpCode::JNE /* JZ */, labelHelper, this->m_func);
  5806. insertInstr->InsertBefore(instr);
  5807. }
  5808. #endif
  5809. #if DBG
  5810. //
  5811. // Helps in debugging of fast paths.
  5812. //
  5813. void LowererMD::GenerateDebugBreak( IR::Instr * insertInstr )
  5814. {
  5815. // int 3
  5816. IR::Instr *int3 = IR::Instr::New(Js::OpCode::INT, insertInstr->m_func);
  5817. int3->SetSrc1(IR::IntConstOpnd::New(3, TyInt32, insertInstr->m_func));
  5818. insertInstr->InsertBefore(int3);
  5819. }
  5820. #endif
  5821. template <bool verify>
  5822. void
  5823. LowererMD::MakeDstEquSrc1(IR::Instr *const instr)
  5824. {
  5825. Assert(instr);
  5826. Assert(instr->IsLowered());
  5827. Assert(instr->GetDst());
  5828. Assert(instr->GetSrc1());
  5829. if(instr->GetDst()->IsEqual(instr->GetSrc1()))
  5830. {
  5831. return;
  5832. }
  5833. if (verify)
  5834. {
  5835. AssertMsg(false, "dst and src1 should be the same at this point. Missing Legalization");
  5836. return;
  5837. }
  5838. if(instr->GetSrc2() && instr->GetDst()->IsEqual(instr->GetSrc2()))
  5839. {
  5840. switch(instr->m_opcode)
  5841. {
  5842. #ifdef _M_IX86
  5843. case Js::OpCode::ADC:
  5844. #endif
  5845. case Js::OpCode::Add_I4:
  5846. case Js::OpCode::Mul_I4:
  5847. case Js::OpCode::Or_I4:
  5848. case Js::OpCode::Xor_I4:
  5849. case Js::OpCode::And_I4:
  5850. case Js::OpCode::ADD:
  5851. case Js::OpCode::IMUL2:
  5852. case Js::OpCode::OR:
  5853. case Js::OpCode::XOR:
  5854. case Js::OpCode::AND:
  5855. case Js::OpCode::ADDSD:
  5856. case Js::OpCode::MULSD:
  5857. case Js::OpCode::ADDSS:
  5858. case Js::OpCode::MULSS:
  5859. case Js::OpCode::ADDPS:
  5860. // For (a = b & a), generate (a = a & b)
  5861. instr->SwapOpnds();
  5862. return;
  5863. }
  5864. // For (a = b - a), generate (c = a; a = b - c) and fall through
  5865. ChangeToAssign(instr->HoistSrc2(Js::OpCode::Ld_A));
  5866. }
  5867. // For (a = b - c), generate (a = b; a = a - c)
  5868. IR::Instr *const mov = IR::Instr::New(Js::OpCode::Ld_A, instr->GetDst(), instr->UnlinkSrc1(), instr->m_func);
  5869. instr->InsertBefore(mov);
  5870. ChangeToAssign(mov);
  5871. instr->SetSrc1(instr->GetDst());
  5872. }
  5873. void
  5874. LowererMD::EmitInt64Instr(IR::Instr * instr)
  5875. {
  5876. #ifdef _M_IX86
  5877. lowererMDArch.EmitInt64Instr(instr);
  5878. #else
  5879. Assert(UNREACHED);
  5880. #endif
  5881. }
  5882. void
  5883. LowererMD::EmitInt4Instr(IR::Instr *instr)
  5884. {
  5885. LowererMDArch::EmitInt4Instr(instr);
  5886. }
  5887. void
  5888. LowererMD::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  5889. {
  5890. lowererMDArch.EmitLoadVar(instrLoad, isFromUint32, isHelper);
  5891. }
  5892. bool
  5893. LowererMD::EmitLoadInt32(IR::Instr *instrLoad, bool conversionFromObjectAllowed, bool bailOutOnHelper, IR::LabelInstr * labelBailOut)
  5894. {
  5895. return lowererMDArch.EmitLoadInt32(instrLoad, conversionFromObjectAllowed, bailOutOnHelper, labelBailOut);
  5896. }
  5897. void
  5898. LowererMD::EmitIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5899. {
  5900. this->lowererMDArch.EmitIntToFloat(dst, src, instrInsert);
  5901. }
  5902. void
  5903. LowererMD::EmitUIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5904. {
  5905. this->lowererMDArch.EmitUIntToFloat(dst, src, instrInsert);
  5906. }
  5907. void
  5908. LowererMD::EmitIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5909. {
  5910. this->lowererMDArch.EmitIntToLong(dst, src, instrInsert);
  5911. }
  5912. void
  5913. LowererMD::EmitUIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5914. {
  5915. this->lowererMDArch.EmitUIntToLong(dst, src, instrInsert);
  5916. }
  5917. void
  5918. LowererMD::EmitLongToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5919. {
  5920. this->lowererMDArch.EmitLongToInt(dst, src, instrInsert);
  5921. }
  5922. void LowererMD::EmitSignExtend(IR::Instr * instr)
  5923. {
  5924. IR::Opnd* dst = instr->GetDst();
  5925. IR::Opnd* src1 = instr->GetSrc1();
  5926. IR::Opnd* src2 = instr->GetSrc2();
  5927. Assert(dst && src1 && src2);
  5928. // Src2 is used to determine what's the from type size
  5929. Assert(src2->GetSize() < dst->GetSize());
  5930. IRType fromType = src2->GetType();
  5931. Js::OpCode op = Js::OpCode::MOVSX;
  5932. switch (src2->GetSize())
  5933. {
  5934. case 1: break; // default
  5935. case 2: op = Js::OpCode::MOVSXW; break;
  5936. case 4:
  5937. #if _M_X64
  5938. op = Js::OpCode::MOVSXD;
  5939. #else
  5940. op = LowererMDArch::GetAssignOp(fromType);
  5941. #endif
  5942. break;
  5943. default:
  5944. Assert(UNREACHED);
  5945. }
  5946. #if _M_IX86
  5947. // Special handling of int64 on x86
  5948. if (dst->IsInt64())
  5949. {
  5950. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(dst);
  5951. Int64RegPair srcPair = m_func->FindOrCreateInt64Pair(src1);
  5952. IR::RegOpnd * eaxReg = IR::RegOpnd::New(RegEAX, TyInt32, m_func);
  5953. IR::RegOpnd * edxReg = IR::RegOpnd::New(RegEDX, TyInt32, m_func);
  5954. instr->InsertBefore(IR::Instr::New(op, eaxReg, srcPair.low->UseWithNewType(fromType, m_func), m_func));
  5955. Legalize(instr->m_prev);
  5956. instr->InsertBefore(IR::Instr::New(Js::OpCode::CDQ, edxReg, m_func));
  5957. Legalize(instr->m_prev);
  5958. m_lowerer->InsertMove(dstPair.low, eaxReg, instr);
  5959. m_lowerer->InsertMove(dstPair.high, edxReg, instr);
  5960. }
  5961. else
  5962. #endif
  5963. {
  5964. instr->InsertBefore(IR::Instr::New(op, dst, src1->UseWithNewType(fromType, m_func), m_func));
  5965. Legalize(instr->m_prev);
  5966. }
  5967. }
  5968. void
  5969. LowererMD::EmitFloat32ToFloat64(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5970. {
  5971. // We should only generate this if sse2 is available
  5972. Assert(AutoSystemInfo::Data.SSE2Available());
  5973. Assert(dst->IsRegOpnd() && dst->IsFloat64());
  5974. Assert(src->IsRegOpnd() && src->GetType() == TyFloat32);
  5975. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::CVTSS2SD, dst, src, this->m_func));
  5976. }
  5977. void
  5978. LowererMD::EmitInt64toFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instr)
  5979. {
  5980. #ifdef _M_IX86
  5981. IR::Opnd *srcOpnd = instr->UnlinkSrc1();
  5982. LoadInt64HelperArgument(instr, srcOpnd);
  5983. IR::Instr* callinstr = IR::Instr::New(Js::OpCode::CALL, dst, this->m_func);
  5984. instr->InsertBefore(callinstr);
  5985. CompileAssert(sizeof(IRType) == 1);
  5986. const uint16 fromToType = dst->GetType() | (srcOpnd->GetType() << 8);
  5987. IR::JnHelperMethod method = IR::HelperOp_Throw;
  5988. switch (fromToType)
  5989. {
  5990. case TyFloat32 | (TyInt64 << 8) : method = IR::HelperI64TOF32; break;
  5991. case TyFloat32 | (TyUint64 << 8) : method = IR::HelperUI64TOF32; break;
  5992. case TyFloat64 | (TyInt64 << 8) : method = IR::HelperI64TOF64; break;
  5993. case TyFloat64 | (TyUint64 << 8) : method = IR::HelperUI64TOF64; break;
  5994. default:
  5995. Assert(UNREACHED);
  5996. }
  5997. this->ChangeToHelperCall(callinstr, method);
  5998. #else
  5999. IR::Opnd* origDst = nullptr;
  6000. if (dst->IsFloat32())
  6001. {
  6002. origDst = dst;
  6003. dst = IR::RegOpnd::New(TyFloat64, this->m_func);
  6004. }
  6005. const auto insertLegalize = [instr](IR::Instr* newInstr)
  6006. {
  6007. instr->InsertBefore(newInstr);
  6008. Legalize(newInstr);
  6009. };
  6010. if (src->IsUnsigned())
  6011. {
  6012. insertLegalize(IR::Instr::New(Js::OpCode::TEST, nullptr, src, src, m_func));
  6013. IR::LabelInstr* msbSetLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  6014. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  6015. insertLegalize(IR::BranchInstr::New(Js::OpCode::JSB, msbSetLabel, m_func));
  6016. // MSB not set, simple case
  6017. insertLegalize(IR::Instr::New(Js::OpCode::CVTSI2SD, dst, src, m_func));
  6018. insertLegalize(IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, m_func));
  6019. insertLegalize(msbSetLabel);
  6020. IR::RegOpnd* halfOpnd = IR::RegOpnd::New(TyInt64, m_func);
  6021. IR::RegOpnd* lsbOpnd = IR::RegOpnd::New(TyInt64, m_func);
  6022. m_lowerer->InsertMove(halfOpnd, src, instr);
  6023. m_lowerer->InsertMove(lsbOpnd, src, instr);
  6024. insertLegalize(IR::Instr::New(Js::OpCode::SHR, halfOpnd, halfOpnd, IR::IntConstOpnd::New(1, TyInt8, m_func), m_func));
  6025. insertLegalize(IR::Instr::New(Js::OpCode::AND, lsbOpnd, lsbOpnd, IR::Int64ConstOpnd::New(1, TyInt64, m_func), m_func));
  6026. insertLegalize(IR::Instr::New(Js::OpCode::OR, halfOpnd, halfOpnd, lsbOpnd, m_func));
  6027. insertLegalize(IR::Instr::New(Js::OpCode::CVTSI2SD, dst, halfOpnd, m_func));
  6028. insertLegalize(IR::Instr::New(Js::OpCode::ADDSD, dst, dst, dst, m_func));
  6029. insertLegalize(doneLabel);
  6030. }
  6031. else
  6032. {
  6033. insertLegalize(IR::Instr::New(Js::OpCode::CVTSI2SD, dst, src, m_func));
  6034. }
  6035. if (origDst)
  6036. {
  6037. insertLegalize(IR::Instr::New(Js::OpCode::CVTSD2SS, origDst, dst, m_func));
  6038. }
  6039. #endif
  6040. }
  6041. void
  6042. LowererMD::EmitNon32BitOvfCheck(IR::Instr *instr, IR::Instr *insertInstr, IR::LabelInstr* bailOutLabel)
  6043. {
  6044. AssertMsg(instr->m_opcode == Js::OpCode::IMUL, "IMUL should be used to check for non-32 bit overflow check on x86.");
  6045. IR::RegOpnd *edxSym = IR::RegOpnd::New(TyInt32, instr->m_func);
  6046. #ifdef _M_IX86
  6047. edxSym->SetReg(RegEDX);
  6048. #else
  6049. edxSym->SetReg(RegRDX);
  6050. #endif
  6051. // dummy def for edx to force RegAlloc to generate a lifetime. This is removed later by the Peeps phase.
  6052. IR::Instr *newInstr = IR::Instr::New(Js::OpCode::NOP, edxSym, instr->m_func);
  6053. insertInstr->InsertBefore(newInstr);
  6054. IR::RegOpnd *temp = IR::RegOpnd::New(TyInt32, instr->m_func);
  6055. Assert(instr->ignoreOverflowBitCount > 32);
  6056. uint8 shamt = 64 - instr->ignoreOverflowBitCount;
  6057. // MOV temp, edx
  6058. newInstr = IR::Instr::New(Js::OpCode::MOV, temp, edxSym, instr->m_func);
  6059. insertInstr->InsertBefore(newInstr);
  6060. // SHL temp, shamt
  6061. newInstr = IR::Instr::New(Js::OpCode::SHL, temp, temp, IR::IntConstOpnd::New(shamt, TyInt8, instr->m_func, true), instr->m_func);
  6062. insertInstr->InsertBefore(newInstr);
  6063. // SAR temp, shamt
  6064. newInstr = IR::Instr::New(Js::OpCode::SAR, temp, temp, IR::IntConstOpnd::New(shamt, TyInt8, instr->m_func, true), instr->m_func);
  6065. insertInstr->InsertBefore(newInstr);
  6066. // CMP temp, edx
  6067. newInstr = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  6068. newInstr->SetSrc1(temp);
  6069. newInstr->SetSrc2(edxSym);
  6070. insertInstr->InsertBefore(newInstr);
  6071. // JNE
  6072. Lowerer::InsertBranch(Js::OpCode::JNE, false, bailOutLabel, insertInstr);
  6073. }
  6074. void LowererMD::ConvertFloatToInt32(IR::Opnd* intOpnd, IR::Opnd* floatOpnd, IR::LabelInstr * labelHelper, IR::LabelInstr * labelDone, IR::Instr * instInsert)
  6075. {
  6076. UNREFERENCED_PARAMETER(labelHelper); // used on ARM
  6077. #if defined(_M_IX86)
  6078. // We should only generate this if sse2 is available
  6079. Assert(AutoSystemInfo::Data.SSE2Available());
  6080. #endif
  6081. Assert((floatOpnd->IsRegOpnd() && floatOpnd->IsFloat()) || (floatOpnd->IsIndirOpnd() && floatOpnd->GetType() == TyMachDouble));
  6082. Assert(intOpnd->GetType() == TyInt32);
  6083. IR::Instr* instr;
  6084. {
  6085. #ifdef _M_X64
  6086. IR::Opnd* dstOpnd = IR::RegOpnd::New(TyInt64, m_func);
  6087. #else
  6088. IR::Opnd* dstOpnd = intOpnd;
  6089. #endif
  6090. // CVTTSD2SI dst, floatOpnd
  6091. instr = IR::Instr::New(floatOpnd->IsFloat64() ? Js::OpCode::CVTTSD2SI : Js::OpCode::CVTTSS2SI, dstOpnd, floatOpnd, this->m_func);
  6092. instInsert->InsertBefore(instr);
  6093. // CMP dst, 0x80000000 {0x8000000000000000 on x64} -- Check for overflow
  6094. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6095. instr->SetSrc1(dstOpnd);
  6096. instr->SetSrc2(IR::IntConstOpnd::New(MachSignBit, TyMachReg, this->m_func, true));
  6097. instInsert->InsertBefore(instr);
  6098. Legalize(instr);
  6099. #ifdef _M_X64
  6100. // Truncate to int32 for x64. We still need to go to helper though if we have int64 overflow.
  6101. // MOV_TRUNC intOpnd, tmpOpnd
  6102. instr = IR::Instr::New(Js::OpCode::MOV_TRUNC, intOpnd, dstOpnd, this->m_func);
  6103. instInsert->InsertBefore(instr);
  6104. #endif
  6105. }
  6106. // JNE $done
  6107. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelDone, this->m_func);
  6108. instInsert->InsertBefore(instr);
  6109. // It does overflow - Let's try using FISTTP which uses 64 bits and is relevant only for x86
  6110. // but requires going to memory and should only be used in overflow scenarios
  6111. #ifdef _M_IX86
  6112. if (AutoSystemInfo::Data.SSE3Available())
  6113. {
  6114. IR::Opnd* floatStackOpnd;
  6115. StackSym* tempSymDouble = this->m_func->tempSymDouble;
  6116. if (!tempSymDouble)
  6117. {
  6118. this->m_func->tempSymDouble = StackSym::New(TyFloat64, this->m_func);
  6119. this->m_func->StackAllocate(this->m_func->tempSymDouble, MachDouble);
  6120. tempSymDouble = this->m_func->tempSymDouble;
  6121. }
  6122. IR::Opnd * float64Opnd;
  6123. if (floatOpnd->IsFloat32())
  6124. {
  6125. float64Opnd = IR::RegOpnd::New(TyFloat64, m_func);
  6126. instr = IR::Instr::New(Js::OpCode::CVTSS2SD, float64Opnd, floatOpnd, m_func);
  6127. instInsert->InsertBefore(instr);
  6128. }
  6129. else
  6130. {
  6131. float64Opnd = floatOpnd;
  6132. }
  6133. if (float64Opnd->IsRegOpnd())
  6134. {
  6135. floatStackOpnd = IR::SymOpnd::New(tempSymDouble, TyMachDouble, m_func);
  6136. instr = IR::Instr::New(Js::OpCode::MOVSD, floatStackOpnd, float64Opnd, m_func);
  6137. instInsert->InsertBefore(instr);
  6138. }
  6139. else
  6140. {
  6141. floatStackOpnd = float64Opnd;
  6142. }
  6143. // FLD [tmpDouble]
  6144. instr = IR::Instr::New(Js::OpCode::FLD, floatStackOpnd, floatStackOpnd, m_func);
  6145. instInsert->InsertBefore(instr);
  6146. if (!float64Opnd->IsRegOpnd())
  6147. {
  6148. floatStackOpnd = IR::SymOpnd::New(tempSymDouble, TyMachDouble, m_func);
  6149. }
  6150. // FISTTP qword ptr [tmpDouble]
  6151. instr = IR::Instr::New(Js::OpCode::FISTTP, floatStackOpnd, m_func);
  6152. instInsert->InsertBefore(instr);
  6153. StackSym *intSym = StackSym::New(TyInt32, m_func);
  6154. intSym->m_offset = tempSymDouble->m_offset;
  6155. intSym->m_allocated = true;
  6156. IR::Opnd* lowerBitsOpnd = IR::SymOpnd::New(intSym, TyInt32, m_func);
  6157. // MOV dst, dword ptr [tmpDouble]
  6158. instr = IR::Instr::New(Js::OpCode::MOV, intOpnd, lowerBitsOpnd, m_func);
  6159. instInsert->InsertBefore(instr);
  6160. // TEST dst, dst -- Check for overflow
  6161. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  6162. instr->SetSrc1(intOpnd);
  6163. instr->SetSrc2(intOpnd);
  6164. instInsert->InsertBefore(instr);
  6165. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelDone, this->m_func);
  6166. instInsert->InsertBefore(instr);
  6167. // CMP [tmpDouble - 4], 0x80000000
  6168. StackSym* higherBitsSym = StackSym::New(TyInt32, m_func);
  6169. higherBitsSym->m_offset = tempSymDouble->m_offset + 4;
  6170. higherBitsSym->m_allocated = true;
  6171. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6172. instr->SetSrc1(IR::SymOpnd::New(higherBitsSym, TyInt32, m_func));
  6173. instr->SetSrc2(IR::IntConstOpnd::New(0x80000000, TyInt32, this->m_func, true));
  6174. instInsert->InsertBefore(instr);
  6175. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelDone, this->m_func);
  6176. instInsert->InsertBefore(instr);
  6177. }
  6178. #endif
  6179. }
  6180. IR::Instr *
  6181. LowererMD::InsertConvertFloat64ToInt32(const RoundMode roundMode, IR::Opnd *const dst, IR::Opnd *const src, IR::Instr *const insertBeforeInstr)
  6182. {
  6183. Assert(dst);
  6184. Assert(dst->IsInt32());
  6185. Assert(src);
  6186. Assert(src->IsFloat64());
  6187. Assert(insertBeforeInstr);
  6188. // The caller is expected to check for overflow. To have that work be done automatically, use LowererMD::EmitFloatToInt.
  6189. Func *const func = insertBeforeInstr->m_func;
  6190. IR::AutoReuseOpnd autoReuseSrcPlusHalf;
  6191. IR::Instr *instr = nullptr;
  6192. switch (roundMode)
  6193. {
  6194. case RoundModeTowardInteger:
  6195. {
  6196. // Conversion with rounding towards nearest integer is not supported by the architecture. Add 0.5 and do a
  6197. // round-toward-zero conversion instead.
  6198. IR::RegOpnd *const srcPlusHalf = IR::RegOpnd::New(TyFloat64, func);
  6199. autoReuseSrcPlusHalf.Initialize(srcPlusHalf, func);
  6200. Lowerer::InsertAdd(
  6201. false /* needFlags */,
  6202. srcPlusHalf,
  6203. src,
  6204. IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetDoublePointFiveAddr(), TyFloat64, func,
  6205. IR::AddrOpndKindDynamicDoubleRef),
  6206. insertBeforeInstr);
  6207. instr = IR::Instr::New(LowererMD::MDConvertFloat64ToInt32Opcode(RoundModeTowardZero), dst, srcPlusHalf, func);
  6208. insertBeforeInstr->InsertBefore(instr);
  6209. LowererMD::Legalize(instr);
  6210. return instr;
  6211. }
  6212. case RoundModeHalfToEven:
  6213. {
  6214. instr = IR::Instr::New(LowererMD::MDConvertFloat64ToInt32Opcode(RoundModeHalfToEven), dst, src, func);
  6215. insertBeforeInstr->InsertBefore(instr);
  6216. LowererMD::Legalize(instr);
  6217. return instr;
  6218. }
  6219. default:
  6220. AssertMsg(0, "RoundMode not supported.");
  6221. return nullptr;
  6222. }
  6223. }
  6224. void
  6225. LowererMD::EmitFloatToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert, IR::Instr *instrBailOut, IR::LabelInstr * labelBailOut)
  6226. {
  6227. #ifdef _M_IX86
  6228. // We should only generate this if sse2 is available
  6229. Assert(AutoSystemInfo::Data.SSE2Available());
  6230. #endif
  6231. IR::BailOutKind bailOutKind = IR::BailOutInvalid;
  6232. if (instrBailOut && instrBailOut->HasBailOutInfo())
  6233. {
  6234. bailOutKind = instrBailOut->GetBailOutKind();
  6235. if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  6236. {
  6237. // Bail out instead of calling helper. If this is happening unconditionally, the caller should instead throw a rejit exception.
  6238. Assert(labelBailOut);
  6239. m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, instrInsert);
  6240. return;
  6241. }
  6242. }
  6243. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6244. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6245. IR::Instr *instr;
  6246. ConvertFloatToInt32(dst, src, labelHelper, labelDone, instrInsert);
  6247. // $Helper
  6248. instrInsert->InsertBefore(labelHelper);
  6249. IR::Opnd * arg = src;
  6250. if (src->IsFloat32())
  6251. {
  6252. arg = IR::RegOpnd::New(TyFloat64, m_func);
  6253. EmitFloat32ToFloat64(arg, src, instrInsert);
  6254. }
  6255. instr = IR::Instr::New(Js::OpCode::CALL, dst, this->m_func);
  6256. instrInsert->InsertBefore(instr);
  6257. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  6258. {
  6259. _Analysis_assume_(instrBailOut != nullptr);
  6260. instr = instr->ConvertToBailOutInstr(instrBailOut->GetBailOutInfo(), bailOutKind);
  6261. if (instrBailOut->GetBailOutInfo()->bailOutInstr == instrBailOut)
  6262. {
  6263. IR::Instr * instrShare = instrBailOut->ShareBailOut();
  6264. m_lowerer->LowerBailTarget(instrShare);
  6265. }
  6266. }
  6267. // dst = ToInt32Core(src);
  6268. LoadDoubleHelperArgument(instr, arg);
  6269. this->ChangeToHelperCall(instr, IR::HelperConv_ToInt32Core);
  6270. // $Done
  6271. instrInsert->InsertBefore(labelDone);
  6272. }
  6273. void
  6274. LowererMD::EmitLoadVarNoCheck(IR::RegOpnd * dst, IR::RegOpnd * src, IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  6275. {
  6276. #ifdef _M_IX86
  6277. if (!AutoSystemInfo::Data.SSE2Available())
  6278. {
  6279. IR::JnHelperMethod helperMethod;
  6280. // PUSH &floatTemp
  6281. IR::Opnd *tempOpnd;
  6282. if (instrLoad->dstIsTempNumber)
  6283. {
  6284. helperMethod = isFromUint32 ? IR::HelperOp_UInt32ToAtomInPlace : IR::HelperOp_Int32ToAtomInPlace;
  6285. // Use the original dst to get the temp number sym
  6286. StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(instrLoad->GetDst(), instrLoad->dstIsTempNumberTransferred);
  6287. IR::Instr *load = this->m_lowerer->InsertLoadStackAddress(tempNumberSym, instrLoad);
  6288. tempOpnd = load->GetDst();
  6289. this->LoadHelperArgument(instrLoad, tempOpnd);
  6290. }
  6291. else
  6292. {
  6293. helperMethod = isFromUint32 ? IR::HelperOp_UInt32ToAtom : IR::HelperOp_Int32ToAtom;
  6294. }
  6295. // PUSH memContext
  6296. this->m_lowerer->LoadScriptContext(instrLoad);
  6297. // PUSH s1
  6298. this->LoadHelperArgument(instrLoad, src);
  6299. // dst = ToVar()
  6300. IR::Instr * instr = IR::Instr::New(Js::OpCode::Call, dst,
  6301. IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
  6302. instrLoad->InsertBefore(instr);
  6303. this->LowerCall(instr, 0);
  6304. return;
  6305. }
  6306. #endif
  6307. IR::RegOpnd * floatReg = IR::RegOpnd::New(TyFloat64, this->m_func);
  6308. if (isFromUint32)
  6309. {
  6310. this->EmitUIntToFloat(floatReg, src, instrLoad);
  6311. }
  6312. else
  6313. {
  6314. this->EmitIntToFloat(floatReg, src, instrLoad);
  6315. }
  6316. this->SaveDoubleToVar(dst, floatReg, instrLoad, instrLoad, isHelper);
  6317. }
  6318. void
  6319. LowererMD::ImmedSrcToReg(IR::Instr * instr, IR::Opnd * newOpnd, int srcNum)
  6320. {
  6321. if (srcNum == 2)
  6322. {
  6323. instr->SetSrc2(newOpnd);
  6324. }
  6325. else
  6326. {
  6327. Assert(srcNum == 1);
  6328. instr->SetSrc1(newOpnd);
  6329. }
  6330. }
  6331. IR::LabelInstr *
  6332. LowererMD::GetBailOutStackRestoreLabel(BailOutInfo * bailOutInfo, IR::LabelInstr * exitTargetInstr)
  6333. {
  6334. return lowererMDArch.GetBailOutStackRestoreLabel(bailOutInfo, exitTargetInstr);
  6335. }
  6336. StackSym *
  6337. LowererMD::GetImplicitParamSlotSym(Js::ArgSlot argSlot)
  6338. {
  6339. return GetImplicitParamSlotSym(argSlot, this->m_func);
  6340. }
  6341. StackSym *
  6342. LowererMD::GetImplicitParamSlotSym(Js::ArgSlot argSlot, Func * func)
  6343. {
  6344. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  6345. // Pass in the EBP+8 to start at the function object, the start of the implicit param slots
  6346. StackSym * stackSym = StackSym::NewImplicitParamSym(argSlot, func);
  6347. func->SetArgOffset(stackSym, (2 + argSlot) * MachPtr);
  6348. func->SetHasImplicitParamLoad();
  6349. return stackSym;
  6350. }
  6351. bool LowererMD::GenerateFastAnd(IR::Instr * instrAnd)
  6352. {
  6353. return this->lowererMDArch.GenerateFastAnd(instrAnd);
  6354. }
  6355. bool LowererMD::GenerateFastDivAndRem(IR::Instr* instrDiv, IR::LabelInstr* bailoutLabel)
  6356. {
  6357. return this->lowererMDArch.GenerateFastDivAndRem(instrDiv, bailoutLabel);
  6358. }
  6359. bool LowererMD::GenerateFastXor(IR::Instr * instrXor)
  6360. {
  6361. return this->lowererMDArch.GenerateFastXor(instrXor);
  6362. }
  6363. bool LowererMD::GenerateFastOr(IR::Instr * instrOr)
  6364. {
  6365. return this->lowererMDArch.GenerateFastOr(instrOr);
  6366. }
  6367. bool LowererMD::GenerateFastNot(IR::Instr * instrNot)
  6368. {
  6369. return this->lowererMDArch.GenerateFastNot(instrNot);
  6370. }
  6371. bool LowererMD::GenerateFastShiftLeft(IR::Instr * instrShift)
  6372. {
  6373. return this->lowererMDArch.GenerateFastShiftLeft(instrShift);
  6374. }
  6375. bool LowererMD::GenerateFastShiftRight(IR::Instr * instrShift)
  6376. {
  6377. return this->lowererMDArch.GenerateFastShiftRight(instrShift);
  6378. }
  6379. void LowererMD::GenerateIsJsObjectTest(IR::RegOpnd* instanceReg, IR::Instr* insertInstr, IR::LabelInstr* labelHelper)
  6380. {
  6381. // TEST instanceReg, (Js::AtomTag_IntPtr | Js::FloatTag_Value )
  6382. GenerateObjectTest(instanceReg, insertInstr, labelHelper);
  6383. IR::RegOpnd * typeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  6384. // MOV typeReg, instanceReg + offsetof(RecyclableObject::type)
  6385. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, typeReg,
  6386. IR::IndirOpnd::New(instanceReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  6387. m_func));
  6388. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_LastJavascriptPrimitiveType
  6389. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6390. cmp->SetSrc1(IR::IndirOpnd::New(typeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func));
  6391. cmp->SetSrc2(IR::IntConstOpnd::New(Js::TypeId::TypeIds_LastJavascriptPrimitiveType, TyInt32, this->m_func));
  6392. insertInstr->InsertBefore(cmp);
  6393. // JLE labelHelper
  6394. insertInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JLE, labelHelper, this->m_func));
  6395. }
  6396. void
  6397. LowererMD::EmitReinterpretPrimitive(IR::Opnd* dst, IR::Opnd* src, IR::Instr* insertBeforeInstr)
  6398. {
  6399. Assert(dst && src);
  6400. Assert(dst->GetSize() == src->GetSize());
  6401. Assert(dst->GetType() != src->GetType());
  6402. if (
  6403. // Additional runtime check to prevent unknown behavior
  6404. (dst->GetSize() != src->GetSize()) ||
  6405. // There is nothing to do in this case
  6406. (dst->GetType() == src->GetType())
  6407. )
  6408. {
  6409. Lowerer::InsertMove(dst, src, insertBeforeInstr);
  6410. return;
  6411. }
  6412. auto LegalizeInsert = [insertBeforeInstr](IR::Instr* instr)
  6413. {
  6414. Legalize(instr);
  6415. insertBeforeInstr->InsertBefore(instr);
  6416. };
  6417. if (dst->GetSize() == 8)
  6418. {
  6419. #if _M_AMD64
  6420. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVQ, dst, src, m_func));
  6421. #elif LOWER_SPLIT_INT64
  6422. if (dst->IsInt64())
  6423. {
  6424. // movd xmm2, xmm1
  6425. // movd low_bits, xmm2
  6426. // shufps xmm2, xmm2, 1
  6427. // movd high_bits, xmm2
  6428. Assert(src->IsFloat64());
  6429. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(dst);
  6430. // shufps modifies the register, we shouldn't change the source here
  6431. IR::RegOpnd* tmpDouble = IR::RegOpnd::New(TyFloat64, m_func);
  6432. Lowerer::InsertMove(tmpDouble, src, insertBeforeInstr);
  6433. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dstPair.low, tmpDouble, m_func));
  6434. LegalizeInsert(IR::Instr::New(Js::OpCode::SHUFPS, tmpDouble, tmpDouble, IR::IntConstOpnd::New(1, TyInt8, m_func, true), m_func));
  6435. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dstPair.high, tmpDouble, m_func));
  6436. }
  6437. else
  6438. {
  6439. // movd xmm0, lowBits;
  6440. // movd xmm1, highBits;
  6441. // shufps xmm0, xmm1, (0 | 2 << 2 | 0 << 4 | 1 << 6);
  6442. // shufps xmm0, xmm0, (0 | 2 << 2 | 3 << 4 | 3 << 6);
  6443. Assert(src->IsInt64());
  6444. Assert(dst->IsFloat64());
  6445. Int64RegPair srcPair = m_func->FindOrCreateInt64Pair(src);
  6446. IR::RegOpnd* tmpDouble = IR::RegOpnd::New(TyFloat64, m_func);
  6447. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dst, srcPair.low, m_func));
  6448. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, tmpDouble, srcPair.high, m_func));
  6449. LegalizeInsert(IR::Instr::New(Js::OpCode::SHUFPS, dst, tmpDouble, IR::IntConstOpnd::New((0 | 2 << 2 | 0 << 4 | 1 << 6), TyInt8, m_func, true), m_func));
  6450. LegalizeInsert(IR::Instr::New(Js::OpCode::SHUFPS, dst, dst, IR::IntConstOpnd::New((0 | 2 << 2 | 3 << 4 | 3 << 6), TyInt8, m_func, true), m_func));
  6451. }
  6452. #endif
  6453. }
  6454. else if (dst->GetSize() == 4)
  6455. {
  6456. // 32bit reinterprets
  6457. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dst, src, m_func));
  6458. }
  6459. else
  6460. {
  6461. Assert(UNREACHED);
  6462. }
  6463. }
  6464. void LowererMD::EmitReinterpretFloatToInt(IR::Opnd* dst, IR::Opnd* src, IR::Instr* insertBeforeInstr)
  6465. {
  6466. Assert(dst->IsInt32() || dst->IsUInt32() || dst->IsInt64());
  6467. Assert(src->IsFloat());
  6468. EmitReinterpretPrimitive(dst, src, insertBeforeInstr);
  6469. }
  6470. void LowererMD::EmitReinterpretIntToFloat(IR::Opnd* dst, IR::Opnd* src, IR::Instr* insertBeforeInstr)
  6471. {
  6472. Assert(dst->IsFloat());
  6473. Assert(src->IsInt32() || src->IsUInt32() || src->IsInt64());
  6474. EmitReinterpretPrimitive(dst, src, insertBeforeInstr);
  6475. }
  6476. IR::Instr *
  6477. LowererMD::LowerToFloat(IR::Instr *instr)
  6478. {
  6479. switch (instr->m_opcode)
  6480. {
  6481. case Js::OpCode::Add_A:
  6482. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  6483. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  6484. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::ADDSD : Js::OpCode::ADDSS;
  6485. break;
  6486. case Js::OpCode::Sub_A:
  6487. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  6488. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  6489. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::SUBSD : Js::OpCode::SUBSS;
  6490. break;
  6491. case Js::OpCode::Mul_A:
  6492. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  6493. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  6494. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::MULSD : Js::OpCode::MULSS;
  6495. break;
  6496. case Js::OpCode::Div_A:
  6497. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  6498. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  6499. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::DIVSD : Js::OpCode::DIVSS;
  6500. break;
  6501. case Js::OpCode::Neg_A:
  6502. {
  6503. IR::Opnd *opnd;
  6504. instr->m_opcode = Js::OpCode::XORPS;
  6505. if (instr->GetDst()->IsFloat32())
  6506. {
  6507. opnd = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetMaskNegFloatAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6508. }
  6509. else
  6510. {
  6511. Assert(instr->GetDst()->IsFloat64());
  6512. opnd = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetMaskNegDoubleAddr(), TyMachDouble, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6513. }
  6514. instr->SetSrc2(opnd);
  6515. break;
  6516. }
  6517. case Js::OpCode::BrEq_A:
  6518. case Js::OpCode::BrNeq_A:
  6519. case Js::OpCode::BrSrEq_A:
  6520. case Js::OpCode::BrSrNeq_A:
  6521. case Js::OpCode::BrGt_A:
  6522. case Js::OpCode::BrGe_A:
  6523. case Js::OpCode::BrLt_A:
  6524. case Js::OpCode::BrLe_A:
  6525. case Js::OpCode::BrNotEq_A:
  6526. case Js::OpCode::BrNotNeq_A:
  6527. case Js::OpCode::BrSrNotEq_A:
  6528. case Js::OpCode::BrSrNotNeq_A:
  6529. case Js::OpCode::BrNotGt_A:
  6530. case Js::OpCode::BrNotGe_A:
  6531. case Js::OpCode::BrNotLt_A:
  6532. case Js::OpCode::BrNotLe_A:
  6533. return this->LowerFloatCondBranch(instr->AsBranchInstr());
  6534. default:
  6535. Assume(UNREACHED);
  6536. }
  6537. Legalize(instr);
  6538. return instr;
  6539. }
  6540. IR::BranchInstr *
  6541. LowererMD::LowerFloatCondBranch(IR::BranchInstr *instrBranch, bool ignoreNan)
  6542. {
  6543. Js::OpCode brOpcode = Js::OpCode::InvalidOpCode;
  6544. Js::OpCode cmpOpcode = Js::OpCode::InvalidOpCode;
  6545. IR::Instr *instr;
  6546. bool swapCmpOpnds = false;
  6547. bool addJP = false;
  6548. IR::LabelInstr *labelNaN = nullptr;
  6549. // Generate float compare that behave correctly for NaN's.
  6550. // These branch on unordered:
  6551. // JB
  6552. // JBE
  6553. // JE
  6554. // These don't branch on unordered:
  6555. // JA
  6556. // JAE
  6557. // JNE
  6558. // Unfortunately, only JA and JAE do what we'd like....
  6559. Func * func = instrBranch->m_func;
  6560. IR::Opnd *src1 = instrBranch->UnlinkSrc1();
  6561. IR::Opnd *src2 = instrBranch->UnlinkSrc2();
  6562. Assert(src1->GetType() == src2->GetType());
  6563. switch (instrBranch->m_opcode)
  6564. {
  6565. case Js::OpCode::BrSrEq_A:
  6566. case Js::OpCode::BrEq_A:
  6567. case Js::OpCode::BrSrNotNeq_A:
  6568. case Js::OpCode::BrNotNeq_A:
  6569. cmpOpcode = src1->IsFloat64() ? Js::OpCode::UCOMISD : Js::OpCode::UCOMISS;
  6570. brOpcode = Js::OpCode::JEQ;
  6571. if (!ignoreNan)
  6572. {
  6573. // Don't jump on NaN's
  6574. labelNaN = instrBranch->GetOrCreateContinueLabel();
  6575. addJP = true;
  6576. }
  6577. break;
  6578. case Js::OpCode::BrNeq_A:
  6579. case Js::OpCode::BrSrNeq_A:
  6580. case Js::OpCode::BrSrNotEq_A:
  6581. case Js::OpCode::BrNotEq_A:
  6582. cmpOpcode = src1->IsFloat64() ? Js::OpCode::UCOMISD : Js::OpCode::UCOMISS;
  6583. brOpcode = Js::OpCode::JNE;
  6584. if (!ignoreNan)
  6585. {
  6586. // Jump on NaN's
  6587. labelNaN = instrBranch->GetTarget();
  6588. addJP = true;
  6589. }
  6590. break;
  6591. case Js::OpCode::BrLe_A:
  6592. swapCmpOpnds = true;
  6593. brOpcode = Js::OpCode::JAE;
  6594. break;
  6595. case Js::OpCode::BrLt_A:
  6596. swapCmpOpnds = true;
  6597. brOpcode = Js::OpCode::JA;
  6598. break;
  6599. case Js::OpCode::BrGe_A:
  6600. brOpcode = Js::OpCode::JAE;
  6601. break;
  6602. case Js::OpCode::BrGt_A:
  6603. brOpcode = Js::OpCode::JA;
  6604. break;
  6605. case Js::OpCode::BrNotLe_A:
  6606. swapCmpOpnds = true;
  6607. brOpcode = Js::OpCode::JB;
  6608. break;
  6609. case Js::OpCode::BrNotLt_A:
  6610. swapCmpOpnds = true;
  6611. brOpcode = Js::OpCode::JBE;
  6612. break;
  6613. case Js::OpCode::BrNotGe_A:
  6614. brOpcode = Js::OpCode::JB;
  6615. break;
  6616. case Js::OpCode::BrNotGt_A:
  6617. brOpcode = Js::OpCode::JBE;
  6618. break;
  6619. default:
  6620. Assume(UNREACHED);
  6621. }
  6622. // if we haven't set cmpOpcode, then we are using COMISD/COMISS
  6623. if (cmpOpcode == Js::OpCode::InvalidOpCode)
  6624. {
  6625. cmpOpcode = src1->IsFloat64() ? Js::OpCode::COMISD : Js::OpCode::COMISS;
  6626. }
  6627. if (swapCmpOpnds)
  6628. {
  6629. IR::Opnd *tmp = src1;
  6630. src1 = src2;
  6631. src2 = tmp;
  6632. }
  6633. // VC generates UCOMISD for BrEq/BrNeq, and COMISD for all others, accordingly to IEEE 754.
  6634. // We'll do the same.
  6635. // COMISD / UCOMISD src1, src2
  6636. IR::Instr *instrCmp = IR::Instr::New(cmpOpcode, func);
  6637. instrCmp->SetSrc1(src1);
  6638. instrCmp->SetSrc2(src2);
  6639. instrBranch->InsertBefore(instrCmp);
  6640. Legalize(instrCmp);
  6641. if (addJP)
  6642. {
  6643. // JP $LabelNaN
  6644. instr = IR::BranchInstr::New(Js::OpCode::JP, labelNaN, func);
  6645. instrBranch->InsertBefore(instr);
  6646. }
  6647. // Jcc $L
  6648. instr = IR::BranchInstr::New(brOpcode, instrBranch->GetTarget(), func);
  6649. instrBranch->InsertBefore(instr);
  6650. instrBranch->Remove();
  6651. return instr->AsBranchInstr();
  6652. }
  6653. void LowererMD::HelperCallForAsmMathBuiltin(IR::Instr* instr, IR::JnHelperMethod helperMethodFloat, IR::JnHelperMethod helperMethodDouble)
  6654. {
  6655. Assert(instr->m_opcode == Js::OpCode::InlineMathFloor || instr->m_opcode == Js::OpCode::InlineMathCeil || instr->m_opcode == Js::OpCode::Trunc_A || instr->m_opcode == Js::OpCode::Nearest_A);
  6656. AssertMsg(instr->GetDst()->IsFloat(), "dst must be float.");
  6657. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  6658. Assert(!instr->GetSrc2());
  6659. IR::Opnd * argOpnd = instr->UnlinkSrc1();
  6660. IR::JnHelperMethod helperMethod;
  6661. if (argOpnd->IsFloat32())
  6662. {
  6663. helperMethod = helperMethodFloat;
  6664. LoadFloatHelperArgument(instr, argOpnd);
  6665. }
  6666. else
  6667. {
  6668. helperMethod = helperMethodDouble;
  6669. LoadDoubleHelperArgument(instr, argOpnd);
  6670. }
  6671. ChangeToHelperCall(instr, helperMethod);
  6672. }
  6673. void LowererMD::GenerateFastInlineBuiltInCall(IR::Instr* instr, IR::JnHelperMethod helperMethod)
  6674. {
  6675. switch (instr->m_opcode)
  6676. {
  6677. case Js::OpCode::InlineMathSqrt:
  6678. // Sqrt maps directly to the SSE2 instruction.
  6679. // src and dst should already be XMM registers, all we need is just change the opcode.
  6680. Assert(helperMethod == (IR::JnHelperMethod)0);
  6681. Assert(instr->GetSrc2() == nullptr);
  6682. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::SQRTSD : Js::OpCode::SQRTSS;
  6683. break;
  6684. case Js::OpCode::InlineMathAbs:
  6685. Assert(helperMethod == (IR::JnHelperMethod)0);
  6686. return GenerateFastInlineBuiltInMathAbs(instr);
  6687. case Js::OpCode::InlineMathPow:
  6688. #ifdef _M_IX86
  6689. if (!instr->GetSrc2()->IsFloat())
  6690. {
  6691. #endif
  6692. this->GenerateFastInlineBuiltInMathPow(instr);
  6693. break;
  6694. #ifdef _M_IX86
  6695. }
  6696. // fallthrough
  6697. #endif
  6698. case Js::OpCode::InlineMathAcos:
  6699. case Js::OpCode::InlineMathAsin:
  6700. case Js::OpCode::InlineMathAtan:
  6701. case Js::OpCode::InlineMathAtan2:
  6702. case Js::OpCode::InlineMathCos:
  6703. case Js::OpCode::InlineMathExp:
  6704. case Js::OpCode::InlineMathLog:
  6705. case Js::OpCode::Expo_A: //** operator reuses InlineMathPow fastpath
  6706. case Js::OpCode::InlineMathSin:
  6707. case Js::OpCode::InlineMathTan:
  6708. {
  6709. AssertMsg(instr->GetDst()->IsFloat(), "dst must be float.");
  6710. AssertMsg(instr->GetSrc1()->IsFloat(), "src1 must be float.");
  6711. AssertMsg(!instr->GetSrc2() || instr->GetSrc2()->IsFloat(), "src2 must be float.");
  6712. // Before:
  6713. // dst = <Built-in call> src1, src2
  6714. // After:
  6715. // I386:
  6716. // XMM0 = MOVSD src1
  6717. // CALL helperMethod
  6718. // dst = MOVSD call->dst
  6719. // AMD64:
  6720. // XMM0 = MOVSD src1
  6721. // RAX = MOV helperMethod
  6722. // CALL RAX
  6723. // dst = MOVSD call->dst
  6724. // Src1
  6725. IR::Instr* argOut = IR::Instr::New(Js::OpCode::MOVSD, this->m_func);
  6726. IR::RegOpnd* dst1 = IR::RegOpnd::New(nullptr, (RegNum)FIRST_FLOAT_ARG_REG, TyMachDouble, this->m_func);
  6727. dst1->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  6728. argOut->SetDst(dst1);
  6729. argOut->SetSrc1(instr->UnlinkSrc1());
  6730. instr->InsertBefore(argOut);
  6731. // Src2
  6732. if (instr->GetSrc2() != nullptr)
  6733. {
  6734. IR::Instr* argOut2 = IR::Instr::New(Js::OpCode::MOVSD, this->m_func);
  6735. IR::RegOpnd* dst2 = IR::RegOpnd::New(nullptr, (RegNum)(FIRST_FLOAT_ARG_REG + 1), TyMachDouble, this->m_func);
  6736. dst2->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  6737. argOut2->SetDst(dst2);
  6738. argOut2->SetSrc1(instr->UnlinkSrc2());
  6739. instr->InsertBefore(argOut2);
  6740. }
  6741. // Call CRT.
  6742. IR::RegOpnd* floatCallDst = IR::RegOpnd::New(nullptr, (RegNum)(FIRST_FLOAT_REG), TyMachDouble, this->m_func); // Dst in XMM0.
  6743. #ifdef _M_IX86
  6744. IR::Instr* floatCall = IR::Instr::New(Js::OpCode::CALL, floatCallDst, this->m_func);
  6745. floatCall->SetSrc1(IR::HelperCallOpnd::New(helperMethod, this->m_func));
  6746. instr->InsertBefore(floatCall);
  6747. #else
  6748. // s1 = MOV helperAddr
  6749. IR::RegOpnd* s1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  6750. IR::AddrOpnd* helperAddr = IR::AddrOpnd::New((Js::Var)IR::GetMethodOriginalAddress(m_func->GetThreadContextInfo(), helperMethod), IR::AddrOpndKind::AddrOpndKindDynamicMisc, this->m_func);
  6751. IR::Instr* mov = IR::Instr::New(Js::OpCode::MOV, s1, helperAddr, this->m_func);
  6752. instr->InsertBefore(mov);
  6753. // dst(XMM0) = CALL s1
  6754. IR::Instr *floatCall = IR::Instr::New(Js::OpCode::CALL, floatCallDst, s1, this->m_func);
  6755. instr->InsertBefore(floatCall);
  6756. #endif
  6757. instr->m_func->SetHasCallsOnSelfAndParents();
  6758. // Save the result.
  6759. instr->m_opcode = Js::OpCode::MOVSD;
  6760. instr->SetSrc1(floatCall->GetDst());
  6761. break;
  6762. }
  6763. case Js::OpCode::InlineMathFloor:
  6764. case Js::OpCode::InlineMathCeil:
  6765. case Js::OpCode::InlineMathRound:
  6766. #ifdef ENABLE_WASM
  6767. case Js::OpCode::Trunc_A:
  6768. case Js::OpCode::Nearest_A:
  6769. #endif //ENABLE_WASM
  6770. {
  6771. Assert(AutoSystemInfo::Data.SSE4_1Available());
  6772. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsFloat());
  6773. // MOVSD roundedFloat, src
  6774. //
  6775. // if(round)
  6776. // {
  6777. // /* N.B.: the following CMPs are lowered to COMISDs, whose results can only be >, <, or =.
  6778. // In fact, only ">" can be used if NaN has not been handled.
  6779. // */
  6780. // CMP 0.5, roundedFloat
  6781. // JA $ltHalf
  6782. // CMP TwoToFraction, roundedFloat
  6783. // JA $addHalfToRoundSrcLabel
  6784. // J $skipRoundSd (NaN is also handled here)
  6785. // $ltHalf:
  6786. // CMP roundedFloat, -0.5
  6787. // JL $ltNegHalf
  6788. // if (shouldCheckNegZero) {
  6789. // CMP roundedFloat, 0
  6790. // JA $setZero
  6791. // $negZeroTest [Helper]:
  6792. // JB $bailoutLabel
  6793. // isNegZero(src)
  6794. // JE $bailoutLabel
  6795. // J $skipRoundSd
  6796. // } // else: setZero
  6797. // $setZero:
  6798. // MOV roundedFloat, 0
  6799. // J $skipRoundSd
  6800. // $ltNegHalf:
  6801. // CMP roundedFloat, NegTwoToFraction
  6802. // JA $addHalfToRoundSrc
  6803. // J $skipRoundSd
  6804. // $addHalfToRoundSrc:
  6805. // ADDSD roundedFloat, 0.5
  6806. // $skipAddHalf:
  6807. // }
  6808. //
  6809. // if(isNotCeil)
  6810. // {
  6811. // CMP roundedFloat, 0
  6812. // JGE $skipRoundSd
  6813. // }
  6814. // ROUNDSD roundedFloat, roundedFloat, round_mode
  6815. //
  6816. // $skipRoundSd:
  6817. // if(isNotCeil)
  6818. // MOVSD checkNegZeroOpnd, roundedFloat
  6819. // else if (ceil)
  6820. // MOVSD checkNegZeroOpnd, src
  6821. //
  6822. // CMP checkNegZeroOpnd, 0
  6823. // JNE $convertToInt
  6824. //
  6825. // if(instr->ShouldCheckForNegativeZero())
  6826. // {
  6827. // isNegZero CALL IsNegZero(checkNegZeroOpnd)
  6828. // CMP isNegZero, 0
  6829. // JNE $bailoutLabel
  6830. // }
  6831. //
  6832. // $convertToInt:
  6833. // CVT(T)SD2SI dst, roundedFloat //CVTTSD2SI for floor/round and CVTSD2SI for ceil
  6834. // CMP dst 0x80000000
  6835. // JNE $fallthrough
  6836. //
  6837. // if(!sharedBailout)
  6838. // {
  6839. // $bailoutLabel:
  6840. // }
  6841. // GenerateBailout(instr)
  6842. //
  6843. // $fallthrough:
  6844. bool isNotCeil = instr->m_opcode != Js::OpCode::InlineMathCeil;
  6845. // MOVSD roundedFloat, src
  6846. IR::Opnd * src = instr->UnlinkSrc1();
  6847. IR::RegOpnd* roundedFloat = IR::RegOpnd::New(src->GetType(), this->m_func);
  6848. IR::Instr* argOut = IR::Instr::New(LowererMDArch::GetAssignOp(src->GetType()), roundedFloat, src, this->m_func);
  6849. instr->InsertBefore(argOut);
  6850. bool negZeroCheckDone = false;
  6851. IR::LabelInstr * bailoutLabel = nullptr;
  6852. bool sharedBailout = false;
  6853. if (instr->GetDst()->IsInt32())
  6854. {
  6855. sharedBailout = (instr->GetBailOutInfo()->bailOutInstr != instr) ? true : false;
  6856. bailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /*helperLabel*/true);
  6857. }
  6858. IR::Opnd * zero;
  6859. if (src->IsFloat64())
  6860. {
  6861. zero = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleZeroAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6862. }
  6863. else
  6864. {
  6865. Assert(src->IsFloat32());
  6866. zero = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatZeroAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6867. }
  6868. IR::AutoReuseOpnd autoReuseZero(zero, this->m_func);
  6869. IR::LabelInstr * skipRoundSd = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6870. if(instr->m_opcode == Js::OpCode::InlineMathRound)
  6871. {
  6872. IR::LabelInstr * addHalfToRoundSrcLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6873. IR::LabelInstr * ltHalf = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6874. IR::LabelInstr * setZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6875. IR::LabelInstr * ltNegHalf = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6876. IR::Opnd * pointFive;
  6877. IR::Opnd * negPointFive;
  6878. if (src->IsFloat64())
  6879. {
  6880. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoublePointFiveAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6881. negPointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNegPointFiveAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6882. }
  6883. else
  6884. {
  6885. Assert(src->IsFloat32());
  6886. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatPointFiveAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6887. negPointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNegPointFiveAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6888. }
  6889. // CMP 0.5, roundedFloat
  6890. // JA $ltHalf
  6891. this->m_lowerer->InsertCompareBranch(pointFive, roundedFloat, Js::OpCode::BrGt_A, ltHalf, instr);
  6892. if (instr->GetDst()->IsInt32())
  6893. {
  6894. // if we are specializing dst to int, we will bailout on overflow so don't need upperbound check
  6895. // Also, we will bailout on NaN, so it doesn't need special handling either
  6896. // J $addHalfToRoundSrcLabel
  6897. this->m_lowerer->InsertBranch(Js::OpCode::Br, addHalfToRoundSrcLabel, instr);
  6898. }
  6899. else
  6900. {
  6901. IR::Opnd * twoToFraction;
  6902. if (src->IsFloat64())
  6903. {
  6904. twoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleTwoToFractionAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6905. }
  6906. else
  6907. {
  6908. Assert(src->IsFloat32());
  6909. twoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatTwoToFractionAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6910. }
  6911. // CMP 2^fraction, roundedFloat
  6912. // JA $addHalfToRoundSrcLabel
  6913. this->m_lowerer->InsertCompareBranch(twoToFraction, roundedFloat, Js::OpCode::BrGt_A, addHalfToRoundSrcLabel, instr);
  6914. // J $skipRoundSd (NaN also handled here)
  6915. this->m_lowerer->InsertBranch(Js::OpCode::Br, skipRoundSd, instr);
  6916. }
  6917. // $ltHalf:
  6918. instr->InsertBefore(ltHalf);
  6919. // CMP roundedFloat, -0.5
  6920. // JL $ltNegHalf
  6921. this->m_lowerer->InsertCompareBranch(roundedFloat, negPointFive, Js::OpCode::BrLt_A, ltNegHalf, instr);
  6922. if (instr->ShouldCheckForNegativeZero())
  6923. {
  6924. // CMP roundedFloat, 0
  6925. // JA $setZero
  6926. this->m_lowerer->InsertCompareBranch(roundedFloat, zero, Js::OpCode::BrGt_A, setZero, instr);
  6927. // $negZeroTest [helper]
  6928. m_lowerer->InsertLabel(true, instr);
  6929. // JB $bailoutLabel
  6930. this->m_lowerer->InsertBranch(Js::OpCode::JB, bailoutLabel, instr);
  6931. // if isNegZero(src) J $bailoutLabel else J $skipRoundSd
  6932. NegZeroBranching(src, instr, bailoutLabel, skipRoundSd);
  6933. negZeroCheckDone = true;
  6934. }
  6935. // $setZero:
  6936. instr->InsertBefore(setZero);
  6937. // MOVSD_ZERO roundedFloat
  6938. LoadFloatZero(roundedFloat, instr);
  6939. // J $skipRoundSd
  6940. this->m_lowerer->InsertBranch(Js::OpCode::Br, skipRoundSd, instr);
  6941. // $ltNegHalf:
  6942. instr->InsertBefore(ltNegHalf);
  6943. if (!instr->GetDst()->IsInt32())
  6944. {
  6945. // if we are specializing dst to int, we will bailout on overflow so don't need lowerbound check
  6946. IR::Opnd * negTwoToFraction;
  6947. if (src->IsFloat64())
  6948. {
  6949. negTwoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNegTwoToFractionAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6950. }
  6951. else
  6952. {
  6953. Assert(src->IsFloat32());
  6954. negTwoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNegTwoToFractionAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6955. }
  6956. // CMP roundedFloat, negTwoToFraction
  6957. // JA $addHalfToRoundSrcLabel
  6958. this->m_lowerer->InsertCompareBranch(roundedFloat, negTwoToFraction, Js::OpCode::BrGt_A, addHalfToRoundSrcLabel, instr);
  6959. // J $skipRoundSd
  6960. this->m_lowerer->InsertBranch(Js::OpCode::Br, skipRoundSd, instr);
  6961. }
  6962. if (src->IsFloat64())
  6963. {
  6964. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoublePointFiveAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6965. }
  6966. else
  6967. {
  6968. Assert(src->IsFloat32());
  6969. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatPointFiveAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6970. }
  6971. // $addHalfToRoundSrcLabel
  6972. instr->InsertBefore(addHalfToRoundSrcLabel);
  6973. // ADDSD roundedFloat, 0.5
  6974. IR::Instr * addInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::ADDSD : Js::OpCode::ADDSS, roundedFloat, roundedFloat, pointFive, this->m_func);
  6975. instr->InsertBefore(addInstr);
  6976. Legalize(addInstr);
  6977. }
  6978. if (instr->m_opcode == Js::OpCode::InlineMathFloor && instr->GetDst()->IsInt32())
  6979. {
  6980. this->m_lowerer->InsertCompareBranch(roundedFloat, zero, Js::OpCode::BrGe_A, skipRoundSd, instr);
  6981. }
  6982. // ROUNDSD srcCopy, srcCopy, round_mode
  6983. IR::Opnd * roundMode = nullptr;
  6984. switch (instr->m_opcode)
  6985. {
  6986. #ifdef ENABLE_WASM
  6987. case Js::OpCode::Trunc_A:
  6988. roundMode = IR::IntConstOpnd::New(0x03, TyInt32, this->m_func);
  6989. break;
  6990. case Js::OpCode::Nearest_A:
  6991. roundMode = IR::IntConstOpnd::New(0x00, TyInt32, this->m_func);
  6992. break;
  6993. #endif //ENABLE_WASM
  6994. case Js::OpCode::InlineMathRound:
  6995. case Js::OpCode::InlineMathFloor:
  6996. roundMode = IR::IntConstOpnd::New(0x01, TyInt32, this->m_func);
  6997. break;
  6998. case Js::OpCode::InlineMathCeil:
  6999. roundMode = IR::IntConstOpnd::New(0x02, TyInt32, this->m_func);
  7000. break;
  7001. }
  7002. IR::Instr* roundInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::ROUNDSD : Js::OpCode::ROUNDSS, roundedFloat, roundedFloat, roundMode, this->m_func);
  7003. instr->InsertBefore(roundInstr);
  7004. if (instr->m_opcode == Js::OpCode::InlineMathRound)
  7005. {
  7006. instr->InsertBefore(skipRoundSd);
  7007. }
  7008. if (instr->GetDst()->IsInt32())
  7009. {
  7010. if (instr->m_opcode == Js::OpCode::InlineMathFloor)
  7011. {
  7012. instr->InsertBefore(skipRoundSd);
  7013. }
  7014. //negZero bailout
  7015. if(instr->ShouldCheckForNegativeZero() && !negZeroCheckDone)
  7016. {
  7017. IR::LabelInstr * convertToInt = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7018. IR::Opnd * checkNegZeroOpnd = isNotCeil ? src : roundedFloat;
  7019. this->m_lowerer->InsertCompareBranch(checkNegZeroOpnd, zero, Js::OpCode::BrNeq_A, convertToInt, instr);
  7020. m_lowerer->InsertLabel(true, instr);
  7021. NegZeroBranching(checkNegZeroOpnd, instr, bailoutLabel, convertToInt);
  7022. instr->InsertBefore(convertToInt);
  7023. }
  7024. IR::Opnd * originalDst = instr->UnlinkDst();
  7025. // CVT(T)SD2SI dst, srcCopy
  7026. IR::Instr* convertToIntInstr;
  7027. if (isNotCeil)
  7028. {
  7029. convertToIntInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::CVTTSD2SI : Js::OpCode::CVTTSS2SI, originalDst, roundedFloat, this->m_func);
  7030. }
  7031. else
  7032. {
  7033. convertToIntInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::CVTSD2SI : Js::OpCode::CVTSS2SI, originalDst, roundedFloat, this->m_func);
  7034. }
  7035. instr->InsertBefore(convertToIntInstr);
  7036. IR::LabelInstr * fallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7037. IR::Opnd * intOverflowValue = IR::IntConstOpnd::New(INT32_MIN, IRType::TyInt32, this->m_func, true);
  7038. this->m_lowerer->InsertCompareBranch(originalDst, intOverflowValue, Js::OpCode::BrNeq_A, fallthrough, instr);
  7039. instr->InsertAfter(fallthrough);
  7040. if (!sharedBailout)
  7041. {
  7042. instr->InsertBefore(bailoutLabel);
  7043. }
  7044. // In case of a shared bailout, we should jump to the code that sets some data on the bailout record which is specific
  7045. // to this bailout. Pass the bailoutLabel to GenerateFunction so that it may use the label as the collectRuntimeStatsLabel.
  7046. this->m_lowerer->GenerateBailOut(instr, nullptr, nullptr, sharedBailout ? bailoutLabel : nullptr);
  7047. }
  7048. else
  7049. {
  7050. IR::Opnd * originalDst = instr->UnlinkDst();
  7051. Assert(originalDst->IsFloat());
  7052. Assert(originalDst->GetType() == roundedFloat->GetType());
  7053. IR::Instr * movInstr = IR::Instr::New(originalDst->IsFloat64() ? Js::OpCode::MOVSD : Js::OpCode::MOVSS, originalDst, roundedFloat, this->m_func);
  7054. instr->InsertBefore(movInstr);
  7055. instr->Remove();
  7056. }
  7057. break;
  7058. }
  7059. case Js::OpCode::InlineMathMin:
  7060. case Js::OpCode::InlineMathMax:
  7061. {
  7062. IR::Opnd* src1 = instr->GetSrc1();
  7063. IR::Opnd* src2 = instr->GetSrc2();
  7064. IR::Opnd* dst = instr->GetDst();
  7065. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7066. IR::LabelInstr* labelNaNHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  7067. IR::LabelInstr* labelNegZeroAndNaNCheckHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  7068. IR::Instr* branchInstr;
  7069. bool min = instr->m_opcode == Js::OpCode::InlineMathMin ? true : false;
  7070. bool dstEqualsSrc1 = dst->IsEqual(src1);
  7071. bool dstEqualsSrc2 = dst->IsEqual(src2);
  7072. IR::Opnd * otherSrc = src2;
  7073. IR::Opnd * compareSrc1 = src1;
  7074. IR::Opnd * compareSrc2 = src2;
  7075. if (dstEqualsSrc2)
  7076. {
  7077. otherSrc = src1;
  7078. compareSrc1 = src2;
  7079. compareSrc2 = src1;
  7080. }
  7081. if (!dstEqualsSrc1 && !dstEqualsSrc2)
  7082. {
  7083. //MOV dst, src1;
  7084. this->m_lowerer->InsertMove(dst, src1, instr);
  7085. }
  7086. // CMP src1, src2
  7087. if(dst->IsInt32())
  7088. {
  7089. if(min)
  7090. {
  7091. // JLT $continueLabel
  7092. branchInstr = IR::BranchInstr::New(Js::OpCode::BrLt_I4, doneLabel, compareSrc1, compareSrc2, instr->m_func);
  7093. instr->InsertBefore(branchInstr);
  7094. LowererMDArch::EmitInt4Instr(branchInstr);
  7095. }
  7096. else
  7097. {
  7098. // JGT $continueLabel
  7099. branchInstr = IR::BranchInstr::New(Js::OpCode::BrGt_I4, doneLabel, compareSrc1, compareSrc2, instr->m_func);
  7100. instr->InsertBefore(branchInstr);
  7101. LowererMDArch::EmitInt4Instr(branchInstr);
  7102. }
  7103. // MOV dst, src1
  7104. this->m_lowerer->InsertMove(dst, otherSrc, instr);
  7105. }
  7106. else if(dst->IsFloat())
  7107. {
  7108. // COMISD/COMISS src1 (src2), src2 (src1)
  7109. // JA $doneLabel
  7110. // JEQ $labelNegZeroAndNaNCheckHelper
  7111. // MOVSD/MOVSS dst, src2
  7112. // JMP $doneLabel
  7113. //
  7114. // $labelNegZeroAndNaNCheckHelper
  7115. // JP $labelNaNHelper
  7116. // if(min)
  7117. // {
  7118. // if(src2 == -0.0)
  7119. // MOVSD/MOVSS dst, src2
  7120. // }
  7121. // else
  7122. // {
  7123. // if(src1 == -0.0)
  7124. // MOVSD/MOVSS dst, src2
  7125. // }
  7126. // JMP $doneLabel
  7127. //
  7128. // $labelNaNHelper
  7129. // MOVSD/MOVSS dst, NaN
  7130. //
  7131. // $doneLabel
  7132. if(min)
  7133. {
  7134. this->m_lowerer->InsertCompareBranch(compareSrc1, compareSrc2, Js::OpCode::BrLt_A, doneLabel, instr); // Lowering of BrLt_A for floats is done to JA with operands swapped
  7135. }
  7136. else
  7137. {
  7138. this->m_lowerer->InsertCompareBranch(compareSrc1, compareSrc2, Js::OpCode::BrGt_A, doneLabel, instr);
  7139. }
  7140. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JEQ, labelNegZeroAndNaNCheckHelper, instr->m_func));
  7141. this->m_lowerer->InsertMove(dst, otherSrc, instr);
  7142. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, instr->m_func));
  7143. instr->InsertBefore(labelNegZeroAndNaNCheckHelper);
  7144. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JP, labelNaNHelper, instr->m_func));
  7145. IR::LabelInstr *isNeg0Label = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  7146. NegZeroBranching(min ? compareSrc2 : compareSrc1, instr, isNeg0Label, doneLabel);
  7147. instr->InsertBefore(isNeg0Label);
  7148. this->m_lowerer->InsertMove(dst, otherSrc, instr);
  7149. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, instr->m_func));
  7150. instr->InsertBefore(labelNaNHelper);
  7151. IR::Opnd * opndNaN = nullptr;
  7152. if (dst->IsFloat32())
  7153. {
  7154. opndNaN = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNaNAddr(), IRType::TyFloat32, this->m_func);
  7155. }
  7156. else
  7157. {
  7158. opndNaN = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNaNAddr(), IRType::TyFloat64, this->m_func);
  7159. }
  7160. this->m_lowerer->InsertMove(dst, opndNaN, instr);
  7161. }
  7162. instr->InsertBefore(doneLabel);
  7163. instr->Remove();
  7164. break;
  7165. }
  7166. default:
  7167. AssertMsg(FALSE, "Unknown inline built-in opcode");
  7168. break;
  7169. }
  7170. }
  7171. void LowererMD::GenerateFastInlineBuiltInMathAbs(IR::Instr* inlineInstr)
  7172. {
  7173. IR::Opnd* src = inlineInstr->GetSrc1()->Copy(this->m_func);
  7174. IR::Opnd* dst = inlineInstr->UnlinkDst();
  7175. Assert(src);
  7176. IR::Instr* tmpInstr;
  7177. IR::Instr* nextInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  7178. IR::Instr* continueInstr = m_lowerer->LowerBailOnIntMin(inlineInstr);
  7179. continueInstr->InsertAfter(nextInstr);
  7180. IRType srcType = src->GetType();
  7181. if (srcType == IRType::TyInt32)
  7182. {
  7183. // Note: if execution gets so far, we always get (untagged) int32 here.
  7184. // Since -x = ~x + 1, abs(x) = x, abs(-x) = -x, sign-extend(x) = 0, sign_extend(-x) = -1, where 0 <= x.
  7185. // Then: abs(x) = sign-extend(x) XOR x - sign-extend(x)
  7186. // Expected input (otherwise bailout):
  7187. // - src1 is (untagged) int, not equal to int_min (abs(int_min) would produce overflow, as there's no corresponding positive int).
  7188. // MOV EAX, src
  7189. IR::RegOpnd *regEAX = IR::RegOpnd::New(TyInt32, this->m_func);
  7190. regEAX->SetReg(LowererMDArch::GetRegIMulDestLower());
  7191. tmpInstr = IR::Instr::New(Js::OpCode::MOV, regEAX, src, this->m_func);
  7192. nextInstr->InsertBefore(tmpInstr);
  7193. IR::RegOpnd *regEDX = IR::RegOpnd::New(TyInt32, this->m_func);
  7194. regEDX->SetReg(LowererMDArch::GetRegIMulHighDestLower());
  7195. // CDQ (sign-extend EAX into EDX, producing 64bit EDX:EAX value)
  7196. // Note: put EDX on dst to give of def to the EDX lifetime
  7197. tmpInstr = IR::Instr::New(Js::OpCode::CDQ, regEDX, this->m_func);
  7198. nextInstr->InsertBefore(tmpInstr);
  7199. // XOR EAX, EDX
  7200. tmpInstr = IR::Instr::New(Js::OpCode::XOR, regEAX, regEAX, regEDX, this->m_func);
  7201. nextInstr->InsertBefore(tmpInstr);
  7202. // SUB EAX, EDX
  7203. tmpInstr = IR::Instr::New(Js::OpCode::SUB, regEAX, regEAX, regEDX, this->m_func);
  7204. nextInstr->InsertBefore(tmpInstr);
  7205. // MOV dst, EAX
  7206. tmpInstr = IR::Instr::New(Js::OpCode::MOV, dst, regEAX, this->m_func);
  7207. nextInstr->InsertBefore(tmpInstr);
  7208. }
  7209. else if (srcType == IRType::TyFloat64)
  7210. {
  7211. if (!dst->IsRegOpnd())
  7212. {
  7213. // MOVSD tempRegOpnd, src
  7214. IR::RegOpnd* tempRegOpnd = IR::RegOpnd::New(nullptr, TyMachDouble, this->m_func);
  7215. tempRegOpnd->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  7216. tmpInstr = IR::Instr::New(Js::OpCode::MOVSD, tempRegOpnd, src, this->m_func);
  7217. nextInstr->InsertBefore(tmpInstr);
  7218. // This saves the result in the same register.
  7219. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(tempRegOpnd), nextInstr);
  7220. // MOVSD dst, tempRegOpnd
  7221. tmpInstr = IR::Instr::New(Js::OpCode::MOVSD, dst, tempRegOpnd, this->m_func);
  7222. nextInstr->InsertBefore(tmpInstr);
  7223. }
  7224. else
  7225. {
  7226. // MOVSD dst, src
  7227. tmpInstr = IR::Instr::New(Js::OpCode::MOVSD, dst, src, this->m_func);
  7228. nextInstr->InsertBefore(tmpInstr);
  7229. // This saves the result in the same register.
  7230. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(dst), nextInstr);
  7231. }
  7232. }
  7233. else if (srcType == IRType::TyFloat32)
  7234. {
  7235. if (!dst->IsRegOpnd())
  7236. {
  7237. // MOVSS tempRegOpnd, src
  7238. IR::RegOpnd* tempRegOpnd = IR::RegOpnd::New(nullptr, TyFloat32, this->m_func);
  7239. tempRegOpnd->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  7240. tmpInstr = IR::Instr::New(Js::OpCode::MOVSS, tempRegOpnd, src, this->m_func);
  7241. nextInstr->InsertBefore(tmpInstr);
  7242. // This saves the result in the same register.
  7243. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(tempRegOpnd), nextInstr);
  7244. // MOVSS dst, tempRegOpnd
  7245. tmpInstr = IR::Instr::New(Js::OpCode::MOVSS, dst, tempRegOpnd, this->m_func);
  7246. nextInstr->InsertBefore(tmpInstr);
  7247. }
  7248. else
  7249. {
  7250. // MOVSS dst, src
  7251. tmpInstr = IR::Instr::New(Js::OpCode::MOVSS, dst, src, this->m_func);
  7252. nextInstr->InsertBefore(tmpInstr);
  7253. // This saves the result in the same register.
  7254. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(dst), nextInstr);
  7255. }
  7256. }
  7257. else
  7258. {
  7259. AssertMsg(FALSE, "GenerateFastInlineBuiltInMathAbs: unexpected type of the src!");
  7260. }
  7261. }
  7262. void LowererMD::GenerateFastInlineBuiltInMathPow(IR::Instr* instr)
  7263. {
  7264. #ifdef _M_IX86
  7265. AssertMsg(!instr->GetSrc2()->IsFloat(), "Math.pow(*, double) needs customized lowering!");
  7266. #endif
  7267. IR::JnHelperMethod directPowHelper = (IR::JnHelperMethod)0;
  7268. IR::Opnd* bailoutOpnd = nullptr;
  7269. if (!instr->GetSrc2()->IsFloat())
  7270. {
  7271. LoadHelperArgument(instr, instr->UnlinkSrc2());
  7272. if (instr->GetSrc1()->IsFloat())
  7273. {
  7274. directPowHelper = IR::HelperDirectMath_PowDoubleInt;
  7275. LoadDoubleHelperArgument(instr, instr->UnlinkSrc1());
  7276. }
  7277. else
  7278. {
  7279. directPowHelper = IR::HelperDirectMath_PowIntInt;
  7280. LoadHelperArgument(instr, instr->UnlinkSrc1());
  7281. if (!this->m_func->tempSymBool)
  7282. {
  7283. this->m_func->tempSymBool = StackSym::New(TyUint8, this->m_func);
  7284. this->m_func->StackAllocate(this->m_func->tempSymBool, TySize[TyUint8]);
  7285. }
  7286. IR::SymOpnd* boolOpnd = IR::SymOpnd::New(this->m_func->tempSymBool, TyUint8, this->m_func);
  7287. IR::RegOpnd* boolRefOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  7288. this->m_lowerer->InsertLea(boolRefOpnd, boolOpnd, instr);
  7289. LoadHelperArgument(instr, boolRefOpnd);
  7290. bailoutOpnd = boolOpnd;
  7291. }
  7292. }
  7293. #ifndef _M_IX86
  7294. else
  7295. {
  7296. AssertMsg(instr->GetSrc1()->IsFloat(), "Math.Pow(int, double) should not generated by GlobOpt!");
  7297. directPowHelper = IR::HelperDirectMath_Pow;
  7298. LoadDoubleHelperArgument(instr, instr->UnlinkSrc2());
  7299. LoadDoubleHelperArgument(instr, instr->UnlinkSrc1());
  7300. }
  7301. #endif
  7302. ChangeToHelperCall(instr, directPowHelper, nullptr, bailoutOpnd);
  7303. }
  7304. IR::Instr *
  7305. LowererMD::NegZeroBranching(IR::Opnd* opnd, IR::Instr* instr, IR::LabelInstr* isNeg0Label, IR::LabelInstr* isNotNeg0Label)
  7306. {
  7307. Assert(opnd->IsFloat());
  7308. bool is32Bits = opnd->IsFloat32();
  7309. IRType regType = is32Bits ? TyUint32 : TyUint64;
  7310. // Use UInt64 comparison between the opnd to check and negative zero constant.
  7311. // For this we have to convert opnd which is a double to uint64.
  7312. // MOV intOpnd, src
  7313. IR::RegOpnd *intOpnd = IR::RegOpnd::New(regType, this->m_func);
  7314. EmitReinterpretFloatToInt(intOpnd, opnd, instr);
  7315. #if LOWER_SPLIT_INT64
  7316. if (!is32Bits)
  7317. {
  7318. // For 64bits comparisons on x86 we need to check 2 registers
  7319. // CMP intOpnd.high, (k_NegZero >> 32).i32
  7320. // BRNEQ isNotNeg0Label
  7321. // CMP intOpnd.low, k_NegZero.i32
  7322. // BREQ isNeg0Label
  7323. // JMP isNotNeg0Label
  7324. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(intOpnd);
  7325. const uint32 high64NegZero = Js::NumberConstants::k_NegZero >> 32;
  7326. const uint32 low64NegZero = Js::NumberConstants::k_NegZero & UINT32_MAX;
  7327. IR::IntConstOpnd *negZeroHighOpnd = IR::IntConstOpnd::New(high64NegZero, TyUint32, m_func);
  7328. IR::IntConstOpnd *negZeroLowOpnd = IR::IntConstOpnd::New(low64NegZero, TyUint32, m_func);
  7329. m_lowerer->InsertCompareBranch(dstPair.high, negZeroHighOpnd, Js::OpCode::BrNeq_A, isNotNeg0Label, instr);
  7330. m_lowerer->InsertCompareBranch(dstPair.low, negZeroLowOpnd, Js::OpCode::BrEq_A, isNeg0Label, instr);
  7331. }
  7332. else
  7333. #endif
  7334. {
  7335. #if _M_IX86
  7336. IR::IntConstOpnd *negZeroOpnd = IR::IntConstOpnd::New(Js::NumberConstants::k_Float32NegZero, regType, m_func);
  7337. #else
  7338. IR::IntConstOpnd *negZeroOpnd = IR::IntConstOpnd::New(is32Bits ? Js::NumberConstants::k_Float32NegZero : Js::NumberConstants::k_NegZero, regType, m_func);
  7339. #endif
  7340. // CMP intOpnd, k_NegZero
  7341. // BREQ isNeg0Label
  7342. // JMP isNotNeg0Label
  7343. m_lowerer->InsertCompareBranch(intOpnd, negZeroOpnd, Js::OpCode::BrEq_A, isNeg0Label, instr);
  7344. }
  7345. IR::Instr* jmpNotNegZero = IR::BranchInstr::New(Js::OpCode::JMP, isNotNeg0Label, m_func);
  7346. instr->InsertBefore(jmpNotNegZero);
  7347. return jmpNotNegZero;
  7348. }
  7349. void
  7350. LowererMD::FinalLower()
  7351. {
  7352. this->lowererMDArch.FinalLower();
  7353. }
  7354. IR::Instr *
  7355. LowererMD::LowerDivI4AndBailOnReminder(IR::Instr * instr, IR::LabelInstr * bailOutLabel)
  7356. {
  7357. // Don't have save the operand for bailout because the lowering of IDIV don't overwrite their values
  7358. // (EDX) = CDQ
  7359. // EAX = numerator
  7360. // (EDX:EAX)= IDIV (EAX), denominator
  7361. // TEST EDX, EDX
  7362. // JNE bailout
  7363. // <Caller insert more checks here>
  7364. // dst = MOV EAX <-- assignInstr
  7365. Assert(instr);
  7366. Assert(instr->m_opcode == Js::OpCode::Div_I4);
  7367. Assert(!instr->HasBailOutInfo());
  7368. EmitInt4Instr(instr);
  7369. Assert(instr->m_opcode == Js::OpCode::IDIV);
  7370. IR::Instr * prev = instr->m_prev;
  7371. Assert(prev->m_opcode == Js::OpCode::CDQ);
  7372. #ifdef _M_IX86
  7373. Assert(prev->GetDst()->AsRegOpnd()->GetReg() == RegEDX);
  7374. #else
  7375. Assert(prev->GetDst()->AsRegOpnd()->GetReg() == RegRDX);
  7376. #endif
  7377. IR::Opnd * reminderOpnd = prev->GetDst();
  7378. // Insert all check before the assignment to the actual dst.
  7379. IR::Instr * insertBeforeInstr = instr->m_next;
  7380. Assert(insertBeforeInstr->m_opcode == Js::OpCode::MOV);
  7381. #ifdef _M_IX86
  7382. Assert(insertBeforeInstr->GetSrc1()->AsRegOpnd()->GetReg() == RegEAX);
  7383. #else
  7384. Assert(insertBeforeInstr->GetSrc1()->AsRegOpnd()->GetReg() == RegRAX);
  7385. #endif
  7386. // Jump to bailout if the reminder is not 0 (not int result)
  7387. this->m_lowerer->InsertTestBranch(reminderOpnd, reminderOpnd, Js::OpCode::BrNeq_A, bailOutLabel, insertBeforeInstr);
  7388. return insertBeforeInstr;
  7389. }
  7390. void
  7391. LowererMD::LowerTypeof(IR::Instr * typeOfInstr)
  7392. {
  7393. Func * func = typeOfInstr->m_func;
  7394. IR::Opnd * src1 = typeOfInstr->GetSrc1();
  7395. IR::Opnd * dst = typeOfInstr->GetDst();
  7396. Assert(src1->IsRegOpnd() && dst->IsRegOpnd());
  7397. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  7398. IR::LabelInstr * taggedIntLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  7399. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  7400. // MOV typeDisplayStringsArray, &javascriptLibrary->typeDisplayStrings
  7401. IR::RegOpnd * typeDisplayStringsArrayOpnd = IR::RegOpnd::New(TyMachPtr, func);
  7402. m_lowerer->InsertMove(typeDisplayStringsArrayOpnd, IR::AddrOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetTypeDisplayStringsOffset(), IR::AddrOpndKindConstantAddress, this->m_func), typeOfInstr);
  7403. GenerateObjectTest(src1, typeOfInstr, taggedIntLabel);
  7404. // MOV typeId, TypeIds_Object
  7405. // MOV typeRegOpnd, [src1 + offset(Type)]
  7406. // MOV objTypeId, [typeRegOpnd + offsetof(typeId)]
  7407. // CMP objTypeId, TypeIds_Limit /*external object test*/
  7408. // CMOVB typeId, objTypeId
  7409. // TEST [typeRegOpnd + offsetof(flags)], TypeFlagMask_IsFalsy /*test for falsy*/
  7410. // CMOVNE typeId, TypeIds_Undefined
  7411. // MOV dst, typeDisplayStrings[typeId]
  7412. // TEST dst, dst
  7413. // JE $helper
  7414. // JMP $done
  7415. IR::RegOpnd * typeIdOpnd = IR::RegOpnd::New(TyUint32, func);
  7416. m_lowerer->InsertMove(typeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_Object, TyUint32, func), typeOfInstr);
  7417. IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, func);
  7418. m_lowerer->InsertMove(typeRegOpnd,
  7419. IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func),
  7420. typeOfInstr);
  7421. IR::RegOpnd * objTypeIdOpnd = IR::RegOpnd::New(TyUint32, func);
  7422. m_lowerer->InsertMove(objTypeIdOpnd, IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, func), typeOfInstr);
  7423. m_lowerer->InsertCompare(objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_Limit, TyUint32, func), typeOfInstr);
  7424. InsertCmovCC(Js::OpCode::CMOVB, typeIdOpnd, objTypeIdOpnd, typeOfInstr);
  7425. // Insert MOV reg, 0 before the TEST because MOV reg, 0 will be peeped to XOR reg, reg and that may affect the zero flags that CMOVE depends on
  7426. IR::RegOpnd* typeIdUndefinedOpnd = IR::RegOpnd::New(TyUint32, func);
  7427. m_lowerer->InsertMove(typeIdUndefinedOpnd, IR::IntConstOpnd::New(Js::TypeIds_Undefined, TyUint32, func), typeOfInstr);
  7428. IR::Opnd *flagsOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfFlags(), TyInt32, this->m_func);
  7429. m_lowerer->InsertTest(flagsOpnd, IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyInt32, this->m_func), typeOfInstr);
  7430. InsertCmovCC(Js::OpCode::CMOVNE, typeIdOpnd, typeIdUndefinedOpnd, typeOfInstr);
  7431. if (dst->IsEqual(src1))
  7432. {
  7433. ChangeToAssign(typeOfInstr->HoistSrc1(Js::OpCode::Ld_A));
  7434. }
  7435. m_lowerer->InsertMove(dst, IR::IndirOpnd::New(typeDisplayStringsArrayOpnd, typeIdOpnd, this->GetDefaultIndirScale(), TyMachPtr, func), typeOfInstr);
  7436. m_lowerer->InsertTestBranch(dst, dst, Js::OpCode::BrEq_A, helperLabel, typeOfInstr);
  7437. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, typeOfInstr);
  7438. // $taggedInt:
  7439. // MOV dst, typeDisplayStrings[TypeIds_Number]
  7440. // JMP $done
  7441. typeOfInstr->InsertBefore(taggedIntLabel);
  7442. m_lowerer->InsertMove(dst, IR::IndirOpnd::New(typeDisplayStringsArrayOpnd, Js::TypeIds_Number * sizeof(Js::Var), TyMachPtr, func), typeOfInstr);
  7443. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, typeOfInstr);
  7444. // $helper
  7445. // CALL OP_TypeOf
  7446. // $done
  7447. typeOfInstr->InsertBefore(helperLabel);
  7448. typeOfInstr->InsertAfter(doneLabel);
  7449. m_lowerer->LowerUnaryHelperMem(typeOfInstr, IR::HelperOp_Typeof);
  7450. }
  7451. void
  7452. LowererMD::InsertObjectPoison(IR::Opnd* poisonedOpnd, IR::BranchInstr* branchInstr, IR::Instr* insertInstr, bool isForStore)
  7453. {
  7454. if ((isForStore && CONFIG_FLAG_RELEASE(PoisonObjectsForStores)) || (!isForStore && CONFIG_FLAG_RELEASE(PoisonObjectsForLoads)))
  7455. {
  7456. Js::OpCode opcode;
  7457. if (branchInstr->m_opcode == Js::OpCode::JNE)
  7458. {
  7459. opcode = Js::OpCode::CMOVNE;
  7460. }
  7461. else
  7462. {
  7463. AssertOrFailFastMsg(branchInstr->m_opcode == Js::OpCode::JEQ, "Unexpected branch type in InsertObjectPoison preceeding instruction");
  7464. opcode = Js::OpCode::CMOVE;
  7465. }
  7466. AssertOrFailFast(branchInstr->m_prev->m_opcode == Js::OpCode::CMP || branchInstr->m_prev->m_opcode == Js::OpCode::TEST);
  7467. IR::RegOpnd* regZero = IR::RegOpnd::New(TyMachPtr, insertInstr->m_func);
  7468. Lowerer::InsertMove(regZero, IR::IntConstOpnd::New(0, TyMachPtr, insertInstr->m_func), branchInstr->m_prev);
  7469. InsertCmovCC(opcode, poisonedOpnd, regZero, insertInstr);
  7470. }
  7471. }
  7472. IR::Instr*
  7473. LowererMD::InsertCmovCC(const Js::OpCode opCode, IR::Opnd * dst, IR::Opnd* src1, IR::Instr* insertBeforeInstr, bool postRegAlloc)
  7474. {
  7475. Assert(opCode > Js::OpCode::MDStart);
  7476. Func* func = insertBeforeInstr->m_func;
  7477. IR::Opnd* src2 = nullptr;
  7478. if (!postRegAlloc)
  7479. {
  7480. src2 = src1;
  7481. src1 = dst;
  7482. }
  7483. IR::Instr * instr = IR::Instr::New(opCode, dst, src1, src2, func);
  7484. insertBeforeInstr->InsertBefore(instr);
  7485. LowererMD::Legalize(instr);
  7486. return instr;
  7487. }
  7488. IR::BranchInstr*
  7489. LowererMD::InsertMissingItemCompareBranch(IR::Opnd* compareSrc, IR::Opnd* missingItemOpnd, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr)
  7490. {
  7491. return this->lowererMDArch.InsertMissingItemCompareBranch(compareSrc, missingItemOpnd, opcode, target, insertBeforeInstr);
  7492. }