LowerMDShared.cpp 302 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Language/JavascriptFunctionArgIndex.h"
  7. const Js::OpCode LowererMD::MDUncondBranchOpcode = Js::OpCode::JMP;
  8. const Js::OpCode LowererMD::MDMultiBranchOpcode = Js::OpCode::JMP;
  9. const Js::OpCode LowererMD::MDTestOpcode = Js::OpCode::TEST;
  10. const Js::OpCode LowererMD::MDOrOpcode = Js::OpCode::OR;
  11. const Js::OpCode LowererMD::MDXorOpcode = Js::OpCode::XOR;
  12. #if _M_X64
  13. const Js::OpCode LowererMD::MDMovUint64ToFloat64Opcode = Js::OpCode::MOVQ;
  14. #endif
  15. const Js::OpCode LowererMD::MDOverflowBranchOpcode = Js::OpCode::JO;
  16. const Js::OpCode LowererMD::MDNotOverflowBranchOpcode = Js::OpCode::JNO;
  17. const Js::OpCode LowererMD::MDConvertFloat32ToFloat64Opcode = Js::OpCode::CVTSS2SD;
  18. const Js::OpCode LowererMD::MDConvertFloat64ToFloat32Opcode = Js::OpCode::CVTSD2SS;
  19. const Js::OpCode LowererMD::MDCallOpcode = Js::OpCode::CALL;
  20. const Js::OpCode LowererMD::MDImulOpcode = Js::OpCode::IMUL2;
  21. const Js::OpCode LowererMD::MDLea = Js::OpCode::LEA;
  22. const Js::OpCode LowererMD::MDSpecBlockNEOpcode = Js::OpCode::CMOVNE;
  23. const Js::OpCode LowererMD::MDSpecBlockFNEOpcode = Js::OpCode::CMOVNE;
  24. static const int TWO_31_FLOAT = 0x4f000000;
  25. static const int FLOAT_INT_MIN = 0xcf000000;
  26. //
  27. // Static utility fn()
  28. //
  29. bool
  30. LowererMD::IsAssign(IR::Instr *instr)
  31. {
  32. return instr->GetDst() && instr->m_opcode == LowererMDArch::GetAssignOp(instr->GetDst()->GetType());
  33. }
  34. ///----------------------------------------------------------------------------
  35. ///
  36. /// LowererMD::IsCall
  37. ///
  38. ///----------------------------------------------------------------------------
  39. bool
  40. LowererMD::IsCall(IR::Instr *instr)
  41. {
  42. return instr->m_opcode == Js::OpCode::CALL;
  43. }
  44. ///----------------------------------------------------------------------------
  45. ///
  46. /// LowererMD::IsUnconditionalBranch
  47. ///
  48. ///----------------------------------------------------------------------------
  49. bool
  50. LowererMD::IsUnconditionalBranch(const IR::Instr *instr)
  51. {
  52. return (instr->m_opcode == Js::OpCode::JMP);
  53. }
  54. // GenerateMemRef: Return an opnd that can be used to access the given address.
  55. IR::Opnd *
  56. LowererMD::GenerateMemRef(intptr_t addr, IRType type, IR::Instr *instr, bool dontEncode)
  57. {
  58. return IR::MemRefOpnd::New(addr, type, this->m_func);
  59. }
  60. void
  61. LowererMD::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, size_t value, IR::Instr * insertBeforeInstr, bool isZeroed)
  62. {
  63. #if _M_X64
  64. lowererMDArch.GenerateMemInit(opnd, offset, value, insertBeforeInstr, isZeroed);
  65. #else
  66. m_lowerer->GenerateMemInit(opnd, offset, (uint32)value, insertBeforeInstr, isZeroed);
  67. #endif
  68. }
  69. ///----------------------------------------------------------------------------
  70. ///
  71. /// LowererMD::InvertBranch
  72. ///
  73. ///----------------------------------------------------------------------------
  74. void
  75. LowererMD::InvertBranch(IR::BranchInstr *branchInstr)
  76. {
  77. switch (branchInstr->m_opcode)
  78. {
  79. case Js::OpCode::JA:
  80. branchInstr->m_opcode = Js::OpCode::JBE;
  81. break;
  82. case Js::OpCode::JAE:
  83. branchInstr->m_opcode = Js::OpCode::JB;
  84. break;
  85. case Js::OpCode::JB:
  86. branchInstr->m_opcode = Js::OpCode::JAE;
  87. break;
  88. case Js::OpCode::JBE:
  89. branchInstr->m_opcode = Js::OpCode::JA;
  90. break;
  91. case Js::OpCode::JEQ:
  92. branchInstr->m_opcode = Js::OpCode::JNE;
  93. break;
  94. case Js::OpCode::JNE:
  95. branchInstr->m_opcode = Js::OpCode::JEQ;
  96. break;
  97. case Js::OpCode::JGE:
  98. branchInstr->m_opcode = Js::OpCode::JLT;
  99. break;
  100. case Js::OpCode::JGT:
  101. branchInstr->m_opcode = Js::OpCode::JLE;
  102. break;
  103. case Js::OpCode::JLT:
  104. branchInstr->m_opcode = Js::OpCode::JGE;
  105. break;
  106. case Js::OpCode::JLE:
  107. branchInstr->m_opcode = Js::OpCode::JGT;
  108. break;
  109. case Js::OpCode::JO:
  110. branchInstr->m_opcode = Js::OpCode::JNO;
  111. break;
  112. case Js::OpCode::JNO:
  113. branchInstr->m_opcode = Js::OpCode::JO;
  114. break;
  115. case Js::OpCode::JP:
  116. branchInstr->m_opcode = Js::OpCode::JNP;
  117. break;
  118. case Js::OpCode::JNP:
  119. branchInstr->m_opcode = Js::OpCode::JP;
  120. break;
  121. case Js::OpCode::JSB:
  122. branchInstr->m_opcode = Js::OpCode::JNSB;
  123. break;
  124. case Js::OpCode::JNSB:
  125. branchInstr->m_opcode = Js::OpCode::JSB;
  126. break;
  127. default:
  128. AssertMsg(UNREACHED, "JCC missing in InvertBranch()");
  129. }
  130. }
  131. void
  132. LowererMD::ReverseBranch(IR::BranchInstr *branchInstr)
  133. {
  134. switch (branchInstr->m_opcode)
  135. {
  136. case Js::OpCode::JA:
  137. branchInstr->m_opcode = Js::OpCode::JB;
  138. break;
  139. case Js::OpCode::JAE:
  140. branchInstr->m_opcode = Js::OpCode::JBE;
  141. break;
  142. case Js::OpCode::JB:
  143. branchInstr->m_opcode = Js::OpCode::JA;
  144. break;
  145. case Js::OpCode::JBE:
  146. branchInstr->m_opcode = Js::OpCode::JAE;
  147. break;
  148. case Js::OpCode::JGE:
  149. branchInstr->m_opcode = Js::OpCode::JLE;
  150. break;
  151. case Js::OpCode::JGT:
  152. branchInstr->m_opcode = Js::OpCode::JLT;
  153. break;
  154. case Js::OpCode::JLT:
  155. branchInstr->m_opcode = Js::OpCode::JGT;
  156. break;
  157. case Js::OpCode::JLE:
  158. branchInstr->m_opcode = Js::OpCode::JGE;
  159. break;
  160. case Js::OpCode::JEQ:
  161. case Js::OpCode::JNE:
  162. case Js::OpCode::JO:
  163. case Js::OpCode::JNO:
  164. case Js::OpCode::JP:
  165. case Js::OpCode::JNP:
  166. case Js::OpCode::JSB:
  167. case Js::OpCode::JNSB:
  168. break;
  169. default:
  170. AssertMsg(UNREACHED, "JCC missing in ReverseBranch()");
  171. }
  172. }
  173. IR::Instr *
  174. LowererMD::LowerCallHelper(IR::Instr *instrCall)
  175. {
  176. IR::Opnd *argOpnd = instrCall->UnlinkSrc2();
  177. IR::Instr *prevInstr = nullptr;
  178. IR::JnHelperMethod helperMethod = instrCall->GetSrc1()->AsHelperCallOpnd()->m_fnHelper;
  179. instrCall->FreeSrc1();
  180. #ifndef _M_X64
  181. bool callHasDst = instrCall->GetDst() != nullptr;
  182. prevInstr = ChangeToHelperCall(instrCall, helperMethod);
  183. if (callHasDst)
  184. {
  185. prevInstr = prevInstr->m_prev;
  186. }
  187. Assert(prevInstr->GetSrc1()->IsHelperCallOpnd() && prevInstr->GetSrc1()->AsHelperCallOpnd()->m_fnHelper == helperMethod);
  188. #else
  189. prevInstr = instrCall;
  190. #endif
  191. while (argOpnd)
  192. {
  193. Assert(argOpnd->IsRegOpnd());
  194. IR::RegOpnd *regArg = argOpnd->AsRegOpnd();
  195. Assert(regArg->m_sym->m_isSingleDef);
  196. IR::Instr *instrArg = regArg->m_sym->m_instrDef;
  197. Assert(instrArg->m_opcode == Js::OpCode::ArgOut_A || instrArg->m_opcode == Js::OpCode::ExtendArg_A &&
  198. (
  199. helperMethod == IR::JnHelperMethod::HelperOP_InitCachedScope ||
  200. helperMethod == IR::JnHelperMethod::HelperScrFunc_OP_NewScFuncHomeObj ||
  201. helperMethod == IR::JnHelperMethod::HelperScrFunc_OP_NewScGenFuncHomeObj ||
  202. helperMethod == IR::JnHelperMethod::HelperRestify ||
  203. helperMethod == IR::JnHelperMethod::HelperStPropIdArrFromVar
  204. ));
  205. prevInstr = LoadHelperArgument(prevInstr, instrArg->GetSrc1());
  206. argOpnd = instrArg->GetSrc2();
  207. if (prevInstr == instrArg)
  208. {
  209. prevInstr = prevInstr->m_prev;
  210. }
  211. if (instrArg->m_opcode == Js::OpCode::ArgOut_A)
  212. {
  213. instrArg->UnlinkSrc1();
  214. if (argOpnd)
  215. {
  216. instrArg->UnlinkSrc2();
  217. }
  218. regArg->Free(this->m_func);
  219. instrArg->Remove();
  220. }
  221. else if (instrArg->m_opcode == Js::OpCode::ExtendArg_A)
  222. {
  223. if (instrArg->GetSrc1()->IsRegOpnd())
  224. {
  225. m_lowerer->addToLiveOnBackEdgeSyms->Set(instrArg->GetSrc1()->AsRegOpnd()->GetStackSym()->m_id);
  226. }
  227. }
  228. }
  229. switch (helperMethod)
  230. {
  231. case IR::JnHelperMethod::HelperScrFunc_OP_NewScFuncHomeObj:
  232. case IR::JnHelperMethod::HelperScrFunc_OP_NewScGenFuncHomeObj:
  233. break;
  234. default:
  235. prevInstr = m_lowerer->LoadScriptContext(prevInstr);
  236. break;
  237. }
  238. #ifdef _M_X64
  239. FlipHelperCallArgsOrder();
  240. ChangeToHelperCall(instrCall, helperMethod);
  241. #else
  242. this->lowererMDArch.ResetHelperArgsCount();
  243. #endif
  244. // There might be ToVar in between the ArgOut, need to continue lower from the call still
  245. return instrCall;
  246. }
  247. //
  248. // forwarding functions
  249. //
  250. IR::Instr *
  251. LowererMD::LowerCall(IR::Instr * callInstr, Js::ArgSlot argCount)
  252. {
  253. return this->lowererMDArch.LowerCall(callInstr, argCount);
  254. }
  255. IR::Instr *
  256. LowererMD::LowerCallI(IR::Instr * callInstr, ushort callFlags, bool isHelper, IR::Instr * insertBeforeInstrForCFG)
  257. {
  258. return this->lowererMDArch.LowerCallI(callInstr, callFlags, isHelper, insertBeforeInstrForCFG);
  259. }
  260. IR::Instr *
  261. LowererMD::LowerAsmJsCallI(IR::Instr * callInstr)
  262. {
  263. #if DBG
  264. if (PHASE_ON(Js::AsmjsCallDebugBreakPhase, this->m_func))
  265. {
  266. this->GenerateDebugBreak(callInstr->m_next);
  267. }
  268. #endif
  269. return this->lowererMDArch.LowerAsmJsCallI(callInstr);
  270. }
  271. IR::Instr *
  272. LowererMD::LowerAsmJsCallE(IR::Instr * callInstr)
  273. {
  274. #if DBG
  275. if (PHASE_ON(Js::AsmjsCallDebugBreakPhase, this->m_func))
  276. {
  277. this->GenerateDebugBreak(callInstr->m_next);
  278. }
  279. #endif
  280. return this->lowererMDArch.LowerAsmJsCallE(callInstr);
  281. }
  282. IR::Instr *
  283. LowererMD::LowerWasmArrayBoundsCheck(IR::Instr * instr, IR::Opnd *addrOpnd)
  284. {
  285. return this->lowererMDArch.LowerWasmArrayBoundsCheck(instr, addrOpnd);
  286. }
  287. void LowererMD::LowerAtomicStore(IR::Opnd * dst, IR::Opnd * src1, IR::Instr * insertBeforeInstr)
  288. {
  289. return this->lowererMDArch.LowerAtomicStore(dst, src1, insertBeforeInstr);
  290. }
  291. void LowererMD::LowerAtomicLoad(IR::Opnd * dst, IR::Opnd * src1, IR::Instr * insertBeforeInstr)
  292. {
  293. return this->lowererMDArch.LowerAtomicLoad(dst, src1, insertBeforeInstr);
  294. }
  295. IR::Instr *
  296. LowererMD::LowerAsmJsLdElemHelper(IR::Instr * callInstr)
  297. {
  298. return this->lowererMDArch.LowerAsmJsLdElemHelper(callInstr);
  299. }
  300. IR::Instr *
  301. LowererMD::LowerAsmJsStElemHelper(IR::Instr * callInstr)
  302. {
  303. return this->lowererMDArch.LowerAsmJsStElemHelper(callInstr);
  304. }
  305. IR::Instr *
  306. LowererMD::LoadInt64HelperArgument(IR::Instr * instr, IR::Opnd* opnd)
  307. {
  308. return this->lowererMDArch.LoadInt64HelperArgument(instr, opnd);
  309. }
  310. IR::Instr *
  311. LowererMD::LoadHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  312. {
  313. return this->lowererMDArch.LoadHelperArgument(instr, opndArg);
  314. }
  315. IR::Instr *
  316. LowererMD::LoadDoubleHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  317. {
  318. return this->lowererMDArch.LoadDoubleHelperArgument(instr, opndArg);
  319. }
  320. IR::Instr *
  321. LowererMD::LoadFloatHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  322. {
  323. return this->lowererMDArch.LoadFloatHelperArgument(instr, opndArg);
  324. }
  325. IR::Instr *
  326. LowererMD::LowerEntryInstr(IR::EntryInstr * entryInstr)
  327. {
  328. return this->lowererMDArch.LowerEntryInstr(entryInstr);
  329. }
  330. IR::Instr *
  331. LowererMD::LowerExitInstr(IR::ExitInstr * exitInstr)
  332. {
  333. return this->lowererMDArch.LowerExitInstr(exitInstr);
  334. }
  335. IR::Instr *
  336. LowererMD::LowerExitInstrAsmJs(IR::ExitInstr * exitInstr)
  337. {
  338. return this->lowererMDArch.LowerExitInstrAsmJs(exitInstr);
  339. }
  340. IR::Instr *
  341. LowererMD::LoadNewScObjFirstArg(IR::Instr * instr, IR::Opnd * dst, ushort extraArgs)
  342. {
  343. return this->lowererMDArch.LoadNewScObjFirstArg(instr, dst, extraArgs);
  344. }
  345. IR::Instr *
  346. LowererMD::LowerTry(IR::Instr *tryInstr, IR::JnHelperMethod helperMethod)
  347. {
  348. // Mark the entry to the try
  349. IR::Instr *instr = tryInstr->GetNextRealInstrOrLabel();
  350. AssertMsg(instr->IsLabelInstr(), "No label at the entry to a try?");
  351. IR::LabelInstr *tryAddr = instr->AsLabelInstr();
  352. // Arg 5: ScriptContext
  353. this->m_lowerer->LoadScriptContext(tryAddr);
  354. if (tryInstr->m_opcode == Js::OpCode::TryCatch || (this->m_func->DoOptimizeTry() || (this->m_func->IsSimpleJit() && this->m_func->hasBailout)))
  355. {
  356. // Arg 4 : hasBailedOutOffset
  357. IR::Opnd * hasBailedOutOffset = IR::IntConstOpnd::New(this->m_func->m_hasBailedOutSym->m_offset, TyInt32, this->m_func);
  358. this->LoadHelperArgument(tryAddr, hasBailedOutOffset);
  359. }
  360. #ifdef _M_X64
  361. // Arg: args size
  362. IR::RegOpnd *argsSizeOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  363. tryAddr->InsertBefore(IR::Instr::New(Js::OpCode::LdArgSize, argsSizeOpnd, this->m_func));
  364. this->LoadHelperArgument(tryAddr, argsSizeOpnd);
  365. // Arg: spill size
  366. IR::RegOpnd *spillSizeOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  367. tryAddr->InsertBefore(IR::Instr::New(Js::OpCode::LdSpillSize, spillSizeOpnd, this->m_func));
  368. this->LoadHelperArgument(tryAddr, spillSizeOpnd);
  369. #endif
  370. // Arg 3: frame pointer
  371. IR::RegOpnd *ebpOpnd = IR::RegOpnd::New(nullptr, lowererMDArch.GetRegBlockPointer(), TyMachReg, this->m_func);
  372. this->LoadHelperArgument(tryAddr, ebpOpnd);
  373. // Arg 2: handler address
  374. IR::LabelInstr *helperAddr = tryInstr->AsBranchInstr()->GetTarget();
  375. this->LoadHelperArgument(tryAddr, IR::LabelOpnd::New(helperAddr, this->m_func));
  376. // Arg 1: try address
  377. this->LoadHelperArgument(tryAddr, IR::LabelOpnd::New(tryAddr, this->m_func));
  378. // Call the helper
  379. IR::RegOpnd *continuationAddr =
  380. IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), lowererMDArch.GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  381. IR::Instr *callInstr = IR::Instr::New(
  382. Js::OpCode::Call, continuationAddr, IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
  383. tryAddr->InsertBefore(callInstr);
  384. this->LowerCall(callInstr, 0);
  385. #ifdef _M_X64
  386. {
  387. // Emit some instruction to separate the CALL from the JMP following it. The OS stack unwinder
  388. // mistakes the JMP for the start of the epilog otherwise.
  389. IR::Instr *nop = IR::Instr::New(Js::OpCode::NOP, m_func);
  390. tryAddr->InsertBefore(nop);
  391. }
  392. #endif
  393. // Jump to the continuation address supplied by the helper
  394. IR::BranchInstr *branchInstr = IR::MultiBranchInstr::New(Js::OpCode::JMP, continuationAddr, this->m_func);
  395. tryAddr->InsertBefore(branchInstr);
  396. return tryInstr->m_prev;
  397. }
  398. IR::Instr *
  399. LowererMD::LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * targetOpnd)
  400. {
  401. return lowererMDArch.LowerEHRegionReturn(insertBeforeInstr, targetOpnd);
  402. }
  403. IR::Instr *
  404. LowererMD::LowerLeaveNull(IR::Instr *finallyEndInstr)
  405. {
  406. IR::Instr *instrPrev = finallyEndInstr->m_prev;
  407. IR::Instr *instr = nullptr;
  408. // Return a null continuation address to the helper: execution will resume at the point determined by the try
  409. // or the exception handler.
  410. IR::RegOpnd *retReg = IR::RegOpnd::New(StackSym::New(TyMachReg,this->m_func), lowererMDArch.GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  411. instr = IR::Instr::New(Js::OpCode::XOR, retReg, this->m_func);
  412. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(nullptr, lowererMDArch.GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  413. instr->SetSrc1(eaxOpnd);
  414. instr->SetSrc2(eaxOpnd);
  415. finallyEndInstr->InsertBefore(instr);
  416. #if _M_X64
  417. {
  418. // amd64_ReturnFromCallWithFakeFrame expects to find the spill size and args size
  419. // in REG_EH_SPILL_SIZE and REG_EH_ARGS_SIZE.
  420. // MOV REG_EH_SPILL_SIZE, spillSize
  421. IR::Instr *movR8 = IR::Instr::New(Js::OpCode::LdSpillSize,
  422. IR::RegOpnd::New(nullptr, REG_EH_SPILL_SIZE, TyMachReg, m_func),
  423. m_func);
  424. finallyEndInstr->InsertBefore(movR8);
  425. // MOV REG_EH_ARGS_SIZE, argsSize
  426. IR::Instr *movR9 = IR::Instr::New(Js::OpCode::LdArgSize,
  427. IR::RegOpnd::New(nullptr, REG_EH_ARGS_SIZE, TyMachReg, m_func),
  428. m_func);
  429. finallyEndInstr->InsertBefore(movR9);
  430. IR::Opnd *targetOpnd = IR::RegOpnd::New(nullptr, REG_EH_TARGET, TyMachReg, m_func);
  431. IR::Instr *movTarget = IR::Instr::New(Js::OpCode::MOV,
  432. targetOpnd,
  433. IR::HelperCallOpnd::New(IR::HelperOp_ReturnFromCallWithFakeFrame, m_func),
  434. m_func);
  435. finallyEndInstr->InsertBefore(movTarget);
  436. IR::Instr *push = IR::Instr::New(Js::OpCode::PUSH, m_func);
  437. push->SetSrc1(targetOpnd);
  438. finallyEndInstr->InsertBefore(push);
  439. }
  440. #endif
  441. IR::IntConstOpnd *intSrc = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  442. instr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  443. instr->SetSrc1(intSrc);
  444. instr->SetSrc2(retReg);
  445. finallyEndInstr->InsertBefore(instr);
  446. finallyEndInstr->Remove();
  447. return instrPrev;
  448. }
  449. ///----------------------------------------------------------------------------
  450. ///
  451. /// LowererMD::Init
  452. ///
  453. ///----------------------------------------------------------------------------
  454. void
  455. LowererMD::Init(Lowerer *lowerer)
  456. {
  457. m_lowerer = lowerer;
  458. this->lowererMDArch.Init(this);
  459. #ifdef ENABLE_WASM_SIMD
  460. Simd128InitOpcodeMap();
  461. #endif
  462. }
  463. ///----------------------------------------------------------------------------
  464. ///
  465. /// LowererMD::LoadInputParamCount
  466. ///
  467. /// Load the passed-in parameter count from the appropriate EBP slot.
  468. ///
  469. ///----------------------------------------------------------------------------
  470. IR::Instr *
  471. LowererMD::LoadInputParamCount(IR::Instr * instrInsert, int adjust, bool needFlags)
  472. {
  473. IR::Instr * instr;
  474. IR::RegOpnd * dstOpnd;
  475. IR::SymOpnd * srcOpnd;
  476. srcOpnd = Lowerer::LoadCallInfo(instrInsert);
  477. dstOpnd = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), TyMachReg, this->m_func);
  478. instr = IR::Instr::New(Js::OpCode::MOV, dstOpnd, srcOpnd, this->m_func);
  479. instrInsert->InsertBefore(instr);
  480. // Copy the callinfo before masking off the param count
  481. Assert(Js::CallInfo::ksizeofCount == 24);
  482. // Mask off call flags from callinfo
  483. instr = IR::Instr::New(Js::OpCode::AND, dstOpnd, dstOpnd,
  484. IR::IntConstOpnd::New(0x00FFFFFF, TyMachReg, this->m_func, true), this->m_func);
  485. instrInsert->InsertBefore(instr);
  486. instr = m_lowerer->InsertSub(true, dstOpnd, dstOpnd, IR::IntConstOpnd::New(-adjust, TyMachReg, this->m_func), instrInsert);
  487. return instr;
  488. }
  489. IR::Instr *
  490. LowererMD::LoadStackArgPtr(IR::Instr * instr)
  491. {
  492. if (this->m_func->IsLoopBody())
  493. {
  494. // Get the first user param from the interpreter frame instance that was passed in.
  495. // These args don't include the func object and callinfo; we just need to advance past "this".
  496. // t1 = MOV [prm1 + m_inParams]
  497. // dst = LEA &[t1 + sizeof(var)]
  498. Assert(this->m_func->m_loopParamSym);
  499. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  500. size_t offset = Js::InterpreterStackFrame::GetOffsetOfInParams();
  501. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, this->m_func);
  502. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  503. IR::Instr *instrLdParams = IR::Instr::New(Js::OpCode::MOV, tmpOpnd, indirOpnd, this->m_func);
  504. instr->InsertBefore(instrLdParams);
  505. indirOpnd = IR::IndirOpnd::New(tmpOpnd, sizeof(Js::Var), TyMachReg, this->m_func);
  506. instr->SetSrc1(indirOpnd);
  507. instr->m_opcode = Js::OpCode::LEA;
  508. return instr->m_prev;
  509. }
  510. else
  511. {
  512. return this->lowererMDArch.LoadStackArgPtr(instr);
  513. }
  514. }
  515. IR::Instr *
  516. LowererMD::LoadArgumentsFromFrame(IR::Instr * instr)
  517. {
  518. if (this->m_func->IsLoopBody())
  519. {
  520. // Get the arguments ptr from the interpreter frame instance that was passed in.
  521. Assert(this->m_func->m_loopParamSym);
  522. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  523. int32 offset = (int32)Js::InterpreterStackFrame::GetOffsetOfArguments();
  524. instr->SetSrc1(IR::IndirOpnd::New(baseOpnd, offset, TyMachReg, this->m_func));
  525. }
  526. else
  527. {
  528. instr->SetSrc1(this->CreateStackArgumentsSlotOpnd());
  529. }
  530. instr->m_opcode = Js::OpCode::MOV;
  531. return instr->m_prev;
  532. }
  533. // load argument count as I4
  534. IR::Instr *
  535. LowererMD::LoadArgumentCount(IR::Instr * instr)
  536. {
  537. if (this->m_func->IsLoopBody())
  538. {
  539. // Pull the arg count from the interpreter frame instance that was passed in.
  540. // (The callinfo in the loop body's frame just shows the single parameter, the interpreter frame.)
  541. Assert(this->m_func->m_loopParamSym);
  542. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  543. size_t offset = Js::InterpreterStackFrame::GetOffsetOfInSlotsCount();
  544. instr->SetSrc1(IR::IndirOpnd::New(baseOpnd, (int32)offset, TyInt32, this->m_func));
  545. }
  546. else
  547. {
  548. StackSym *sym = StackSym::New(TyVar, this->m_func);
  549. this->m_func->SetArgOffset(sym, (Js::JavascriptFunctionArgIndex_CallInfo - Js::JavascriptFunctionArgIndex_Frame) * sizeof(Js::Var));
  550. instr->SetSrc1(IR::SymOpnd::New(sym, TyMachReg, this->m_func));
  551. }
  552. instr->m_opcode = Js::OpCode::MOV;
  553. return instr->m_prev;
  554. }
  555. IR::Instr *
  556. LowererMD::LoadHeapArguments(IR::Instr * instrArgs)
  557. {
  558. return this->lowererMDArch.LoadHeapArguments(instrArgs);
  559. }
  560. IR::Instr *
  561. LowererMD::LoadHeapArgsCached(IR::Instr * instrArgs)
  562. {
  563. return this->lowererMDArch.LoadHeapArgsCached(instrArgs);
  564. }
  565. ///----------------------------------------------------------------------------
  566. ///
  567. /// LowererMD::ChangeToHelperCall
  568. ///
  569. /// Change the current instruction to a call to the given helper.
  570. ///
  571. ///----------------------------------------------------------------------------
  572. IR::Instr *
  573. LowererMD::ChangeToHelperCall(IR::Instr * callInstr, IR::JnHelperMethod helperMethod, IR::LabelInstr *labelBailOut,
  574. IR::Opnd *opndBailOutArg, IR::PropertySymOpnd *propSymOpnd, bool isHelperContinuation)
  575. {
  576. #if DBG
  577. this->m_lowerer->ReconcileWithLowererStateOnHelperCall(callInstr, helperMethod);
  578. #endif
  579. IR::Instr * bailOutInstr = callInstr;
  580. if (callInstr->HasBailOutInfo())
  581. {
  582. IR::BailOutKind bailOutKind = callInstr->GetBailOutKind();
  583. if (bailOutKind == IR::BailOutOnNotPrimitive ||
  584. bailOutKind == IR::BailOutOnPowIntIntOverflow)
  585. {
  586. callInstr = IR::Instr::New(callInstr->m_opcode, callInstr->m_func);
  587. bailOutInstr->TransferTo(callInstr);
  588. bailOutInstr->InsertBefore(callInstr);
  589. bailOutInstr->m_opcode = bailOutKind == IR::BailOutOnNotPrimitive
  590. ? Js::OpCode::BailOnNotPrimitive
  591. : Js::OpCode::BailOnPowIntIntOverflow;
  592. bailOutInstr->SetSrc1(opndBailOutArg);
  593. }
  594. else
  595. {
  596. bailOutInstr = this->m_lowerer->SplitBailOnImplicitCall(callInstr);
  597. }
  598. }
  599. callInstr->m_opcode = Js::OpCode::CALL;
  600. IR::HelperCallOpnd *helperCallOpnd = Lowerer::CreateHelperCallOpnd(helperMethod, this->lowererMDArch.GetHelperArgsCount(), m_func);
  601. if (helperCallOpnd->IsDiagHelperCallOpnd())
  602. {
  603. // Load arguments for the wrapper.
  604. this->LoadHelperArgument(callInstr, IR::AddrOpnd::New((Js::Var)IR::GetMethodOriginalAddress(m_func->GetThreadContextInfo(), helperMethod), IR::AddrOpndKindDynamicMisc, m_func));
  605. this->m_lowerer->LoadScriptContext(callInstr);
  606. }
  607. callInstr->SetSrc1(helperCallOpnd);
  608. IR::Instr * instrRet = this->lowererMDArch.LowerCall(callInstr, 0);
  609. if (bailOutInstr != callInstr)
  610. {
  611. // The bailout needs to be lowered after we lower the helper call because the helper argument
  612. // has already been loaded. We need to drain them on AMD64 before starting another helper call
  613. if (bailOutInstr->m_opcode == Js::OpCode::BailOnNotObject)
  614. {
  615. this->m_lowerer->LowerBailOnNotObject(bailOutInstr, nullptr, labelBailOut);
  616. }
  617. else if (bailOutInstr->m_opcode == Js::OpCode::BailOnNotPrimitive ||
  618. bailOutInstr->m_opcode == Js::OpCode::BailOnPowIntIntOverflow)
  619. {
  620. this->m_lowerer->LowerBailOnTrue(bailOutInstr, labelBailOut);
  621. }
  622. else if (bailOutInstr->m_opcode == Js::OpCode::BailOut)
  623. {
  624. this->m_lowerer->GenerateBailOut(bailOutInstr, nullptr, labelBailOut);
  625. }
  626. else
  627. {
  628. this->m_lowerer->LowerBailOnEqualOrNotEqual(bailOutInstr, nullptr, labelBailOut, propSymOpnd, isHelperContinuation);
  629. }
  630. }
  631. #if DBG
  632. if (PHASE_ON(Js::AsmjsCallDebugBreakPhase, this->m_func))
  633. {
  634. this->GenerateDebugBreak(instrRet->m_next);
  635. }
  636. #endif
  637. return instrRet;
  638. }
  639. IR::Instr* LowererMD::ChangeToHelperCallMem(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  640. {
  641. this->m_lowerer->LoadScriptContext(instr);
  642. return this->ChangeToHelperCall(instr, helperMethod);
  643. }
  644. ///----------------------------------------------------------------------------
  645. ///
  646. /// LowererMD::ChangeToAssign
  647. ///
  648. /// Change to a MOV.
  649. ///
  650. ///----------------------------------------------------------------------------
  651. IR::Instr *
  652. LowererMD::ChangeToAssignNoBarrierCheck(IR::Instr * instr)
  653. {
  654. return ChangeToAssign(instr, instr->GetDst()->GetType());
  655. }
  656. IR::Instr *
  657. LowererMD::ChangeToAssign(IR::Instr * instr)
  658. {
  659. return ChangeToWriteBarrierAssign(instr, instr->m_func);
  660. }
  661. IR::Instr *
  662. LowererMD::ChangeToAssign(IR::Instr * instr, IRType type)
  663. {
  664. Assert(!instr->HasBailOutInfo() || instr->GetBailOutKind() == IR::BailOutExpectingString);
  665. #if _M_IX86
  666. if (IRType_IsInt64(type))
  667. {
  668. return LowererMDArch::ChangeToAssignInt64(instr);
  669. }
  670. #endif
  671. instr->m_opcode = LowererMDArch::GetAssignOp(type);
  672. Legalize(instr);
  673. return instr;
  674. }
  675. ///----------------------------------------------------------------------------
  676. ///
  677. /// LowererMD::LowerRet
  678. ///
  679. /// Lower Ret to "MOV EAX, src"
  680. /// The real RET is inserted at the exit of the function when emitting the
  681. /// epilog.
  682. ///
  683. ///----------------------------------------------------------------------------
  684. IR::Instr *
  685. LowererMD::LowerRet(IR::Instr * retInstr)
  686. {
  687. IR::RegOpnd * retReg = nullptr;
  688. bool needsRetReg = true;
  689. #ifdef ASMJS_PLAT
  690. if (m_func->GetJITFunctionBody()->IsAsmJsMode() && !m_func->IsLoopBody()) // for loop body ret is the bytecodeoffset
  691. {
  692. Js::AsmJsRetType::Which asmType = m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetRetType();
  693. IRType regType = TyInt32;
  694. switch (asmType)
  695. {
  696. case Js::AsmJsRetType::Double:
  697. regType = TyFloat64;
  698. break;
  699. case Js::AsmJsRetType::Float:
  700. regType = TyFloat32;
  701. break;
  702. case Js::AsmJsRetType::Int64:
  703. {
  704. regType = TyInt64;
  705. #if LOWER_SPLIT_INT64
  706. regType = TyInt32;
  707. {
  708. IR::Opnd* lowOpnd = nullptr;
  709. IR::Opnd* highOpnd = nullptr;
  710. if (retInstr->GetSrc1()->IsRegOpnd())
  711. {
  712. Int64RegPair srcPair = m_func->FindOrCreateInt64Pair(retInstr->GetSrc1()->AsRegOpnd());
  713. lowOpnd = srcPair.low;
  714. highOpnd = srcPair.high;
  715. }
  716. else if (retInstr->GetSrc1()->IsImmediateOpnd())
  717. {
  718. int64 value = retInstr->GetSrc1()->GetImmediateValue(m_func);
  719. lowOpnd = IR::IntConstOpnd::New(value & UINT_MAX, regType, m_func);
  720. highOpnd = IR::IntConstOpnd::New(value >> 32, regType, m_func);
  721. }
  722. else
  723. {
  724. Assert(UNREACHED);
  725. }
  726. retInstr->UnlinkSrc1();
  727. retInstr->SetSrc1(lowOpnd);
  728. // Mov high bits to edx
  729. IR::RegOpnd* regEdx = IR::RegOpnd::New(regType, this->m_func);
  730. regEdx->SetReg(RegEDX);
  731. Lowerer::InsertMove(regEdx, highOpnd, retInstr);
  732. retInstr->SetSrc2(regEdx);
  733. }
  734. #endif
  735. break;
  736. }
  737. case Js::AsmJsRetType::Void:
  738. needsRetReg = false;
  739. break;
  740. case Js::AsmJsRetType::Signed:
  741. regType = TyInt32;
  742. break;
  743. #ifdef ENABLE_WASM_SIMD
  744. case Js::AsmJsRetType::Float32x4:
  745. regType = TySimd128F4;
  746. break;
  747. case Js::AsmJsRetType::Int32x4:
  748. regType = TySimd128I4;
  749. break;
  750. case Js::AsmJsRetType::Float64x2:
  751. regType = TySimd128D2;
  752. break;
  753. case Js::AsmJsRetType::Int64x2:
  754. regType = TySimd128I2;
  755. break;
  756. case Js::AsmJsRetType::Int16x8:
  757. regType = TySimd128I8;
  758. break;
  759. case Js::AsmJsRetType::Int8x16:
  760. regType = TySimd128I16;
  761. break;
  762. case Js::AsmJsRetType::Uint32x4:
  763. regType = TySimd128U4;
  764. break;
  765. case Js::AsmJsRetType::Uint16x8:
  766. regType = TySimd128U8;
  767. break;
  768. case Js::AsmJsRetType::Uint8x16:
  769. regType = TySimd128U16;
  770. break;
  771. case Js::AsmJsRetType::Bool32x4:
  772. regType = TySimd128B4;
  773. break;
  774. case Js::AsmJsRetType::Bool16x8:
  775. regType = TySimd128B8;
  776. break;
  777. case Js::AsmJsRetType::Bool8x16:
  778. regType = TySimd128B16;
  779. break;
  780. #endif
  781. default:
  782. Assert(UNREACHED);
  783. }
  784. if (needsRetReg)
  785. {
  786. retReg = IR::RegOpnd::New(regType, m_func);
  787. retReg->SetReg(lowererMDArch.GetRegReturnAsmJs(regType));
  788. }
  789. }
  790. else
  791. #endif
  792. {
  793. retReg = IR::RegOpnd::New(TyMachReg, m_func);
  794. retReg->SetReg(lowererMDArch.GetRegReturn(TyMachReg));
  795. }
  796. if (needsRetReg)
  797. {
  798. Lowerer::InsertMove(retReg, retInstr->UnlinkSrc1(), retInstr);
  799. retInstr->SetSrc1(retReg);
  800. }
  801. return retInstr;
  802. }
  803. ///----------------------------------------------------------------------------
  804. ///
  805. /// LowererMD::LowerCondBranch
  806. ///
  807. ///----------------------------------------------------------------------------
  808. IR::Instr *
  809. LowererMD::LowerCondBranch(IR::Instr * instr)
  810. {
  811. AssertMsg(instr->GetSrc1() != nullptr, "Expected src opnds on conditional branch");
  812. Assert(!instr->HasBailOutInfo());
  813. IR::Opnd * opndSrc1 = instr->UnlinkSrc1();
  814. IR::Instr * instrPrev = nullptr;
  815. switch (instr->m_opcode)
  816. {
  817. case Js::OpCode::BrTrue_A:
  818. case Js::OpCode::BrFalse_A:
  819. case Js::OpCode::BrNotNull_A:
  820. case Js::OpCode::BrOnObject_A:
  821. case Js::OpCode::BrOnClassConstructor:
  822. case Js::OpCode::BrOnBaseConstructorKind:
  823. Assert(!opndSrc1->IsFloat64());
  824. AssertMsg(instr->GetSrc2() == nullptr, "Expected 1 src on boolean branch");
  825. instrPrev = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  826. instrPrev->SetSrc1(opndSrc1);
  827. instrPrev->SetSrc2(opndSrc1);
  828. instr->InsertBefore(instrPrev);
  829. if (instr->m_opcode != Js::OpCode::BrFalse_A)
  830. {
  831. instr->m_opcode = Js::OpCode::JNE;
  832. }
  833. else
  834. {
  835. instr->m_opcode = Js::OpCode::JEQ;
  836. }
  837. break;
  838. case Js::OpCode::BrOnEmpty:
  839. case Js::OpCode::BrOnNotEmpty:
  840. AssertMsg(0, "BrOnEmpty opcodes should not be passed to MD lowerer");
  841. break;
  842. default:
  843. IR::Opnd * opndSrc2 = instr->UnlinkSrc2();
  844. AssertMsg(opndSrc2 != nullptr, "Expected 2 src's on non-boolean branch");
  845. if (opndSrc1->IsFloat())
  846. {
  847. Assert(opndSrc1->GetType() == opndSrc2->GetType());
  848. instrPrev = IR::Instr::New(opndSrc1->IsFloat64() ? Js::OpCode::COMISD : Js::OpCode::COMISS, m_func);
  849. instrPrev->SetSrc1(opndSrc1);
  850. instrPrev->SetSrc2(opndSrc2);
  851. instr->InsertBefore(instrPrev);
  852. }
  853. else
  854. {
  855. // This check assumes src1 is a variable.
  856. if (opndSrc2->IsIntConstOpnd() && opndSrc2->AsIntConstOpnd()->GetValue() == 0)
  857. {
  858. instrPrev = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  859. instrPrev->SetSrc1(opndSrc1);
  860. instrPrev->SetSrc2(opndSrc1);
  861. instr->InsertBefore(instrPrev);
  862. opndSrc2->Free(this->m_func);
  863. }
  864. else
  865. {
  866. instrPrev = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  867. //
  868. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  869. // relevant only on AMD64.
  870. //
  871. opndSrc1 = instrPrev->SetSrc1(opndSrc1);
  872. opndSrc2 = instrPrev->SetSrc2(opndSrc2);
  873. instr->InsertBefore(instrPrev);
  874. LowererMD::Legalize(instrPrev);
  875. }
  876. }
  877. instr->m_opcode = LowererMD::MDBranchOpcode(instr->m_opcode);
  878. break;
  879. }
  880. return instrPrev;
  881. }
  882. ///----------------------------------------------------------------------------
  883. ///
  884. /// LowererMD::MDBranchOpcode
  885. ///
  886. /// Map HIR branch opcode to machine-dependent equivalent.
  887. ///
  888. ///----------------------------------------------------------------------------
  889. Js::OpCode
  890. LowererMD::MDBranchOpcode(Js::OpCode opcode)
  891. {
  892. switch (opcode)
  893. {
  894. case Js::OpCode::BrSrEq_A:
  895. case Js::OpCode::BrEq_A:
  896. case Js::OpCode::BrSrNotNeq_A:
  897. case Js::OpCode::BrNotNeq_A:
  898. case Js::OpCode::BrAddr_A:
  899. return Js::OpCode::JEQ;
  900. case Js::OpCode::BrSrNeq_A:
  901. case Js::OpCode::BrNeq_A:
  902. case Js::OpCode::BrSrNotEq_A:
  903. case Js::OpCode::BrNotEq_A:
  904. case Js::OpCode::BrNotAddr_A:
  905. return Js::OpCode::JNE;
  906. case Js::OpCode::BrLt_A:
  907. case Js::OpCode::BrNotGe_A:
  908. return Js::OpCode::JLT;
  909. case Js::OpCode::BrLe_A:
  910. case Js::OpCode::BrNotGt_A:
  911. return Js::OpCode::JLE;
  912. case Js::OpCode::BrGt_A:
  913. case Js::OpCode::BrNotLe_A:
  914. return Js::OpCode::JGT;
  915. case Js::OpCode::BrGe_A:
  916. case Js::OpCode::BrNotLt_A:
  917. return Js::OpCode::JGE;
  918. default:
  919. AssertMsg(0, "Branch opcode has no MD mapping");
  920. return opcode;
  921. }
  922. }
  923. Js::OpCode
  924. LowererMD::MDConvertFloat64ToInt32Opcode(const RoundMode roundMode)
  925. {
  926. switch (roundMode)
  927. {
  928. case RoundModeTowardZero:
  929. return Js::OpCode::CVTTSD2SI;
  930. case RoundModeTowardInteger:
  931. return Js::OpCode::Nop;
  932. case RoundModeHalfToEven:
  933. return Js::OpCode::CVTSD2SI;
  934. default:
  935. AssertMsg(0, "RoundMode has no MD mapping.");
  936. return Js::OpCode::Nop;
  937. }
  938. }
  939. Js::OpCode
  940. LowererMD::MDUnsignedBranchOpcode(Js::OpCode opcode)
  941. {
  942. switch (opcode)
  943. {
  944. case Js::OpCode::BrEq_A:
  945. case Js::OpCode::BrSrEq_A:
  946. case Js::OpCode::BrSrNotNeq_A:
  947. case Js::OpCode::BrNotNeq_A:
  948. case Js::OpCode::BrAddr_A:
  949. return Js::OpCode::JEQ;
  950. case Js::OpCode::BrNeq_A:
  951. case Js::OpCode::BrSrNeq_A:
  952. case Js::OpCode::BrSrNotEq_A:
  953. case Js::OpCode::BrNotEq_A:
  954. case Js::OpCode::BrNotAddr_A:
  955. return Js::OpCode::JNE;
  956. case Js::OpCode::BrLt_A:
  957. case Js::OpCode::BrNotGe_A:
  958. return Js::OpCode::JB;
  959. case Js::OpCode::BrLe_A:
  960. case Js::OpCode::BrNotGt_A:
  961. return Js::OpCode::JBE;
  962. case Js::OpCode::BrGt_A:
  963. case Js::OpCode::BrNotLe_A:
  964. return Js::OpCode::JA;
  965. case Js::OpCode::BrGe_A:
  966. case Js::OpCode::BrNotLt_A:
  967. return Js::OpCode::JAE;
  968. default:
  969. AssertMsg(0, "Branch opcode has no MD mapping");
  970. return opcode;
  971. }
  972. }
  973. Js::OpCode LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode opcode)
  974. {
  975. Assert(opcode == Js::OpCode::BrLt_A || opcode == Js::OpCode::BrGe_A);
  976. return opcode == Js::OpCode::BrLt_A ? Js::OpCode::JSB : Js::OpCode::JNSB;
  977. }
  978. void LowererMD::ChangeToAdd(IR::Instr *const instr, const bool needFlags)
  979. {
  980. Assert(instr);
  981. Assert(instr->GetDst());
  982. Assert(instr->GetSrc1());
  983. Assert(instr->GetSrc2());
  984. if(instr->GetDst()->IsFloat64())
  985. {
  986. Assert(instr->GetSrc1()->IsFloat64());
  987. Assert(instr->GetSrc2()->IsFloat64());
  988. Assert(!needFlags);
  989. instr->m_opcode = Js::OpCode::ADDSD;
  990. return;
  991. }
  992. else if (instr->GetDst()->IsFloat32())
  993. {
  994. Assert(instr->GetSrc1()->IsFloat32());
  995. Assert(instr->GetSrc2()->IsFloat32());
  996. Assert(!needFlags);
  997. instr->m_opcode = Js::OpCode::ADDSS;
  998. return;
  999. }
  1000. instr->m_opcode = Js::OpCode::ADD;
  1001. Legalize(instr);
  1002. if (!needFlags)
  1003. {
  1004. // Prefer INC for add by one
  1005. if ((instr->GetDst()->IsEqual(instr->GetSrc1()) &&
  1006. instr->GetSrc2()->IsIntConstOpnd() &&
  1007. instr->GetSrc2()->AsIntConstOpnd()->GetValue() == 1) ||
  1008. (instr->GetDst()->IsEqual(instr->GetSrc2()) &&
  1009. instr->GetSrc1()->IsIntConstOpnd() &&
  1010. instr->GetSrc1()->AsIntConstOpnd()->GetValue() == 1))
  1011. {
  1012. if (instr->GetSrc1()->IsIntConstOpnd())
  1013. {
  1014. // Swap the operands, such that we would create (dst = INC src2)
  1015. instr->SwapOpnds();
  1016. }
  1017. instr->FreeSrc2();
  1018. instr->m_opcode = Js::OpCode::INC;
  1019. }
  1020. }
  1021. }
  1022. void LowererMD::ChangeToSub(IR::Instr *const instr, const bool needFlags)
  1023. {
  1024. Assert(instr);
  1025. Assert(instr->GetDst());
  1026. Assert(instr->GetSrc1());
  1027. Assert(instr->GetSrc2());
  1028. if(instr->GetDst()->IsFloat64())
  1029. {
  1030. Assert(instr->GetSrc1()->IsFloat64());
  1031. Assert(instr->GetSrc2()->IsFloat64());
  1032. Assert(!needFlags);
  1033. instr->m_opcode = Js::OpCode::SUBSD;
  1034. return;
  1035. }
  1036. // Prefer DEC for sub by one
  1037. if(instr->GetDst()->IsEqual(instr->GetSrc1()) &&
  1038. instr->GetSrc2()->IsIntConstOpnd() &&
  1039. instr->GetSrc2()->AsIntConstOpnd()->GetValue() == 1)
  1040. {
  1041. instr->FreeSrc2();
  1042. instr->m_opcode = Js::OpCode::DEC;
  1043. return;
  1044. }
  1045. instr->m_opcode = Js::OpCode::SUB;
  1046. }
  1047. void LowererMD::ChangeToShift(IR::Instr *const instr, const bool needFlags)
  1048. {
  1049. Assert(instr);
  1050. Assert(instr->GetDst());
  1051. Assert(instr->GetSrc1());
  1052. Assert(instr->GetSrc2());
  1053. switch(instr->m_opcode)
  1054. {
  1055. case Js::OpCode::Shl_A:
  1056. case Js::OpCode::Shl_I4:
  1057. instr->m_opcode = Js::OpCode::SHL;
  1058. break;
  1059. case Js::OpCode::Shr_A:
  1060. case Js::OpCode::Shr_I4:
  1061. instr->m_opcode = Js::OpCode::SAR;
  1062. break;
  1063. case Js::OpCode::ShrU_A:
  1064. case Js::OpCode::ShrU_I4:
  1065. instr->m_opcode = Js::OpCode::SHR;
  1066. break;
  1067. case Js::OpCode::Rol_I4:
  1068. instr->m_opcode = Js::OpCode::ROL;
  1069. break;
  1070. case Js::OpCode::Ror_I4:
  1071. instr->m_opcode = Js::OpCode::ROR;
  1072. break;
  1073. default:
  1074. Assert(false);
  1075. __assume(false);
  1076. }
  1077. if(instr->GetSrc2()->IsIntConstOpnd() && !instr->GetSrc1()->IsInt64())
  1078. {
  1079. // Only values between 0-31 mean anything
  1080. IntConstType value = instr->GetSrc2()->AsIntConstOpnd()->GetValue();
  1081. value &= TySize[instr->GetDst()->GetType()] == 8 ? 63 : 31;
  1082. instr->GetSrc2()->AsIntConstOpnd()->SetValue(value);
  1083. }
  1084. }
  1085. void LowererMD::ChangeToIMul(IR::Instr *const instr, bool hasOverflowCheck)
  1086. {
  1087. // If non-32 bit overflow check is needed, we have to use the IMUL form.
  1088. if (hasOverflowCheck && !instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow())
  1089. {
  1090. IR::RegOpnd *regEAX = IR::RegOpnd::New(TyInt32, instr->m_func);
  1091. IR::Opnd *temp2 = nullptr;
  1092. // MOV eax, src1
  1093. regEAX->SetReg(LowererMDArch::GetRegIMulDestLower());
  1094. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, regEAX, instr->GetSrc1(), instr->m_func));
  1095. if (instr->GetSrc2()->IsImmediateOpnd())
  1096. {
  1097. // MOV reg, imm
  1098. temp2 = IR::RegOpnd::New(TyInt32, instr->m_func);
  1099. IR::Opnd * src2 = instr->GetSrc2();
  1100. bool dontEncode = false;
  1101. if (src2->IsHelperCallOpnd())
  1102. {
  1103. dontEncode = true;
  1104. }
  1105. else if (src2->IsIntConstOpnd() || src2->IsAddrOpnd())
  1106. {
  1107. dontEncode = src2->IsIntConstOpnd() ? src2->AsIntConstOpnd()->m_dontEncode : src2->AsAddrOpnd()->m_dontEncode;
  1108. }
  1109. else if (src2->IsInt64ConstOpnd())
  1110. {
  1111. dontEncode = false;
  1112. }
  1113. else
  1114. {
  1115. AssertMsg(false, "Unexpected immediate opnd");
  1116. throw Js::OperationAbortedException();
  1117. }
  1118. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, temp2,
  1119. IR::IntConstOpnd::New((IntConstType)instr->GetSrc2()->GetImmediateValue(instr->m_func), TyInt32, instr->m_func, dontEncode),
  1120. instr->m_func));
  1121. }
  1122. // eax = IMUL eax, reg
  1123. instr->m_opcode = Js::OpCode::IMUL;
  1124. instr->ReplaceSrc1(regEAX);
  1125. if (temp2 != nullptr)
  1126. instr->ReplaceSrc2(temp2);
  1127. auto *dst = instr->GetDst()->Copy(instr->m_func);
  1128. instr->ReplaceDst(regEAX);
  1129. // MOV dst, eax
  1130. instr->InsertAfter(IR::Instr::New(Js::OpCode::MOV, dst, regEAX, instr->m_func));
  1131. }
  1132. else
  1133. EmitInt4Instr(instr); // IMUL2
  1134. }
  1135. const uint16
  1136. LowererMD::GetFormalParamOffset()
  1137. {
  1138. //In x86\x64 formal params were offset from EBP by the EBP chain, return address, and the 2 non-user params
  1139. return 4;
  1140. }
  1141. ///----------------------------------------------------------------------------
  1142. ///
  1143. /// LowererMD::ForceDstToReg
  1144. ///
  1145. ///----------------------------------------------------------------------------
  1146. void
  1147. LowererMD::ForceDstToReg(IR::Instr *instr)
  1148. {
  1149. IR::Opnd * dst = instr->GetDst();
  1150. if (dst->IsRegOpnd())
  1151. {
  1152. return;
  1153. }
  1154. if(dst->IsFloat64())
  1155. {
  1156. instr->SinkDst(Js::OpCode::MOVSD);
  1157. return;
  1158. }
  1159. instr->SinkDst(Js::OpCode::MOV);
  1160. }
  1161. struct LegalInstrForms
  1162. {
  1163. const LegalForms dst, src[2];
  1164. };
  1165. namespace LegalInstrFormsImpl
  1166. {
  1167. LegalInstrForms LEGAL_NONE = { L_None, { L_None, L_None } };
  1168. LegalInstrForms LEGAL_CUSTOM = { LF_Custom, { LF_Custom, LF_Custom } };
  1169. LegalInstrForms LEGAL_CALL = { LF_Optional | L_Reg, { L_Reg | L_Mem | L_Ptr, L_None } };
  1170. LegalInstrForms LEGAL_R = { L_Reg, { L_None, L_None } };
  1171. LegalInstrForms LEGAL_M = { L_Mem, { L_None, L_None } };
  1172. LegalInstrForms LEGAL_RM = { L_Reg | L_Mem, { L_None, L_None } };
  1173. LegalInstrForms LEGAL_N_I = { L_None, { L_Imm32, L_None } };
  1174. LegalInstrForms LEGAL_N_RMI = { L_None, { L_Reg | L_Mem | L_Imm32, L_None } };
  1175. LegalInstrForms LEGAL_R_R = { L_Reg, { L_Reg, L_None } };
  1176. LegalInstrForms LEGAL_R_M = { L_Reg, { L_Mem, L_None } };
  1177. LegalInstrForms LEGAL_M_M = { L_Mem, { L_Mem, L_None } };
  1178. LegalInstrForms LEGAL_R_OR = { L_Reg, { LF_Optional | L_Reg, L_None } };
  1179. LegalInstrForms LEGAL_R_RM = { L_Reg, { L_Reg | L_Mem, L_None } };
  1180. LegalInstrForms LEGAL_R_RMI = { L_Reg, { L_Reg | L_Mem | L_Imm32, L_None } };
  1181. LegalInstrForms LEGAL_RM_RM = { L_Reg | L_Mem, { L_Reg | L_Mem, L_None } };
  1182. LegalInstrForms LEGAL_N_R_R = { L_None, { L_Reg, L_Reg } };
  1183. LegalInstrForms LEGAL_N_I_OR = { L_None, { L_Imm32, LF_Optional | L_Reg } };
  1184. LegalInstrForms LEGAL_N_R_RM = { L_None, { L_Reg, L_Reg | L_Mem } };
  1185. LegalInstrForms LEGAL_N_RM_RI = { L_None, { L_Reg | L_Mem, L_Reg | L_Imm32 } };
  1186. LegalInstrForms LEGAL_N_RM_RMI = { L_None, { L_Reg | L_Mem, L_Reg | L_Mem | L_Imm32 } };
  1187. LegalInstrForms LEGAL_R_R_RM = { L_Reg, { L_Reg, L_Reg | L_Mem } };
  1188. LegalInstrForms LEGAL_R_R_RI = { L_Reg, { L_Reg, L_Reg | L_Imm32 } };
  1189. LegalInstrForms LEGAL_R_R_RMI = { L_Reg, { L_Reg, L_Reg | L_Mem | L_Imm32 } };
  1190. LegalInstrForms LEGAL_RM_R_I = { L_Reg | L_Mem, { L_Reg, L_Imm32 } };
  1191. LegalInstrForms LEGAL_R_RM_I = { L_Reg, { L_Reg | L_Mem, L_Imm32 } };
  1192. LegalInstrForms LEGAL_RM_RM_RM = { L_Reg | L_Mem, { L_Reg | L_Mem, L_Reg | L_Mem } };
  1193. LegalInstrForms LEGAL_RM_RM_RI = { L_Reg | L_Mem, { L_Reg | L_Mem, L_Reg | L_Imm32 } };
  1194. LegalInstrForms LEGAL_RM_RM_RMI = { L_Reg | L_Mem, { L_Reg | L_Mem, L_Reg | L_Mem | L_Imm32 } };
  1195. };
  1196. LegalInstrForms AllLegalInstrForms[] = {
  1197. #define MACRO(name, jnLayout, attrib, byte2, form, opByte, dope, leadIn, legal, ...) LegalInstrFormsImpl::legal,
  1198. #include "MdOpCodes.h"
  1199. #undef MACRO
  1200. };
  1201. template <bool verify>
  1202. void
  1203. LowererMD::Legalize(IR::Instr *const instr, bool fPostRegAlloc)
  1204. {
  1205. Assert(instr);
  1206. Assert(!instr->isInlineeEntryInstr
  1207. || (instr->m_opcode == Js::OpCode::MOV && instr->GetSrc1()->IsIntConstOpnd()));
  1208. const bool isMDOpCode = instr->m_opcode > Js::OpCode::MDStart;
  1209. Assert(isMDOpCode || Lowerer::ValidOpcodeAfterLower(instr, instr->m_func));
  1210. const LegalInstrForms legalInstrForms = isMDOpCode ? AllLegalInstrForms[instr->m_opcode - (Js::OpCode::MDStart + 1)] : LegalInstrFormsImpl::LEGAL_NONE;
  1211. LegalForms dstForms = legalInstrForms.dst;
  1212. LegalForms src1Forms = legalInstrForms.src[0];
  1213. LegalForms src2Forms = legalInstrForms.src[1];
  1214. bool hasSwitchCase = true;
  1215. bool isCustomForm = (dstForms & LF_Custom) != 0;;
  1216. switch(instr->m_opcode)
  1217. {
  1218. case Js::OpCode::JA:
  1219. case Js::OpCode::JAE:
  1220. case Js::OpCode::JB:
  1221. case Js::OpCode::JBE:
  1222. case Js::OpCode::JEQ:
  1223. case Js::OpCode::JNE:
  1224. case Js::OpCode::JLT:
  1225. case Js::OpCode::JLE:
  1226. case Js::OpCode::JGT:
  1227. case Js::OpCode::JGE:
  1228. case Js::OpCode::JNO:
  1229. case Js::OpCode::JO:
  1230. case Js::OpCode::JP:
  1231. case Js::OpCode::JNP:
  1232. case Js::OpCode::JNSB:
  1233. case Js::OpCode::JSB:
  1234. case Js::OpCode::JMP:
  1235. Assert(instr->IsBranchInstr());
  1236. break;
  1237. case Js::OpCode::MOV:
  1238. {
  1239. Assert(instr->GetSrc2() == nullptr);
  1240. IR::Opnd *const dst = instr->GetDst();
  1241. const IRType dstType = dst->GetType();
  1242. IR::Opnd *const src = instr->GetSrc1();
  1243. const IRType srcType = src->GetType();
  1244. if(TySize[dstType] > TySize[srcType])
  1245. {
  1246. if (verify)
  1247. {
  1248. return;
  1249. }
  1250. #if DBG
  1251. switch(dstType)
  1252. {
  1253. case TyInt32:
  1254. case TyUint32:
  1255. #ifdef _M_X64
  1256. case TyInt64:
  1257. case TyUint64:
  1258. #endif
  1259. case TyVar:
  1260. break;
  1261. default:
  1262. Assert(false);
  1263. }
  1264. #endif
  1265. IR::IntConstOpnd *const intConstantSrc = src->IsIntConstOpnd() ? src->AsIntConstOpnd() : nullptr;
  1266. const auto UpdateIntConstantSrc = [&](const size_t extendedValue)
  1267. {
  1268. Assert(intConstantSrc);
  1269. #ifdef _M_X64
  1270. if(TySize[dstType] > sizeof(IntConstType))
  1271. {
  1272. instr->ReplaceSrc1(
  1273. IR::AddrOpnd::New(
  1274. reinterpret_cast<void *>(extendedValue),
  1275. IR::AddrOpndKindConstantVar,
  1276. instr->m_func,
  1277. intConstantSrc->m_dontEncode));
  1278. }
  1279. else
  1280. #endif
  1281. {
  1282. intConstantSrc->SetType(dstType);
  1283. intConstantSrc->SetValue(static_cast<IntConstType>(extendedValue));
  1284. }
  1285. };
  1286. switch(srcType)
  1287. {
  1288. case TyInt8:
  1289. if(intConstantSrc)
  1290. {
  1291. UpdateIntConstantSrc(static_cast<int8>(intConstantSrc->GetValue())); // sign-extend
  1292. break;
  1293. }
  1294. instr->m_opcode = Js::OpCode::MOVSX;
  1295. break;
  1296. case TyUint8:
  1297. if(intConstantSrc)
  1298. {
  1299. UpdateIntConstantSrc(static_cast<uint8>(intConstantSrc->GetValue())); // zero-extend
  1300. break;
  1301. }
  1302. instr->m_opcode = Js::OpCode::MOVZX;
  1303. break;
  1304. case TyInt16:
  1305. if(intConstantSrc)
  1306. {
  1307. UpdateIntConstantSrc(static_cast<int16>(intConstantSrc->GetValue())); // sign-extend
  1308. break;
  1309. }
  1310. instr->m_opcode = Js::OpCode::MOVSXW;
  1311. break;
  1312. case TyUint16:
  1313. if(intConstantSrc)
  1314. {
  1315. UpdateIntConstantSrc(static_cast<uint16>(intConstantSrc->GetValue())); // zero-extend
  1316. break;
  1317. }
  1318. instr->m_opcode = Js::OpCode::MOVZXW;
  1319. break;
  1320. #ifdef _M_X64
  1321. case TyInt32:
  1322. if(intConstantSrc)
  1323. {
  1324. UpdateIntConstantSrc(static_cast<int32>(intConstantSrc->GetValue())); // sign-extend
  1325. break;
  1326. }
  1327. instr->m_opcode = Js::OpCode::MOVSXD;
  1328. break;
  1329. case TyUint32:
  1330. if(intConstantSrc)
  1331. {
  1332. UpdateIntConstantSrc(static_cast<uint32>(intConstantSrc->GetValue())); // zero-extend
  1333. break;
  1334. }
  1335. switch(dst->GetKind())
  1336. {
  1337. case IR::OpndKindReg:
  1338. // (mov r0.u32, r1.u32) clears the upper 32 bits of r0
  1339. dst->SetType(TyUint32);
  1340. instr->m_opcode = Js::OpCode::MOV_TRUNC;
  1341. break;
  1342. case IR::OpndKindSym:
  1343. case IR::OpndKindIndir:
  1344. case IR::OpndKindMemRef:
  1345. // Even if the src is a reg, we don't know if the upper 32 bits are zero. Copy the value to a
  1346. // reg first to zero-extend it to 64 bits, and then copy the 64-bit value to the original dst.
  1347. instr->HoistSrc1(Js::OpCode::MOV_TRUNC);
  1348. instr->GetSrc1()->SetType(dstType);
  1349. break;
  1350. default:
  1351. Assert(false);
  1352. __assume(false);
  1353. }
  1354. break;
  1355. #endif
  1356. default:
  1357. Assert(false);
  1358. __assume(false);
  1359. }
  1360. }
  1361. else if (TySize[dstType] < TySize[srcType])
  1362. {
  1363. instr->GetSrc1()->SetType(dst->GetType());
  1364. }
  1365. if(instr->m_opcode == Js::OpCode::MOV)
  1366. {
  1367. // Allow 64 bit values in x64 as well
  1368. src1Forms = L_Reg | L_Mem | L_Ptr;
  1369. #if _M_X64
  1370. if (dst->IsMemoryOpnd())
  1371. {
  1372. // Only allow <= 32 bit values
  1373. src1Forms = L_Reg | L_Imm32;
  1374. }
  1375. #endif
  1376. LegalizeOpnds<verify>(
  1377. instr,
  1378. L_Reg | L_Mem,
  1379. src1Forms,
  1380. L_None);
  1381. }
  1382. else
  1383. {
  1384. LegalizeOpnds<verify>(
  1385. instr,
  1386. L_Reg,
  1387. L_Reg | L_Mem,
  1388. L_None);
  1389. }
  1390. break;
  1391. }
  1392. case Js::OpCode::CMOVA:
  1393. case Js::OpCode::CMOVAE:
  1394. case Js::OpCode::CMOVB:
  1395. case Js::OpCode::CMOVBE:
  1396. case Js::OpCode::CMOVE:
  1397. case Js::OpCode::CMOVG:
  1398. case Js::OpCode::CMOVGE:
  1399. case Js::OpCode::CMOVL:
  1400. case Js::OpCode::CMOVLE:
  1401. case Js::OpCode::CMOVNE:
  1402. case Js::OpCode::CMOVNO:
  1403. case Js::OpCode::CMOVNP:
  1404. case Js::OpCode::CMOVNS:
  1405. case Js::OpCode::CMOVO:
  1406. case Js::OpCode::CMOVP:
  1407. case Js::OpCode::CMOVS:
  1408. if (instr->GetSrc2())
  1409. {
  1410. Assert(instr->GetDst()->GetSize() == instr->GetSrc2()->GetSize());
  1411. Assert(instr->GetDst()->GetSize() == instr->GetSrc1()->GetSize());
  1412. // 0 shouldn't be the src2 of a CMOVcc.
  1413. // CMOVcc doesn't support moving a constant and the legalizer will hoist the load of the constant
  1414. // to a register. If the constant was 0, Peeps will turn it into a XOR which, in turn, may change
  1415. // the zero flags and hence the result of CMOVcc. If you do want to CMOVcc 0, you should load 0
  1416. // into a register before the instruction whose result the CMOVcc depends on.
  1417. Assert(!instr->GetSrc2()->IsIntConstOpnd() || instr->GetSrc2()->AsIntConstOpnd()->GetValue() != 0);
  1418. // sometimes we have fake src1 to help reg alloc
  1419. LegalizeOpnds<verify>(
  1420. instr,
  1421. L_Reg,
  1422. L_Reg,
  1423. L_Reg | L_Mem);
  1424. }
  1425. else
  1426. {
  1427. Assert(instr->GetDst()->GetSize() == instr->GetSrc1()->GetSize());
  1428. LegalizeOpnds<verify>(
  1429. instr,
  1430. L_Reg,
  1431. L_Reg | L_Mem,
  1432. L_None);
  1433. }
  1434. break;
  1435. case Js::OpCode::MOVSD:
  1436. case Js::OpCode::MOVSS:
  1437. Assert(instr->GetDst()->GetType() == (instr->m_opcode == Js::OpCode::MOVSD? TyFloat64 : TyFloat32) || instr->GetDst()->IsSimd128());
  1438. Assert(instr->GetSrc1()->GetType() == (instr->m_opcode == Js::OpCode::MOVSD ? TyFloat64 : TyFloat32) || instr->GetSrc1()->IsSimd128());
  1439. goto LegalizeDefault;
  1440. case Js::OpCode::NOP:
  1441. {
  1442. Assert(!instr->GetSrc2());
  1443. #if _M_IX86
  1444. RegNum edx = RegEDX;
  1445. #else
  1446. RegNum edx = RegRDX;
  1447. #endif
  1448. // Special case handled by peeps
  1449. Assert(!instr->GetDst() || (instr->GetDst()->IsRegOpnd() && instr->GetDst()->AsRegOpnd()->GetReg() == edx));
  1450. break;
  1451. }
  1452. case Js::OpCode::MOVSX:
  1453. case Js::OpCode::MOVSXW:
  1454. Assert(instr->GetDst()->GetSize() == 4 || instr->GetDst()->GetSize() == 8);
  1455. Assert(instr->m_opcode != Js::OpCode::MOVSX || instr->GetSrc1()->GetSize() == 1);
  1456. Assert(instr->m_opcode != Js::OpCode::MOVSXW || instr->GetSrc1()->GetSize() == 2);
  1457. goto LegalizeDefault;
  1458. case Js::OpCode::LOCKCMPXCHG8B:
  1459. case Js::OpCode::CMPXCHG8B:
  1460. {
  1461. const auto getRegMask = [](IR::Opnd* opnd)
  1462. {
  1463. Assert(opnd->IsListOpnd());
  1464. return opnd->AsListOpnd()->Reduce(
  1465. [](int i, IR::Opnd* opnd) {
  1466. Assert(opnd->IsRegOpnd());
  1467. return 1 << opnd->AsRegOpnd()->GetReg();
  1468. },
  1469. [](int i, uint32 regmask, uint32 allReg)
  1470. {
  1471. AssertMsg((allReg & regmask) == 0, "Should not have the same register twice");
  1472. return allReg | regmask;
  1473. }, 0);
  1474. };
  1475. #if _M_IX86
  1476. const uint32 dstMask = (1 << RegEAX | 1 << RegEDX);
  1477. const uint32 srcMask = (1 << RegEAX | 1 << RegEBX | 1 << RegECX | 1 << RegEDX);
  1478. #else
  1479. const uint32 dstMask = (1 << RegRAX | 1 << RegRDX);
  1480. const uint32 srcMask = (1 << RegRAX | 1 << RegRBX | 1 << RegRCX | 1 << RegRDX);
  1481. #endif
  1482. AssertMsg(!instr->m_func->isPostFinalLower || !instr->GetDst(), "After FinalLower, there should not be a dst");
  1483. AssertMsg(instr->m_func->isPostFinalLower || getRegMask(instr->GetDst()) == dstMask,
  1484. "Before FinalLower, instr should have eax,edx as dst");
  1485. AssertMsg(!instr->m_func->isPostFinalLower || !instr->GetSrc2(), "After FinalLower, there should not be a src2");
  1486. AssertMsg(instr->m_func->isPostFinalLower || getRegMask(instr->GetSrc2()) == srcMask,
  1487. "Before FinalLower, instr should have eax,edx,ecx,ebx as src2");
  1488. LegalizeSrc<verify>(
  1489. instr,
  1490. instr->GetSrc1(),
  1491. L_Mem);
  1492. break;
  1493. }
  1494. case Js::OpCode::TEST:
  1495. if((instr->GetSrc1()->IsImmediateOpnd() && !instr->GetSrc2()->IsImmediateOpnd()) ||
  1496. (instr->GetSrc2()->IsMemoryOpnd() && !instr->GetSrc1()->IsMemoryOpnd()))
  1497. {
  1498. if (verify)
  1499. {
  1500. AssertMsg(false, "Invalid Js::OpCode::TEST opnd order. Missing legalization");
  1501. return;
  1502. }
  1503. instr->SwapOpnds();
  1504. }
  1505. goto LegalizeDefault;
  1506. case Js::OpCode::SHL:
  1507. case Js::OpCode::SHR:
  1508. case Js::OpCode::SAR:
  1509. case Js::OpCode::ROL:
  1510. case Js::OpCode::ROR:
  1511. if (verify)
  1512. {
  1513. Assert(instr->GetSrc2()->IsIntConstOpnd()
  1514. || instr->GetSrc2()->AsRegOpnd()->GetReg() == LowererMDArch::GetRegShiftCount());
  1515. }
  1516. else
  1517. {
  1518. if(!instr->GetSrc2()->IsIntConstOpnd())
  1519. {
  1520. IR::Instr *const newInstr = instr->HoistSrc2(Js::OpCode::MOV);
  1521. newInstr->GetDst()->AsRegOpnd()->SetReg(LowererMDArch::GetRegShiftCount());
  1522. instr->GetSrc2()->AsRegOpnd()->SetReg(LowererMDArch::GetRegShiftCount());
  1523. }
  1524. instr->GetSrc2()->SetType(TyUint8);
  1525. }
  1526. goto LegalizeDefault;
  1527. case Js::OpCode::TZCNT:
  1528. Assert(AutoSystemInfo::Data.TZCntAvailable());
  1529. goto LegalizeDefault;
  1530. case Js::OpCode::LZCNT:
  1531. Assert(AutoSystemInfo::Data.LZCntAvailable());
  1532. goto LegalizeDefault;
  1533. case Js::OpCode::ROUNDSD:
  1534. case Js::OpCode::ROUNDSS:
  1535. Assert(AutoSystemInfo::Data.SSE4_1Available());
  1536. goto LegalizeDefault;
  1537. default:
  1538. LegalizeDefault:
  1539. if (isMDOpCode)
  1540. {
  1541. AssertMsg(!isCustomForm, "Custom legal forms should have a case in the switch statement");
  1542. hasSwitchCase = false;
  1543. if (EncoderMD::IsOPEQ(instr))
  1544. {
  1545. MakeDstEquSrc1<verify>(instr);
  1546. Assert((dstForms & L_FormMask) == (src1Forms & L_FormMask));
  1547. }
  1548. LegalizeOpnds<verify>(
  1549. instr,
  1550. dstForms,
  1551. src1Forms,
  1552. src2Forms);
  1553. }
  1554. break;
  1555. }
  1556. #if DBG
  1557. // Asserting general rules
  1558. // There should be at most 1 memory opnd in an instruction
  1559. if (instr->GetDst() && instr->GetDst()->IsMemoryOpnd())
  1560. {
  1561. // All memref address need to fit in a dword
  1562. Assert(!instr->GetDst()->IsMemRefOpnd() || Math::FitsInDWord((size_t)instr->GetDst()->AsMemRefOpnd()->GetMemLoc()));
  1563. if (instr->GetSrc1())
  1564. {
  1565. Assert(instr->GetSrc1()->IsEqual(instr->GetDst()) || !instr->GetSrc1()->IsMemoryOpnd());
  1566. if (instr->GetSrc2())
  1567. {
  1568. Assert(!instr->GetSrc2()->IsMemoryOpnd());
  1569. }
  1570. }
  1571. }
  1572. else if (instr->GetSrc1() && instr->GetSrc1()->IsMemoryOpnd())
  1573. {
  1574. // All memref address need to fit in a dword
  1575. Assert(!instr->GetSrc1()->IsMemRefOpnd() || Math::FitsInDWord((size_t)instr->GetSrc1()->AsMemRefOpnd()->GetMemLoc()));
  1576. Assert(!instr->GetSrc2() || !instr->GetSrc2()->IsMemoryOpnd());
  1577. }
  1578. else if (instr->GetSrc2() && instr->GetSrc2()->IsMemRefOpnd())
  1579. {
  1580. // All memref address need to fit in a dword
  1581. Assert(Math::FitsInDWord((size_t)instr->GetSrc2()->AsMemRefOpnd()->GetMemLoc()));
  1582. }
  1583. // Non-MOV (second operand) immediate need to fit in DWORD for AMD64
  1584. Assert(!instr->GetSrc2() || !instr->GetSrc2()->IsImmediateOpnd()
  1585. || (TySize[instr->GetSrc2()->GetType()] != 8) || Math::FitsInDWord(instr->GetSrc2()->GetImmediateValue(instr->m_func)));
  1586. #endif
  1587. }
  1588. template <bool verify>
  1589. void LowererMD::LegalizeOpnds(IR::Instr *const instr, const LegalForms dstForms, LegalForms src1Forms, LegalForms src2Forms)
  1590. {
  1591. Assert(instr);
  1592. Assert(dstForms & LF_Optional || !instr->GetDst() == !dstForms);
  1593. Assert(src1Forms & LF_Optional || !instr->GetSrc1() == !src1Forms);
  1594. Assert(src2Forms & LF_Optional || !instr->GetSrc2() == !src2Forms);
  1595. Assert(src1Forms || !src2Forms);
  1596. const auto NormalizeForms = [](LegalForms forms) -> LegalForms
  1597. {
  1598. #ifdef _M_X64
  1599. if(forms & L_Ptr)
  1600. {
  1601. forms |= L_Imm32;
  1602. }
  1603. #else
  1604. if(forms & (L_Imm32 | L_Ptr))
  1605. {
  1606. forms |= L_Imm32 | L_Ptr;
  1607. }
  1608. #endif
  1609. // Remove Legal Flags
  1610. forms &= L_FormMask;
  1611. return forms;
  1612. };
  1613. if(dstForms && instr->GetDst())
  1614. {
  1615. LegalizeDst<verify>(instr, NormalizeForms(dstForms));
  1616. }
  1617. if(!src1Forms || !instr->GetSrc1())
  1618. {
  1619. return;
  1620. }
  1621. bool hasMemOpnd = instr->GetDst() && instr->GetDst()->IsMemoryOpnd();
  1622. // Allow src1 to be a mem opnd if dst & src1 must be the same
  1623. if (hasMemOpnd && src1Forms & L_Mem && !EncoderMD::IsOPEQ(instr))
  1624. {
  1625. src1Forms ^= L_Mem;
  1626. }
  1627. LegalizeSrc<verify>(instr, instr->GetSrc1(), NormalizeForms(src1Forms));
  1628. hasMemOpnd |= instr->GetSrc1()->IsMemoryOpnd();
  1629. // If dst or src1 is a mem opnd, mem2 cannot be a mem opnd
  1630. if(hasMemOpnd && src2Forms & L_Mem)
  1631. {
  1632. src2Forms ^= L_Mem;
  1633. }
  1634. if(src2Forms && instr->GetSrc2())
  1635. {
  1636. LegalizeSrc<verify>(instr, instr->GetSrc2(), NormalizeForms(src2Forms));
  1637. }
  1638. }
  1639. template <bool verify>
  1640. void LowererMD::LegalizeDst(IR::Instr *const instr, const LegalForms forms)
  1641. {
  1642. Assert(instr);
  1643. Assert(forms);
  1644. IR::Opnd *dst = instr->GetDst();
  1645. Assert(dst);
  1646. #ifndef _M_X64
  1647. AssertMsg(!dst->IsInt64(), "Int64 supported only on x64");
  1648. #endif
  1649. switch(dst->GetKind())
  1650. {
  1651. case IR::OpndKindReg:
  1652. Assert(forms & L_Reg);
  1653. return;
  1654. case IR::OpndKindMemRef:
  1655. {
  1656. IR::MemRefOpnd *const memRefOpnd = dst->AsMemRefOpnd();
  1657. if(!LowererMDArch::IsLegalMemLoc(memRefOpnd))
  1658. {
  1659. if (verify)
  1660. {
  1661. AssertMsg(false, "Memory reference not legal in dst opnd. Missing legalization");
  1662. return;
  1663. }
  1664. dst = instr->HoistMemRefAddress(memRefOpnd, Js::OpCode::MOV);
  1665. }
  1666. // fall through
  1667. }
  1668. case IR::OpndKindSym:
  1669. case IR::OpndKindIndir:
  1670. if(forms & L_Mem)
  1671. {
  1672. return;
  1673. }
  1674. break;
  1675. default:
  1676. Assert(false);
  1677. __assume(false);
  1678. }
  1679. if (verify)
  1680. {
  1681. AssertMsg(false, "Dst opnd not legal. Missing legalization");
  1682. return;
  1683. }
  1684. // Use a reg dst, then store that reg into the original dst
  1685. Assert(forms & L_Reg);
  1686. const IRType irType = dst->GetType();
  1687. IR::RegOpnd *const regOpnd = IR::RegOpnd::New(irType, instr->m_func);
  1688. regOpnd->SetValueType(dst->GetValueType());
  1689. instr->UnlinkDst();
  1690. instr->SetDst(regOpnd);
  1691. instr->InsertAfter(IR::Instr::New(GetStoreOp(irType), dst, regOpnd, instr->m_func));
  1692. // If the original dst is the same as one of the srcs, hoist a src into the same reg and replace the same srcs with the reg
  1693. const bool equalsSrc1 = instr->GetSrc1() && dst->IsEqual(instr->GetSrc1());
  1694. const bool equalsSrc2 = instr->GetSrc2() && dst->IsEqual(instr->GetSrc2());
  1695. if(!(equalsSrc1 || equalsSrc2))
  1696. {
  1697. return;
  1698. }
  1699. const Js::OpCode loadOpCode = GetLoadOp(irType);
  1700. if(equalsSrc1)
  1701. {
  1702. instr->HoistSrc1(loadOpCode, RegNOREG, regOpnd->m_sym);
  1703. if(equalsSrc2)
  1704. {
  1705. instr->ReplaceSrc2(regOpnd);
  1706. }
  1707. }
  1708. else
  1709. {
  1710. instr->HoistSrc2(loadOpCode, RegNOREG, regOpnd->m_sym);
  1711. }
  1712. }
  1713. bool LowererMD::HoistLargeConstant(IR::IndirOpnd *indirOpnd, IR::Opnd *src, IR::Instr *instr) {
  1714. if (indirOpnd != nullptr)
  1715. {
  1716. if (indirOpnd->GetOffset() == 0)
  1717. {
  1718. instr->ReplaceSrc(src, indirOpnd->GetBaseOpnd());
  1719. }
  1720. else
  1721. {
  1722. // Hoist the address load as LEA [reg + offset]
  1723. // with the reg = MOV <some address within 32-bit range at the start of the function
  1724. IR::RegOpnd * regOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  1725. Lowerer::InsertLea(regOpnd, indirOpnd, instr);
  1726. instr->ReplaceSrc(src, regOpnd);
  1727. }
  1728. return true;
  1729. }
  1730. return false;
  1731. }
  1732. template <bool verify>
  1733. void LowererMD::LegalizeSrc(IR::Instr *const instr, IR::Opnd *src, const LegalForms forms)
  1734. {
  1735. Assert(instr);
  1736. Assert(src);
  1737. Assert(src == instr->GetSrc1() || src == instr->GetSrc2());
  1738. Assert(forms);
  1739. #ifndef _M_X64
  1740. AssertMsg(!src->IsInt64() || src->IsMemoryOpnd(), "Int64 supported only on x64");
  1741. #endif
  1742. switch(src->GetKind())
  1743. {
  1744. case IR::OpndKindReg:
  1745. Assert(forms & L_Reg);
  1746. return;
  1747. case IR::OpndKindIntConst:
  1748. if(forms & L_Ptr)
  1749. {
  1750. return;
  1751. }
  1752. #ifdef _M_X64
  1753. {
  1754. IR::IntConstOpnd * intOpnd = src->AsIntConstOpnd();
  1755. if ((TySize[intOpnd->GetType()] != 8) ||
  1756. (!instr->isInlineeEntryInstr && Math::FitsInDWord(intOpnd->GetValue())))
  1757. {
  1758. if (forms & L_Imm32)
  1759. {
  1760. // the constant fits in 32-bit, no need to hoist
  1761. return;
  1762. }
  1763. break;
  1764. }
  1765. if (verify)
  1766. {
  1767. AssertMsg(false, "IntConstOpnd doesn't fit in 32 bits. Missing legalization");
  1768. return;
  1769. }
  1770. // The actual value for inlinee entry instr isn't determined until encoder
  1771. // So it need to be hoisted conventionally.
  1772. if (!instr->isInlineeEntryInstr)
  1773. {
  1774. Assert(forms & L_Reg);
  1775. IR::IntConstOpnd * newIntOpnd = intOpnd->Copy(instr->m_func)->AsIntConstOpnd();
  1776. IR::IndirOpnd * indirOpnd = instr->m_func->GetTopFunc()->GetConstantAddressIndirOpnd(intOpnd->GetValue(), newIntOpnd, IR::AddrOpndKindConstantAddress, TyMachPtr, Js::OpCode::MOV);
  1777. if (HoistLargeConstant(indirOpnd, src, instr))
  1778. {
  1779. return;
  1780. }
  1781. }
  1782. }
  1783. #endif
  1784. break;
  1785. case IR::OpndKindFloatConst:
  1786. break; // assume for now that it always needs to be hoisted
  1787. case IR::OpndKindInt64Const:
  1788. if (forms & L_Ptr)
  1789. {
  1790. return;
  1791. }
  1792. #ifdef _M_X64
  1793. {
  1794. IR::Int64ConstOpnd * int64Opnd = src->AsInt64ConstOpnd();
  1795. if ((forms & L_Imm32) && ((src->GetSize() != 8) ||
  1796. (!instr->isInlineeEntryInstr && Math::FitsInDWord(int64Opnd->GetValue()))))
  1797. {
  1798. // the immediate fits in 32-bit, no need to hoist
  1799. return;
  1800. }
  1801. if (verify)
  1802. {
  1803. AssertMsg(false, "Int64ConstOpnd doesn't fit in 32 bits. Missing legalization");
  1804. return;
  1805. }
  1806. Assert(forms & L_Reg);
  1807. IR::Opnd* regOpnd = IR::RegOpnd::New(src->GetType(), instr->m_func);
  1808. IR::Instr* moveToReg = IR::Instr::New(Js::OpCode::MOV, regOpnd, src, instr->m_func);
  1809. instr->InsertBefore(moveToReg);
  1810. instr->ReplaceSrc(src, regOpnd);
  1811. return;
  1812. }
  1813. #endif
  1814. break;
  1815. case IR::OpndKindAddr:
  1816. if (forms & L_Ptr)
  1817. {
  1818. return;
  1819. }
  1820. #ifdef _M_X64
  1821. {
  1822. IR::AddrOpnd * addrOpnd = src->AsAddrOpnd();
  1823. if ((forms & L_Imm32) && ((TySize[addrOpnd->GetType()] != 8) ||
  1824. (!instr->isInlineeEntryInstr && Math::FitsInDWord((size_t)addrOpnd->m_address))))
  1825. {
  1826. // the address fits in 32-bit, no need to hoist
  1827. return;
  1828. }
  1829. if (verify)
  1830. {
  1831. AssertMsg(false, "AddrOpnd doesn't fit in 32 bits. Missing legalization");
  1832. return;
  1833. }
  1834. Assert(!instr->isInlineeEntryInstr);
  1835. Assert(forms & L_Reg);
  1836. // TODO: michhol, remove cast after making m_address intptr
  1837. IR::AddrOpnd * newAddrOpnd = addrOpnd->Copy(instr->m_func)->AsAddrOpnd();
  1838. IR::IndirOpnd * indirOpnd = instr->m_func->GetTopFunc()->GetConstantAddressIndirOpnd((intptr_t)addrOpnd->m_address, newAddrOpnd, addrOpnd->GetAddrOpndKind(), TyMachPtr, Js::OpCode::MOV);
  1839. if (HoistLargeConstant(indirOpnd, src, instr))
  1840. {
  1841. return;
  1842. }
  1843. }
  1844. #endif
  1845. break;
  1846. case IR::OpndKindMemRef:
  1847. {
  1848. IR::MemRefOpnd *const memRefOpnd = src->AsMemRefOpnd();
  1849. if(!LowererMDArch::IsLegalMemLoc(memRefOpnd))
  1850. {
  1851. if (verify)
  1852. {
  1853. AssertMsg(false, "Memory reference not legal in src opnd. Missing legalization");
  1854. return;
  1855. }
  1856. src = instr->HoistMemRefAddress(memRefOpnd, Js::OpCode::MOV);
  1857. }
  1858. // fall through
  1859. }
  1860. case IR::OpndKindSym:
  1861. case IR::OpndKindIndir:
  1862. if(forms & L_Mem)
  1863. {
  1864. return;
  1865. }
  1866. break;
  1867. case IR::OpndKindHelperCall:
  1868. case IR::OpndKindLabel:
  1869. Assert(!instr->isInlineeEntryInstr);
  1870. Assert(forms & L_Ptr);
  1871. return;
  1872. default:
  1873. Assert(false);
  1874. __assume(false);
  1875. }
  1876. if (verify)
  1877. {
  1878. AssertMsg(false, "Src opnd not legal. Missing legalization");
  1879. return;
  1880. }
  1881. // Hoist the src into a reg
  1882. Assert(forms & L_Reg);
  1883. Assert(!(instr->GetDst() && instr->GetDst()->IsEqual(src)));
  1884. const Js::OpCode loadOpCode = GetLoadOp(src->GetType());
  1885. if(src == instr->GetSrc2())
  1886. {
  1887. instr->HoistSrc2(loadOpCode);
  1888. return;
  1889. }
  1890. const bool equalsSrc2 = instr->GetSrc2() && src->IsEqual(instr->GetSrc2());
  1891. IR::Instr * hoistInstr = instr->HoistSrc1(loadOpCode);
  1892. if(equalsSrc2)
  1893. {
  1894. instr->ReplaceSrc2(hoistInstr->GetDst());
  1895. }
  1896. hoistInstr->isInlineeEntryInstr = instr->isInlineeEntryInstr;
  1897. instr->isInlineeEntryInstr = false;
  1898. }
  1899. template void LowererMD::Legalize<false>(IR::Instr *const instr, bool fPostRegAlloc);
  1900. template void LowererMD::LegalizeOpnds<false>(IR::Instr *const instr, const LegalForms dstForms, const LegalForms src1Forms, LegalForms src2Forms);
  1901. template void LowererMD::LegalizeDst<false>(IR::Instr *const instr, const LegalForms forms);
  1902. template void LowererMD::LegalizeSrc<false>(IR::Instr *const instr, IR::Opnd *src, const LegalForms forms);
  1903. template void LowererMD::MakeDstEquSrc1<false>(IR::Instr *const instr);
  1904. #if DBG
  1905. template void LowererMD::Legalize<true>(IR::Instr *const instr, bool fPostRegAlloc);
  1906. template void LowererMD::LegalizeOpnds<true>(IR::Instr *const instr, const LegalForms dstForms, const LegalForms src1Forms, LegalForms src2Forms);
  1907. template void LowererMD::LegalizeDst<true>(IR::Instr *const instr, const LegalForms forms);
  1908. template void LowererMD::LegalizeSrc<true>(IR::Instr *const instr, IR::Opnd *src, const LegalForms forms);
  1909. template void LowererMD::MakeDstEquSrc1<true>(IR::Instr *const instr);
  1910. #endif
  1911. IR::Instr *
  1912. LowererMD::LoadFunctionObjectOpnd(IR::Instr *instr, IR::Opnd *&functionObjOpnd)
  1913. {
  1914. IR::Opnd * src1 = instr->GetSrc1();
  1915. IR::Instr * instrPrev = instr->m_prev;
  1916. if (src1 == nullptr)
  1917. {
  1918. IR::RegOpnd * regOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  1919. StackSym *paramSym = StackSym::New(TyMachPtr, m_func);
  1920. IR::SymOpnd *paramOpnd = IR::SymOpnd::New(paramSym, TyMachPtr, m_func);
  1921. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  1922. IR::Instr * mov1 = IR::Instr::New(Js::OpCode::MOV, regOpnd, paramOpnd, m_func);
  1923. instr->InsertBefore(mov1);
  1924. functionObjOpnd = mov1->GetDst()->AsRegOpnd();
  1925. instrPrev = mov1;
  1926. instr->m_func->SetHasImplicitParamLoad();
  1927. }
  1928. else
  1929. {
  1930. // Inlinee, use the function object opnd on the instruction
  1931. functionObjOpnd = instr->UnlinkSrc1();
  1932. if (!functionObjOpnd->IsRegOpnd())
  1933. {
  1934. Assert(functionObjOpnd->IsAddrOpnd());
  1935. }
  1936. }
  1937. return instrPrev;
  1938. }
  1939. void
  1940. LowererMD::GenerateFastDivByPow2(IR::Instr *instr)
  1941. {
  1942. //
  1943. // Given:
  1944. // dst = Div_A src1, src2
  1945. // where src2 == power of 2
  1946. //
  1947. // Generate:
  1948. // MOV s1, src1
  1949. // AND s1, 0xFFFF000000000000 | (src2Value-1) ----- test for tagged int and divisibility by src2Value [int32]
  1950. // AND s1, 0x00000001 | ((src2Value-1)<<1) [int31]
  1951. // CMP s1, AtomTag_IntPtr
  1952. // JNE $divbyhalf
  1953. // MOV s1, src1
  1954. // SAR s1, log2(src2Value) ------ perform the divide
  1955. // OR s1, 1
  1956. // MOV dst, s1
  1957. // JMP $done
  1958. // $divbyhalf:
  1959. // AND s1, 0xFFFF000000000000 | (src2Value-1>>1) ----- test for tagged int and divisibility by src2Value /2 [int32]
  1960. // AND s1, 0x00000001 | ((src2Value-1)) [int31]
  1961. // CMP s1, AtomTag_IntPtr
  1962. // JNE $helper
  1963. // MOV s1, src1
  1964. // SAR s1, log2(src2Value) [int32]
  1965. // SAR s1, log2(src2Value) + 1 ------ removes the tag and divides [int31]
  1966. // PUSH s1
  1967. // PUSH 0xXXXXXXXX (ScriptContext)
  1968. // CALL Op_FinishOddDivByPow2
  1969. // MOV dst, eax
  1970. // JMP $done
  1971. // $helper:
  1972. // ...
  1973. // $done:
  1974. //
  1975. if (instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->IsNotInt())
  1976. return;
  1977. IR::Opnd *dst = instr->GetDst();
  1978. IR::Opnd *src1 = instr->GetSrc1();
  1979. IR::AddrOpnd *src2 = instr->GetSrc2()->IsAddrOpnd() ? instr->GetSrc2()->AsAddrOpnd() : nullptr;
  1980. IR::LabelInstr *divbyhalf = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  1981. IR::LabelInstr *helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  1982. IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  1983. IR::RegOpnd *s1 = IR::RegOpnd::New(TyVar, m_func);
  1984. AnalysisAssert(src2);
  1985. Assert(src2->IsVar() && Js::TaggedInt::Is(src2->m_address) && (Math::IsPow2(Js::TaggedInt::ToInt32(src2->m_address))));
  1986. int32 src2Value = Js::TaggedInt::ToInt32(src2->m_address);
  1987. // MOV s1, src1
  1988. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, s1, src1, m_func));
  1989. #if INT32VAR
  1990. // dontEncode as src2 is a power of 2.
  1991. IR::Opnd *constant = IR::AddrOpnd::New((Js::Var)(0xFFFF000000000000 | (src2Value - 1)), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true);
  1992. #else
  1993. IR::Opnd *constant = IR::IntConstOpnd::New((0x00000001 | ((src2Value - 1) << 1)), TyInt32, m_func);
  1994. #endif
  1995. // AND s1, constant
  1996. {
  1997. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, s1, s1, constant, m_func);
  1998. instr->InsertBefore(andInstr);
  1999. Legalize(andInstr);
  2000. }
  2001. // CMP s1, AtomTag_IntPtr
  2002. {
  2003. IR::Instr *cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  2004. cmp->SetSrc1(s1);
  2005. cmp->SetSrc2(IR::AddrOpnd::New((Js::Var)(Js::AtomTag_IntPtr), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true));
  2006. instr->InsertBefore(cmp);
  2007. Legalize(cmp);
  2008. }
  2009. // JNE $divbyhalf
  2010. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, divbyhalf, m_func));
  2011. // MOV s1, src1
  2012. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, s1, src1, m_func));
  2013. s1 = s1->UseWithNewType(TyInt32, m_func)->AsRegOpnd();
  2014. // SAR s1, log2(src2Value)
  2015. instr->InsertBefore(IR::Instr::New(Js::OpCode::SAR, s1, s1, IR::IntConstOpnd::New(Math::Log2(src2Value), TyInt32, m_func), m_func));
  2016. if(s1->GetSize() != MachPtr)
  2017. {
  2018. s1 = s1->UseWithNewType(TyMachPtr, m_func)->AsRegOpnd();
  2019. }
  2020. #if INT32VAR
  2021. GenerateInt32ToVarConversion(s1, instr);
  2022. #else
  2023. // OR s1, 1
  2024. instr->InsertBefore(IR::Instr::New(Js::OpCode::OR, s1, s1, IR::IntConstOpnd::New(1, TyInt32, m_func), m_func));
  2025. #endif
  2026. // MOV dst, s1
  2027. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, s1, m_func));
  2028. // JMP $done
  2029. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  2030. // $divbyhalf:
  2031. instr->InsertBefore(divbyhalf);
  2032. #if INT32VAR
  2033. constant = IR::AddrOpnd::New((Js::Var)(0xFFFF000000000000 | ((src2Value-1) >> 1)), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true);
  2034. #else
  2035. constant = IR::IntConstOpnd::New((0x00000001 | (src2Value-1)), TyInt32, m_func);
  2036. #endif
  2037. // AND s1, constant
  2038. {
  2039. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, s1, s1, constant, m_func);
  2040. instr->InsertBefore(andInstr);
  2041. Legalize(andInstr);
  2042. }
  2043. // CMP s1, AtomTag_IntPtr
  2044. {
  2045. IR::Instr *cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  2046. cmp->SetSrc1(s1);
  2047. cmp->SetSrc2(IR::AddrOpnd::New((Js::Var)(Js::AtomTag_IntPtr), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true));
  2048. instr->InsertBefore(cmp);
  2049. Legalize(cmp);
  2050. }
  2051. // JNE $helper
  2052. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, helper, m_func));
  2053. // MOV s1, src1
  2054. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, s1, src1, m_func));
  2055. s1 = s1->UseWithNewType(TyInt32, this->m_func)->AsRegOpnd();
  2056. #if INT32VAR
  2057. IR::Opnd* shiftOpnd = IR::IntConstOpnd::New(Math::Log2(src2Value), TyInt32, m_func);
  2058. #else
  2059. IR::Opnd* shiftOpnd = IR::IntConstOpnd::New(Math::Log2(src2Value) + 1, TyInt32, m_func);
  2060. #endif
  2061. // SAR s1, shiftOpnd
  2062. instr->InsertBefore(IR::Instr::New(Js::OpCode::SAR, s1, s1, shiftOpnd, m_func));
  2063. // PUSH s1
  2064. // PUSH ScriptContext
  2065. // CALL Op_FinishOddDivByPow2
  2066. {
  2067. IR::JnHelperMethod helperMethod;
  2068. if (instr->dstIsTempNumber)
  2069. {
  2070. IR::Opnd *tempOpnd;
  2071. helperMethod = IR::HelperOp_FinishOddDivByPow2InPlace;
  2072. Assert(dst->IsRegOpnd());
  2073. StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(dst, instr->dstIsTempNumberTransferred);
  2074. IR::Instr *load = this->m_lowerer->InsertLoadStackAddress(tempNumberSym, instr);
  2075. tempOpnd = load->GetDst();
  2076. this->lowererMDArch.LoadHelperArgument(instr, tempOpnd);
  2077. }
  2078. else
  2079. {
  2080. helperMethod = IR::HelperOp_FinishOddDivByPow2;
  2081. }
  2082. m_lowerer->LoadScriptContext(instr);
  2083. lowererMDArch.LoadHelperArgument(instr, s1);
  2084. IR::Instr *call = IR::Instr::New(Js::OpCode::Call, dst, IR::HelperCallOpnd::New(helperMethod, m_func), m_func);
  2085. instr->InsertBefore(call);
  2086. lowererMDArch.LowerCall(call, 0);
  2087. }
  2088. // JMP $done
  2089. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  2090. // $helper:
  2091. instr->InsertBefore(helper);
  2092. // $done:
  2093. instr->InsertAfter(done);
  2094. }
  2095. ///----------------------------------------------------------------------------
  2096. ///
  2097. /// LowererMD::GenerateFastCmSrEqConst
  2098. ///
  2099. ///----------------------------------------------------------------------------
  2100. bool
  2101. LowererMD::GenerateFastCmSrEqConst(IR::Instr *instr)
  2102. {
  2103. //
  2104. // Given:
  2105. // s1 = CmSrEq_A s2, s3
  2106. // where either s2 or s3 is 'null', 'true' or 'false'
  2107. //
  2108. // Generate:
  2109. //
  2110. // CMP s2, s3
  2111. // JEQ $mov_true
  2112. // MOV s1, Library.GetFalse()
  2113. // JMP $done
  2114. // $mov_true:
  2115. // MOV s1, Library.GetTrue()
  2116. // $done:
  2117. //
  2118. Assert(m_lowerer->IsConstRegOpnd(instr->GetSrc2()->AsRegOpnd()));
  2119. IR::Opnd *opnd = instr->GetSrc1();
  2120. IR::RegOpnd *opndReg = instr->GetSrc2()->AsRegOpnd();
  2121. IR::LabelInstr *labelMovTrue = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2122. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2123. if (!opnd->IsRegOpnd())
  2124. {
  2125. IR::RegOpnd *lhsReg = IR::RegOpnd::New(TyVar, m_func);
  2126. IR::Instr *mov = IR::Instr::New(Js::OpCode::MOV, lhsReg, opnd, m_func);
  2127. instr->InsertBefore(mov);
  2128. opnd = lhsReg;
  2129. }
  2130. Assert(opnd->IsRegOpnd());
  2131. // CMP s2, s3
  2132. // JEQ $mov_true
  2133. this->m_lowerer->InsertCompareBranch(opnd, opndReg->m_sym->GetConstOpnd(), Js::OpCode::BrEq_A, labelMovTrue, instr);
  2134. // MOV s1, 'false'
  2135. IR::Instr *instrMov = IR::Instr::New(Js::OpCode::MOV,
  2136. instr->GetDst(),
  2137. m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse),
  2138. m_func);
  2139. instr->InsertBefore(instrMov);
  2140. // JMP $done
  2141. IR::BranchInstr *jmp = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, this->m_func);
  2142. instr->InsertBefore(jmp);
  2143. // $mov_true:
  2144. instr->InsertBefore(labelMovTrue);
  2145. // MOV s1, 'true'
  2146. instr->m_opcode = Js::OpCode::MOV;
  2147. instr->UnlinkSrc1();
  2148. instr->UnlinkSrc2();
  2149. instr->SetSrc1(m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue));
  2150. instr->ClearBailOutInfo();
  2151. Legalize(instr);
  2152. // $done:
  2153. instr->InsertAfter(labelDone);
  2154. return true;
  2155. }
  2156. ///----------------------------------------------------------------------------
  2157. ///
  2158. /// LowererMD::GenerateFastCmXxTaggedInt
  2159. ///
  2160. ///----------------------------------------------------------------------------
  2161. bool LowererMD::GenerateFastCmXxTaggedInt(IR::Instr *instr, bool isInHelper /* = false */)
  2162. {
  2163. // The idea is to do an inline compare if we can prove that both sources
  2164. // are tagged ints (i.e., are vars with the low bit set).
  2165. //
  2166. // Given:
  2167. //
  2168. // Cmxx_A dst, src1, src2
  2169. //
  2170. // Generate:
  2171. //
  2172. // (If not Int31's, goto $helper)
  2173. // MOV r1, src1
  2174. // if (==, !=, !== or ===)
  2175. // SUB r1, src2
  2176. // NEG r1 // Sets CF if r1 != 0
  2177. // SBB r1, r1 // CF == 1 ? r1 = -1 : r1 = 0
  2178. // else
  2179. // MOV r2, 0
  2180. // CMP r1, src2
  2181. // SETcc r2
  2182. // DEC r2
  2183. // set r1 to r2
  2184. // AND r1, (notEqualResult - equalResult)
  2185. // ADD r1, equalResult
  2186. // MOV dst, r1
  2187. // JMP $fallthru
  2188. // $helper:
  2189. // (caller will generate normal helper call sequence)
  2190. // $fallthru:
  2191. IR::Opnd * src1 = instr->GetSrc1();
  2192. IR::Opnd * src2 = instr->GetSrc2();
  2193. IR::Opnd * dst = instr->GetDst();
  2194. IR::RegOpnd * r1 = IR::RegOpnd::New(TyMachReg, m_func);
  2195. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  2196. IR::LabelInstr * fallthru = IR::LabelInstr::New(Js::OpCode::Label, m_func, isInHelper);
  2197. Assert(src1 && src2 && dst);
  2198. // Not tagged ints?
  2199. if (src1->IsRegOpnd() && src1->AsRegOpnd()->IsNotInt())
  2200. {
  2201. return false;
  2202. }
  2203. if (src2->IsRegOpnd() && src2->AsRegOpnd()->IsNotInt())
  2204. {
  2205. return false;
  2206. }
  2207. bool isNeqOp = instr->m_opcode == Js::OpCode::CmSrNeq_A || instr->m_opcode == Js::OpCode::CmNeq_A;
  2208. intptr_t notEqualResult = isNeqOp ? m_func->GetScriptContextInfo()->GetTrueAddr() : m_func->GetScriptContextInfo()->GetFalseAddr();
  2209. intptr_t equalResult = !isNeqOp ? m_func->GetScriptContextInfo()->GetTrueAddr() : m_func->GetScriptContextInfo()->GetFalseAddr();
  2210. // Tagged ints?
  2211. bool isTaggedInts = false;
  2212. if (src1->IsTaggedInt())
  2213. {
  2214. if (src2->IsTaggedInt())
  2215. {
  2216. isTaggedInts = true;
  2217. }
  2218. }
  2219. if (!isTaggedInts)
  2220. {
  2221. this->GenerateSmIntPairTest(instr, src1, src2, helper);
  2222. }
  2223. // MOV r1, src1
  2224. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, r1, src1, m_func));
  2225. Js::OpCode setCC_Opcode = Js::OpCode::Nop;
  2226. switch(instr->m_opcode)
  2227. {
  2228. case Js::OpCode::CmSrEq_A:
  2229. case Js::OpCode::CmEq_A:
  2230. break;
  2231. case Js::OpCode::CmSrNeq_A:
  2232. case Js::OpCode::CmNeq_A:
  2233. break;
  2234. case Js::OpCode::CmGe_A:
  2235. setCC_Opcode = Js::OpCode::SETGE;
  2236. break;
  2237. case Js::OpCode::CmGt_A:
  2238. setCC_Opcode = Js::OpCode::SETG;
  2239. break;
  2240. case Js::OpCode::CmLe_A:
  2241. setCC_Opcode = Js::OpCode::SETLE;
  2242. break;
  2243. case Js::OpCode::CmLt_A:
  2244. setCC_Opcode = Js::OpCode::SETL;
  2245. break;
  2246. default:
  2247. Assume(UNREACHED);
  2248. }
  2249. if (setCC_Opcode == Js::OpCode::Nop)
  2250. {
  2251. // SUB r1, src2
  2252. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::SUB, r1, r1, src2, m_func);
  2253. instr->InsertBefore(subInstr);
  2254. Legalize(subInstr); // src2 may need legalizing
  2255. // NEG r1
  2256. instr->InsertBefore(IR::Instr::New(Js::OpCode::NEG, r1, r1, m_func));
  2257. // SBB r1, r1
  2258. instr->InsertBefore(IR::Instr::New(Js::OpCode::SBB, r1, r1, r1, m_func));
  2259. }
  2260. else
  2261. {
  2262. IR::Instr *instrNew;
  2263. IR::RegOpnd *r2 = IR::RegOpnd::New(TyMachPtr, this->m_func);
  2264. // MOV r2, 0
  2265. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, r2, IR::IntConstOpnd::New(0, TyMachReg, this->m_func), m_func));
  2266. // CMP r1, src2
  2267. IR::Opnd *r1_32 = r1->UseWithNewType(TyInt32, this->m_func);
  2268. IR::Opnd *src2_32 =src2->UseWithNewType(TyInt32, this->m_func);
  2269. instrNew = IR::Instr::New(Js::OpCode::CMP, m_func);
  2270. instrNew->SetSrc1(r1_32);
  2271. instrNew->SetSrc2(src2_32);
  2272. instr->InsertBefore(instrNew);
  2273. // SETcc r2
  2274. IR::RegOpnd *r2_i8 = (IR::RegOpnd*) r2->UseWithNewType(TyInt8, this->m_func);
  2275. instrNew = IR::Instr::New(setCC_Opcode, r2_i8, r2_i8, m_func);
  2276. instr->InsertBefore(instrNew);
  2277. // DEC r2
  2278. instr->InsertBefore(IR::Instr::New(Js::OpCode::DEC, r2, r2, m_func));
  2279. // r1 <- r2
  2280. r1 = r2;
  2281. }
  2282. // AND r1, (notEqualResult - equalResult)
  2283. {
  2284. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, r1, r1, m_func);
  2285. andInstr->SetSrc2(IR::AddrOpnd::New((void*)((size_t)notEqualResult - (size_t)equalResult), IR::AddrOpndKind::AddrOpndKindDynamicMisc, this->m_func));
  2286. instr->InsertBefore(andInstr);
  2287. Legalize(andInstr);
  2288. }
  2289. // ADD r1, equalResult
  2290. {
  2291. IR::Instr * add = IR::Instr::New(Js::OpCode::ADD, r1, r1, m_func);
  2292. add->SetSrc2(IR::AddrOpnd::New(equalResult, IR::AddrOpndKind::AddrOpndKindDynamicVar, this->m_func));
  2293. instr->InsertBefore(add);
  2294. Legalize(add);
  2295. }
  2296. // MOV dst, r1
  2297. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, r1, m_func));
  2298. if (isTaggedInts)
  2299. {
  2300. instr->Remove();
  2301. return true;
  2302. }
  2303. // JMP $fallthru
  2304. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, fallthru, m_func));
  2305. instr->InsertBefore(helper);
  2306. instr->InsertAfter(fallthru);
  2307. return false;
  2308. }
  2309. void LowererMD::GenerateFastCmXxR8(IR::Instr *instr)
  2310. {
  2311. GenerateFastCmXx(instr);
  2312. }
  2313. void LowererMD::GenerateFastCmXxI4(IR::Instr *instr)
  2314. {
  2315. GenerateFastCmXx(instr);
  2316. }
  2317. void LowererMD::GenerateFastCmXx(IR::Instr *instr)
  2318. {
  2319. // For float src:
  2320. // dst = MOV 0/1
  2321. // (U)COMISD src1, src2
  2322. // JP $done
  2323. // dst.i8 = SetCC dst.i8
  2324. // $done:
  2325. // for int src:
  2326. // CMP src1, src2
  2327. // dst = MOV 0 / false
  2328. // dst.i8 = SetCC dst.i8 / CMOCcc true
  2329. IR::Opnd * src1 = instr->UnlinkSrc1();
  2330. IR::Opnd * src2 = instr->UnlinkSrc2();
  2331. IR::Opnd * dst = instr->UnlinkDst();
  2332. IR::Opnd * tmp = dst;
  2333. bool isIntDst = dst->AsRegOpnd()->m_sym->IsInt32();
  2334. bool isFloatSrc = src1->IsFloat();
  2335. bool isInt64Src = src1->IsInt64();
  2336. Assert(!isFloatSrc || src2->IsFloat());
  2337. Assert(!isFloatSrc || isIntDst);
  2338. Assert(!isInt64Src || src2->IsInt64());
  2339. Assert(!isInt64Src || isIntDst);
  2340. Assert(!isFloatSrc || AutoSystemInfo::Data.SSE2Available());
  2341. IR::Opnd *opnd;
  2342. IR::Instr *newInstr;
  2343. Assert(src1->IsRegOpnd());
  2344. #if LOWER_SPLIT_INT64
  2345. Int64RegPair src1Pair, src2Pair;
  2346. if (isInt64Src)
  2347. {
  2348. src1Pair = this->m_func->FindOrCreateInt64Pair(src1);
  2349. src2Pair = this->m_func->FindOrCreateInt64Pair(src2);
  2350. src1 = src1Pair.high;
  2351. src2 = src2Pair.high;
  2352. }
  2353. #endif
  2354. IR::Instr * done;
  2355. if (isFloatSrc)
  2356. {
  2357. done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2358. instr->InsertBefore(done);
  2359. }
  2360. else
  2361. {
  2362. done = instr;
  2363. }
  2364. if (isIntDst)
  2365. {
  2366. // reg = MOV 0 will get peeped to XOR reg, reg which sets the flags.
  2367. // Put the MOV before the CMP, but use a tmp if dst == src1/src2
  2368. if (dst->IsEqual(src1) || dst->IsEqual(src2))
  2369. {
  2370. tmp = IR::RegOpnd::New(dst->GetType(), this->m_func);
  2371. }
  2372. // dst = MOV 0
  2373. if (isFloatSrc && instr->m_opcode == Js::OpCode::CmNeq_A)
  2374. {
  2375. opnd = IR::IntConstOpnd::New(1, TyInt32, this->m_func);
  2376. }
  2377. else
  2378. {
  2379. opnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  2380. }
  2381. m_lowerer->InsertMove(tmp, opnd, done);
  2382. }
  2383. Js::OpCode cmpOp;
  2384. if (isFloatSrc)
  2385. {
  2386. if (instr->m_opcode == Js::OpCode::CmEq_A || instr->m_opcode == Js::OpCode::CmNeq_A)
  2387. {
  2388. cmpOp = src1->IsFloat64() ? Js::OpCode::UCOMISD : Js::OpCode::UCOMISS;
  2389. }
  2390. else
  2391. {
  2392. cmpOp = src1->IsFloat64() ? Js::OpCode::COMISD : Js::OpCode::COMISS;
  2393. }
  2394. }
  2395. else
  2396. {
  2397. cmpOp = Js::OpCode::CMP;
  2398. }
  2399. // CMP src1, src2
  2400. newInstr = IR::Instr::New(cmpOp, this->m_func);
  2401. newInstr->SetSrc1(src1);
  2402. newInstr->SetSrc2(src2);
  2403. done->InsertBefore(newInstr);
  2404. LowererMD::Legalize(newInstr);
  2405. if (isFloatSrc)
  2406. {
  2407. newInstr = IR::BranchInstr::New(Js::OpCode::JP, done->AsLabelInstr(), this->m_func);
  2408. done->InsertBefore(newInstr);
  2409. }
  2410. if (!isIntDst)
  2411. {
  2412. opnd = this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse);
  2413. Lowerer::InsertMove(tmp, opnd, done);
  2414. }
  2415. Js::OpCode useCC;
  2416. switch(instr->m_opcode)
  2417. {
  2418. case Js::OpCode::CmEq_I4:
  2419. case Js::OpCode::CmEq_A:
  2420. useCC = isIntDst ? Js::OpCode::SETE : Js::OpCode::CMOVE;
  2421. break;
  2422. case Js::OpCode::CmNeq_I4:
  2423. case Js::OpCode::CmNeq_A:
  2424. useCC = isIntDst ? Js::OpCode::SETNE : Js::OpCode::CMOVNE;
  2425. break;
  2426. case Js::OpCode::CmGe_I4:
  2427. useCC = isIntDst ? Js::OpCode::SETGE : Js::OpCode::CMOVGE;
  2428. break;
  2429. case Js::OpCode::CmGt_I4:
  2430. useCC = isIntDst ? Js::OpCode::SETG : Js::OpCode::CMOVG;
  2431. break;
  2432. case Js::OpCode::CmLe_I4:
  2433. useCC = isIntDst ? Js::OpCode::SETLE : Js::OpCode::CMOVLE;
  2434. break;
  2435. case Js::OpCode::CmLt_I4:
  2436. useCC = isIntDst ? Js::OpCode::SETL : Js::OpCode::CMOVL;
  2437. break;
  2438. case Js::OpCode::CmUnGe_I4:
  2439. case Js::OpCode::CmGe_A:
  2440. useCC = isIntDst ? Js::OpCode::SETAE : Js::OpCode::CMOVAE;
  2441. break;
  2442. case Js::OpCode::CmUnGt_I4:
  2443. case Js::OpCode::CmGt_A:
  2444. useCC = isIntDst ? Js::OpCode::SETA : Js::OpCode::CMOVA;
  2445. break;
  2446. case Js::OpCode::CmUnLe_I4:
  2447. case Js::OpCode::CmLe_A:
  2448. useCC = isIntDst ? Js::OpCode::SETBE : Js::OpCode::CMOVBE;
  2449. break;
  2450. case Js::OpCode::CmUnLt_I4:
  2451. case Js::OpCode::CmLt_A:
  2452. useCC = isIntDst ? Js::OpCode::SETB : Js::OpCode::CMOVB;
  2453. break;
  2454. default:
  2455. useCC = Js::OpCode::InvalidOpCode;
  2456. Assume(UNREACHED);
  2457. }
  2458. if (isIntDst)
  2459. {
  2460. // tmp.i8 = SetCC tmp.i8
  2461. IR::Opnd *tmp_i8 = tmp->UseWithNewType(TyInt8, this->m_func);
  2462. newInstr = IR::Instr::New(useCC, tmp_i8, tmp_i8, this->m_func);
  2463. }
  2464. else
  2465. {
  2466. // regTrue = MOV true
  2467. IR::Opnd *regTrue = IR::RegOpnd::New(TyMachPtr, this->m_func);
  2468. Lowerer::InsertMove(regTrue, this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), done);
  2469. // tmp = CMOVcc tmp, regTrue
  2470. newInstr = IR::Instr::New(useCC, tmp, tmp, regTrue, this->m_func);
  2471. }
  2472. done->InsertBefore(newInstr);
  2473. #ifndef _M_X64
  2474. if (isInt64Src)
  2475. {
  2476. IR::LabelInstr* skipLow = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2477. newInstr = IR::BranchInstr::New(Js::OpCode::JNE, skipLow, this->m_func);
  2478. done->InsertBefore(newInstr);
  2479. newInstr = IR::Instr::New(cmpOp, this->m_func);
  2480. newInstr->SetSrc1(src1Pair.low);
  2481. newInstr->SetSrc2(src2Pair.low);
  2482. done->InsertBefore(newInstr);
  2483. Js::OpCode lowUseCC = useCC;
  2484. // Need to do an unsigned compare for the lower part
  2485. switch (instr->m_opcode)
  2486. {
  2487. case Js::OpCode::CmGe_I4: lowUseCC = Js::OpCode::SETAE; break;
  2488. case Js::OpCode::CmGt_I4: lowUseCC = Js::OpCode::SETA; break;
  2489. case Js::OpCode::CmLe_I4: lowUseCC = Js::OpCode::SETBE; break;
  2490. case Js::OpCode::CmLt_I4: lowUseCC = Js::OpCode::SETB; break;
  2491. }
  2492. // tmp.i8 = SetCC tmp.i8
  2493. IR::Opnd *tmp_i8 = tmp->UseWithNewType(TyInt8, this->m_func);
  2494. newInstr = IR::Instr::New(lowUseCC, tmp_i8, tmp_i8, this->m_func);
  2495. done->InsertBefore(newInstr);
  2496. done->InsertBefore(skipLow);
  2497. }
  2498. #endif
  2499. if (tmp != dst)
  2500. {
  2501. newInstr = IR::Instr::New(Js::OpCode::MOV, dst, tmp, this->m_func);
  2502. instr->InsertBefore(newInstr);
  2503. }
  2504. instr->Remove();
  2505. }
  2506. IR::Instr * LowererMD::GenerateConvBool(IR::Instr *instr)
  2507. {
  2508. // TEST src1, src1
  2509. // dst = MOV true
  2510. // rf = MOV false
  2511. // dst = CMOV dst, rf
  2512. IR::Instr *instrNew, *instrFirst;
  2513. IR::RegOpnd *dst = instr->GetDst()->AsRegOpnd();
  2514. IR::RegOpnd *regFalse;
  2515. // TEST src1, src2
  2516. instrFirst = instrNew = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  2517. instrNew->SetSrc1(instr->GetSrc1());
  2518. instrNew->SetSrc2(instr->GetSrc1());
  2519. instr->InsertBefore(instrNew);
  2520. // dst = MOV true
  2521. Lowerer::InsertMove(dst, this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), instr);
  2522. // rf = MOV false
  2523. regFalse = IR::RegOpnd::New(TyMachPtr, this->m_func);
  2524. Lowerer::InsertMove(regFalse, this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
  2525. // Add dst as src1 of CMOV to create a pseudo use of dst. Otherwise, the register allocator
  2526. // won't know the previous dst is needed. and needed in the same register as the dst of the CMOV.
  2527. // dst = CMOV dst, rf
  2528. instrNew = IR::Instr::New(Js::OpCode::CMOVE, dst, dst, regFalse, this->m_func);
  2529. instr->InsertBefore(instrNew);
  2530. instr->Remove();
  2531. return instrFirst;
  2532. }
  2533. ///----------------------------------------------------------------------------
  2534. ///
  2535. /// LowererMD::GenerateFastAdd
  2536. ///
  2537. /// NOTE: We assume that only the sum of two Int31's will have 0x2 set. This
  2538. /// is only true until we have a var type with tag == 0x2.
  2539. ///
  2540. ///----------------------------------------------------------------------------
  2541. bool
  2542. LowererMD::GenerateFastAdd(IR::Instr * instrAdd)
  2543. {
  2544. // Given:
  2545. //
  2546. // dst = Add src1, src2
  2547. //
  2548. // Generate:
  2549. //
  2550. // (If not 2 Int31's, jump to $helper.)
  2551. // s1 = MOV src1
  2552. // s1 = DEC s1 -- Get rid of one of the tag [Int31 only]
  2553. // s1 = ADD s1, src2 -- try an inline add
  2554. // JO $helper -- bail if the add overflowed
  2555. // s1 = OR s1, AtomTag_IntPtr [Int32 only]
  2556. // dst = MOV s1
  2557. // JMP $fallthru
  2558. // $helper:
  2559. // (caller generates helper call)
  2560. // $fallthru:
  2561. IR::Instr * instr;
  2562. IR::LabelInstr * labelHelper;
  2563. IR::LabelInstr * labelFallThru;
  2564. IR::Opnd * opndReg;
  2565. IR::Opnd * opndSrc1;
  2566. IR::Opnd * opndSrc2;
  2567. opndSrc1 = instrAdd->GetSrc1();
  2568. opndSrc2 = instrAdd->GetSrc2();
  2569. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Add instruction");
  2570. // Generate fastpath for Incr_A anyway -
  2571. // Incrementing strings representing integers can be inter-mixed with integers e.g. "1"++ -> converts 1 to an int and thereafter, integer increment is expected.
  2572. if (opndSrc1->IsRegOpnd() && (opndSrc1->AsRegOpnd()->IsNotInt() || opndSrc1->GetValueType().IsString()
  2573. || (instrAdd->m_opcode != Js::OpCode::Incr_A && opndSrc1->GetValueType().IsLikelyString())))
  2574. {
  2575. return false;
  2576. }
  2577. if (opndSrc2->IsRegOpnd() && (opndSrc2->AsRegOpnd()->IsNotInt() ||
  2578. opndSrc2->GetValueType().IsLikelyString()))
  2579. {
  2580. return false;
  2581. }
  2582. // Tagged ints?
  2583. bool isTaggedInts = false;
  2584. if (opndSrc1->IsTaggedInt())
  2585. {
  2586. if (opndSrc2->IsTaggedInt())
  2587. {
  2588. isTaggedInts = true;
  2589. }
  2590. }
  2591. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2592. if (!isTaggedInts)
  2593. {
  2594. // (If not 2 Int31's, jump to $helper.)
  2595. this->GenerateSmIntPairTest(instrAdd, opndSrc1, opndSrc2, labelHelper);
  2596. }
  2597. if (opndSrc1->IsAddrOpnd())
  2598. {
  2599. // If opnd1 is a constant, just swap them.
  2600. IR::Opnd *opndTmp = opndSrc1;
  2601. opndSrc1 = opndSrc2;
  2602. opndSrc2 = opndTmp;
  2603. }
  2604. //
  2605. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2606. // relevant only on AMD64.
  2607. //
  2608. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2609. // s1 = MOV src1
  2610. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2611. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  2612. instrAdd->InsertBefore(instr);
  2613. #if !INT32VAR
  2614. // Do the DEC in place
  2615. if (opndSrc2->IsAddrOpnd())
  2616. {
  2617. Assert(opndSrc2->AsAddrOpnd()->GetAddrOpndKind() == IR::AddrOpndKindConstantVar);
  2618. opndSrc2 = IR::IntConstOpnd::New(*((int *)&(opndSrc2->AsAddrOpnd()->m_address)) - 1, TyInt32, this->m_func, opndSrc2->AsAddrOpnd()->m_dontEncode);
  2619. opndSrc2 = opndSrc2->Use(this->m_func);
  2620. }
  2621. else if (opndSrc2->IsIntConstOpnd())
  2622. {
  2623. Assert(opndSrc2->GetType() == TyInt32);
  2624. opndSrc2 = opndSrc2->Use(this->m_func);
  2625. opndSrc2->AsIntConstOpnd()->DecrValue(1);
  2626. }
  2627. else
  2628. {
  2629. // s1 = DEC s1
  2630. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2631. instr = IR::Instr::New(Js::OpCode::DEC, opndReg, opndReg, this->m_func);
  2632. instrAdd->InsertBefore(instr);
  2633. }
  2634. instr = IR::Instr::New(Js::OpCode::ADD, opndReg, opndReg, opndSrc2, this->m_func);
  2635. #else
  2636. if (opndSrc2->IsAddrOpnd())
  2637. {
  2638. // truncate to untag
  2639. int value = ::Math::PointerCastToIntegralTruncate<int>(opndSrc2->AsAddrOpnd()->m_address);
  2640. if (value == 1)
  2641. {
  2642. instr = IR::Instr::New(Js::OpCode::INC, opndReg, opndReg, this->m_func);
  2643. }
  2644. else
  2645. {
  2646. opndSrc2 = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
  2647. instr = IR::Instr::New(Js::OpCode::ADD, opndReg, opndReg, opndSrc2, this->m_func);
  2648. }
  2649. }
  2650. else
  2651. {
  2652. instr = IR::Instr::New(Js::OpCode::ADD, opndReg, opndReg, opndSrc2->UseWithNewType(TyInt32, this->m_func), this->m_func);
  2653. }
  2654. #endif
  2655. // s1 = ADD s1, src2
  2656. instrAdd->InsertBefore(instr);
  2657. Legalize(instr);
  2658. // JO $helper
  2659. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  2660. instrAdd->InsertBefore(instr);
  2661. //
  2662. // Convert TyInt32 operand, back to TyMachPtr type.
  2663. //
  2664. if(TyMachReg != opndReg->GetType())
  2665. {
  2666. opndReg = opndReg->UseWithNewType(TyMachPtr, this->m_func);
  2667. }
  2668. #if INT32VAR
  2669. // s1 = OR s1, AtomTag_IntPtr
  2670. GenerateInt32ToVarConversion(opndReg, instrAdd);
  2671. #endif
  2672. // dst = MOV s1
  2673. instr = IR::Instr::New(Js::OpCode::MOV, instrAdd->GetDst(), opndReg, this->m_func);
  2674. instrAdd->InsertBefore(instr);
  2675. // JMP $fallthru
  2676. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2677. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2678. instrAdd->InsertBefore(instr);
  2679. // $helper:
  2680. // (caller generates helper call)
  2681. // $fallthru:
  2682. instrAdd->InsertBefore(labelHelper);
  2683. instrAdd->InsertAfter(labelFallThru);
  2684. return true;
  2685. }
  2686. ///----------------------------------------------------------------------------
  2687. ///
  2688. /// LowererMD::GenerateFastSub
  2689. ///
  2690. ///
  2691. ///----------------------------------------------------------------------------
  2692. bool
  2693. LowererMD::GenerateFastSub(IR::Instr * instrSub)
  2694. {
  2695. // Given:
  2696. //
  2697. // dst = Sub src1, src2
  2698. //
  2699. // Generate:
  2700. //
  2701. // (If not 2 Int31's, jump to $helper.)
  2702. // s1 = MOV src1
  2703. // s1 = SUB s1, src2 -- try an inline sub
  2704. // JO $helper -- bail if the subtract overflowed
  2705. // JNE $helper
  2706. // s1 = INC s1 -- restore the var tag on the result [Int31 only]
  2707. // s1 = OR s1, AtomTag_IntPtr [Int32 only]
  2708. // dst = MOV s1
  2709. // JMP $fallthru
  2710. // $helper:
  2711. // (caller generates helper call)
  2712. // $fallthru:
  2713. IR::Instr * instr;
  2714. IR::LabelInstr * labelHelper;
  2715. IR::LabelInstr * labelFallThru;
  2716. IR::Opnd * opndReg;
  2717. IR::Opnd * opndSrc1;
  2718. IR::Opnd * opndSrc2;
  2719. opndSrc1 = instrSub->GetSrc1();
  2720. opndSrc2 = instrSub->GetSrc2();
  2721. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Sub instruction");
  2722. // Not tagged ints?
  2723. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2724. {
  2725. return false;
  2726. }
  2727. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2728. {
  2729. return false;
  2730. }
  2731. // Tagged ints?
  2732. bool isTaggedInts = false;
  2733. if (opndSrc1->IsTaggedInt())
  2734. {
  2735. if (opndSrc2->IsTaggedInt())
  2736. {
  2737. isTaggedInts = true;
  2738. }
  2739. }
  2740. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2741. if (!isTaggedInts)
  2742. {
  2743. // (If not 2 Int31's, jump to $helper.)
  2744. this->GenerateSmIntPairTest(instrSub, opndSrc1, opndSrc2, labelHelper);
  2745. }
  2746. //
  2747. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2748. // relevant only on AMD64.
  2749. //
  2750. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2751. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2752. // s1 = MOV src1
  2753. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2754. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  2755. instrSub->InsertBefore(instr);
  2756. // s1 = SUB s1, src2
  2757. instr = IR::Instr::New(Js::OpCode::SUB, opndReg, opndReg, opndSrc2, this->m_func);
  2758. instrSub->InsertBefore(instr);
  2759. // JO $helper
  2760. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  2761. instrSub->InsertBefore(instr);
  2762. #if !INT32VAR
  2763. // s1 = INC s1
  2764. instr = IR::Instr::New(Js::OpCode::INC, opndReg, opndReg, this->m_func);
  2765. instrSub->InsertBefore(instr);
  2766. #endif
  2767. //
  2768. // Convert TyInt32 operand, back to TyMachPtr type.
  2769. //
  2770. if(TyMachReg != opndReg->GetType())
  2771. {
  2772. opndReg = opndReg->UseWithNewType(TyMachPtr, this->m_func);
  2773. }
  2774. #if INT32VAR
  2775. // s1 = OR s1, AtomTag_IntPtr
  2776. GenerateInt32ToVarConversion(opndReg, instrSub);
  2777. #endif
  2778. // dst = MOV s1
  2779. instr = IR::Instr::New(Js::OpCode::MOV, instrSub->GetDst(), opndReg, this->m_func);
  2780. instrSub->InsertBefore(instr);
  2781. // JMP $fallthru
  2782. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2783. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2784. instrSub->InsertBefore(instr);
  2785. // $helper:
  2786. // (caller generates helper call)
  2787. // $fallthru:
  2788. instrSub->InsertBefore(labelHelper);
  2789. instrSub->InsertAfter(labelFallThru);
  2790. return true;
  2791. }
  2792. ///----------------------------------------------------------------------------
  2793. ///
  2794. /// LowererMD::GenerateFastMul
  2795. ///
  2796. ///----------------------------------------------------------------------------
  2797. bool
  2798. LowererMD::GenerateFastMul(IR::Instr * instrMul)
  2799. {
  2800. // Given:
  2801. //
  2802. // dst = Mul src1, src2
  2803. //
  2804. // Generate:
  2805. //
  2806. // (If not 2 Int31's, jump to $helper.)
  2807. // s1 = MOV src1
  2808. // s1 = DEC s1 -- clear the var tag from the value to be multiplied [Int31 only]
  2809. // s2 = MOV src2
  2810. // s2 = SAR s2, Js::VarTag_Shift -- extract the real src2 amount from the var [Int31 only]
  2811. // s1 = IMUL s1, s2 -- do the signed mul
  2812. // JO $helper -- bail if the result overflowed
  2813. // s3 = MOV s1
  2814. // TEST s3, s3 -- Check result is 0. might be -0. Result is -0 when a negative number is multiplied with 0.
  2815. // JEQ $zero
  2816. // JMP $nonzero
  2817. // $zero: -- result of mul was 0. try to check for -0
  2818. // s2 = ADD s2, src1 -- Add src1 to s2
  2819. // JGT $nonzero -- positive 0. [Int31 only]
  2820. // JGE $nonzero -- positive 0. [Int32 only]
  2821. // dst = ToVar(-0.0) -- load negative 0
  2822. // JMP $fallthru
  2823. // $nonzero:
  2824. // s3 = INC s3 -- restore the var tag on the result [Int31 only]
  2825. // s3 = OR s3, AtomTag_IntPtr [Int32 only]
  2826. // dst= MOV s3
  2827. // JMP $fallthru
  2828. // $helper:
  2829. // (caller generates helper call)
  2830. // $fallthru:
  2831. IR::LabelInstr * labelHelper;
  2832. IR::LabelInstr * labelFallThru;
  2833. IR::LabelInstr * labelNonZero;
  2834. IR::Instr * instr;
  2835. IR::RegOpnd * opndReg1;
  2836. IR::RegOpnd * opndReg2;
  2837. IR::RegOpnd * s3;
  2838. IR::Opnd * opndSrc1;
  2839. IR::Opnd * opndSrc2;
  2840. opndSrc1 = instrMul->GetSrc1();
  2841. opndSrc2 = instrMul->GetSrc2();
  2842. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on mul instruction");
  2843. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2844. {
  2845. return true;
  2846. }
  2847. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2848. {
  2849. return true;
  2850. }
  2851. // (If not 2 Int31's, jump to $helper.)
  2852. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2853. labelNonZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2854. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2855. this->GenerateSmIntPairTest(instrMul, opndSrc1, opndSrc2, labelHelper);
  2856. //
  2857. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2858. // relevant only on AMD64.
  2859. //
  2860. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2861. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2862. if (opndSrc1->IsImmediateOpnd())
  2863. {
  2864. IR::Opnd * temp = opndSrc1;
  2865. opndSrc1 = opndSrc2;
  2866. opndSrc2 = temp;
  2867. }
  2868. // s1 = MOV src1
  2869. opndReg1 = IR::RegOpnd::New(TyInt32, this->m_func);
  2870. instr = IR::Instr::New(Js::OpCode::MOV, opndReg1, opndSrc1, this->m_func);
  2871. instrMul->InsertBefore(instr);
  2872. #if !INT32VAR
  2873. // s1 = DEC s1
  2874. instr = IR::Instr::New(Js::OpCode::DEC, opndReg1, opndReg1, this->m_func);
  2875. instrMul->InsertBefore(instr);
  2876. #endif
  2877. if (opndSrc2->IsImmediateOpnd())
  2878. {
  2879. Assert(opndSrc2->IsAddrOpnd() && opndSrc2->AsAddrOpnd()->IsVar());
  2880. IR::Opnd *opnd2 = IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(opndSrc2->AsAddrOpnd()->m_address), TyInt32, this->m_func);
  2881. // s2 = MOV src2
  2882. opndReg2 = IR::RegOpnd::New(TyInt32, this->m_func);
  2883. instr = IR::Instr::New(Js::OpCode::MOV, opndReg2, opnd2, this->m_func);
  2884. instrMul->InsertBefore(instr);
  2885. }
  2886. else
  2887. {
  2888. // s2 = MOV src2
  2889. opndReg2 = IR::RegOpnd::New(TyInt32, this->m_func);
  2890. instr = IR::Instr::New(Js::OpCode::MOV, opndReg2, opndSrc2, this->m_func);
  2891. instrMul->InsertBefore(instr);
  2892. #if !INT32VAR
  2893. // s2 = SAR s2, Js::VarTag_Shift
  2894. instr = IR::Instr::New(
  2895. Js::OpCode::SAR, opndReg2, opndReg2,
  2896. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  2897. instrMul->InsertBefore(instr);
  2898. #endif
  2899. }
  2900. // s1 = IMUL s1, s2
  2901. instr = IR::Instr::New(Js::OpCode::IMUL2, opndReg1, opndReg1, opndReg2, this->m_func);
  2902. instrMul->InsertBefore(instr);
  2903. // JO $helper
  2904. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  2905. instrMul->InsertBefore(instr);
  2906. // MOV s3, s1
  2907. s3 = IR::RegOpnd::New(TyInt32, this->m_func);
  2908. instr = IR::Instr::New(Js::OpCode::MOV, s3, opndReg1, this->m_func);
  2909. instrMul->InsertBefore(instr);
  2910. // TEST s3, s3
  2911. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  2912. instr->SetSrc1(s3);
  2913. instr->SetSrc2(s3);
  2914. instrMul->InsertBefore(instr);
  2915. // JEQ $zero
  2916. IR::LabelInstr *labelZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2917. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelZero, this->m_func);
  2918. instrMul->InsertBefore(instr);
  2919. // JMP $nonzero
  2920. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelNonZero, this->m_func);
  2921. instrMul->InsertBefore(instr);
  2922. // $zero:
  2923. instrMul->InsertBefore(labelZero);
  2924. // s2 = ADD s2, src1
  2925. instr = IR::Instr::New(Js::OpCode::ADD, opndReg2, opndReg2, opndSrc1, this->m_func);
  2926. instrMul->InsertBefore(instr);
  2927. Legalize(instr);
  2928. // JGT $nonzero
  2929. #if INT32VAR
  2930. Js::OpCode greaterOpCode = Js::OpCode::JGE;
  2931. #else
  2932. Js::OpCode greaterOpCode = Js::OpCode::JGT;
  2933. #endif
  2934. instr = IR::BranchInstr::New(greaterOpCode, labelNonZero, this->m_func);
  2935. instrMul->InsertBefore(instr);
  2936. // dst = ToVar(-0.0) -- load negative 0
  2937. instr = IR::Instr::New(Js::OpCode::MOV, instrMul->GetDst(), m_lowerer->LoadLibraryValueOpnd(instrMul, LibraryValue::ValueNegativeZero), this->m_func);
  2938. instrMul->InsertBefore(instr);
  2939. // JMP $fallthru
  2940. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2941. instrMul->InsertBefore(instr);
  2942. // $nonzero:
  2943. instrMul->InsertBefore(labelNonZero);
  2944. #if !INT32VAR
  2945. // s3 = INC s3
  2946. instr = IR::Instr::New(Js::OpCode::INC, s3, s3, this->m_func);
  2947. instrMul->InsertBefore(instr);
  2948. #endif
  2949. //
  2950. // Convert TyInt32 operand, back to TyMachPtr type.
  2951. // Cast is fine. We know ChangeType returns IR::Opnd * but it
  2952. // preserves the Type.
  2953. //
  2954. if(TyMachReg != s3->GetType())
  2955. {
  2956. s3 = static_cast<IR::RegOpnd *>(s3->UseWithNewType(TyMachPtr, this->m_func));
  2957. }
  2958. #if INT32VAR
  2959. // s3 = OR s3, AtomTag_IntPtr
  2960. GenerateInt32ToVarConversion(s3, instrMul);
  2961. #endif
  2962. // dst = MOV s3
  2963. instr = IR::Instr::New(Js::OpCode::MOV, instrMul->GetDst(), s3, this->m_func);
  2964. instrMul->InsertBefore(instr);
  2965. // JMP $fallthru
  2966. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2967. instrMul->InsertBefore(instr);
  2968. // $helper:
  2969. // (caller generates helper call)
  2970. // $fallthru:
  2971. instrMul->InsertBefore(labelHelper);
  2972. instrMul->InsertAfter(labelFallThru);
  2973. return true;
  2974. }
  2975. bool
  2976. LowererMD::GenerateFastNeg(IR::Instr * instrNeg)
  2977. {
  2978. // Given:
  2979. //
  2980. // dst = Not src
  2981. //
  2982. // Generate:
  2983. //
  2984. // if not int, jump $helper
  2985. // if src == 0 -- test for zero (must be handled by the runtime to preserve
  2986. // JEQ $helper difference btw +0 and -0)
  2987. // dst = MOV src
  2988. // dst = NEG dst -- do an inline NEG
  2989. // dst = ADD dst, 2 -- restore the var tag on the result [int31 only]
  2990. // JO $helper
  2991. // dst = OR dst, AtomTag_Ptr [int32 only]
  2992. // JMP $fallthru
  2993. // $helper:
  2994. // (caller generates helper call)
  2995. // $fallthru:
  2996. IR::Instr * instr;
  2997. IR::LabelInstr * labelHelper = nullptr;
  2998. IR::LabelInstr * labelFallThru = nullptr;
  2999. IR::Opnd * opndSrc1;
  3000. IR::Opnd * opndDst;
  3001. bool usingNewDst = false;
  3002. opndSrc1 = instrNeg->GetSrc1();
  3003. AssertMsg(opndSrc1, "Expected src opnd on Neg instruction");
  3004. if(opndSrc1->IsEqual(instrNeg->GetDst()))
  3005. {
  3006. usingNewDst = true;
  3007. opndDst = IR::RegOpnd::New(TyInt32, this->m_func);
  3008. }
  3009. else
  3010. {
  3011. opndDst = instrNeg->GetDst()->UseWithNewType(TyInt32, this->m_func);
  3012. }
  3013. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->m_sym->IsIntConst())
  3014. {
  3015. IR::Opnd *newOpnd;
  3016. IntConstType value = opndSrc1->AsRegOpnd()->m_sym->GetIntConstValue();
  3017. if (value == 0)
  3018. {
  3019. // If the negate operand is zero, the result is -0.0, which is a Number rather than an Int31.
  3020. newOpnd = m_lowerer->LoadLibraryValueOpnd(instrNeg, LibraryValue::ValueNegativeZero);
  3021. }
  3022. else
  3023. {
  3024. // negation below can overflow because max negative int32 value > max positive value by 1.
  3025. newOpnd = IR::AddrOpnd::NewFromNumber(-(int64)value, m_func);
  3026. }
  3027. instrNeg->ClearBailOutInfo();
  3028. instrNeg->FreeSrc1();
  3029. instrNeg->SetSrc1(newOpnd);
  3030. instrNeg = this->ChangeToAssign(instrNeg);
  3031. // Skip lowering call to helper
  3032. return false;
  3033. }
  3034. bool isInt = (opndSrc1->IsTaggedInt());
  3035. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  3036. {
  3037. return true;
  3038. }
  3039. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3040. if (!isInt)
  3041. {
  3042. GenerateSmIntTest(opndSrc1, instrNeg, labelHelper);
  3043. }
  3044. //
  3045. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  3046. // relevant only on AMD64.
  3047. //
  3048. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  3049. GenerateTaggedZeroTest(opndSrc1, instrNeg, labelHelper);
  3050. // dst = MOV src
  3051. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndSrc1, this->m_func);
  3052. instrNeg->InsertBefore(instr);
  3053. // dst = NEG dst
  3054. instr = IR::Instr::New(Js::OpCode::NEG, opndDst, opndDst, this->m_func);
  3055. instrNeg->InsertBefore(instr);
  3056. #if !INT32VAR
  3057. // dst = ADD dst, 2
  3058. instr = IR::Instr::New(Js::OpCode::ADD, opndDst, opndDst, IR::IntConstOpnd::New(2, TyInt32, this->m_func), this->m_func);
  3059. instrNeg->InsertBefore(instr);
  3060. #endif
  3061. // JO $helper
  3062. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  3063. instrNeg->InsertBefore(instr);
  3064. //
  3065. // Convert TyInt32 operand, back to TyMachPtr type.
  3066. //
  3067. if(TyMachReg != opndDst->GetType())
  3068. {
  3069. opndDst = opndDst->UseWithNewType(TyMachPtr, this->m_func);
  3070. }
  3071. #if INT32VAR
  3072. GenerateInt32ToVarConversion(opndDst, instrNeg);
  3073. #endif
  3074. if(usingNewDst)
  3075. {
  3076. instr = IR::Instr::New(Js::OpCode::MOV, instrNeg->GetDst(), opndDst, this->m_func);
  3077. instrNeg->InsertBefore(instr);
  3078. }
  3079. // JMP $fallthru
  3080. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3081. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  3082. instrNeg->InsertBefore(instr);
  3083. // $helper:
  3084. // (caller generates helper sequence)
  3085. // $fallthru:
  3086. AssertMsg(labelHelper, "Should not be NULL");
  3087. instrNeg->InsertBefore(labelHelper);
  3088. instrNeg->InsertAfter(labelFallThru);
  3089. return true;
  3090. }
  3091. void
  3092. LowererMD::GenerateFastBrS(IR::BranchInstr *brInstr)
  3093. {
  3094. IR::Opnd *src1 = brInstr->UnlinkSrc1();
  3095. Assert(src1->IsIntConstOpnd() || src1->IsAddrOpnd() || src1->IsRegOpnd());
  3096. IR::Instr *cmpInstr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  3097. cmpInstr->SetSrc1(m_lowerer->LoadOptimizationOverridesValueOpnd(brInstr, OptimizationOverridesValue::OptimizationOverridesSideEffects));
  3098. cmpInstr->SetSrc2(src1);
  3099. brInstr->InsertBefore(cmpInstr);
  3100. Legalize(cmpInstr);
  3101. Js::OpCode opcode = Js::OpCode::InvalidOpCode;
  3102. switch(brInstr->m_opcode)
  3103. {
  3104. case Js::OpCode::BrHasSideEffects:
  3105. opcode = Js::OpCode::JNE;
  3106. break;
  3107. case Js::OpCode::BrNotHasSideEffects:
  3108. opcode = Js::OpCode::JEQ;
  3109. break;
  3110. default:
  3111. Assert(UNREACHED);
  3112. __assume(false);
  3113. }
  3114. brInstr->m_opcode = opcode;
  3115. }
  3116. ///----------------------------------------------------------------------------
  3117. ///
  3118. /// LowererMD::GenerateSmIntPairTest
  3119. ///
  3120. /// Generate code to test whether the given operands are both Int31 vars
  3121. /// and branch to the given label if not.
  3122. ///
  3123. ///----------------------------------------------------------------------------
  3124. #if !INT32VAR
  3125. IR::Instr *
  3126. LowererMD::GenerateSmIntPairTest(
  3127. IR::Instr * instrInsert,
  3128. IR::Opnd * opndSrc1,
  3129. IR::Opnd * opndSrc2,
  3130. IR::LabelInstr * labelFail)
  3131. {
  3132. IR::Opnd * opndReg;
  3133. IR::Instr * instrPrev = instrInsert->m_prev;
  3134. IR::Instr * instr;
  3135. Assert(opndSrc1->GetType() == TyVar);
  3136. Assert(opndSrc2->GetType() == TyVar);
  3137. if (opndSrc1->IsTaggedInt())
  3138. {
  3139. IR::Opnd *tempOpnd = opndSrc1;
  3140. opndSrc1 = opndSrc2;
  3141. opndSrc2 = tempOpnd;
  3142. }
  3143. if (opndSrc2->IsTaggedInt())
  3144. {
  3145. if (opndSrc1->IsTaggedInt())
  3146. {
  3147. return instrPrev;
  3148. }
  3149. // TEST src1, AtomTag
  3150. // JEQ $fail
  3151. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  3152. instr->SetSrc1(opndSrc1);
  3153. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func));
  3154. instrInsert->InsertBefore(instr);
  3155. }
  3156. else
  3157. {
  3158. // s1 = MOV src1
  3159. // s1 = AND s1, 1
  3160. // TEST s1, src2
  3161. // JEQ $fail
  3162. // s1 = MOV src1
  3163. opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  3164. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  3165. instrInsert->InsertBefore(instr);
  3166. // s1 = AND s1, AtomTag
  3167. instr = IR::Instr::New(
  3168. Js::OpCode::AND, opndReg, opndReg, IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func), this->m_func);
  3169. instrInsert->InsertBefore(instr);
  3170. // TEST s1, src2
  3171. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  3172. instr->SetSrc1(opndReg);
  3173. instr->SetSrc2(opndSrc2);
  3174. instrInsert->InsertBefore(instr);
  3175. }
  3176. // JEQ $fail
  3177. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelFail, this->m_func);
  3178. instrInsert->InsertBefore(instr);
  3179. return instrPrev;
  3180. }
  3181. #else
  3182. IR::Instr *
  3183. LowererMD::GenerateSmIntPairTest(
  3184. IR::Instr * instrInsert,
  3185. IR::Opnd * opndSrc1,
  3186. IR::Opnd * opndSrc2,
  3187. IR::LabelInstr * labelFail)
  3188. {
  3189. IR::Opnd * opndReg;
  3190. IR::Instr * instrPrev = instrInsert->m_prev;
  3191. IR::Instr * instr;
  3192. Assert(opndSrc1->GetType() == TyVar);
  3193. Assert(opndSrc2->GetType() == TyVar);
  3194. if (opndSrc1->IsTaggedInt())
  3195. {
  3196. IR::Opnd *tempOpnd = opndSrc1;
  3197. opndSrc1 = opndSrc2;
  3198. opndSrc2 = tempOpnd;
  3199. }
  3200. if (opndSrc2->IsTaggedInt())
  3201. {
  3202. if (opndSrc1->IsTaggedInt())
  3203. {
  3204. return instrPrev;
  3205. }
  3206. GenerateSmIntTest(opndSrc1, instrInsert, labelFail);
  3207. return instrPrev;
  3208. }
  3209. else
  3210. {
  3211. opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  3212. IR::Opnd * opndReg1;
  3213. // s1 = MOV src1
  3214. // s1 = SHR s1, VarTag_Shift
  3215. // s2 = MOV src2
  3216. // s2 = SHR s2, 32
  3217. // s1 = OR s1, s2 ------ move both tags to the lower 32 bits
  3218. // CMP s1, AtomTag_Pair ------ compare the tags together to the expected tag pair
  3219. // JNE $fail
  3220. // s1 = MOV src1
  3221. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  3222. instrInsert->InsertBefore(instr);
  3223. // s1 = SHR s1, VarTag_Shift
  3224. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  3225. instrInsert->InsertBefore(instr);
  3226. // s2 = MOV src2
  3227. opndReg1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  3228. instr = IR::Instr::New(Js::OpCode::MOV, opndReg1, opndSrc2, this->m_func);
  3229. instrInsert->InsertBefore(instr);
  3230. // s2 = SHR s2, 32
  3231. instr = IR::Instr::New(Js::OpCode::SHR, opndReg1, opndReg1, IR::IntConstOpnd::New(32, TyInt8, this->m_func), this->m_func);
  3232. instrInsert->InsertBefore(instr);
  3233. // s1 = OR s1, s2
  3234. instr = IR::Instr::New(Js::OpCode::OR, opndReg, opndReg, opndReg1, this->m_func);
  3235. instrInsert->InsertBefore(instr);
  3236. opndReg = opndReg->UseWithNewType(TyInt32, this->m_func)->AsRegOpnd();
  3237. // CMP s1, AtomTag_Pair
  3238. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  3239. instr->SetSrc1(opndReg);
  3240. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag_Pair, TyInt32, this->m_func, true));
  3241. instrInsert->InsertBefore(instr);
  3242. }
  3243. // JNE $fail
  3244. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelFail, this->m_func);
  3245. instrInsert->InsertBefore(instr);
  3246. return instrPrev;
  3247. }
  3248. #endif
  3249. void
  3250. LowererMD::GenerateLoadTaggedType(IR::Instr * instrLdSt, IR::RegOpnd * opndType, IR::RegOpnd * opndTaggedType)
  3251. {
  3252. // Generate
  3253. //
  3254. // MOV taggedType, type
  3255. // OR taggedType, InlineCacheAuxSlotTypeTag
  3256. // MOV taggedType, type
  3257. {
  3258. IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV, opndTaggedType, opndType, instrLdSt->m_func);
  3259. instrLdSt->InsertBefore(instrMov);
  3260. }
  3261. // OR taggedType, InlineCacheAuxSlotTypeTag
  3262. {
  3263. IR::IntConstOpnd * opndAuxSlotTag = IR::IntConstOpnd::New(InlineCacheAuxSlotTypeTag, TyMachPtr, instrLdSt->m_func);
  3264. IR::Instr * instrAnd = IR::Instr::New(Js::OpCode::OR, opndTaggedType, opndTaggedType, opndAuxSlotTag, instrLdSt->m_func);
  3265. instrLdSt->InsertBefore(instrAnd);
  3266. }
  3267. }
  3268. void
  3269. LowererMD::GenerateLoadPolymorphicInlineCacheSlot(IR::Instr * instrLdSt, IR::RegOpnd * opndInlineCache, IR::RegOpnd * opndType, uint polymorphicInlineCacheSize)
  3270. {
  3271. // Generate
  3272. //
  3273. // MOV r1, type
  3274. // SHR r1, PolymorphicInlineCacheShift
  3275. // AND r1, (size - 1)
  3276. // SHL r1, log2(sizeof(Js::InlineCache))
  3277. // LEA inlineCache, [inlineCache + r1]
  3278. // MOV r1, type
  3279. IR::RegOpnd * opndOffset = IR::RegOpnd::New(TyMachPtr, instrLdSt->m_func);
  3280. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndOffset, opndType, instrLdSt->m_func);
  3281. instrLdSt->InsertBefore(instr);
  3282. IntConstType rightShiftAmount = PolymorphicInlineCacheShift;
  3283. IntConstType leftShiftAmount = Math::Log2(sizeof(Js::InlineCache));
  3284. // instead of generating
  3285. // SHR r1, PolymorphicInlineCacheShift
  3286. // AND r1, (size - 1)
  3287. // SHL r1, log2(sizeof(Js::InlineCache))
  3288. //
  3289. // we can generate:
  3290. // SHR r1, (PolymorphicInlineCacheShift - log2(sizeof(Js::InlineCache))
  3291. // AND r1, (size - 1) << log2(sizeof(Js::InlineCache))
  3292. Assert(rightShiftAmount > leftShiftAmount);
  3293. instr = IR::Instr::New(Js::OpCode::SHR, opndOffset, opndOffset, IR::IntConstOpnd::New(rightShiftAmount - leftShiftAmount, TyUint8, instrLdSt->m_func, true), instrLdSt->m_func);
  3294. instrLdSt->InsertBefore(instr);
  3295. instr = IR::Instr::New(Js::OpCode::AND, opndOffset, opndOffset, IR::IntConstOpnd::New(((__int64)(polymorphicInlineCacheSize - 1) << leftShiftAmount), TyMachReg, instrLdSt->m_func, true), instrLdSt->m_func);
  3296. instrLdSt->InsertBefore(instr);
  3297. // LEA inlineCache, [inlineCache + r1]
  3298. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(opndInlineCache, opndOffset, TyMachPtr, instrLdSt->m_func);
  3299. instr = IR::Instr::New(Js::OpCode::LEA, opndInlineCache, indirOpnd, instrLdSt->m_func);
  3300. instrLdSt->InsertBefore(instr);
  3301. }
  3302. IR::Instr *
  3303. LowererMD::ChangeToWriteBarrierAssign(IR::Instr * assignInstr, const Func* func)
  3304. {
  3305. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3306. IR::Opnd* dest = assignInstr->GetDst();
  3307. auto threadContextInfo = func->GetTopFunc()->GetThreadContextInfo();
  3308. void* destAddr = nullptr;
  3309. bool isPossibleBarrieredDest = false;
  3310. if (TySize[dest->GetType()] == sizeof(void*))
  3311. {
  3312. if (dest->IsIndirOpnd())
  3313. {
  3314. Assert(!dest->AsIndirOpnd()->HasAddrKind());
  3315. isPossibleBarrieredDest = true;
  3316. }
  3317. else if (dest->IsMemRefOpnd())
  3318. {
  3319. // looks all thread context field access are from MemRefOpnd
  3320. destAddr = (void*)dest->AsMemRefOpnd()->GetMemLoc();
  3321. isPossibleBarrieredDest = destAddr != nullptr
  3322. && ((intptr_t)destAddr % sizeof(void*)) == 0
  3323. && destAddr != (void*)threadContextInfo->GetImplicitCallFlagsAddr()
  3324. && destAddr != (void*)threadContextInfo->GetDisableImplicitFlagsAddr()
  3325. && destAddr != (void*)threadContextInfo->GetBailOutRegisterSaveSpaceAddr();
  3326. if (isPossibleBarrieredDest)
  3327. {
  3328. Assert(Recycler::WBCheckIsRecyclerAddress((char*)destAddr));
  3329. }
  3330. }
  3331. }
  3332. #endif
  3333. IR::Instr * instr = ChangeToAssignNoBarrierCheck(assignInstr);
  3334. // Now insert write barrier if necessary
  3335. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3336. if (isPossibleBarrieredDest
  3337. && assignInstr->m_opcode == Js::OpCode::MOV // ignore SSE instructions like MOVSD
  3338. && assignInstr->GetSrc1()->IsWriteBarrierTriggerableValue())
  3339. {
  3340. instr = LowererMD::GenerateWriteBarrier(assignInstr);
  3341. }
  3342. #endif
  3343. return instr;
  3344. }
  3345. void
  3346. LowererMD::GenerateWriteBarrierAssign(IR::MemRefOpnd * opndDst, IR::Opnd * opndSrc, IR::Instr * insertBeforeInstr)
  3347. {
  3348. Lowerer::InsertMove(opndDst, opndSrc, insertBeforeInstr);
  3349. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3350. if (opndSrc->IsWriteBarrierTriggerableValue())
  3351. {
  3352. void * address = (void *)opndDst->AsMemRefOpnd()->GetMemLoc();
  3353. #ifdef RECYCLER_WRITE_BARRIER_BYTE
  3354. // WriteBarrier-TODO: need to pass card table address through RPC
  3355. IR::MemRefOpnd * cardTableEntry = IR::MemRefOpnd::New(
  3356. &RecyclerWriteBarrierManager::GetAddressOfCardTable()[RecyclerWriteBarrierManager::GetCardTableIndex(address)], TyInt8, insertBeforeInstr->m_func);
  3357. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, cardTableEntry, IR::IntConstOpnd::New(1, TyInt8, insertBeforeInstr->m_func), insertBeforeInstr->m_func);
  3358. insertBeforeInstr->InsertBefore(movInstr);
  3359. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  3360. if (CONFIG_FLAG(ForceSoftwareWriteBarrier) && CONFIG_FLAG(RecyclerVerifyMark))
  3361. {
  3362. this->LoadHelperArgument(insertBeforeInstr, opndDst);
  3363. IR::Instr* instrCall = IR::Instr::New(Js::OpCode::Call, m_func);
  3364. insertBeforeInstr->InsertBefore(instrCall);
  3365. this->ChangeToHelperCall(instrCall, IR::HelperWriteBarrierSetVerifyBit);
  3366. }
  3367. #endif
  3368. #else
  3369. IR::MemRefOpnd * cardTableEntry = IR::MemRefOpnd::New(
  3370. &RecyclerWriteBarrierManager::GetAddressOfCardTable()[RecyclerWriteBarrierManager::GetCardTableIndex(address)], TyMachPtr, assignInstr->m_func);
  3371. IR::Instr * orInstr = IR::Instr::New(Js::OpCode::OR, cardTableEntry,
  3372. IR::IntConstOpnd::New(1 << ((uint)address >> 7), TyInt32, assignInstr->m_func), assignInstr->m_func);
  3373. assignInstr->InsertBefore(orInstr);
  3374. #endif
  3375. }
  3376. #endif
  3377. }
  3378. void
  3379. LowererMD::GenerateWriteBarrierAssign(IR::IndirOpnd * opndDst, IR::Opnd * opndSrc, IR::Instr * insertBeforeInstr)
  3380. {
  3381. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3382. if (opndSrc->IsWriteBarrierTriggerableValue())
  3383. {
  3384. IR::RegOpnd * writeBarrierAddrRegOpnd = IR::RegOpnd::New(TyMachPtr, insertBeforeInstr->m_func);
  3385. insertBeforeInstr->InsertBefore(IR::Instr::New(Js::OpCode::LEA, writeBarrierAddrRegOpnd, opndDst, insertBeforeInstr->m_func));
  3386. IR::Instr* movInstr = IR::Instr::New(Js::OpCode::MOV,
  3387. IR::IndirOpnd::New(writeBarrierAddrRegOpnd, 0, TyMachReg, insertBeforeInstr->m_func), opndSrc, insertBeforeInstr->m_func);
  3388. insertBeforeInstr->InsertBefore(movInstr);
  3389. GenerateWriteBarrier(movInstr);
  3390. // The mov happens above, and it's slightly faster doing it that way since we've already calculated the address we're writing to
  3391. return;
  3392. }
  3393. #endif
  3394. Lowerer::InsertMove(opndDst, opndSrc, insertBeforeInstr);
  3395. return;
  3396. }
  3397. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3398. IR::Instr*
  3399. LowererMD::GenerateWriteBarrier(IR::Instr * assignInstr)
  3400. {
  3401. #if defined(RECYCLER_WRITE_BARRIER_BYTE)
  3402. PHASE_PRINT_TRACE(Js::JitWriteBarrierPhase, assignInstr->m_func, _u("Generating write barrier\n"));
  3403. IR::RegOpnd * indexOpnd = IR::RegOpnd::New(TyMachPtr, assignInstr->m_func);
  3404. IR::Instr * loadIndexInstr = IR::Instr::New(Js::OpCode::LEA, indexOpnd, assignInstr->GetDst(), assignInstr->m_func);
  3405. assignInstr->InsertBefore(loadIndexInstr);
  3406. IR::Instr * shiftBitInstr = IR::Instr::New(Js::OpCode::SHR, indexOpnd, indexOpnd,
  3407. IR::IntConstOpnd::New(12 /* 1 << 12 = 4096 */, TyInt8, assignInstr->m_func), assignInstr->m_func);
  3408. assignInstr->InsertAfter(shiftBitInstr);
  3409. // The cardtable address is likely 64 bits already so we have to load it to a register
  3410. // That is, we have to do the following:
  3411. // LEA reg1, targetOfWrite
  3412. // SHR reg1, 12
  3413. // MOV reg2, cardTableAddress
  3414. // MOV [reg1 + reg2], 1
  3415. //
  3416. // Instead of doing this:
  3417. // LEA reg1, targetOfWrite
  3418. // SHR reg1, 12
  3419. // MOV [cardTableAddress + reg2], 1
  3420. //
  3421. //TODO: (leish)(swb) hoist RecyclerWriteBarrierManager::GetAddressOfCardTable()
  3422. IR::RegOpnd * cardTableRegOpnd = IR::RegOpnd::New(TyMachReg, assignInstr->m_func);
  3423. IR::Instr * cardTableAddrInstr = IR::Instr::New(Js::OpCode::MOV, cardTableRegOpnd,
  3424. IR::AddrOpnd::New(RecyclerWriteBarrierManager::GetAddressOfCardTable(), IR::AddrOpndKindWriteBarrierCardTable, assignInstr->m_func),
  3425. assignInstr->m_func);
  3426. shiftBitInstr->InsertAfter(cardTableAddrInstr);
  3427. IR::IndirOpnd * cardTableEntryOpnd = IR::IndirOpnd::New(cardTableRegOpnd, indexOpnd,
  3428. TyInt8, assignInstr->m_func);
  3429. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, cardTableEntryOpnd, IR::IntConstOpnd::New(1, TyInt8, assignInstr->m_func), assignInstr->m_func);
  3430. cardTableAddrInstr->InsertAfter(movInstr);
  3431. return loadIndexInstr;
  3432. #else
  3433. Assert(writeBarrierAddrRegOpnd->IsRegOpnd());
  3434. IR::RegOpnd * shiftBitOpnd = IR::RegOpnd::New(TyInt32, assignInstr->m_func);
  3435. shiftBitOpnd->SetReg(LowererMDArch::GetRegShiftCount());
  3436. IR::Instr * moveShiftBitOpnd = IR::Instr::New(Js::OpCode::MOV, shiftBitOpnd, writeBarrierAddrRegOpnd, assignInstr->m_func);
  3437. assignInstr->InsertBefore(moveShiftBitOpnd);
  3438. IR::Instr * shiftBitInstr = IR::Instr::New(Js::OpCode::SHR, shiftBitOpnd, shiftBitOpnd,
  3439. IR::IntConstOpnd::New(7 /* 1 << 7 = 128 */, TyInt32, assignInstr->m_func), assignInstr->m_func);
  3440. assignInstr->InsertBefore(shiftBitInstr);
  3441. IR::RegOpnd * bitOpnd = IR::RegOpnd::New(TyInt32, assignInstr->m_func);
  3442. IR::Instr * mov1Instr = IR::Instr::New(Js::OpCode::MOV, bitOpnd,
  3443. IR::IntConstOpnd::New(1, TyInt32, assignInstr->m_func), assignInstr->m_func);
  3444. assignInstr->InsertBefore(mov1Instr);
  3445. IR::Instr * bitInstr = IR::Instr::New(Js::OpCode::SHL, bitOpnd, bitOpnd, shiftBitOpnd, assignInstr->m_func);
  3446. assignInstr->InsertBefore(bitInstr);
  3447. IR::RegOpnd * indexOpnd = shiftBitOpnd;
  3448. IR::Instr * indexInstr = IR::Instr::New(Js::OpCode::SHR, indexOpnd, indexOpnd,
  3449. IR::IntConstOpnd::New(5 /* 1 << 5 = 32 */, TyInt32, assignInstr->m_func), assignInstr->m_func);
  3450. assignInstr->InsertBefore(indexInstr);
  3451. IR::RegOpnd * cardTableRegOpnd = IR::RegOpnd::New(TyMachReg, assignInstr->m_func);
  3452. IR::Instr * cardTableAddrInstr = IR::Instr::New(Js::OpCode::MOV, cardTableRegOpnd,
  3453. IR::AddrOpnd::New(RecyclerWriteBarrierManager::GetAddressOfCardTable(), IR::AddrOpndKindDynamicMisc, assignInstr->m_func),
  3454. assignInstr->m_func);
  3455. assignInstr->InsertBefore(cardTableAddrInstr);
  3456. IR::IndirOpnd * cardTableEntryOpnd = IR::IndirOpnd::New(cardTableRegOpnd, indexOpnd, LowererMDArch::GetDefaultIndirScale(),
  3457. TyInt32, assignInstr->m_func);
  3458. IR::Instr * orInstr = IR::Instr::New(Js::OpCode::OR, cardTableEntryOpnd, cardTableEntryOpnd,
  3459. bitOpnd, assignInstr->m_func);
  3460. assignInstr->InsertBefore(orInstr);
  3461. #endif
  3462. }
  3463. #endif
  3464. void
  3465. LowererMD::GenerateStFldFromLocalInlineCache(
  3466. IR::Instr * instrStFld,
  3467. IR::RegOpnd * opndBase,
  3468. IR::Opnd * opndSrc,
  3469. IR::RegOpnd * inlineCache,
  3470. IR::LabelInstr * labelFallThru,
  3471. bool isInlineSlot)
  3472. {
  3473. IR::Instr * instr;
  3474. IR::Opnd* slotIndexOpnd;
  3475. IR::RegOpnd * opndIndirBase = opndBase;
  3476. if (!isInlineSlot)
  3477. {
  3478. // slotArray = MOV base->slots -- load the slot array
  3479. IR::RegOpnd * opndSlotArray = IR::RegOpnd::New(TyMachReg, instrStFld->m_func);
  3480. IR::IndirOpnd * opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrStFld->m_func);
  3481. instr = IR::Instr::New(Js::OpCode::MOV, opndSlotArray, opndIndir, instrStFld->m_func);
  3482. instrStFld->InsertBefore(instr);
  3483. opndIndirBase = opndSlotArray;
  3484. }
  3485. // slotIndex = MOV [&inlineCache->u.local.inlineSlotOffsetOrAuxSlotIndex] -- load the cached slot offset or index
  3486. IR::RegOpnd * opndSlotIndex = IR::RegOpnd::New(TyMachReg, instrStFld->m_func);
  3487. slotIndexOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.local.slotIndex), TyUint16, instrStFld->m_func);
  3488. instr = IR::Instr::New(Js::OpCode::MOVZXW, opndSlotIndex, slotIndexOpnd, instrStFld->m_func);
  3489. instrStFld->InsertBefore(instr);
  3490. // [base + slotIndex * (1 << indirScale)] = MOV src -- store the value directly to the slot
  3491. // [slotArray + slotIndex * (1 << indirScale)] = MOV src -- store the value directly to the slot
  3492. IR::IndirOpnd * storeLocIndirOpnd = IR::IndirOpnd::New(opndIndirBase, opndSlotIndex,
  3493. LowererMDArch::GetDefaultIndirScale(), TyMachReg, instrStFld->m_func);
  3494. GenerateWriteBarrierAssign(storeLocIndirOpnd, opndSrc, instrStFld);
  3495. // JMP $fallthru
  3496. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrStFld->m_func);
  3497. instrStFld->InsertBefore(instr);
  3498. }
  3499. //----------------------------------------------------------------------------
  3500. //
  3501. // LowererMD::GenerateFastScopedLdFld
  3502. //
  3503. // Make use of the helper to cache the type and slot index used to do a ScopedLdFld
  3504. // when the scope is an array of length 1.
  3505. // Extract the only element from array and do an inline load from the appropriate slot
  3506. // if the type hasn't changed since the last time this ScopedLdFld was executed.
  3507. //
  3508. //----------------------------------------------------------------------------
  3509. IR::Instr *
  3510. LowererMD::GenerateFastScopedLdFld(IR::Instr * instrLdScopedFld)
  3511. {
  3512. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  3513. // JNE $helper
  3514. // MOV r1, [base + offset(scopes)] -- load the first scope
  3515. // MOV r2, r1->type
  3516. // CMP r2, [&(inlineCache->u.local.type)] -- check type
  3517. // JNE $helper
  3518. // MOV r1, r1->slots -- load the slots array
  3519. // MOV r2 , [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  3520. // MOV dst, [r1+r2] -- load the value from the slot
  3521. // JMP $fallthru
  3522. // $helper:
  3523. // dst = CALL PatchGetPropertyScoped(inlineCache, base, field, defaultInstance, scriptContext)
  3524. // $fallthru:
  3525. IR::RegOpnd * opndBase;
  3526. IR::Instr * instr;
  3527. IR::IndirOpnd * indirOpnd;
  3528. IR::LabelInstr * labelHelper;
  3529. IR::Opnd * opndDst;
  3530. IR::RegOpnd * inlineCache;
  3531. IR::RegOpnd *r1;
  3532. IR::LabelInstr * labelFallThru;
  3533. IR::Opnd *propertySrc = instrLdScopedFld->GetSrc1();
  3534. AssertMsg(propertySrc->IsSymOpnd() && propertySrc->AsSymOpnd()->IsPropertySymOpnd() && propertySrc->AsSymOpnd()->m_sym->IsPropertySym(),
  3535. "Expected property sym operand as src of LdScoped");
  3536. IR::PropertySymOpnd * propertySymOpnd = propertySrc->AsPropertySymOpnd();
  3537. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  3538. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3539. AssertMsg(opndBase->m_sym->m_isSingleDef, "We assume this isn't redefined");
  3540. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  3541. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfLength(), TyInt16, this->m_func);
  3542. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  3543. instr->SetSrc1(indirOpnd);
  3544. instr->SetSrc2(IR::IntConstOpnd::New(0x1, TyInt8, this->m_func));
  3545. instrLdScopedFld->InsertBefore(instr);
  3546. // JNE $helper
  3547. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  3548. instrLdScopedFld->InsertBefore(instr);
  3549. // MOV r1, [base + offset(scopes)] -- load the first scope
  3550. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg, this->m_func);
  3551. r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  3552. instr = IR::Instr::New(Js::OpCode::MOV, r1, indirOpnd, this->m_func);
  3553. instrLdScopedFld->InsertBefore(instr);
  3554. //first load the inlineCache type
  3555. inlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  3556. Assert(inlineCache != nullptr);
  3557. IR::RegOpnd * opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3558. opndDst = instrLdScopedFld->GetDst();
  3559. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3560. r1->m_sym->m_isNotNumber = true;
  3561. // Load the type
  3562. this->m_lowerer->GenerateObjectTestAndTypeLoad(instrLdScopedFld, r1, opndType, labelHelper);
  3563. // Check the local cache with the tagged type
  3564. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3565. GenerateLoadTaggedType(instrLdScopedFld, opndType, opndTaggedType);
  3566. instrLdScopedFld->InsertBefore(IR::Instr::New(Js::OpCode::MOV, inlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrLdScopedFld, propertySymOpnd), this->m_func));
  3567. Lowerer::GenerateLocalInlineCacheCheck(instrLdScopedFld, opndTaggedType, inlineCache, labelHelper);
  3568. Lowerer::GenerateLdFldFromLocalInlineCache(instrLdScopedFld, r1, opndDst, inlineCache, labelFallThru, false);
  3569. // $helper:
  3570. // dst = CALL PatchGetPropertyScoped(inlineCache, opndBase, propertyId, srcBase, scriptContext)
  3571. // $fallthru:
  3572. instrLdScopedFld->InsertBefore(labelHelper);
  3573. instrLdScopedFld->InsertAfter(labelFallThru);
  3574. return instrLdScopedFld->m_prev;
  3575. }
  3576. //----------------------------------------------------------------------------
  3577. //
  3578. // LowererMD::GenerateFastScopedStFld
  3579. //
  3580. // Make use of the helper to cache the type and slot index used to do a ScopedStFld
  3581. // when the scope is an array of length 1.
  3582. // Extract the only element from array and do an inline load from the appropriate slot
  3583. // if the type hasn't changed since the last time this ScopedStFld was executed.
  3584. //
  3585. //----------------------------------------------------------------------------
  3586. IR::Instr *
  3587. LowererMD::GenerateFastScopedStFld(IR::Instr * instrStScopedFld)
  3588. {
  3589. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  3590. // JNE $helper
  3591. // MOV r1, [base + offset(scopes)] -- load the first scope
  3592. // MOV r2, r1->type
  3593. // CMP r2, [&(inlineCache->u.local.type)] -- check type
  3594. // JNE $helper
  3595. // MOV r1, r1->slots -- load the slots array
  3596. // MOV r2, [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  3597. // [r1 + r2*4] = MOV value -- store the value directly to the slot
  3598. // JMP $fallthru
  3599. // $helper:
  3600. // CALL PatchSetPropertyScoped(inlineCache, base, field, value, defaultInstance, scriptContext)
  3601. // $fallthru:
  3602. IR::RegOpnd * opndBase;
  3603. IR::Instr * instr;
  3604. IR::IndirOpnd * indirOpnd;
  3605. IR::LabelInstr * labelHelper;
  3606. IR::Opnd * opndDst;
  3607. IR::RegOpnd * inlineCache;
  3608. IR::RegOpnd *r1;
  3609. IR::LabelInstr * labelFallThru;
  3610. IR::Opnd *newValue = instrStScopedFld->GetSrc1();
  3611. // IR::Opnd *defaultInstance = instrStScopedFld->UnlinkSrc2();
  3612. opndDst = instrStScopedFld->GetDst();
  3613. AssertMsg(opndDst->IsSymOpnd() && opndDst->AsSymOpnd()->IsPropertySymOpnd() && opndDst->AsSymOpnd()->m_sym->IsPropertySym(),
  3614. "Expected property sym operand as dst of StScoped");
  3615. IR::PropertySymOpnd * propertySymOpnd = opndDst->AsPropertySymOpnd();
  3616. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  3617. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3618. AssertMsg(opndBase->m_sym->m_isSingleDef, "We assume this isn't redefined");
  3619. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  3620. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfLength(), TyInt16, this->m_func);
  3621. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  3622. instr->SetSrc1(indirOpnd);
  3623. instr->SetSrc2(IR::IntConstOpnd::New(0x1, TyInt8, this->m_func));
  3624. instrStScopedFld->InsertBefore(instr);
  3625. // JNE $helper
  3626. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  3627. instrStScopedFld->InsertBefore(instr);
  3628. // MOV r1, [base + offset(scopes)] -- load the first scope
  3629. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg, this->m_func);
  3630. r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  3631. instr = IR::Instr::New(Js::OpCode::MOV, r1, indirOpnd, this->m_func);
  3632. instrStScopedFld->InsertBefore(instr);
  3633. //first load the inlineCache type
  3634. inlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  3635. Assert(inlineCache != nullptr);
  3636. IR::RegOpnd * opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3637. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3638. r1->m_sym->m_isNotNumber = true;
  3639. // Load the type
  3640. this->m_lowerer->GenerateObjectTestAndTypeLoad(instrStScopedFld, r1, opndType, labelHelper);
  3641. // Check the local cache with the tagged type
  3642. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3643. GenerateLoadTaggedType(instrStScopedFld, opndType, opndTaggedType);
  3644. instrStScopedFld->InsertBefore(IR::Instr::New(Js::OpCode::MOV, inlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrStScopedFld, propertySymOpnd), this->m_func));
  3645. Lowerer::GenerateLocalInlineCacheCheck(instrStScopedFld, opndTaggedType, inlineCache, labelHelper);
  3646. GenerateStFldFromLocalInlineCache(instrStScopedFld, r1, newValue, inlineCache, labelFallThru, false);
  3647. // $helper:
  3648. // CALL PatchSetPropertyScoped(inlineCache, opndBase, propertyId, newValue, defaultInstance, scriptContext)
  3649. // $fallthru:
  3650. instrStScopedFld->InsertBefore(labelHelper);
  3651. instrStScopedFld->InsertAfter(labelFallThru);
  3652. return instrStScopedFld->m_prev;
  3653. }
  3654. IR::Opnd *
  3655. LowererMD::CreateStackArgumentsSlotOpnd()
  3656. {
  3657. StackSym *sym = StackSym::New(TyMachReg, this->m_func);
  3658. sym->m_offset = -MachArgsSlotOffset;
  3659. sym->m_allocated = true;
  3660. return IR::SymOpnd::New(sym, TyMachReg, this->m_func);
  3661. }
  3662. IR::RegOpnd *
  3663. LowererMD::GenerateUntagVar(IR::RegOpnd * src, IR::LabelInstr * labelFail, IR::Instr * assignInstr, bool generateTagCheck)
  3664. {
  3665. Assert(src->IsVar());
  3666. // MOV valueOpnd, index
  3667. IR::RegOpnd *valueOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  3668. //
  3669. // Convert Index to 32 bits.
  3670. //
  3671. IR::Opnd * opnd = src->UseWithNewType(TyMachReg, this->m_func);
  3672. #if INT32VAR
  3673. if (generateTagCheck)
  3674. {
  3675. Assert(!opnd->IsTaggedInt());
  3676. this->GenerateSmIntTest(opnd, assignInstr, labelFail);
  3677. }
  3678. // Moving into r2 clears the tag bits on AMD64.
  3679. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV_TRUNC, valueOpnd, opnd, this->m_func);
  3680. assignInstr->InsertBefore(instr);
  3681. #else
  3682. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, valueOpnd, opnd, this->m_func);
  3683. assignInstr->InsertBefore(instr);
  3684. // SAR valueOpnd, Js::VarTag_Shift
  3685. instr = IR::Instr::New(Js::OpCode::SAR, valueOpnd, valueOpnd,
  3686. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  3687. assignInstr->InsertBefore(instr);
  3688. if (generateTagCheck)
  3689. {
  3690. Assert(!opnd->IsTaggedInt());
  3691. // SAR set the carry flag (CF) to 1 if the lower bit is 1
  3692. // JAE will jmp if CF = 0
  3693. instr = IR::BranchInstr::New(Js::OpCode::JAE, labelFail, this->m_func);
  3694. assignInstr->InsertBefore(instr);
  3695. }
  3696. #endif
  3697. return valueOpnd;
  3698. }
  3699. IR::RegOpnd *LowererMD::LoadNonnegativeIndex(
  3700. IR::RegOpnd *indexOpnd,
  3701. const bool skipNegativeCheck,
  3702. IR::LabelInstr *const notTaggedIntLabel,
  3703. IR::LabelInstr *const negativeLabel,
  3704. IR::Instr *const insertBeforeInstr)
  3705. {
  3706. Assert(indexOpnd);
  3707. Assert(indexOpnd->IsVar() || indexOpnd->GetType() == TyInt32 || indexOpnd->GetType() == TyUint32);
  3708. Assert(indexOpnd->GetType() != TyUint32 || skipNegativeCheck);
  3709. Assert(!indexOpnd->IsVar() || notTaggedIntLabel);
  3710. Assert(skipNegativeCheck || negativeLabel);
  3711. Assert(insertBeforeInstr);
  3712. if(indexOpnd->IsVar())
  3713. {
  3714. if (indexOpnd->GetValueType().IsLikelyFloat()
  3715. #ifdef _M_IX86
  3716. && AutoSystemInfo::Data.SSE2Available()
  3717. #endif
  3718. )
  3719. {
  3720. return m_lowerer->LoadIndexFromLikelyFloat(indexOpnd, skipNegativeCheck, notTaggedIntLabel, negativeLabel, insertBeforeInstr);
  3721. }
  3722. // mov intIndex, index
  3723. // sar intIndex, 1
  3724. // jae $notTaggedIntOrNegative
  3725. indexOpnd = m_lowerer->GenerateUntagVar(indexOpnd, notTaggedIntLabel, insertBeforeInstr, !indexOpnd->IsTaggedInt());
  3726. }
  3727. if(!skipNegativeCheck)
  3728. {
  3729. // test index, index
  3730. // js $notTaggedIntOrNegative
  3731. Lowerer::InsertTestBranch(indexOpnd, indexOpnd, Js::OpCode::JSB, negativeLabel, insertBeforeInstr);
  3732. }
  3733. return indexOpnd;
  3734. }
  3735. // Inlines fast-path for int Mul/Add or int Mul/Sub. If not int, call MulAdd/MulSub helper
  3736. bool LowererMD::TryGenerateFastMulAdd(IR::Instr * instrAdd, IR::Instr ** pInstrPrev)
  3737. {
  3738. IR::Instr *instrMul = instrAdd->GetPrevRealInstrOrLabel();
  3739. IR::Opnd *addSrc;
  3740. IR::RegOpnd *addCommonSrcOpnd;
  3741. Assert(instrAdd->m_opcode == Js::OpCode::Add_A || instrAdd->m_opcode == Js::OpCode::Sub_A);
  3742. bool isSub = (instrAdd->m_opcode == Js::OpCode::Sub_A) ? true : false;
  3743. // Mul needs to be a single def reg
  3744. if (instrMul->m_opcode != Js::OpCode::Mul_A || instrMul->GetDst()->IsRegOpnd() == false)
  3745. {
  3746. // Cannot generate MulAdd
  3747. return false;
  3748. }
  3749. if (instrMul->HasBailOutInfo())
  3750. {
  3751. // Bailout will be generated for the Add, but not the Mul.
  3752. // We could handle this, but this path isn't used that much anymore.
  3753. return false;
  3754. }
  3755. IR::RegOpnd *regMulDst = instrMul->GetDst()->AsRegOpnd();
  3756. if (regMulDst->m_sym->m_isSingleDef == false)
  3757. {
  3758. // Cannot generate MulAdd
  3759. return false;
  3760. }
  3761. // Only handle a * b + c, so dst of Mul needs to match left source of Add
  3762. if (instrMul->GetDst()->IsEqual(instrAdd->GetSrc1()))
  3763. {
  3764. addCommonSrcOpnd = instrAdd->GetSrc1()->AsRegOpnd();
  3765. addSrc = instrAdd->GetSrc2();
  3766. }
  3767. else if (instrMul->GetDst()->IsEqual(instrAdd->GetSrc2()))
  3768. {
  3769. addSrc = instrAdd->GetSrc1();
  3770. addCommonSrcOpnd = instrAdd->GetSrc2()->AsRegOpnd();
  3771. }
  3772. else
  3773. {
  3774. return false;
  3775. }
  3776. // Only handle a * b + c where c != a * b
  3777. if (instrAdd->GetSrc1()->IsEqual(instrAdd->GetSrc2()))
  3778. {
  3779. return false;
  3780. }
  3781. if (addCommonSrcOpnd->m_isTempLastUse == false)
  3782. {
  3783. return false;
  3784. }
  3785. IR::Opnd *mulSrc1 = instrMul->GetSrc1();
  3786. IR::Opnd *mulSrc2 = instrMul->GetSrc2();
  3787. if (mulSrc1->IsRegOpnd() && mulSrc1->AsRegOpnd()->IsTaggedInt()
  3788. && mulSrc2->IsRegOpnd() && mulSrc2->AsRegOpnd()->IsTaggedInt())
  3789. {
  3790. return false;
  3791. }
  3792. // Save prevInstr for the main lower loop
  3793. *pInstrPrev = instrMul->m_prev;
  3794. // Generate int31 fast-path for Mul, go to MulAdd helper if it fails, or one of the source is marked notInt
  3795. if (!(addSrc->IsRegOpnd() && addSrc->AsRegOpnd()->IsNotInt())
  3796. && !(mulSrc1->IsRegOpnd() && mulSrc1->AsRegOpnd()->IsNotInt())
  3797. && !(mulSrc2->IsRegOpnd() && mulSrc2->AsRegOpnd()->IsNotInt()))
  3798. {
  3799. this->GenerateFastMul(instrMul);
  3800. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3801. IR::Instr *instr = IR::BranchInstr::New(Js::OpCode::JMP, labelHelper, this->m_func);
  3802. instrMul->InsertBefore(instr);
  3803. // Generate int31 fast-path for Add
  3804. bool success;
  3805. if (isSub)
  3806. {
  3807. success = this->GenerateFastSub(instrAdd);
  3808. }
  3809. else
  3810. {
  3811. success = this->GenerateFastAdd(instrAdd);
  3812. }
  3813. if (!success)
  3814. {
  3815. labelHelper->isOpHelper = false;
  3816. }
  3817. // Generate MulAdd helper call
  3818. instrAdd->InsertBefore(labelHelper);
  3819. }
  3820. if (instrAdd->dstIsTempNumber)
  3821. {
  3822. m_lowerer->LoadHelperTemp(instrAdd, instrAdd);
  3823. }
  3824. else
  3825. {
  3826. IR::Opnd *tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  3827. this->LoadHelperArgument(instrAdd, tempOpnd);
  3828. }
  3829. this->m_lowerer->LoadScriptContext(instrAdd);
  3830. IR::JnHelperMethod helper;
  3831. if (addSrc == instrAdd->GetSrc2())
  3832. {
  3833. instrAdd->FreeSrc1();
  3834. IR::Opnd *addOpnd = instrAdd->UnlinkSrc2();
  3835. this->LoadHelperArgument(instrAdd, addOpnd);
  3836. helper = isSub ? IR::HelperOp_MulSubRight : IR::HelperOp_MulAddRight;
  3837. }
  3838. else
  3839. {
  3840. instrAdd->FreeSrc2();
  3841. IR::Opnd *addOpnd = instrAdd->UnlinkSrc1();
  3842. this->LoadHelperArgument(instrAdd, addOpnd);
  3843. helper = isSub ? IR::HelperOp_MulSubLeft : IR::HelperOp_MulAddLeft;
  3844. }
  3845. IR::Opnd *src2 = instrMul->UnlinkSrc2();
  3846. this->LoadHelperArgument(instrAdd, src2);
  3847. IR::Opnd *src1 = instrMul->UnlinkSrc1();
  3848. this->LoadHelperArgument(instrAdd, src1);
  3849. this->ChangeToHelperCall(instrAdd, helper);
  3850. instrMul->Remove();
  3851. return true;
  3852. }
  3853. void
  3854. LowererMD::GenerateFastAbs(IR::Opnd *dst, IR::Opnd *src, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
  3855. {
  3856. // TEST src1, AtomTag
  3857. // JEQ $float
  3858. // MOV EAX, src
  3859. // SAR EAX, AtomTag_Int32
  3860. // CDQ
  3861. // XOR EAX, EDX
  3862. // SUB EAX, EDX
  3863. // SHL EAX, AtomTag_Int32
  3864. // JO $labelHelper
  3865. // INC EAX
  3866. // MOV dst, EAX
  3867. // JMP $done
  3868. // $float
  3869. // CMP [src], JavascriptNumber.vtable
  3870. // JNE $helper
  3871. // MOVSD r1, [src + offsetof(value)]
  3872. // ANDPD r1, absDoubleCst
  3873. // dst = DoubleToVar(r1)
  3874. IR::Instr *instr = nullptr;
  3875. IR::LabelInstr *labelFloat = nullptr;
  3876. bool isInt = false;
  3877. bool isNotInt = false;
  3878. if (src->IsRegOpnd())
  3879. {
  3880. if (src->AsRegOpnd()->IsTaggedInt())
  3881. {
  3882. isInt = true;
  3883. }
  3884. else if (src->AsRegOpnd()->IsNotInt())
  3885. {
  3886. isNotInt = true;
  3887. }
  3888. }
  3889. else if (src->IsAddrOpnd())
  3890. {
  3891. IR::AddrOpnd *varOpnd = src->AsAddrOpnd();
  3892. Assert(varOpnd->IsVar() && Js::TaggedInt::Is(varOpnd->m_address));
  3893. #ifdef _M_X64
  3894. __int64 absValue = ::_abs64(Js::TaggedInt::ToInt32(varOpnd->m_address));
  3895. #else
  3896. __int32 absValue = ::abs(Js::TaggedInt::ToInt32(varOpnd->m_address));
  3897. #endif
  3898. if (!Js::TaggedInt::IsOverflow(absValue))
  3899. {
  3900. varOpnd->SetAddress(Js::TaggedInt::ToVarUnchecked((__int32)absValue), IR::AddrOpndKindConstantVar);
  3901. instr = IR::Instr::New(Js::OpCode::MOV, dst, varOpnd, this->m_func);
  3902. insertInstr->InsertBefore(instr);
  3903. return;
  3904. }
  3905. }
  3906. if (src->IsRegOpnd() == false)
  3907. {
  3908. IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  3909. instr = IR::Instr::New(Js::OpCode::MOV, regOpnd, src, this->m_func);
  3910. insertInstr->InsertBefore(instr);
  3911. src = regOpnd;
  3912. }
  3913. #ifdef _M_IX86
  3914. bool emitFloatAbs = !isInt && AutoSystemInfo::Data.SSE2Available();
  3915. #else
  3916. bool emitFloatAbs = !isInt;
  3917. #endif
  3918. if (!isNotInt)
  3919. {
  3920. if (!isInt)
  3921. {
  3922. IR::LabelInstr *label = labelHelper;
  3923. if (emitFloatAbs)
  3924. {
  3925. label = labelFloat = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3926. }
  3927. GenerateSmIntTest(src, insertInstr, label);
  3928. }
  3929. // MOV EAX, src
  3930. IR::RegOpnd *regEAX = IR::RegOpnd::New(TyInt32, this->m_func);
  3931. regEAX->SetReg(LowererMDArch::GetRegIMulDestLower());
  3932. instr = IR::Instr::New(Js::OpCode::MOV, regEAX, src, this->m_func);
  3933. insertInstr->InsertBefore(instr);
  3934. #ifdef _M_IX86
  3935. // SAR EAX, AtomTag_Int32
  3936. instr = IR::Instr::New(Js::OpCode::SAR, regEAX, regEAX, IR::IntConstOpnd::New(Js::AtomTag_Int32, TyInt32, this->m_func), this->m_func);
  3937. insertInstr->InsertBefore(instr);
  3938. #endif
  3939. IR::RegOpnd *regEDX = IR::RegOpnd::New(TyInt32, this->m_func);
  3940. regEDX->SetReg(LowererMDArch::GetRegIMulHighDestLower());
  3941. // CDQ
  3942. // Note: put EDX on dst to give of def to the EDX lifetime
  3943. instr = IR::Instr::New(Js::OpCode::CDQ, regEDX, this->m_func);
  3944. insertInstr->InsertBefore(instr);
  3945. // XOR EAX, EDX
  3946. instr = IR::Instr::New(Js::OpCode::XOR, regEAX, regEAX, regEDX, this->m_func);
  3947. insertInstr->InsertBefore(instr);
  3948. // SUB EAX, EDX
  3949. instr = IR::Instr::New(Js::OpCode::SUB, regEAX, regEAX, regEDX, this->m_func);
  3950. insertInstr->InsertBefore(instr);
  3951. #ifdef _M_X64
  3952. // abs(INT_MIN) overflows a 32 bit integer.
  3953. // JO $labelHelper
  3954. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  3955. insertInstr->InsertBefore(instr);
  3956. #endif
  3957. #ifdef _M_IX86
  3958. // SHL EAX, AtomTag_Int32
  3959. instr = IR::Instr::New(Js::OpCode::SHL, regEAX, regEAX, IR::IntConstOpnd::New(Js::AtomTag_Int32, TyInt32, this->m_func), this->m_func);
  3960. insertInstr->InsertBefore(instr);
  3961. // JO $labelHelper
  3962. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  3963. insertInstr->InsertBefore(instr);
  3964. // INC EAX
  3965. instr = IR::Instr::New(Js::OpCode::INC, regEAX, regEAX, this->m_func);
  3966. insertInstr->InsertBefore(instr);
  3967. #endif
  3968. // MOV dst, EAX
  3969. instr = IR::Instr::New(Js::OpCode::MOV, dst, regEAX, this->m_func);
  3970. insertInstr->InsertBefore(instr);
  3971. #ifdef _M_X64
  3972. GenerateInt32ToVarConversion(dst, insertInstr);
  3973. #endif
  3974. }
  3975. if (labelFloat)
  3976. {
  3977. // JMP $done
  3978. instr = IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, this->m_func);
  3979. insertInstr->InsertBefore(instr);
  3980. // $float
  3981. insertInstr->InsertBefore(labelFloat);
  3982. }
  3983. if (emitFloatAbs)
  3984. {
  3985. #if defined(_M_IX86)
  3986. // CMP [src], JavascriptNumber.vtable
  3987. IR::Opnd *opnd = IR::IndirOpnd::New(src->AsRegOpnd(), (int32)0, TyMachPtr, this->m_func);
  3988. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  3989. instr->SetSrc1(opnd);
  3990. instr->SetSrc2(m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptNumber));
  3991. insertInstr->InsertBefore(instr);
  3992. // JNE $helper
  3993. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  3994. insertInstr->InsertBefore(instr);
  3995. // MOVSD r1, [src + offsetof(value)]
  3996. opnd = IR::IndirOpnd::New(src->AsRegOpnd(), Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  3997. IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyMachDouble, this->m_func);
  3998. instr = IR::Instr::New(Js::OpCode::MOVSD, regOpnd, opnd, this->m_func);
  3999. insertInstr->InsertBefore(instr);
  4000. this->GenerateFloatAbs(regOpnd, insertInstr);
  4001. // dst = DoubleToVar(r1)
  4002. SaveDoubleToVar(callInstr->GetDst()->AsRegOpnd(), regOpnd, callInstr, insertInstr);
  4003. #elif defined(_M_X64)
  4004. // if (typeof(src) == double)
  4005. IR::RegOpnd *src64 = src->AsRegOpnd();
  4006. GenerateFloatTest(src64, insertInstr, labelHelper);
  4007. // dst64 = MOV src64
  4008. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, src64, this->m_func));
  4009. // Unconditionally set the sign bit. This will get XORd away when we remove the tag.
  4010. // dst64 = OR 0x8000000000000000
  4011. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::OR, dst, dst, IR::IntConstOpnd::New(MachSignBit, TyMachReg, this->m_func), this->m_func));
  4012. #endif
  4013. }
  4014. else if(!isInt)
  4015. {
  4016. // The source is not known to be a tagged int, so either it's definitely not an int (isNotInt), or the int version of
  4017. // abs failed the tag check and jumped here. We can't emit the float version of abs (!emitFloatAbs) due to SSE2 not
  4018. // being available, so jump straight to the helper.
  4019. // JMP $helper
  4020. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelHelper, this->m_func);
  4021. insertInstr->InsertBefore(instr);
  4022. }
  4023. }
  4024. IR::Instr * LowererMD::GenerateFloatAbs(IR::RegOpnd * regOpnd, IR::Instr * insertInstr)
  4025. {
  4026. // ANDPS reg, absDoubleCst
  4027. IR::Opnd * opnd;
  4028. if (regOpnd->IsFloat64())
  4029. {
  4030. opnd = m_lowerer->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueAbsDoubleCst);
  4031. }
  4032. else
  4033. {
  4034. Assert(regOpnd->IsFloat32());
  4035. opnd = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetAbsFloatCstAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  4036. }
  4037. // ANDPS has smaller encoding then ANDPD
  4038. IR::Instr * instr = IR::Instr::New(Js::OpCode::ANDPS, regOpnd, regOpnd, opnd, this->m_func);
  4039. insertInstr->InsertBefore(instr);
  4040. Legalize(instr);
  4041. return instr;
  4042. }
  4043. IR::RegOpnd* LowererMD::MaterializeDoubleConstFromInt(intptr_t constAddr, IR::Instr* instr)
  4044. {
  4045. IR::Opnd* constVal = IR::MemRefOpnd::New(constAddr, IRType::TyFloat64, this->m_func);
  4046. IR::RegOpnd * xmmReg = IR::RegOpnd::New(TyFloat64, m_func);
  4047. this->m_lowerer->InsertMove(xmmReg, constVal, instr);
  4048. return xmmReg;
  4049. }
  4050. IR::RegOpnd* LowererMD::MaterializeConstFromBits(int bits, IRType type, IR::Instr* instr)
  4051. {
  4052. IR::Opnd * regBits = IR::RegOpnd::New(TyInt32, m_func);
  4053. this->m_lowerer->InsertMove(regBits, IR::IntConstOpnd::New(bits, TyInt32, m_func), instr);
  4054. IR::RegOpnd * regConst = IR::RegOpnd::New(type, m_func);
  4055. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOVD, regConst, regBits, m_func));
  4056. return regConst;
  4057. }
  4058. IR::Opnd* LowererMD::Subtract2To31(IR::Opnd* src1, IR::Opnd* intMinFP, IRType type, IR::Instr* instr)
  4059. {
  4060. Js::OpCode op = (type == TyFloat32) ? Js::OpCode::SUBSS : Js::OpCode::SUBSD;
  4061. IR::Opnd* adjSrc = IR::RegOpnd::New(type, m_func);
  4062. IR::Instr* sub = IR::Instr::New(op, adjSrc, src1, intMinFP, m_func);
  4063. instr->InsertBefore(sub);
  4064. Legalize(sub);
  4065. return adjSrc;
  4066. }
  4067. template <bool Saturate>
  4068. IR::Opnd*
  4069. LowererMD::GenerateTruncChecks(_In_ IR::Instr* instr, _In_opt_ IR::LabelInstr* doneLabel)
  4070. {
  4071. AnalysisAssert(!Saturate || doneLabel);
  4072. IR::Opnd* dst = instr->GetDst();
  4073. Assert(dst->IsInt32() || dst->IsUInt32());
  4074. IR::LabelInstr * nanLabel = (Saturate && dst->IsSigned()) ? IR::LabelInstr::New(Js::OpCode::Label, m_func, true) : nullptr;
  4075. IR::LabelInstr * conversion = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4076. IR::LabelInstr * tooSmallLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  4077. IR::Opnd* src1 = instr->GetSrc1();
  4078. IR::Opnd * src64 = nullptr;
  4079. if (src1->IsFloat32())
  4080. {
  4081. src64 = IR::RegOpnd::New(TyFloat64, m_func);
  4082. EmitFloat32ToFloat64(src64, src1, instr);
  4083. }
  4084. else
  4085. {
  4086. src64 = src1;
  4087. }
  4088. IR::RegOpnd* limitReg = MaterializeDoubleConstFromInt(dst->IsUInt32() ?
  4089. m_func->GetThreadContextInfo()->GetDoubleNegOneAddr() :
  4090. m_func->GetThreadContextInfo()->GetDoubleIntMinMinusOneAddr(), instr);
  4091. m_lowerer->InsertCompareBranch(src64, limitReg, Js::OpCode::BrLe_A, tooSmallLabel, instr);
  4092. limitReg = MaterializeDoubleConstFromInt(dst->IsUInt32() ?
  4093. m_func->GetThreadContextInfo()->GetDoubleUintMaxPlusOneAddr() :
  4094. m_func->GetThreadContextInfo()->GetDoubleIntMaxPlusOneAddr(), instr);
  4095. m_lowerer->InsertCompareBranch(limitReg, src64, Js::OpCode::BrGt_A, conversion, instr, true /*no NaN check*/);
  4096. if (Saturate)
  4097. {
  4098. // Insert a label to mark this as the start of a helper block, so layout knows to move it
  4099. m_lowerer->InsertLabel(true, instr);
  4100. // NaN case is same as too small case for unsigned, so combine them
  4101. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JP, dst->IsSigned() ? nanLabel : tooSmallLabel, m_func));
  4102. // Overflow case
  4103. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(dst->IsUnsigned() ? UINT32_MAX : INT32_MAX, dst->GetType(), m_func), instr);
  4104. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, instr);
  4105. instr->InsertBefore(tooSmallLabel);
  4106. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(dst->IsUnsigned() ? 0 : INT32_MIN, dst->GetType(), m_func), instr);
  4107. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, instr);
  4108. if (dst->IsSigned())
  4109. {
  4110. instr->InsertBefore(nanLabel);
  4111. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(0, dst->GetType(), m_func), instr);
  4112. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, instr);
  4113. }
  4114. }
  4115. else
  4116. {
  4117. instr->InsertBefore(tooSmallLabel);
  4118. m_lowerer->GenerateThrow(IR::IntConstOpnd::New(SCODE_CODE(VBSERR_Overflow), TyInt32, m_func), instr);
  4119. //no jump here we aren't coming back
  4120. }
  4121. instr->InsertBefore(conversion);
  4122. return src64;
  4123. }
  4124. template <bool Saturate>
  4125. void
  4126. LowererMD::GenerateTruncWithCheck(_In_ IR::Instr * instr)
  4127. {
  4128. Assert(AutoSystemInfo::Data.SSE2Available());
  4129. IR::LabelInstr * doneLabel = Saturate ? IR::LabelInstr::New(Js::OpCode::Label, m_func) : nullptr;
  4130. IR::Opnd* src64 = GenerateTruncChecks<Saturate>(instr, doneLabel); //converts src to double and checks if MIN <= src <= MAX
  4131. IR::Opnd* dst = instr->GetDst();
  4132. if (dst->IsUnsigned())
  4133. {
  4134. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(0, TyUint32, m_func), instr);
  4135. IR::LabelInstr * skipUnsignedPart = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4136. IR::Opnd* twoTo31 = MaterializeDoubleConstFromInt(m_func->GetThreadContextInfo()->GetDoubleTwoTo31Addr(), instr);
  4137. m_lowerer->InsertCompareBranch(src64, twoTo31, Js::OpCode::BrLt_A, skipUnsignedPart, instr);
  4138. instr->InsertBefore(IR::Instr::New(Js::OpCode::SUBPD, src64, src64, twoTo31, m_func));
  4139. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(0x80000000 /*2^31*/, TyUint32, m_func), instr);
  4140. instr->InsertBefore(skipUnsignedPart);
  4141. IR::Opnd* tmp = IR::RegOpnd::New(TyInt32, m_func);
  4142. instr->InsertBefore(IR::Instr::New(Js::OpCode::CVTTSD2SI, tmp, src64, m_func));
  4143. instr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, dst, dst, tmp, m_func));
  4144. }
  4145. else
  4146. {
  4147. instr->InsertBefore(IR::Instr::New(Js::OpCode::CVTTSD2SI, dst, src64, m_func));
  4148. }
  4149. if (Saturate)
  4150. {
  4151. instr->InsertBefore(doneLabel);
  4152. }
  4153. instr->UnlinkSrc1();
  4154. instr->UnlinkDst();
  4155. instr->Remove();
  4156. }
  4157. template void LowererMD::GenerateTruncWithCheck<false>(_In_ IR::Instr * instr);
  4158. template void LowererMD::GenerateTruncWithCheck<true>(_In_ IR::Instr * instr);
  4159. void
  4160. LowererMD::GenerateCtz(IR::Instr * instr)
  4161. {
  4162. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  4163. Assert(IRType_IsNativeInt(instr->GetDst()->GetType()));
  4164. #ifdef _M_IX86
  4165. if (instr->GetSrc1()->IsInt64())
  4166. {
  4167. lowererMDArch.EmitInt64Instr(instr);
  4168. return;
  4169. }
  4170. #endif
  4171. if (AutoSystemInfo::Data.TZCntAvailable())
  4172. {
  4173. instr->m_opcode = Js::OpCode::TZCNT;
  4174. Legalize(instr);
  4175. }
  4176. else
  4177. {
  4178. // dst = BSF src
  4179. // dst = CMOVE dst, 32 // dst is src1 to help reg alloc
  4180. int instrSize = instr->GetSrc1()->GetSize();
  4181. IRType type = instrSize == 8 ? TyInt64 : TyInt32;
  4182. instr->m_opcode = Js::OpCode::BSF;
  4183. Legalize(instr);
  4184. IR::IntConstOpnd * const32 = IR::IntConstOpnd::New(instrSize * 8, type, m_func);
  4185. IR::Instr* cmove = IR::Instr::New(Js::OpCode::CMOVE, instr->GetDst(), instr->GetDst(), const32, this->m_func);
  4186. instr->InsertAfter(cmove);
  4187. Legalize(cmove);
  4188. }
  4189. }
  4190. void
  4191. LowererMD::GeneratePopCnt(IR::Instr * instr)
  4192. {
  4193. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  4194. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsUInt32() || instr->GetDst()->IsInt64());
  4195. #ifdef _M_IX86
  4196. if (instr->GetSrc1()->IsInt64())
  4197. {
  4198. lowererMDArch.EmitInt64Instr(instr);
  4199. return;
  4200. }
  4201. #endif
  4202. if (AutoSystemInfo::Data.PopCntAvailable())
  4203. {
  4204. instr->m_opcode = Js::OpCode::POPCNT;
  4205. Legalize(instr);
  4206. }
  4207. else
  4208. {
  4209. int instrSize = instr->GetSrc1()->GetSize();
  4210. LoadHelperArgument(instr, instr->GetSrc1());
  4211. instr->UnlinkSrc1();
  4212. this->ChangeToHelperCall(instr, instrSize == 8 ? IR::HelperPopCnt64 : IR::HelperPopCnt32);
  4213. }
  4214. }
  4215. void
  4216. LowererMD::GenerateClz(IR::Instr * instr)
  4217. {
  4218. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  4219. Assert(IRType_IsNativeInt(instr->GetDst()->GetType()));
  4220. #ifdef _M_IX86
  4221. if (instr->GetSrc1()->IsInt64())
  4222. {
  4223. lowererMDArch.EmitInt64Instr(instr);
  4224. return;
  4225. }
  4226. #endif
  4227. if (AutoSystemInfo::Data.LZCntAvailable())
  4228. {
  4229. instr->m_opcode = Js::OpCode::LZCNT;
  4230. Legalize(instr);
  4231. }
  4232. else
  4233. {
  4234. // tmp = BSR src
  4235. // JE $label32
  4236. // dst = SUB 31, tmp
  4237. // dst = SUB 63, tmp; for int64
  4238. // JMP $done
  4239. // label32:
  4240. // dst = mov 32;
  4241. // dst = mov 64; for int64
  4242. // $done
  4243. int instrSize = instr->GetSrc1()->GetSize();
  4244. IRType type = instrSize == 8 ? TyInt64 : TyInt32;
  4245. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr->m_next);
  4246. IR::Opnd * dst = instr->UnlinkDst();
  4247. IR::Opnd * tmpOpnd = IR::RegOpnd::New(type, m_func);
  4248. instr->SetDst(tmpOpnd);
  4249. instr->m_opcode = Js::OpCode::BSR;
  4250. Legalize(instr);
  4251. IR::LabelInstr * label32 = Lowerer::InsertLabel(false, doneLabel);
  4252. instr = IR::BranchInstr::New(Js::OpCode::JEQ, label32, m_func);
  4253. label32->InsertBefore(instr);
  4254. Lowerer::InsertSub(false, dst, IR::IntConstOpnd::New(instrSize == 8 ? 63 : 31, type, m_func), tmpOpnd, label32);
  4255. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, label32);
  4256. Lowerer::InsertMove(dst, IR::IntConstOpnd::New(instrSize == 8 ? 64 : 32, type, m_func), doneLabel);
  4257. }
  4258. }
  4259. #if !FLOATVAR
  4260. void
  4261. LowererMD::GenerateNumberAllocation(IR::RegOpnd * opndDst, IR::Instr * instrInsert, bool isHelper)
  4262. {
  4263. size_t alignedAllocSize = Js::RecyclerJavascriptNumberAllocator::GetAlignedAllocSize(
  4264. m_func->GetScriptContextInfo()->IsRecyclerVerifyEnabled(),
  4265. m_func->GetScriptContextInfo()->GetRecyclerVerifyPad());
  4266. IR::Opnd * endAddressOpnd = m_lowerer->LoadNumberAllocatorValueOpnd(instrInsert, NumberAllocatorValue::NumberAllocatorEndAddress);
  4267. IR::Opnd * freeObjectListOpnd = m_lowerer->LoadNumberAllocatorValueOpnd(instrInsert, NumberAllocatorValue::NumberAllocatorFreeObjectList);
  4268. // MOV dst, allocator->freeObjectList
  4269. IR::Instr * loadMemBlockInstr = IR::Instr::New(Js::OpCode::MOV, opndDst, freeObjectListOpnd, this->m_func);
  4270. instrInsert->InsertBefore(loadMemBlockInstr);
  4271. // LEA nextMemBlock, [dst + allocSize]
  4272. IR::RegOpnd * nextMemBlockOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  4273. IR::Instr * loadNextMemBlockInstr = IR::Instr::New(Js::OpCode::LEA, nextMemBlockOpnd,
  4274. IR::IndirOpnd::New(opndDst, alignedAllocSize, TyMachPtr, this->m_func), this->m_func);
  4275. instrInsert->InsertBefore(loadNextMemBlockInstr);
  4276. // CMP nextMemBlock, allocator->endAddress
  4277. IR::Instr * checkInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4278. checkInstr->SetSrc1(nextMemBlockOpnd);
  4279. checkInstr->SetSrc2(endAddressOpnd);
  4280. instrInsert->InsertBefore(checkInstr);
  4281. // JA $helper
  4282. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4283. IR::BranchInstr * branchInstr = IR::BranchInstr::New(Js::OpCode::JA, helperLabel, this->m_func);
  4284. instrInsert->InsertBefore(branchInstr);
  4285. // MOV allocator->freeObjectList, nextMemBlock
  4286. IR::Instr * setFreeObjectListInstr = IR::Instr::New(Js::OpCode::MOV, freeObjectListOpnd, nextMemBlockOpnd, this->m_func);
  4287. instrInsert->InsertBefore(setFreeObjectListInstr);
  4288. // JMP $done
  4289. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  4290. IR::BranchInstr * branchToDoneInstr = IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, this->m_func);
  4291. instrInsert->InsertBefore(branchToDoneInstr);
  4292. // $helper:
  4293. instrInsert->InsertBefore(helperLabel);
  4294. // PUSH allocator
  4295. this->LoadHelperArgument(instrInsert, m_lowerer->LoadScriptContextValueOpnd(instrInsert, ScriptContextValue::ScriptContextNumberAllocator));
  4296. // dst = Call AllocUninitializedNumber
  4297. IR::Instr * instrCall = IR::Instr::New(Js::OpCode::CALL, opndDst,
  4298. IR::HelperCallOpnd::New(IR::HelperAllocUninitializedNumber, this->m_func), this->m_func);
  4299. instrInsert->InsertBefore(instrCall);
  4300. this->lowererMDArch.LowerCall(instrCall, 0);
  4301. // $done:
  4302. instrInsert->InsertBefore(doneLabel);
  4303. }
  4304. #endif
  4305. #ifdef _CONTROL_FLOW_GUARD
  4306. void
  4307. LowererMD::GenerateCFGCheck(IR::Opnd * entryPointOpnd, IR::Instr * insertBeforeInstr)
  4308. {
  4309. bool useJITTrampoline = CONFIG_FLAG(UseJITTrampoline);
  4310. IR::LabelInstr * callLabelInstr = nullptr;
  4311. uintptr_t jitThunkStartAddress = NULL;
  4312. if (useJITTrampoline)
  4313. {
  4314. #if ENABLE_OOP_NATIVE_CODEGEN
  4315. if (m_func->IsOOPJIT())
  4316. {
  4317. OOPJITThunkEmitter * jitThunkEmitter = m_func->GetOOPThreadContext()->GetJITThunkEmitter();
  4318. jitThunkStartAddress = jitThunkEmitter->EnsureInitialized();
  4319. }
  4320. else
  4321. #endif
  4322. {
  4323. InProcJITThunkEmitter * jitThunkEmitter = m_func->GetInProcThreadContext()->GetJITThunkEmitter();
  4324. jitThunkStartAddress = jitThunkEmitter->EnsureInitialized();
  4325. }
  4326. if (jitThunkStartAddress)
  4327. {
  4328. uintptr_t endAddressOfSegment = jitThunkStartAddress + InProcJITThunkEmitter::TotalThunkSize;
  4329. Assert(endAddressOfSegment > jitThunkStartAddress);
  4330. // Generate instructions for local Pre-Reserved Segment Range check
  4331. IR::AddrOpnd * endAddressOfSegmentConstOpnd = IR::AddrOpnd::New(endAddressOfSegment, IR::AddrOpndKindDynamicMisc, m_func);
  4332. IR::RegOpnd *resultOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4333. callLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4334. IR::LabelInstr * cfgLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  4335. // resultOpnd = SUB endAddressOfSegmentConstOpnd, entryPointOpnd
  4336. // CMP resultOpnd, TotalThunkSize
  4337. // JAE $cfgLabel
  4338. // AND entryPointOpnd, ~(ThunkSize-1)
  4339. // JMP $callLabel
  4340. m_lowerer->InsertSub(false, resultOpnd, endAddressOfSegmentConstOpnd, entryPointOpnd, insertBeforeInstr);
  4341. m_lowerer->InsertCompareBranch(resultOpnd, IR::IntConstOpnd::New(InProcJITThunkEmitter::TotalThunkSize, TyMachReg, m_func, true), Js::OpCode::BrGe_A, true, cfgLabelInstr, insertBeforeInstr);
  4342. m_lowerer->InsertAnd(entryPointOpnd, entryPointOpnd, IR::IntConstOpnd::New(InProcJITThunkEmitter::ThunkAlignmentMask, TyMachReg, m_func, true), insertBeforeInstr);
  4343. m_lowerer->InsertBranch(Js::OpCode::Br, callLabelInstr, insertBeforeInstr);
  4344. insertBeforeInstr->InsertBefore(cfgLabelInstr);
  4345. }
  4346. }
  4347. //MOV ecx, entryPoint
  4348. IR::RegOpnd * entryPointRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4349. #if _M_IX86
  4350. entryPointRegOpnd->SetReg(RegECX);
  4351. #elif _M_X64
  4352. entryPointRegOpnd->SetReg(RegRCX);
  4353. #endif
  4354. entryPointRegOpnd->m_isCallArg = true;
  4355. IR::Instr* movInstrEntryPointToRegister = IR::Instr::New(Js::OpCode::MOV, entryPointRegOpnd, entryPointOpnd, this->m_func);
  4356. insertBeforeInstr->InsertBefore(movInstrEntryPointToRegister);
  4357. //Generate CheckCFG CALL here
  4358. IR::HelperCallOpnd *cfgCallOpnd = IR::HelperCallOpnd::New(IR::HelperGuardCheckCall, this->m_func);
  4359. IR::Instr* cfgCallInstr = IR::Instr::New(Js::OpCode::CALL, this->m_func);
  4360. this->m_func->SetHasCallsOnSelfAndParents();
  4361. #if _M_IX86
  4362. //call[__guard_check_icall_fptr]
  4363. cfgCallInstr->SetSrc1(cfgCallOpnd);
  4364. #elif _M_X64
  4365. //mov rax, __guard_check_icall_fptr
  4366. IR::RegOpnd *targetOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, m_func), RegRAX, TyMachPtr, this->m_func);
  4367. IR::Instr *movInstr = IR::Instr::New(Js::OpCode::MOV, targetOpnd, cfgCallOpnd, this->m_func);
  4368. insertBeforeInstr->InsertBefore(movInstr);
  4369. //call rax
  4370. cfgCallInstr->SetSrc1(targetOpnd);
  4371. #endif
  4372. //CALL cfg(rax)
  4373. insertBeforeInstr->InsertBefore(cfgCallInstr);
  4374. if (jitThunkStartAddress)
  4375. {
  4376. Assert(callLabelInstr);
  4377. if (CONFIG_FLAG(ForceJITCFGCheck))
  4378. {
  4379. // Always generate CFG check to make sure that the address is still valid
  4380. movInstrEntryPointToRegister->InsertBefore(callLabelInstr);
  4381. }
  4382. else
  4383. {
  4384. insertBeforeInstr->InsertBefore(callLabelInstr);
  4385. }
  4386. }
  4387. }
  4388. #endif
  4389. void
  4390. LowererMD::GenerateFastRecyclerAlloc(size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, IR::LabelInstr* allocHelperLabel, IR::LabelInstr* allocDoneLabel)
  4391. {
  4392. IR::Opnd * endAddressOpnd;
  4393. IR::Opnd * freeListOpnd;
  4394. ScriptContextInfo* scriptContext = this->m_func->GetScriptContextInfo();
  4395. void* allocatorAddress;
  4396. uint32 endAddressOffset;
  4397. uint32 freeListOffset;
  4398. size_t alignedSize = HeapInfo::GetAlignedSizeNoCheck(allocSize);
  4399. bool allowNativeCodeBumpAllocation = scriptContext->GetRecyclerAllowNativeCodeBumpAllocation();
  4400. Recycler::GetNormalHeapBlockAllocatorInfoForNativeAllocation((void*)scriptContext->GetRecyclerAddr(), alignedSize,
  4401. allocatorAddress, endAddressOffset, freeListOffset,
  4402. allowNativeCodeBumpAllocation, this->m_func->IsOOPJIT());
  4403. endAddressOpnd = IR::MemRefOpnd::New((char*)allocatorAddress + endAddressOffset, TyMachPtr, this->m_func, IR::AddrOpndKindDynamicRecyclerAllocatorEndAddressRef);
  4404. freeListOpnd = IR::MemRefOpnd::New((char*)allocatorAddress + freeListOffset, TyMachPtr, this->m_func, IR::AddrOpndKindDynamicRecyclerAllocatorFreeListRef);
  4405. const IR::AutoReuseOpnd autoReuseTempOpnd(freeListOpnd, m_func);
  4406. // MOV newObjDst, allocator->freeObjectList
  4407. Lowerer::InsertMove(newObjDst, freeListOpnd, insertionPointInstr);
  4408. // LEA nextMemBlock, [newObjDst + allocSize]
  4409. IR::RegOpnd * nextMemBlockOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  4410. IR::IndirOpnd* nextMemBlockSrc = IR::IndirOpnd::New(newObjDst, (int32)alignedSize, TyMachPtr, this->m_func);
  4411. IR::Instr * loadNextMemBlockInstr = IR::Instr::New(Js::OpCode::LEA, nextMemBlockOpnd, nextMemBlockSrc, this->m_func);
  4412. insertionPointInstr->InsertBefore(loadNextMemBlockInstr);
  4413. // CMP nextMemBlock, allocator->endAddress
  4414. IR::Instr * checkInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4415. checkInstr->SetSrc1(nextMemBlockOpnd);
  4416. checkInstr->SetSrc2(endAddressOpnd);
  4417. insertionPointInstr->InsertBefore(checkInstr);
  4418. Legalize(checkInstr);
  4419. // JA $allocHelper
  4420. IR::BranchInstr * branchToAllocHelperInstr = IR::BranchInstr::New(Js::OpCode::JA, allocHelperLabel, this->m_func);
  4421. insertionPointInstr->InsertBefore(branchToAllocHelperInstr);
  4422. // MOV allocator->freeObjectList, nextMemBlock
  4423. Lowerer::InsertMove(freeListOpnd, nextMemBlockOpnd, insertionPointInstr, false);
  4424. // JMP $allocDone
  4425. IR::BranchInstr * branchToAllocDoneInstr = IR::BranchInstr::New(Js::OpCode::JMP, allocDoneLabel, this->m_func);
  4426. insertionPointInstr->InsertBefore(branchToAllocDoneInstr);
  4427. }
  4428. #ifdef ENABLE_WASM
  4429. void
  4430. LowererMD::GenerateCopysign(IR::Instr * instr)
  4431. {
  4432. #if defined(_M_IX86)
  4433. // We should only generate this if sse2 is available
  4434. Assert(AutoSystemInfo::Data.SSE2Available());
  4435. #endif
  4436. // ANDPS reg0, absDoubleCst
  4437. // ANDPS reg1, sgnBitDoubleCst
  4438. // ORPS reg0, reg1
  4439. // Copy sign from src2 to src1
  4440. IR::Opnd* src1 = instr->GetSrc1();
  4441. IR::Opnd* src2 = instr->GetSrc2();
  4442. Assert(src1->IsFloat32() || src1->IsFloat64());
  4443. GenerateFloatAbs(src1->AsRegOpnd(), instr);
  4444. IR::MemRefOpnd *memRef = IR::MemRefOpnd::New(src2->IsFloat32() ? this->m_func->GetThreadContextInfo()->GetSgnFloatBitCst() : this->m_func->GetThreadContextInfo()->GetSgnDoubleBitCst(),
  4445. src2->GetType(), this->m_func, src2->IsFloat32() ? IR::AddrOpndKindDynamicFloatRef : IR::AddrOpndKindDynamicDoubleRef);
  4446. IR::Instr* t2 = IR::Instr::New(Js::OpCode::ANDPS, instr->GetSrc2(), instr->GetSrc2(), memRef, m_func);
  4447. instr->InsertBefore(t2);
  4448. Legalize(t2);
  4449. instr->m_opcode = Js::OpCode::ORPS;
  4450. Legalize(instr);
  4451. };
  4452. #endif //ENABLE_WASM
  4453. void
  4454. LowererMD::SaveDoubleToVar(IR::RegOpnd * dstOpnd, IR::RegOpnd *opndFloat, IR::Instr *instrOrig, IR::Instr *instrInsert, bool isHelper)
  4455. {
  4456. Assert(opndFloat->GetType() == TyFloat64);
  4457. // Call JSNumber::ToVar to save the float operand to the result of the original (var) instruction
  4458. #if !FLOATVAR
  4459. // We should only generate this if sse2 is available
  4460. Assert(AutoSystemInfo::Data.SSE2Available());
  4461. IR::Opnd * symVTableDst;
  4462. IR::Opnd * symDblDst;
  4463. IR::Opnd * symTypeDst;
  4464. IR::Instr * newInstr;
  4465. IR::Instr * numberInitInsertInstr = nullptr;
  4466. if (instrOrig->dstIsTempNumber)
  4467. {
  4468. // Use the original dst to get the temp number sym
  4469. StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(instrOrig->GetDst(), instrOrig->dstIsTempNumberTransferred);
  4470. // LEA dst, &tempSym
  4471. IR::SymOpnd * symTempSrc = IR::SymOpnd::New(tempNumberSym, TyMachPtr, this->m_func);
  4472. IR::Instr * loadTempNumberInstr = IR::Instr::New(Js::OpCode::LEA, dstOpnd, symTempSrc, this->m_func);
  4473. instrInsert->InsertBefore(loadTempNumberInstr);
  4474. symVTableDst = IR::SymOpnd::New(tempNumberSym, TyMachPtr, this->m_func);
  4475. symDblDst = IR::SymOpnd::New(tempNumberSym, (uint32)Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func);
  4476. symTypeDst = IR::SymOpnd::New(tempNumberSym, (uint32)Js::JavascriptNumber::GetOffsetOfType(), TyMachPtr, this->m_func);
  4477. if (this->m_lowerer->outerMostLoopLabel == nullptr)
  4478. {
  4479. // If we are not in loop, just insert in place
  4480. numberInitInsertInstr = instrInsert;
  4481. }
  4482. else
  4483. {
  4484. // Otherwise, initialize in the outer most loop top if we haven't initialized it yet.
  4485. numberInitInsertInstr = this->m_lowerer->initializedTempSym->TestAndSet(tempNumberSym->m_id) ?
  4486. nullptr : this->m_lowerer->outerMostLoopLabel;
  4487. }
  4488. }
  4489. else
  4490. {
  4491. this->GenerateNumberAllocation(dstOpnd, instrInsert, isHelper);
  4492. symVTableDst = IR::IndirOpnd::New(dstOpnd, 0, TyMachPtr, this->m_func);
  4493. symDblDst = IR::IndirOpnd::New(dstOpnd, (uint32)Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func);
  4494. symTypeDst = IR::IndirOpnd::New(dstOpnd, (uint32)Js::JavascriptNumber::GetOffsetOfType(), TyMachPtr, this->m_func);
  4495. numberInitInsertInstr = instrInsert;
  4496. }
  4497. if (numberInitInsertInstr)
  4498. {
  4499. // Inline the case where the dst is marked as temp.
  4500. IR::Opnd *jsNumberVTable = m_lowerer->LoadVTableValueOpnd(numberInitInsertInstr, VTableValue::VtableJavascriptNumber);
  4501. // MOV dst->vtable, JavascriptNumber::vtable
  4502. newInstr = IR::Instr::New(Js::OpCode::MOV, symVTableDst, jsNumberVTable, this->m_func);
  4503. numberInitInsertInstr->InsertBefore(newInstr);
  4504. // MOV dst->type, JavascriptNumber_type
  4505. IR::Opnd *typeOpnd = m_lowerer->LoadLibraryValueOpnd(numberInitInsertInstr, LibraryValue::ValueNumberTypeStatic);
  4506. newInstr = IR::Instr::New(Js::OpCode::MOV, symTypeDst, typeOpnd, this->m_func);
  4507. numberInitInsertInstr->InsertBefore(newInstr);
  4508. }
  4509. // MOVSD dst->value, opndFloat ; copy the float result to the temp JavascriptNumber
  4510. newInstr = IR::Instr::New(Js::OpCode::MOVSD, symDblDst, opndFloat, this->m_func);
  4511. instrInsert->InsertBefore(newInstr);
  4512. #else
  4513. // s1 = MOVD opndFloat
  4514. IR::RegOpnd *s1 = IR::RegOpnd::New(TyMachReg, m_func);
  4515. IR::Instr *movd = IR::Instr::New(Js::OpCode::MOVD, s1, opndFloat, m_func);
  4516. instrInsert->InsertBefore(movd);
  4517. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  4518. {
  4519. // s1 = MOVD src
  4520. // tmp = NOT s1
  4521. // tmp = AND tmp, 0x7FF0000000000000ull
  4522. // test tmp, tmp
  4523. // je helper
  4524. // jmp done
  4525. // helper:
  4526. // tmp2 = AND s1, 0x000FFFFFFFFFFFFFull
  4527. // test tmp2, tmp2
  4528. // je done
  4529. // s1 = JavascriptNumber::k_Nan
  4530. // done:
  4531. IR::RegOpnd *tmp = IR::RegOpnd::New(TyMachReg, m_func);
  4532. IR::Instr * newInstr = IR::Instr::New(Js::OpCode::NOT, tmp, s1, m_func);
  4533. instrInsert->InsertBefore(newInstr);
  4534. LowererMD::MakeDstEquSrc1(newInstr);
  4535. newInstr = IR::Instr::New(Js::OpCode::AND, tmp, tmp, IR::AddrOpnd::New((Js::Var)0x7FF0000000000000, IR::AddrOpndKindConstantVar, m_func, true), m_func);
  4536. instrInsert->InsertBefore(newInstr);
  4537. LowererMD::Legalize(newInstr);
  4538. IR::LabelInstr* helper = Lowerer::InsertLabel(true, instrInsert);
  4539. Lowerer::InsertTestBranch(tmp, tmp, Js::OpCode::BrEq_A, helper, helper);
  4540. IR::LabelInstr* done = Lowerer::InsertLabel(isHelper, instrInsert);
  4541. Lowerer::InsertBranch(Js::OpCode::Br, done, helper);
  4542. IR::RegOpnd *tmp2 = IR::RegOpnd::New(TyMachReg, m_func);
  4543. newInstr = IR::Instr::New(Js::OpCode::AND, tmp2, s1, IR::AddrOpnd::New((Js::Var)0x000FFFFFFFFFFFFFull, IR::AddrOpndKindConstantVar, m_func, true), m_func);
  4544. done->InsertBefore(newInstr);
  4545. LowererMD::Legalize(newInstr);
  4546. Lowerer::InsertTestBranch(tmp2, tmp2, Js::OpCode::BrEq_A, done, done);
  4547. IR::Opnd * opndNaN = IR::AddrOpnd::New((Js::Var)Js::JavascriptNumber::k_Nan, IR::AddrOpndKindConstantVar, m_func, true);
  4548. Lowerer::InsertMove(s1, opndNaN, done);
  4549. }
  4550. // s1 = XOR s1, FloatTag_Value
  4551. // dst = s1
  4552. IR::Instr *setTag = IR::Instr::New(Js::OpCode::XOR,
  4553. s1,
  4554. s1,
  4555. IR::AddrOpnd::New((Js::Var)Js::FloatTag_Value,
  4556. IR::AddrOpndKindConstantVar,
  4557. this->m_func,
  4558. /* dontEncode = */ true),
  4559. this->m_func);
  4560. IR::Instr *movDst = IR::Instr::New(Js::OpCode::MOV, dstOpnd, s1, this->m_func);
  4561. instrInsert->InsertBefore(setTag);
  4562. instrInsert->InsertBefore(movDst);
  4563. LowererMD::Legalize(setTag);
  4564. #endif
  4565. }
  4566. void
  4567. LowererMD::EmitLoadFloatFromNumber(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr)
  4568. {
  4569. IR::LabelInstr *labelDone;
  4570. IR::Instr *instr;
  4571. labelDone = EmitLoadFloatCommon(dst, src, insertInstr, insertInstr->HasBailOutInfo());
  4572. if (labelDone == nullptr)
  4573. {
  4574. // We're done
  4575. insertInstr->Remove();
  4576. return;
  4577. }
  4578. // $Done note: insertAfter
  4579. insertInstr->InsertAfter(labelDone);
  4580. if (!insertInstr->HasBailOutInfo())
  4581. {
  4582. // $Done
  4583. insertInstr->Remove();
  4584. return;
  4585. }
  4586. Assert(!m_func->GetJITFunctionBody()->IsAsmJsMode());
  4587. IR::LabelInstr *labelNoBailOut = nullptr;
  4588. IR::SymOpnd *tempSymOpnd = nullptr;
  4589. if (insertInstr->GetBailOutKind() == IR::BailOutPrimitiveButString)
  4590. {
  4591. if (!this->m_func->tempSymDouble)
  4592. {
  4593. this->m_func->tempSymDouble = StackSym::New(TyFloat64, this->m_func);
  4594. this->m_func->StackAllocate(this->m_func->tempSymDouble, MachDouble);
  4595. }
  4596. // LEA r3, tempSymDouble
  4597. IR::RegOpnd *reg3Opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4598. tempSymOpnd = IR::SymOpnd::New(this->m_func->tempSymDouble, TyFloat64, this->m_func);
  4599. instr = IR::Instr::New(Js::OpCode::LEA, reg3Opnd, tempSymOpnd, this->m_func);
  4600. insertInstr->InsertBefore(instr);
  4601. // regBoolResult = to_number_fromPrimitive(value, &dst, allowUndef, scriptContext);
  4602. this->m_lowerer->LoadScriptContext(insertInstr);
  4603. IR::IntConstOpnd *allowUndefOpnd;
  4604. if (insertInstr->GetBailOutKind() == IR::BailOutPrimitiveButString)
  4605. {
  4606. allowUndefOpnd = IR::IntConstOpnd::New(true, TyInt32, this->m_func);
  4607. }
  4608. else
  4609. {
  4610. Assert(insertInstr->GetBailOutKind() == IR::BailOutNumberOnly);
  4611. allowUndefOpnd = IR::IntConstOpnd::New(false, TyInt32, this->m_func);
  4612. }
  4613. this->LoadHelperArgument(insertInstr, allowUndefOpnd);
  4614. this->LoadHelperArgument(insertInstr, reg3Opnd);
  4615. this->LoadHelperArgument(insertInstr, src);
  4616. IR::RegOpnd *regBoolResult = IR::RegOpnd::New(TyInt32, this->m_func);
  4617. instr = IR::Instr::New(Js::OpCode::CALL, regBoolResult, IR::HelperCallOpnd::New(IR::HelperOp_ConvNumber_FromPrimitive, this->m_func), this->m_func);
  4618. insertInstr->InsertBefore(instr);
  4619. this->lowererMDArch.LowerCall(instr, 0);
  4620. // TEST regBoolResult, regBoolResult
  4621. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  4622. instr->SetSrc1(regBoolResult);
  4623. instr->SetSrc2(regBoolResult);
  4624. insertInstr->InsertBefore(instr);
  4625. // JNE $noBailOut
  4626. labelNoBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4627. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelNoBailOut, this->m_func);
  4628. insertInstr->InsertBefore(instr);
  4629. }
  4630. // Bailout code
  4631. Assert(insertInstr->m_opcode == Js::OpCode::FromVar);
  4632. insertInstr->UnlinkDst();
  4633. insertInstr->FreeSrc1();
  4634. IR::Instr *bailoutInstr = insertInstr;
  4635. insertInstr = bailoutInstr->m_next;
  4636. this->m_lowerer->GenerateBailOut(bailoutInstr);
  4637. // $noBailOut
  4638. if (labelNoBailOut)
  4639. {
  4640. insertInstr->InsertBefore(labelNoBailOut);
  4641. Assert(dst->IsRegOpnd());
  4642. // MOVSD dst, [pResult].f64
  4643. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, tempSymOpnd, this->m_func);
  4644. insertInstr->InsertBefore(instr);
  4645. }
  4646. }
  4647. IR::LabelInstr*
  4648. LowererMD::EmitLoadFloatCommon(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr, bool needHelperLabel)
  4649. {
  4650. IR::Instr *instr;
  4651. Assert(src->GetType() == TyVar);
  4652. Assert(dst->IsFloat());
  4653. bool isFloatConst = false;
  4654. IR::RegOpnd *regFloatOpnd = nullptr;
  4655. if (src->IsRegOpnd() && src->AsRegOpnd()->m_sym->m_isFltConst)
  4656. {
  4657. IR::RegOpnd *regOpnd = src->AsRegOpnd();
  4658. Assert(regOpnd->m_sym->m_isSingleDef);
  4659. Js::Var value = regOpnd->m_sym->GetFloatConstValueAsVar_PostGlobOpt();
  4660. #if FLOATVAR
  4661. void *pDouble = (double*)NativeCodeDataNewNoFixup(this->m_func->GetNativeCodeDataAllocator(), DoubleType<DataDesc_LowererMD_EmitLoadFloatCommon_Double>, Js::JavascriptNumber::GetValue(value));
  4662. IR::Opnd * doubleRef;
  4663. if (!m_func->IsOOPJIT())
  4664. {
  4665. doubleRef = IR::MemRefOpnd::New(pDouble, TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  4666. }
  4667. else
  4668. {
  4669. int offset = NativeCodeData::GetDataTotalOffset(pDouble);
  4670. doubleRef = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), offset, TyMachDouble,
  4671. #if DBG
  4672. NativeCodeData::GetDataDescription(pDouble, m_func->m_alloc),
  4673. #endif
  4674. m_func, true);
  4675. GetLowerer()->addToLiveOnBackEdgeSyms->Set(m_func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  4676. }
  4677. #else
  4678. IR::MemRefOpnd *doubleRef = IR::MemRefOpnd::New((BYTE*)value + Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func,
  4679. IR::AddrOpndKindDynamicDoubleRef);
  4680. #endif
  4681. regFloatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  4682. instr = IR::Instr::New(Js::OpCode::MOVSD, regFloatOpnd, doubleRef, this->m_func);
  4683. insertInstr->InsertBefore(instr);
  4684. Legalize(instr);
  4685. isFloatConst = true;
  4686. }
  4687. // Src is constant?
  4688. if (src->IsImmediateOpnd() || src->IsFloatConstOpnd())
  4689. {
  4690. regFloatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  4691. m_lowerer->LoadFloatFromNonReg(src, regFloatOpnd, insertInstr);
  4692. isFloatConst = true;
  4693. }
  4694. if (isFloatConst)
  4695. {
  4696. if (dst->GetType() == TyFloat32)
  4697. {
  4698. // CVTSD2SS regOpnd32.f32, regOpnd.f64 -- Convert regOpnd from f64 to f32
  4699. IR::RegOpnd *regOpnd32 = regFloatOpnd->UseWithNewType(TyFloat32, this->m_func)->AsRegOpnd();
  4700. instr = IR::Instr::New(Js::OpCode::CVTSD2SS, regOpnd32, regFloatOpnd, this->m_func);
  4701. insertInstr->InsertBefore(instr);
  4702. // MOVSS dst, regOpnd32
  4703. instr = IR::Instr::New(Js::OpCode::MOVSS, dst, regOpnd32, this->m_func);
  4704. insertInstr->InsertBefore(instr);
  4705. }
  4706. else
  4707. {
  4708. // MOVSD dst, regOpnd
  4709. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, regFloatOpnd, this->m_func);
  4710. insertInstr->InsertBefore(instr);
  4711. }
  4712. return nullptr;
  4713. }
  4714. Assert(src->IsRegOpnd());
  4715. IR::LabelInstr *labelStore = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4716. IR::LabelInstr *labelHelper;
  4717. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4718. if (needHelperLabel)
  4719. {
  4720. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4721. }
  4722. else
  4723. {
  4724. labelHelper = labelDone;
  4725. }
  4726. bool const isFloat32 = dst->GetType() == TyFloat32;
  4727. IR::RegOpnd *reg2 = ((isFloat32 || !dst->IsRegOpnd()) ? IR::RegOpnd::New(TyMachDouble, this->m_func) : dst->AsRegOpnd());
  4728. // Load the float value in reg2
  4729. this->lowererMDArch.LoadCheckedFloat(src->AsRegOpnd(), reg2, labelStore, labelHelper, insertInstr, needHelperLabel);
  4730. // $Store
  4731. insertInstr->InsertBefore(labelStore);
  4732. if (isFloat32)
  4733. {
  4734. IR::RegOpnd *reg2_32 = reg2->UseWithNewType(TyFloat32, this->m_func)->AsRegOpnd();
  4735. // CVTSD2SS r2_32.f32, r2.f64 -- Convert regOpnd from f64 to f32
  4736. instr = IR::Instr::New(Js::OpCode::CVTSD2SS, reg2_32, reg2, this->m_func);
  4737. insertInstr->InsertBefore(instr);
  4738. // MOVSS dst, r2_32
  4739. instr = IR::Instr::New(Js::OpCode::MOVSS, dst, reg2_32, this->m_func);
  4740. insertInstr->InsertBefore(instr);
  4741. }
  4742. else if (reg2 != dst)
  4743. {
  4744. // MOVSD dst, r2
  4745. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, reg2, this->m_func);
  4746. insertInstr->InsertBefore(instr);
  4747. }
  4748. // JMP $Done
  4749. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, this->m_func);
  4750. insertInstr->InsertBefore(instr);
  4751. if (needHelperLabel)
  4752. {
  4753. // $Helper
  4754. insertInstr->InsertBefore(labelHelper);
  4755. }
  4756. return labelDone;
  4757. }
  4758. void
  4759. LowererMD::EmitLoadFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr, IR::Instr * instrBailOut, IR::LabelInstr * labelBailOut)
  4760. {
  4761. IR::LabelInstr *labelDone;
  4762. IR::Instr *instr;
  4763. labelDone = EmitLoadFloatCommon(dst, src, insertInstr, true);
  4764. if (labelDone == nullptr)
  4765. {
  4766. // We're done
  4767. return;
  4768. }
  4769. IR::BailOutKind bailOutKind = instrBailOut && instrBailOut->HasBailOutInfo() ? instrBailOut->GetBailOutKind() : IR::BailOutInvalid;
  4770. if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  4771. {
  4772. // Bail out instead of making the helper call.
  4773. Assert(labelBailOut);
  4774. m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, insertInstr);
  4775. insertInstr->InsertBefore(labelDone);
  4776. return;
  4777. }
  4778. IR::Opnd *memAddress = dst;
  4779. if (dst->IsRegOpnd())
  4780. {
  4781. // Create an f64 stack location to store the result of the helper.
  4782. IR::SymOpnd *symOpnd = IR::SymOpnd::New(StackSym::New(dst->GetType(), this->m_func), dst->GetType(), this->m_func);
  4783. this->m_func->StackAllocate(symOpnd->m_sym->AsStackSym(), sizeof(double));
  4784. memAddress = symOpnd;
  4785. }
  4786. // LEA r3, dst
  4787. IR::RegOpnd *reg3Opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4788. instr = IR::Instr::New(Js::OpCode::LEA, reg3Opnd, memAddress, this->m_func);
  4789. insertInstr->InsertBefore(instr);
  4790. // to_number_full(value, &dst, scriptContext);
  4791. // Create dummy binary op to convert into helper
  4792. instr = IR::Instr::New(Js::OpCode::Add_A, this->m_func);
  4793. instr->SetSrc1(src);
  4794. instr->SetSrc2(reg3Opnd);
  4795. insertInstr->InsertBefore(instr);
  4796. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  4797. {
  4798. _Analysis_assume_(instrBailOut != nullptr);
  4799. instr = instr->ConvertToBailOutInstr(instrBailOut->GetBailOutInfo(), bailOutKind);
  4800. if (instrBailOut->GetBailOutInfo()->bailOutInstr == instrBailOut)
  4801. {
  4802. IR::Instr * instrShare = instrBailOut->ShareBailOut();
  4803. m_lowerer->LowerBailTarget(instrShare);
  4804. }
  4805. }
  4806. IR::JnHelperMethod helper;
  4807. if (dst->GetType() == TyFloat32)
  4808. {
  4809. helper = IR::HelperOp_ConvFloat_Helper;
  4810. }
  4811. else
  4812. {
  4813. helper = IR::HelperOp_ConvNumber_Helper;
  4814. }
  4815. this->m_lowerer->LowerBinaryHelperMem(instr, helper);
  4816. if (dst->IsRegOpnd())
  4817. {
  4818. if (dst->GetType() == TyFloat32)
  4819. {
  4820. // MOVSS dst, r32
  4821. instr = IR::Instr::New(Js::OpCode::MOVSS, dst, memAddress, this->m_func);
  4822. insertInstr->InsertBefore(instr);
  4823. }
  4824. else
  4825. {
  4826. // MOVSD dst, [pResult].f64
  4827. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, memAddress, this->m_func);
  4828. insertInstr->InsertBefore(instr);
  4829. }
  4830. }
  4831. // $Done
  4832. insertInstr->InsertBefore(labelDone);
  4833. }
  4834. void
  4835. LowererMD::LowerInt4NegWithBailOut(
  4836. IR::Instr *const instr,
  4837. const IR::BailOutKind bailOutKind,
  4838. IR::LabelInstr *const bailOutLabel,
  4839. IR::LabelInstr *const skipBailOutLabel)
  4840. {
  4841. Assert(instr);
  4842. Assert(instr->m_opcode == Js::OpCode::Neg_I4);
  4843. Assert(!instr->HasBailOutInfo());
  4844. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  4845. Assert(bailOutLabel);
  4846. Assert(instr->m_next == bailOutLabel);
  4847. Assert(skipBailOutLabel);
  4848. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  4849. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  4850. // Lower the instruction
  4851. instr->m_opcode = Js::OpCode::NEG;
  4852. Legalize(instr);
  4853. if(bailOutKind & IR::BailOutOnOverflow || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck)
  4854. {
  4855. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func));
  4856. }
  4857. if(bailOutKind & IR::BailOutOnNegativeZero)
  4858. {
  4859. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JEQ, bailOutLabel, instr->m_func));
  4860. }
  4861. // Skip bailout
  4862. bailOutLabel->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, skipBailOutLabel, instr->m_func));
  4863. }
  4864. void
  4865. LowererMD::LowerInt4AddWithBailOut(
  4866. IR::Instr *const instr,
  4867. const IR::BailOutKind bailOutKind,
  4868. IR::LabelInstr *const bailOutLabel,
  4869. IR::LabelInstr *const skipBailOutLabel)
  4870. {
  4871. Assert(instr);
  4872. Assert(instr->m_opcode == Js::OpCode::Add_I4);
  4873. Assert(!instr->HasBailOutInfo());
  4874. Assert(
  4875. (bailOutKind & IR::BailOutOnResultConditions) == IR::BailOutOnOverflow ||
  4876. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  4877. Assert(bailOutLabel);
  4878. Assert(instr->m_next == bailOutLabel);
  4879. Assert(skipBailOutLabel);
  4880. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  4881. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  4882. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  4883. // Restore sources overwritten by the instruction in the bailout path
  4884. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  4885. Assert(dst->IsRegOpnd());
  4886. const bool dstEquSrc1 = dst->IsEqual(src1), dstEquSrc2 = dst->IsEqual(src2);
  4887. if(dstEquSrc1 & dstEquSrc2)
  4888. {
  4889. // We have:
  4890. // s1 += s1
  4891. // Which is equivalent to:
  4892. // s1 <<= 1
  4893. //
  4894. // These overflow a signed 32-bit integer when for the initial s1:
  4895. // s1 > 0 && (s1 & 0x40000000) - result is negative after overflow
  4896. // s1 < 0 && !(s1 & 0x40000000) - result is nonnegative after overflow
  4897. //
  4898. // To restore s1 to its value before the operation, we first do an arithmetic right-shift by one bit to undo the
  4899. // left-shift and preserve the sign of the result after overflow. Since the result after overflow always has the
  4900. // opposite sign from the operands (hence the overflow), we just need to invert the sign of the result. The following
  4901. // restores s1 to its value before the instruction:
  4902. // s1 = (s1 >> 1) ^ 0x80000000
  4903. //
  4904. // Generate:
  4905. // sar s1, 1
  4906. // xor s1, 0x80000000
  4907. const auto startBailOutInstr = bailOutLabel->m_next;
  4908. Assert(startBailOutInstr);
  4909. startBailOutInstr->InsertBefore(
  4910. IR::Instr::New(
  4911. Js::OpCode::SAR,
  4912. dst,
  4913. dst,
  4914. IR::IntConstOpnd::New(1, TyInt8, instr->m_func),
  4915. instr->m_func)
  4916. );
  4917. startBailOutInstr->InsertBefore(
  4918. IR::Instr::New(
  4919. Js::OpCode::XOR,
  4920. dst,
  4921. dst,
  4922. IR::IntConstOpnd::New(INT32_MIN, TyInt32, instr->m_func, true /* dontEncode */),
  4923. instr->m_func)
  4924. );
  4925. }
  4926. else if(dstEquSrc1 | dstEquSrc2)
  4927. {
  4928. // We have:
  4929. // s1 += s2
  4930. // Or:
  4931. // s1 = s2 + s1
  4932. //
  4933. // The following restores s1 to its value before the instruction:
  4934. // s1 -= s2
  4935. //
  4936. // Generate:
  4937. // sub s1, s2
  4938. if(dstEquSrc1)
  4939. {
  4940. Assert(src2->IsRegOpnd() || src2->IsIntConstOpnd());
  4941. }
  4942. else
  4943. {
  4944. Assert(src1->IsRegOpnd() || src1->IsIntConstOpnd());
  4945. }
  4946. bailOutLabel->InsertAfter(IR::Instr::New(Js::OpCode::SUB, dst, dst, dstEquSrc1 ? src2 : src1, instr->m_func));
  4947. }
  4948. // Lower the instruction
  4949. ChangeToAdd(instr, true /* needFlags */);
  4950. Legalize(instr);
  4951. // Skip bailout on no overflow
  4952. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNO, skipBailOutLabel, instr->m_func));
  4953. // Fall through to bailOutLabel
  4954. }
  4955. void
  4956. LowererMD::LowerInt4SubWithBailOut(
  4957. IR::Instr *const instr,
  4958. const IR::BailOutKind bailOutKind,
  4959. IR::LabelInstr *const bailOutLabel,
  4960. IR::LabelInstr *const skipBailOutLabel)
  4961. {
  4962. Assert(instr);
  4963. Assert(instr->m_opcode == Js::OpCode::Sub_I4);
  4964. Assert(!instr->HasBailOutInfo());
  4965. Assert(
  4966. (bailOutKind & IR::BailOutOnResultConditions) == IR::BailOutOnOverflow ||
  4967. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  4968. Assert(bailOutLabel);
  4969. Assert(instr->m_next == bailOutLabel);
  4970. Assert(skipBailOutLabel);
  4971. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  4972. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  4973. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  4974. // Restore sources overwritten by the instruction in the bailout path
  4975. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  4976. Assert(dst->IsRegOpnd());
  4977. const bool dstEquSrc1 = dst->IsEqual(src1), dstEquSrc2 = dst->IsEqual(src2);
  4978. if(dstEquSrc1 ^ dstEquSrc2)
  4979. {
  4980. // We have:
  4981. // s1 -= s2
  4982. // Or:
  4983. // s1 = s2 - s1
  4984. //
  4985. // The following restores s1 to its value before the instruction:
  4986. // s1 += s2
  4987. // Or:
  4988. // s1 = s2 - s1
  4989. //
  4990. // Generate:
  4991. // neg s1 - only for second case
  4992. // add s1, s2
  4993. if(dstEquSrc1)
  4994. {
  4995. Assert(src2->IsRegOpnd() || src2->IsIntConstOpnd());
  4996. }
  4997. else
  4998. {
  4999. Assert(src1->IsRegOpnd() || src1->IsIntConstOpnd());
  5000. }
  5001. const auto startBailOutInstr = bailOutLabel->m_next;
  5002. Assert(startBailOutInstr);
  5003. if(dstEquSrc2)
  5004. {
  5005. startBailOutInstr->InsertBefore(IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func));
  5006. }
  5007. startBailOutInstr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, dst, dst, dstEquSrc1 ? src2 : src1, instr->m_func));
  5008. }
  5009. // Lower the instruction
  5010. ChangeToSub(instr, true /* needFlags */);
  5011. Legalize(instr);
  5012. // Skip bailout on no overflow
  5013. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNO, skipBailOutLabel, instr->m_func));
  5014. // Fall through to bailOutLabel
  5015. }
  5016. bool
  5017. LowererMD::GenerateSimplifiedInt4Mul(
  5018. IR::Instr *const mulInstr,
  5019. const IR::BailOutKind bailOutKind,
  5020. IR::LabelInstr *const bailOutLabel)
  5021. {
  5022. if (AutoSystemInfo::Data.IsAtomPlatform())
  5023. {
  5024. // On Atom, always optimize unless phase is off
  5025. if (PHASE_OFF(Js::AtomPhase, mulInstr->m_func->GetTopFunc()) ||
  5026. PHASE_OFF(Js::MulStrengthReductionPhase, mulInstr->m_func->GetTopFunc()))
  5027. return false;
  5028. }
  5029. else
  5030. {
  5031. // On other platforms, don't optimize unless phase is forced
  5032. if (!PHASE_FORCE(Js::AtomPhase, mulInstr->m_func->GetTopFunc()) &&
  5033. !PHASE_FORCE(Js::MulStrengthReductionPhase, mulInstr->m_func->GetTopFunc()))
  5034. return false;
  5035. }
  5036. Assert(mulInstr);
  5037. Assert(mulInstr->m_opcode == Js::OpCode::Mul_I4);
  5038. IR::Instr *instr = mulInstr, *nextInstr;
  5039. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5040. if (!src1->IsIntConstOpnd() && !src2->IsIntConstOpnd())
  5041. return false;
  5042. // if two const operands, GlobOpt would have folded the computation
  5043. Assert(!(src1->IsIntConstOpnd() && src2->IsIntConstOpnd()));
  5044. Assert(dst->IsRegOpnd());
  5045. const auto constSrc = src1->IsIntConstOpnd() ? src1 : src2;
  5046. const auto nonConstSrc = src1->IsIntConstOpnd() ? src2 : src1;
  5047. const auto constSrcValue = constSrc->AsIntConstOpnd()->AsInt32();
  5048. auto nonConstSrcCopy = nonConstSrc;
  5049. Assert(nonConstSrc->IsRegOpnd());
  5050. bool doOVF = bailOutKind & IR::BailOutOnMulOverflow || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck;
  5051. // don't simplify mul by large numbers with OF check
  5052. if (doOVF && (constSrcValue > 3 || constSrcValue < -3))
  5053. return false;
  5054. switch(constSrcValue)
  5055. {
  5056. case -3:
  5057. case 3:
  5058. // if dst = src, we need to have a copy of the src for the ADD/SUB
  5059. if (dst->IsEqual(nonConstSrc))
  5060. {
  5061. nonConstSrcCopy = IR::RegOpnd::New(nonConstSrc->GetType(), instr->m_func);
  5062. // MOV
  5063. Lowerer::InsertMove(nonConstSrcCopy, nonConstSrc, instr);
  5064. }
  5065. instr->UnlinkSrc1();
  5066. instr->UnlinkSrc2();
  5067. // SHL
  5068. instr->m_opcode = Js::OpCode::SHL;
  5069. instr->SetSrc1(nonConstSrc);
  5070. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) 1, TyInt32, instr->m_func));
  5071. constSrc->Free(instr->m_func);
  5072. Legalize(instr);
  5073. // JO
  5074. if (doOVF)
  5075. {
  5076. nextInstr = IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func);
  5077. instr->InsertAfter(nextInstr);
  5078. instr = nextInstr;
  5079. }
  5080. // ADD
  5081. nextInstr = IR::Instr::New(Js::OpCode::ADD, dst, dst, nonConstSrcCopy, instr->m_func);
  5082. instr->InsertAfter(nextInstr);
  5083. instr = nextInstr;
  5084. Legalize(instr);
  5085. if (constSrcValue == -3)
  5086. {
  5087. // JO
  5088. if (doOVF)
  5089. {
  5090. nextInstr = IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func);
  5091. instr->InsertAfter(nextInstr);
  5092. instr = nextInstr;
  5093. }
  5094. // NEG
  5095. nextInstr = IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func);
  5096. instr->InsertAfter(nextInstr);
  5097. instr = nextInstr;
  5098. Legalize(instr);
  5099. }
  5100. // last JO inserted by caller
  5101. return true;
  5102. case -2:
  5103. case 2:
  5104. instr->UnlinkSrc1();
  5105. instr->UnlinkSrc2();
  5106. // SHL
  5107. instr->m_opcode = Js::OpCode::SHL;
  5108. instr->SetSrc1(nonConstSrc);
  5109. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) 1, TyInt32, instr->m_func));
  5110. constSrc->Free(instr->m_func);
  5111. Legalize(instr);
  5112. if (constSrcValue == -2)
  5113. {
  5114. // JO
  5115. if (doOVF)
  5116. {
  5117. nextInstr = IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func);
  5118. instr->InsertAfter(nextInstr);
  5119. instr = nextInstr;
  5120. }
  5121. // NEG
  5122. nextInstr = IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func);
  5123. instr->InsertAfter(nextInstr);
  5124. instr = nextInstr;
  5125. Legalize(instr);
  5126. }
  5127. // last JO inserted by caller
  5128. return true;
  5129. case -1:
  5130. instr->UnlinkSrc1();
  5131. instr->UnlinkSrc2();
  5132. // NEG
  5133. instr->m_opcode = Js::OpCode::NEG;
  5134. instr->SetSrc1(nonConstSrc);
  5135. constSrc->Free(instr->m_func);
  5136. Legalize(instr);
  5137. // JO inserted by caller
  5138. return true;
  5139. case 0:
  5140. instr->FreeSrc1();
  5141. instr->FreeSrc2();
  5142. // MOV
  5143. instr->m_opcode = Js::OpCode::MOV;
  5144. instr->SetSrc1(IR::IntConstOpnd::New((IntConstType) 0, TyInt32, instr->m_func));
  5145. Legalize(instr);
  5146. // JO inserted by caller are removed in later phases
  5147. return true;
  5148. case 1:
  5149. instr->UnlinkSrc1();
  5150. instr->UnlinkSrc2();
  5151. // MOV
  5152. instr->m_opcode = Js::OpCode::MOV;
  5153. instr->SetSrc1(nonConstSrc);
  5154. constSrc->Free(instr->m_func);
  5155. Legalize(instr);
  5156. // JO inserted by caller are removed in later phases
  5157. return true;
  5158. default:
  5159. // large numbers with no OF check
  5160. Assert(!doOVF);
  5161. // 2^i
  5162. // -2^i
  5163. if (Math::IsPow2(constSrcValue) || Math::IsPow2(-constSrcValue))
  5164. {
  5165. uint32 shamt = constSrcValue > 0 ? Math::Log2(constSrcValue) : Math::Log2(-constSrcValue);
  5166. instr->UnlinkSrc1();
  5167. instr->UnlinkSrc2();
  5168. // SHL
  5169. instr->m_opcode = Js::OpCode::SHL;
  5170. instr->SetSrc1(nonConstSrc);
  5171. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) shamt, TyInt32, instr->m_func));
  5172. constSrc->Free(instr->m_func);
  5173. Legalize(instr);
  5174. if (constSrcValue < 0)
  5175. {
  5176. // NEG
  5177. nextInstr = IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func);
  5178. instr->InsertAfter(nextInstr);
  5179. Legalize(instr);
  5180. }
  5181. return true;
  5182. }
  5183. // 2^i + 1
  5184. // 2^i - 1
  5185. if (Math::IsPow2(constSrcValue - 1) || Math::IsPow2(constSrcValue + 1))
  5186. {
  5187. bool plusOne = Math::IsPow2(constSrcValue - 1);
  5188. uint32 shamt = plusOne ? Math::Log2(constSrcValue - 1) : Math::Log2(constSrcValue + 1);
  5189. if (dst->IsEqual(nonConstSrc))
  5190. {
  5191. nonConstSrcCopy = IR::RegOpnd::New(nonConstSrc->GetType(), instr->m_func);
  5192. // MOV
  5193. Lowerer::InsertMove(nonConstSrcCopy, nonConstSrc, instr);
  5194. }
  5195. instr->UnlinkSrc1();
  5196. instr->UnlinkSrc2();
  5197. // SHL
  5198. instr->m_opcode = Js::OpCode::SHL;
  5199. instr->SetSrc1(nonConstSrc);
  5200. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) shamt, TyInt32, instr->m_func));
  5201. constSrc->Free(instr->m_func);
  5202. Legalize(instr);
  5203. // ADD/SUB
  5204. nextInstr = IR::Instr::New(plusOne ? Js::OpCode::ADD : Js::OpCode::SUB, dst, dst, nonConstSrcCopy, instr->m_func);
  5205. instr->InsertAfter(nextInstr);
  5206. instr = nextInstr;
  5207. Legalize(instr);
  5208. return true;
  5209. }
  5210. return false;
  5211. }
  5212. }
  5213. void
  5214. LowererMD::LowerInt4MulWithBailOut(
  5215. IR::Instr *const instr,
  5216. const IR::BailOutKind bailOutKind,
  5217. IR::LabelInstr *const bailOutLabel,
  5218. IR::LabelInstr *const skipBailOutLabel)
  5219. {
  5220. Assert(instr);
  5221. Assert(instr->m_opcode == Js::OpCode::Mul_I4);
  5222. Assert(!instr->HasBailOutInfo());
  5223. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  5224. Assert(bailOutLabel);
  5225. Assert(instr->m_next == bailOutLabel);
  5226. Assert(skipBailOutLabel);
  5227. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5228. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5229. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  5230. IR::LabelInstr *checkForNegativeZeroLabel = nullptr;
  5231. if(bailOutKind & IR::BailOutOnNegativeZero)
  5232. {
  5233. // We have:
  5234. // s3 = s1 * s2
  5235. //
  5236. // If the result is zero, we need to check and only bail out if it would be -0. The following determines this:
  5237. // bailOut = (s1 < 0 || s2 < 0) (either s1 or s2 has to be zero for the result to be zero, so we don't emit zero checks)
  5238. //
  5239. // Note, however, that if in future we decide to ignore mul overflow in some cases, and overflow occurs with one of the operands as negative,
  5240. // this can lead to bailout. Will handle that case if ever we decide to ignore mul overflow.
  5241. //
  5242. // Generate:
  5243. // $checkForNegativeZeroLabel:
  5244. // test s1, s1
  5245. // js $bailOutLabel
  5246. // test s2, s2
  5247. // jns $skipBailOutLabel
  5248. // (fall through to bail out)
  5249. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5250. Assert(dst->IsRegOpnd());
  5251. Assert(!src1->IsEqual(src2)); // cannot result in -0 if both operands are the same; GlobOpt should have figured that out
  5252. checkForNegativeZeroLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true);
  5253. bailOutLabel->InsertBefore(checkForNegativeZeroLabel);
  5254. if(src1->IsIntConstOpnd() || src2->IsIntConstOpnd())
  5255. {
  5256. Assert(!(src1->IsIntConstOpnd() && src2->IsIntConstOpnd())); // if this results in -0, GlobOpt should have avoided type specialization
  5257. const auto constSrc = src1->IsIntConstOpnd() ? src1 : src2;
  5258. const auto nonConstSrc = src1->IsIntConstOpnd() ? src2 : src1;
  5259. Assert(nonConstSrc->IsRegOpnd());
  5260. const auto newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5261. newInstr->SetSrc1(nonConstSrc);
  5262. newInstr->SetSrc2(nonConstSrc);
  5263. bailOutLabel->InsertBefore(newInstr);
  5264. const auto constSrcValue = constSrc->AsIntConstOpnd()->GetValue();
  5265. if(constSrcValue == 0)
  5266. {
  5267. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNSB, skipBailOutLabel, instr->m_func));
  5268. }
  5269. else
  5270. {
  5271. Assert(constSrcValue < 0); // cannot result in -0 if one operand is positive; GlobOpt should have figured that out
  5272. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, skipBailOutLabel, instr->m_func));
  5273. }
  5274. }
  5275. else
  5276. {
  5277. auto newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5278. newInstr->SetSrc1(src1);
  5279. newInstr->SetSrc2(src1);
  5280. bailOutLabel->InsertBefore(newInstr);
  5281. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JSB, bailOutLabel, instr->m_func));
  5282. newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5283. newInstr->SetSrc1(src2);
  5284. newInstr->SetSrc2(src2);
  5285. bailOutLabel->InsertBefore(newInstr);
  5286. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNSB, skipBailOutLabel, instr->m_func));
  5287. }
  5288. // Fall through to bailOutLabel
  5289. }
  5290. const bool needsOverflowCheck =
  5291. bailOutKind & IR::BailOutOnMulOverflow || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck;
  5292. AssertMsg(!instr->ShouldCheckForNon32BitOverflow() || (needsOverflowCheck && instr->ShouldCheckForNon32BitOverflow()), "Non 32-bit overflow check required without bailout info");
  5293. bool simplifiedMul = LowererMD::GenerateSimplifiedInt4Mul(instr, bailOutKind, bailOutLabel);
  5294. // Lower the instruction
  5295. if (!simplifiedMul)
  5296. {
  5297. LowererMD::ChangeToIMul(instr, needsOverflowCheck);
  5298. }
  5299. const auto insertBeforeInstr = checkForNegativeZeroLabel ? checkForNegativeZeroLabel : bailOutLabel;
  5300. if(needsOverflowCheck)
  5301. {
  5302. // do we care about int32 or non-int32 overflow ?
  5303. if (!simplifiedMul && !instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow())
  5304. LowererMD::EmitNon32BitOvfCheck(instr, insertBeforeInstr, bailOutLabel);
  5305. else
  5306. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func));
  5307. }
  5308. if(bailOutKind & IR::BailOutOnNegativeZero)
  5309. {
  5310. // On zero, branch to determine whether the result would be -0
  5311. Assert(checkForNegativeZeroLabel);
  5312. const auto newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5313. const auto dst = instr->GetDst();
  5314. newInstr->SetSrc1(dst);
  5315. newInstr->SetSrc2(dst);
  5316. insertBeforeInstr->InsertBefore(newInstr);
  5317. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JEQ, checkForNegativeZeroLabel, instr->m_func));
  5318. }
  5319. // Skip bailout
  5320. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, skipBailOutLabel, instr->m_func));
  5321. }
  5322. void
  5323. LowererMD::LowerInt4RemWithBailOut(
  5324. IR::Instr *const instr,
  5325. const IR::BailOutKind bailOutKind,
  5326. IR::LabelInstr *const bailOutLabel,
  5327. IR::LabelInstr *const skipBailOutLabel) const
  5328. {
  5329. Assert(instr);
  5330. Assert(instr->m_opcode == Js::OpCode::Rem_I4);
  5331. Assert(!instr->HasBailOutInfo());
  5332. Assert(bailOutKind & IR::BailOutOnNegativeZero);
  5333. Assert(bailOutLabel);
  5334. Assert(instr->m_next == bailOutLabel);
  5335. Assert(skipBailOutLabel);
  5336. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5337. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5338. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  5339. bool fastPath = m_lowerer->GenerateSimplifiedInt4Rem(instr, skipBailOutLabel);
  5340. // We have:
  5341. // s3 = s1 % s2
  5342. //
  5343. // If the result is zero, we need to check and only bail out if it would be -0. The following determines this:
  5344. // bailOut = (s3 == 0 && s1 < 0)
  5345. //
  5346. // Generate:
  5347. // $checkForNegativeZeroLabel:
  5348. // test s3, s3
  5349. // jne $skipBailOutLabel
  5350. // test s1, s1
  5351. // jns $skipBailOutLabel
  5352. // (fall through to bail out)
  5353. IR::Opnd *dst = instr->GetDst(), *src1 = instr->GetSrc1();
  5354. Assert(dst->IsRegOpnd());
  5355. IR::Instr * newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5356. newInstr->SetSrc1(dst);
  5357. newInstr->SetSrc2(dst);
  5358. bailOutLabel->InsertBefore(newInstr);
  5359. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, skipBailOutLabel, instr->m_func));
  5360. // Fast path already checks if s1 >= 0
  5361. if (!fastPath)
  5362. {
  5363. newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5364. newInstr->SetSrc1(src1);
  5365. newInstr->SetSrc2(src1);
  5366. bailOutLabel->InsertBefore(newInstr);
  5367. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNSB, skipBailOutLabel, instr->m_func));
  5368. }
  5369. // Fall through to bailOutLabel
  5370. // Lower the instruction
  5371. LowererMDArch::EmitInt4Instr(instr);
  5372. }
  5373. IR::Instr *
  5374. LowererMD::LoadFloatZero(IR::Opnd * opndDst, IR::Instr * instrInsert)
  5375. {
  5376. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOVSD_ZERO, opndDst, instrInsert->m_func);
  5377. instrInsert->InsertBefore(instr);
  5378. return instr;
  5379. }
  5380. template <typename T>
  5381. IR::Instr *
  5382. LowererMD::LoadFloatValue(IR::Opnd * opndDst, T value, IR::Instr * instrInsert)
  5383. {
  5384. if (value == 0.0 && !Js::JavascriptNumber::IsNegZero(value))
  5385. {
  5386. // zero can be loaded with "XORPS xmm, xmm" rather than needing memory load
  5387. return LoadFloatZero(opndDst, instrInsert);
  5388. }
  5389. IR::Opnd * opnd;
  5390. void* pValue = nullptr;
  5391. const bool isFloat64 = opndDst->IsFloat64();
  5392. IRType irtype = isFloat64 ? TyMachDouble : TyFloat32;
  5393. // Cast the value to the matching opndDst's type because T might not match
  5394. if (isFloat64)
  5395. {
  5396. pValue = NativeCodeDataNewNoFixup(instrInsert->m_func->GetNativeCodeDataAllocator(), DoubleType<DataDesc_LowererMD_LoadFloatValue_Double>, (double)value);
  5397. }
  5398. else
  5399. {
  5400. Assert(opndDst->IsFloat32());
  5401. pValue = NativeCodeDataNewNoFixup(instrInsert->m_func->GetNativeCodeDataAllocator(), FloatType<DataDesc_LowererMD_LoadFloatValue_Float>, (float)value);
  5402. }
  5403. if (!instrInsert->m_func->IsOOPJIT())
  5404. {
  5405. opnd = IR::MemRefOpnd::New((void*)pValue, irtype,
  5406. instrInsert->m_func, isFloat64 ? IR::AddrOpndKindDynamicDoubleRef : IR::AddrOpndKindDynamicFloatRef);
  5407. }
  5408. else // OOP JIT
  5409. {
  5410. int offset = NativeCodeData::GetDataTotalOffset(pValue);
  5411. auto addressRegOpnd = IR::RegOpnd::New(TyMachPtr, instrInsert->m_func);
  5412. Lowerer::InsertMove(
  5413. addressRegOpnd,
  5414. IR::MemRefOpnd::New(instrInsert->m_func->GetWorkItem()->GetWorkItemData()->nativeDataAddr, TyMachPtr, instrInsert->m_func, IR::AddrOpndKindDynamicNativeCodeDataRef),
  5415. instrInsert);
  5416. opnd = IR::IndirOpnd::New(addressRegOpnd, offset, irtype,
  5417. #if DBG
  5418. NativeCodeData::GetDataDescription(pValue, instrInsert->m_func->m_alloc),
  5419. #endif
  5420. instrInsert->m_func, true);
  5421. }
  5422. // movsd xmm, [reg+offset]
  5423. IR::Instr * instr = IR::Instr::New(LowererMDArch::GetAssignOp(opndDst->GetType()), opndDst, opnd, instrInsert->m_func);
  5424. instrInsert->InsertBefore(instr);
  5425. Legalize(instr);
  5426. return instr;
  5427. }
  5428. template IR::Instr * LowererMD::LoadFloatValue<float>(IR::Opnd * opndDst, float value, IR::Instr * instrInsert);
  5429. template IR::Instr * LowererMD::LoadFloatValue<double>(IR::Opnd * opndDst, double value, IR::Instr * instrInsert);
  5430. IR::Instr *
  5431. LowererMD::EnsureAdjacentArgs(IR::Instr * instrArg)
  5432. {
  5433. // Ensure that the arg instructions for a given call site are adjacent.
  5434. // This isn't normally desirable for CQ, but it's required by, for instance, the cloner,
  5435. // which must clone a complete call sequence.
  5436. IR::Opnd * opnd = instrArg->GetSrc2();
  5437. IR::Instr * instrNextArg;
  5438. StackSym * sym;
  5439. AssertMsg(opnd, "opnd");
  5440. while (opnd->IsSymOpnd())
  5441. {
  5442. sym = opnd->AsSymOpnd()->m_sym->AsStackSym();
  5443. instrNextArg = sym->m_instrDef;
  5444. Assert(instrNextArg);
  5445. instrNextArg->SinkInstrBefore(instrArg);
  5446. instrArg = instrNextArg;
  5447. opnd = instrArg->GetSrc2();
  5448. }
  5449. sym = opnd->AsRegOpnd()->m_sym;
  5450. instrNextArg = sym->m_instrDef;
  5451. Assert(instrNextArg && instrNextArg->m_opcode == Js::OpCode::StartCall);
  5452. // The StartCall can be trivially moved down.
  5453. if (instrNextArg->m_next != instrArg)
  5454. {
  5455. instrNextArg->UnlinkStartCallFromBailOutInfo(instrArg);
  5456. instrNextArg->Unlink();
  5457. instrArg->InsertBefore(instrNextArg);
  5458. }
  5459. return instrNextArg->m_prev;
  5460. }
  5461. #if INT32VAR
  5462. //
  5463. // Convert an int32 to Var representation.
  5464. //
  5465. void LowererMD::GenerateInt32ToVarConversion( IR::Opnd * opndSrc, IR::Instr * insertInstr )
  5466. {
  5467. AssertMsg(TySize[opndSrc->GetType()] == MachPtr, "For this to work it should be a 64-bit register");
  5468. IR::Instr* instr = IR::Instr::New(Js::OpCode::BTS, opndSrc, opndSrc, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  5469. insertInstr->InsertBefore(instr);
  5470. }
  5471. //
  5472. // jump to $labelHelper, based on the result of CMP
  5473. //
  5474. void LowererMD::GenerateSmIntTest(IR::Opnd *opndSrc, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::Instr **instrFirst /* = nullptr */, bool fContinueLabel /*= false*/)
  5475. {
  5476. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  5477. IR::Opnd * opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  5478. // s1 = MOV src1 - Move to a temporary
  5479. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc, this->m_func);
  5480. insertInstr->InsertBefore(instr);
  5481. if (instrFirst)
  5482. {
  5483. *instrFirst = instr;
  5484. }
  5485. // s1 = SHR s1, VarTag_Shift
  5486. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  5487. insertInstr->InsertBefore(instr);
  5488. // CMP s1, AtomTag
  5489. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  5490. instr->SetSrc1(opndReg);
  5491. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt32, this->m_func, /* dontEncode = */ true));
  5492. insertInstr->InsertBefore(instr);
  5493. if(fContinueLabel)
  5494. {
  5495. // JEQ $labelHelper
  5496. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  5497. }
  5498. else
  5499. {
  5500. // JNE $labelHelper
  5501. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  5502. }
  5503. insertInstr->InsertBefore(instr);
  5504. }
  5505. //
  5506. // If lower 32-bits are zero (value is zero), jump to $helper.
  5507. //
  5508. void LowererMD::GenerateTaggedZeroTest( IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelHelper )
  5509. {
  5510. // Cast the var to 32 bit integer.
  5511. if(opndSrc->GetSize() != 4)
  5512. {
  5513. opndSrc = opndSrc->UseWithNewType(TyUint32, this->m_func);
  5514. }
  5515. AssertMsg(TySize[opndSrc->GetType()] == 4, "This technique works only on the 32-bit version");
  5516. // TEST src1, src1
  5517. IR::Instr* instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  5518. instr->SetSrc1(opndSrc);
  5519. instr->SetSrc2(opndSrc);
  5520. insertInstr->InsertBefore(instr);
  5521. if(labelHelper != nullptr)
  5522. {
  5523. // JZ $labelHelper
  5524. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  5525. insertInstr->InsertBefore(instr);
  5526. }
  5527. }
  5528. //
  5529. // If top 16 bits are not zero i.e. it is NOT object, jump to $helper.
  5530. //
  5531. bool LowererMD::GenerateObjectTest(IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
  5532. {
  5533. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  5534. if (opndSrc->IsTaggedValue() && fContinueLabel)
  5535. {
  5536. // Insert delete branch opcode to tell the dbChecks not to assert on the helper label we may fall through into
  5537. IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
  5538. insertInstr->InsertBefore(fakeBr);
  5539. return false;
  5540. }
  5541. else if (opndSrc->IsNotTaggedValue() && !fContinueLabel)
  5542. {
  5543. return false;
  5544. }
  5545. IR::Opnd * opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  5546. // s1 = MOV src1 - Move to a temporary
  5547. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc, this->m_func);
  5548. insertInstr->InsertBefore(instr);
  5549. // s1 = SHR s1, VarTag_Shift
  5550. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  5551. insertInstr->InsertBefore(instr);
  5552. if (fContinueLabel)
  5553. {
  5554. // JEQ $labelHelper
  5555. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelTarget, this->m_func);
  5556. insertInstr->InsertBefore(instr);
  5557. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5558. insertInstr->InsertBefore(labelHelper);
  5559. }
  5560. else
  5561. {
  5562. // JNZ $labelHelper
  5563. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelTarget, this->m_func);
  5564. insertInstr->InsertBefore(instr);
  5565. }
  5566. return true;
  5567. }
  5568. #else
  5569. //
  5570. // Convert an int32 value to a Var.
  5571. //
  5572. void LowererMD::GenerateInt32ToVarConversion( IR::Opnd * opndSrc, IR::Instr * insertInstr )
  5573. {
  5574. // SHL r1, AtomTag
  5575. IR::Instr * instr = IR::Instr::New(Js::OpCode::SHL, opndSrc, opndSrc, IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func), this->m_func);
  5576. insertInstr->InsertBefore(instr);
  5577. // INC r1
  5578. instr = IR::Instr::New(Js::OpCode::INC, opndSrc, opndSrc, this->m_func);
  5579. insertInstr->InsertBefore(instr);
  5580. }
  5581. //
  5582. // jump to $labelHelper, based on the result of TEST
  5583. //
  5584. void LowererMD::GenerateSmIntTest(IR::Opnd *opndSrc, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::Instr **instrFirst /* = nullptr */, bool fContinueLabel /*= false*/)
  5585. {
  5586. if (opndSrc->IsTaggedInt() && !fContinueLabel)
  5587. {
  5588. return;
  5589. }
  5590. else if (opndSrc->IsNotTaggedValue() && fContinueLabel)
  5591. {
  5592. return;
  5593. }
  5594. // TEST src1, AtomTag
  5595. IR::Instr* instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  5596. instr->SetSrc1(opndSrc);
  5597. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func));
  5598. insertInstr->InsertBefore(instr);
  5599. if (instrFirst)
  5600. {
  5601. *instrFirst = instr;
  5602. }
  5603. if(fContinueLabel)
  5604. {
  5605. // JNE $labelHelper
  5606. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  5607. }
  5608. else
  5609. {
  5610. // JEQ $labelHelper
  5611. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  5612. }
  5613. insertInstr->InsertBefore(instr);
  5614. }
  5615. //
  5616. // If value is zero in tagged int representation, jump to $labelHelper.
  5617. //
  5618. void LowererMD::GenerateTaggedZeroTest( IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelHelper )
  5619. {
  5620. if (opndSrc->IsNotTaggedValue())
  5621. {
  5622. return;
  5623. }
  5624. // CMP src1, AtomTag
  5625. IR::Instr* instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  5626. instr->SetSrc1(opndSrc);
  5627. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt32, this->m_func));
  5628. insertInstr->InsertBefore(instr);
  5629. // JEQ $helper
  5630. if(labelHelper != nullptr)
  5631. {
  5632. // JEQ $labelHelper
  5633. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  5634. insertInstr->InsertBefore(instr);
  5635. }
  5636. }
  5637. //
  5638. // If not object, jump to $labelHelper.
  5639. //
  5640. bool LowererMD::GenerateObjectTest(IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
  5641. {
  5642. if (opndSrc->IsTaggedInt() && fContinueLabel)
  5643. {
  5644. // Insert delete branch opcode to tell the dbChecks not to assert on this helper label
  5645. IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
  5646. insertInstr->InsertBefore(fakeBr);
  5647. return false;
  5648. }
  5649. else if (opndSrc->IsNotTaggedValue() && !fContinueLabel)
  5650. {
  5651. return false;
  5652. }
  5653. // TEST src1, AtomTag
  5654. IR::Instr* instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  5655. instr->SetSrc1(opndSrc);
  5656. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func));
  5657. insertInstr->InsertBefore(instr);
  5658. if (fContinueLabel)
  5659. {
  5660. // JEQ $labelHelper
  5661. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelTarget, this->m_func);
  5662. insertInstr->InsertBefore(instr);
  5663. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5664. insertInstr->InsertBefore(labelHelper);
  5665. }
  5666. else
  5667. {
  5668. // JNE $labelHelper
  5669. IR::BranchInstr* branchInstr = IR::BranchInstr::New(Js::OpCode::JNE, labelTarget, this->m_func);
  5670. insertInstr->InsertBefore(branchInstr);
  5671. InsertObjectPoison(opndSrc, branchInstr, insertInstr, false);
  5672. }
  5673. return true;
  5674. }
  5675. #endif
  5676. #if FLOATVAR
  5677. //
  5678. // If any of the top 14 bits are not set, then the var is not a float value and hence, jump to $labelHelper.
  5679. //
  5680. void LowererMD::GenerateFloatTest(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper, const bool checkForNullInLoopBody)
  5681. {
  5682. if (opndSrc->GetValueType().IsFloat())
  5683. {
  5684. return;
  5685. }
  5686. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  5687. // s1 = MOV src1 - Move to a temporary
  5688. IR::Opnd * opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  5689. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc, this->m_func);
  5690. insertInstr->InsertBefore(instr);
  5691. // s1 = SHR s1, 50
  5692. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(50, TyInt8, this->m_func), this->m_func);
  5693. insertInstr->InsertBefore(instr);
  5694. // JZ $helper
  5695. instr = IR::BranchInstr::New(Js::OpCode::JEQ /* JZ */, labelHelper, this->m_func);
  5696. insertInstr->InsertBefore(instr);
  5697. }
  5698. IR::RegOpnd* LowererMD::CheckFloatAndUntag(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper)
  5699. {
  5700. IR::Opnd* floatTag = IR::AddrOpnd::New((Js::Var)Js::FloatTag_Value, IR::AddrOpndKindConstantVar, this->m_func, /* dontEncode = */ true);
  5701. IR::RegOpnd* regOpndFloatTag = IR::RegOpnd::New(TyUint64, this->m_func);
  5702. // MOV floatTagReg, FloatTag_Value
  5703. IR::Instr* instr = IR::Instr::New(Js::OpCode::MOV, regOpndFloatTag, floatTag, this->m_func);
  5704. insertInstr->InsertBefore(instr);
  5705. if (!opndSrc->GetValueType().IsFloat())
  5706. {
  5707. // TEST s1, floatTagReg
  5708. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  5709. instr->SetSrc1(opndSrc);
  5710. instr->SetSrc2(regOpndFloatTag);
  5711. insertInstr->InsertBefore(instr);
  5712. // JZ $helper
  5713. instr = IR::BranchInstr::New(Js::OpCode::JEQ /* JZ */, labelHelper, this->m_func);
  5714. insertInstr->InsertBefore(instr);
  5715. }
  5716. // untaggedFloat = XOR floatTagReg, s1 // where untaggedFloat == floatTagReg; use floatTagReg temporarily for the untagged float
  5717. IR::RegOpnd* untaggedFloat = regOpndFloatTag;
  5718. instr = IR::Instr::New(Js::OpCode::XOR, untaggedFloat, regOpndFloatTag, opndSrc, this->m_func);
  5719. insertInstr->InsertBefore(instr);
  5720. IR::RegOpnd *floatReg = IR::RegOpnd::New(TyMachDouble, this->m_func);
  5721. instr = IR::Instr::New(Js::OpCode::MOVD, floatReg, untaggedFloat, this->m_func);
  5722. insertInstr->InsertBefore(instr);
  5723. return floatReg;
  5724. }
  5725. #else
  5726. void LowererMD::GenerateFloatTest(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper, const bool checkForNullInLoopBody)
  5727. {
  5728. if (opndSrc->GetValueType().IsFloat())
  5729. {
  5730. return;
  5731. }
  5732. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  5733. if(checkForNullInLoopBody && m_func->IsLoopBody())
  5734. {
  5735. // It's possible that the value was determined dead by the jitted function and was not restored. The jitted loop
  5736. // body may not realize that it's dead and may try to use it. Check for null in loop bodies.
  5737. // test src1, src1
  5738. // jz $helper (bail out)
  5739. m_lowerer->InsertCompareBranch(
  5740. opndSrc,
  5741. IR::AddrOpnd::NewNull(m_func),
  5742. Js::OpCode::BrEq_A,
  5743. labelHelper,
  5744. insertInstr);
  5745. }
  5746. IR::Instr* instr = IR::Instr::New(Js::OpCode::CMP, insertInstr->m_func);
  5747. instr->SetSrc1(IR::IndirOpnd::New(opndSrc, 0, TyMachPtr, insertInstr->m_func));
  5748. instr->SetSrc2(m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptNumber));
  5749. insertInstr->InsertBefore(instr);
  5750. // JNZ $helper
  5751. instr = IR::BranchInstr::New(Js::OpCode::JNE /* JZ */, labelHelper, this->m_func);
  5752. insertInstr->InsertBefore(instr);
  5753. }
  5754. #endif
  5755. #if DBG
  5756. //
  5757. // Helps in debugging of fast paths.
  5758. //
  5759. void LowererMD::GenerateDebugBreak( IR::Instr * insertInstr )
  5760. {
  5761. // int 3
  5762. IR::Instr *int3 = IR::Instr::New(Js::OpCode::INT, insertInstr->m_func);
  5763. int3->SetSrc1(IR::IntConstOpnd::New(3, TyInt32, insertInstr->m_func));
  5764. insertInstr->InsertBefore(int3);
  5765. }
  5766. #endif
  5767. template <bool verify>
  5768. void
  5769. LowererMD::MakeDstEquSrc1(IR::Instr *const instr)
  5770. {
  5771. Assert(instr);
  5772. Assert(instr->IsLowered());
  5773. Assert(instr->GetDst());
  5774. Assert(instr->GetSrc1());
  5775. if(instr->GetDst()->IsEqual(instr->GetSrc1()))
  5776. {
  5777. return;
  5778. }
  5779. if (verify)
  5780. {
  5781. AssertMsg(false, "dst and src1 should be the same at this point. Missing Legalization");
  5782. return;
  5783. }
  5784. if(instr->GetSrc2() && instr->GetDst()->IsEqual(instr->GetSrc2()))
  5785. {
  5786. switch(instr->m_opcode)
  5787. {
  5788. #ifdef _M_IX86
  5789. case Js::OpCode::ADC:
  5790. #endif
  5791. case Js::OpCode::Add_I4:
  5792. case Js::OpCode::Mul_I4:
  5793. case Js::OpCode::Or_I4:
  5794. case Js::OpCode::Xor_I4:
  5795. case Js::OpCode::And_I4:
  5796. case Js::OpCode::ADD:
  5797. case Js::OpCode::IMUL2:
  5798. case Js::OpCode::OR:
  5799. case Js::OpCode::XOR:
  5800. case Js::OpCode::AND:
  5801. case Js::OpCode::ADDSD:
  5802. case Js::OpCode::MULSD:
  5803. case Js::OpCode::ADDSS:
  5804. case Js::OpCode::MULSS:
  5805. case Js::OpCode::ADDPS:
  5806. // For (a = b & a), generate (a = a & b)
  5807. instr->SwapOpnds();
  5808. return;
  5809. }
  5810. // For (a = b - a), generate (c = a; a = b - c) and fall through
  5811. ChangeToAssign(instr->HoistSrc2(Js::OpCode::Ld_A));
  5812. }
  5813. // For (a = b - c), generate (a = b; a = a - c)
  5814. IR::Instr *const mov = IR::Instr::New(Js::OpCode::Ld_A, instr->GetDst(), instr->UnlinkSrc1(), instr->m_func);
  5815. instr->InsertBefore(mov);
  5816. ChangeToAssign(mov);
  5817. instr->SetSrc1(instr->GetDst());
  5818. }
  5819. void
  5820. LowererMD::EmitInt64Instr(IR::Instr * instr)
  5821. {
  5822. #ifdef _M_IX86
  5823. lowererMDArch.EmitInt64Instr(instr);
  5824. #else
  5825. Assert(UNREACHED);
  5826. #endif
  5827. }
  5828. void
  5829. LowererMD::EmitInt4Instr(IR::Instr *instr)
  5830. {
  5831. LowererMDArch::EmitInt4Instr(instr);
  5832. }
  5833. void
  5834. LowererMD::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  5835. {
  5836. lowererMDArch.EmitLoadVar(instrLoad, isFromUint32, isHelper);
  5837. }
  5838. bool
  5839. LowererMD::EmitLoadInt32(IR::Instr *instrLoad, bool conversionFromObjectAllowed, bool bailOutOnHelper, IR::LabelInstr * labelBailOut)
  5840. {
  5841. return lowererMDArch.EmitLoadInt32(instrLoad, conversionFromObjectAllowed, bailOutOnHelper, labelBailOut);
  5842. }
  5843. void
  5844. LowererMD::EmitIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5845. {
  5846. this->lowererMDArch.EmitIntToFloat(dst, src, instrInsert);
  5847. }
  5848. void
  5849. LowererMD::EmitUIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5850. {
  5851. this->lowererMDArch.EmitUIntToFloat(dst, src, instrInsert);
  5852. }
  5853. void
  5854. LowererMD::EmitIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5855. {
  5856. this->lowererMDArch.EmitIntToLong(dst, src, instrInsert);
  5857. }
  5858. void
  5859. LowererMD::EmitUIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5860. {
  5861. this->lowererMDArch.EmitUIntToLong(dst, src, instrInsert);
  5862. }
  5863. void
  5864. LowererMD::EmitLongToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5865. {
  5866. this->lowererMDArch.EmitLongToInt(dst, src, instrInsert);
  5867. }
  5868. void LowererMD::EmitSignExtend(IR::Instr * instr)
  5869. {
  5870. IR::Opnd* dst = instr->GetDst();
  5871. IR::Opnd* src1 = instr->GetSrc1();
  5872. IR::Opnd* src2 = instr->GetSrc2();
  5873. Assert(dst && src1 && src2);
  5874. // Src2 is used to determine what's the from type size
  5875. Assert(src2->GetSize() < dst->GetSize());
  5876. IRType fromType = src2->GetType();
  5877. Js::OpCode op = Js::OpCode::MOVSX;
  5878. switch (src2->GetSize())
  5879. {
  5880. case 1: break; // default
  5881. case 2: op = Js::OpCode::MOVSXW; break;
  5882. case 4:
  5883. #if _M_X64
  5884. op = Js::OpCode::MOVSXD;
  5885. #else
  5886. op = LowererMDArch::GetAssignOp(fromType);
  5887. #endif
  5888. break;
  5889. default:
  5890. Assert(UNREACHED);
  5891. }
  5892. #if _M_IX86
  5893. // Special handling of int64 on x86
  5894. if (dst->IsInt64())
  5895. {
  5896. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(dst);
  5897. Int64RegPair srcPair = m_func->FindOrCreateInt64Pair(src1);
  5898. IR::RegOpnd * eaxReg = IR::RegOpnd::New(RegEAX, TyInt32, m_func);
  5899. IR::RegOpnd * edxReg = IR::RegOpnd::New(RegEDX, TyInt32, m_func);
  5900. instr->InsertBefore(IR::Instr::New(op, eaxReg, srcPair.low->UseWithNewType(fromType, m_func), m_func));
  5901. Legalize(instr->m_prev);
  5902. instr->InsertBefore(IR::Instr::New(Js::OpCode::CDQ, edxReg, m_func));
  5903. Legalize(instr->m_prev);
  5904. m_lowerer->InsertMove(dstPair.low, eaxReg, instr);
  5905. m_lowerer->InsertMove(dstPair.high, edxReg, instr);
  5906. }
  5907. else
  5908. #endif
  5909. {
  5910. instr->InsertBefore(IR::Instr::New(op, dst, src1->UseWithNewType(fromType, m_func), m_func));
  5911. Legalize(instr->m_prev);
  5912. }
  5913. }
  5914. void
  5915. LowererMD::EmitFloat32ToFloat64(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  5916. {
  5917. // We should only generate this if sse2 is available
  5918. Assert(AutoSystemInfo::Data.SSE2Available());
  5919. Assert(dst->IsRegOpnd() && dst->IsFloat64());
  5920. Assert(src->IsRegOpnd() && src->GetType() == TyFloat32);
  5921. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::CVTSS2SD, dst, src, this->m_func));
  5922. }
  5923. void
  5924. LowererMD::EmitInt64toFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instr)
  5925. {
  5926. #ifdef _M_IX86
  5927. IR::Opnd *srcOpnd = instr->UnlinkSrc1();
  5928. LoadInt64HelperArgument(instr, srcOpnd);
  5929. IR::Instr* callinstr = IR::Instr::New(Js::OpCode::CALL, dst, this->m_func);
  5930. instr->InsertBefore(callinstr);
  5931. CompileAssert(sizeof(IRType) == 1);
  5932. const uint16 fromToType = dst->GetType() | (srcOpnd->GetType() << 8);
  5933. IR::JnHelperMethod method = IR::HelperOp_Throw;
  5934. switch (fromToType)
  5935. {
  5936. case TyFloat32 | (TyInt64 << 8) : method = IR::HelperI64TOF32; break;
  5937. case TyFloat32 | (TyUint64 << 8) : method = IR::HelperUI64TOF32; break;
  5938. case TyFloat64 | (TyInt64 << 8) : method = IR::HelperI64TOF64; break;
  5939. case TyFloat64 | (TyUint64 << 8) : method = IR::HelperUI64TOF64; break;
  5940. default:
  5941. Assert(UNREACHED);
  5942. }
  5943. this->ChangeToHelperCall(callinstr, method);
  5944. #else
  5945. IR::Opnd* origDst = nullptr;
  5946. if (dst->IsFloat32())
  5947. {
  5948. origDst = dst;
  5949. dst = IR::RegOpnd::New(TyFloat64, this->m_func);
  5950. }
  5951. const auto insertLegalize = [instr](IR::Instr* newInstr)
  5952. {
  5953. instr->InsertBefore(newInstr);
  5954. Legalize(newInstr);
  5955. };
  5956. if (src->IsUnsigned())
  5957. {
  5958. insertLegalize(IR::Instr::New(Js::OpCode::TEST, nullptr, src, src, m_func));
  5959. IR::LabelInstr* msbSetLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  5960. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  5961. insertLegalize(IR::BranchInstr::New(Js::OpCode::JSB, msbSetLabel, m_func));
  5962. // MSB not set, simple case
  5963. insertLegalize(IR::Instr::New(Js::OpCode::CVTSI2SD, dst, src, m_func));
  5964. insertLegalize(IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, m_func));
  5965. insertLegalize(msbSetLabel);
  5966. IR::RegOpnd* halfOpnd = IR::RegOpnd::New(TyInt64, m_func);
  5967. IR::RegOpnd* lsbOpnd = IR::RegOpnd::New(TyInt64, m_func);
  5968. m_lowerer->InsertMove(halfOpnd, src, instr);
  5969. m_lowerer->InsertMove(lsbOpnd, src, instr);
  5970. insertLegalize(IR::Instr::New(Js::OpCode::SHR, halfOpnd, halfOpnd, IR::IntConstOpnd::New(1, TyInt8, m_func), m_func));
  5971. insertLegalize(IR::Instr::New(Js::OpCode::AND, lsbOpnd, lsbOpnd, IR::Int64ConstOpnd::New(1, TyInt64, m_func), m_func));
  5972. insertLegalize(IR::Instr::New(Js::OpCode::OR, halfOpnd, halfOpnd, lsbOpnd, m_func));
  5973. insertLegalize(IR::Instr::New(Js::OpCode::CVTSI2SD, dst, halfOpnd, m_func));
  5974. insertLegalize(IR::Instr::New(Js::OpCode::ADDSD, dst, dst, dst, m_func));
  5975. insertLegalize(doneLabel);
  5976. }
  5977. else
  5978. {
  5979. insertLegalize(IR::Instr::New(Js::OpCode::CVTSI2SD, dst, src, m_func));
  5980. }
  5981. if (origDst)
  5982. {
  5983. insertLegalize(IR::Instr::New(Js::OpCode::CVTSD2SS, origDst, dst, m_func));
  5984. }
  5985. #endif
  5986. }
  5987. void
  5988. LowererMD::EmitNon32BitOvfCheck(IR::Instr *instr, IR::Instr *insertInstr, IR::LabelInstr* bailOutLabel)
  5989. {
  5990. AssertMsg(instr->m_opcode == Js::OpCode::IMUL, "IMUL should be used to check for non-32 bit overflow check on x86.");
  5991. IR::RegOpnd *edxSym = IR::RegOpnd::New(TyInt32, instr->m_func);
  5992. #ifdef _M_IX86
  5993. edxSym->SetReg(RegEDX);
  5994. #else
  5995. edxSym->SetReg(RegRDX);
  5996. #endif
  5997. // dummy def for edx to force RegAlloc to generate a lifetime. This is removed later by the Peeps phase.
  5998. IR::Instr *newInstr = IR::Instr::New(Js::OpCode::NOP, edxSym, instr->m_func);
  5999. insertInstr->InsertBefore(newInstr);
  6000. IR::RegOpnd *temp = IR::RegOpnd::New(TyInt32, instr->m_func);
  6001. Assert(instr->ignoreOverflowBitCount > 32);
  6002. uint8 shamt = 64 - instr->ignoreOverflowBitCount;
  6003. // MOV temp, edx
  6004. newInstr = IR::Instr::New(Js::OpCode::MOV, temp, edxSym, instr->m_func);
  6005. insertInstr->InsertBefore(newInstr);
  6006. // SHL temp, shamt
  6007. newInstr = IR::Instr::New(Js::OpCode::SHL, temp, temp, IR::IntConstOpnd::New(shamt, TyInt8, instr->m_func, true), instr->m_func);
  6008. insertInstr->InsertBefore(newInstr);
  6009. // SAR temp, shamt
  6010. newInstr = IR::Instr::New(Js::OpCode::SAR, temp, temp, IR::IntConstOpnd::New(shamt, TyInt8, instr->m_func, true), instr->m_func);
  6011. insertInstr->InsertBefore(newInstr);
  6012. // CMP temp, edx
  6013. newInstr = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  6014. newInstr->SetSrc1(temp);
  6015. newInstr->SetSrc2(edxSym);
  6016. insertInstr->InsertBefore(newInstr);
  6017. // JNE
  6018. Lowerer::InsertBranch(Js::OpCode::JNE, false, bailOutLabel, insertInstr);
  6019. }
  6020. void LowererMD::ConvertFloatToInt32(IR::Opnd* intOpnd, IR::Opnd* floatOpnd, IR::LabelInstr * labelHelper, IR::LabelInstr * labelDone, IR::Instr * instInsert)
  6021. {
  6022. UNREFERENCED_PARAMETER(labelHelper); // used on ARM
  6023. #if defined(_M_IX86)
  6024. // We should only generate this if sse2 is available
  6025. Assert(AutoSystemInfo::Data.SSE2Available());
  6026. #endif
  6027. Assert((floatOpnd->IsRegOpnd() && floatOpnd->IsFloat()) || (floatOpnd->IsIndirOpnd() && floatOpnd->GetType() == TyMachDouble));
  6028. Assert(intOpnd->GetType() == TyInt32);
  6029. IR::Instr* instr;
  6030. {
  6031. #ifdef _M_X64
  6032. IR::Opnd* dstOpnd = IR::RegOpnd::New(TyInt64, m_func);
  6033. #else
  6034. IR::Opnd* dstOpnd = intOpnd;
  6035. #endif
  6036. // CVTTSD2SI dst, floatOpnd
  6037. instr = IR::Instr::New(floatOpnd->IsFloat64() ? Js::OpCode::CVTTSD2SI : Js::OpCode::CVTTSS2SI, dstOpnd, floatOpnd, this->m_func);
  6038. instInsert->InsertBefore(instr);
  6039. // CMP dst, 0x80000000 {0x8000000000000000 on x64} -- Check for overflow
  6040. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6041. instr->SetSrc1(dstOpnd);
  6042. instr->SetSrc2(IR::IntConstOpnd::New(MachSignBit, TyMachReg, this->m_func, true));
  6043. instInsert->InsertBefore(instr);
  6044. Legalize(instr);
  6045. #ifdef _M_X64
  6046. // Truncate to int32 for x64. We still need to go to helper though if we have int64 overflow.
  6047. // MOV_TRUNC intOpnd, tmpOpnd
  6048. instr = IR::Instr::New(Js::OpCode::MOV_TRUNC, intOpnd, dstOpnd, this->m_func);
  6049. instInsert->InsertBefore(instr);
  6050. #endif
  6051. }
  6052. // JNE $done
  6053. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelDone, this->m_func);
  6054. instInsert->InsertBefore(instr);
  6055. // It does overflow - Let's try using FISTTP which uses 64 bits and is relevant only for x86
  6056. // but requires going to memory and should only be used in overflow scenarios
  6057. #ifdef _M_IX86
  6058. if (AutoSystemInfo::Data.SSE3Available())
  6059. {
  6060. IR::Opnd* floatStackOpnd;
  6061. StackSym* tempSymDouble = this->m_func->tempSymDouble;
  6062. if (!tempSymDouble)
  6063. {
  6064. this->m_func->tempSymDouble = StackSym::New(TyFloat64, this->m_func);
  6065. this->m_func->StackAllocate(this->m_func->tempSymDouble, MachDouble);
  6066. tempSymDouble = this->m_func->tempSymDouble;
  6067. }
  6068. IR::Opnd * float64Opnd;
  6069. if (floatOpnd->IsFloat32())
  6070. {
  6071. float64Opnd = IR::RegOpnd::New(TyFloat64, m_func);
  6072. instr = IR::Instr::New(Js::OpCode::CVTSS2SD, float64Opnd, floatOpnd, m_func);
  6073. instInsert->InsertBefore(instr);
  6074. }
  6075. else
  6076. {
  6077. float64Opnd = floatOpnd;
  6078. }
  6079. if (float64Opnd->IsRegOpnd())
  6080. {
  6081. floatStackOpnd = IR::SymOpnd::New(tempSymDouble, TyMachDouble, m_func);
  6082. instr = IR::Instr::New(Js::OpCode::MOVSD, floatStackOpnd, float64Opnd, m_func);
  6083. instInsert->InsertBefore(instr);
  6084. }
  6085. else
  6086. {
  6087. floatStackOpnd = float64Opnd;
  6088. }
  6089. // FLD [tmpDouble]
  6090. instr = IR::Instr::New(Js::OpCode::FLD, floatStackOpnd, floatStackOpnd, m_func);
  6091. instInsert->InsertBefore(instr);
  6092. if (!float64Opnd->IsRegOpnd())
  6093. {
  6094. floatStackOpnd = IR::SymOpnd::New(tempSymDouble, TyMachDouble, m_func);
  6095. }
  6096. // FISTTP qword ptr [tmpDouble]
  6097. instr = IR::Instr::New(Js::OpCode::FISTTP, floatStackOpnd, m_func);
  6098. instInsert->InsertBefore(instr);
  6099. StackSym *intSym = StackSym::New(TyInt32, m_func);
  6100. intSym->m_offset = tempSymDouble->m_offset;
  6101. intSym->m_allocated = true;
  6102. IR::Opnd* lowerBitsOpnd = IR::SymOpnd::New(intSym, TyInt32, m_func);
  6103. // MOV dst, dword ptr [tmpDouble]
  6104. instr = IR::Instr::New(Js::OpCode::MOV, intOpnd, lowerBitsOpnd, m_func);
  6105. instInsert->InsertBefore(instr);
  6106. // TEST dst, dst -- Check for overflow
  6107. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  6108. instr->SetSrc1(intOpnd);
  6109. instr->SetSrc2(intOpnd);
  6110. instInsert->InsertBefore(instr);
  6111. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelDone, this->m_func);
  6112. instInsert->InsertBefore(instr);
  6113. // CMP [tmpDouble - 4], 0x80000000
  6114. StackSym* higherBitsSym = StackSym::New(TyInt32, m_func);
  6115. higherBitsSym->m_offset = tempSymDouble->m_offset + 4;
  6116. higherBitsSym->m_allocated = true;
  6117. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6118. instr->SetSrc1(IR::SymOpnd::New(higherBitsSym, TyInt32, m_func));
  6119. instr->SetSrc2(IR::IntConstOpnd::New(0x80000000, TyInt32, this->m_func, true));
  6120. instInsert->InsertBefore(instr);
  6121. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelDone, this->m_func);
  6122. instInsert->InsertBefore(instr);
  6123. }
  6124. #endif
  6125. }
  6126. IR::Instr *
  6127. LowererMD::InsertConvertFloat64ToInt32(const RoundMode roundMode, IR::Opnd *const dst, IR::Opnd *const src, IR::Instr *const insertBeforeInstr)
  6128. {
  6129. Assert(dst);
  6130. Assert(dst->IsInt32());
  6131. Assert(src);
  6132. Assert(src->IsFloat64());
  6133. Assert(insertBeforeInstr);
  6134. // The caller is expected to check for overflow. To have that work be done automatically, use LowererMD::EmitFloatToInt.
  6135. Func *const func = insertBeforeInstr->m_func;
  6136. IR::AutoReuseOpnd autoReuseSrcPlusHalf;
  6137. IR::Instr *instr = nullptr;
  6138. switch (roundMode)
  6139. {
  6140. case RoundModeTowardInteger:
  6141. {
  6142. // Conversion with rounding towards nearest integer is not supported by the architecture. Add 0.5 and do a
  6143. // round-toward-zero conversion instead.
  6144. IR::RegOpnd *const srcPlusHalf = IR::RegOpnd::New(TyFloat64, func);
  6145. autoReuseSrcPlusHalf.Initialize(srcPlusHalf, func);
  6146. Lowerer::InsertAdd(
  6147. false /* needFlags */,
  6148. srcPlusHalf,
  6149. src,
  6150. IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetDoublePointFiveAddr(), TyFloat64, func,
  6151. IR::AddrOpndKindDynamicDoubleRef),
  6152. insertBeforeInstr);
  6153. instr = IR::Instr::New(LowererMD::MDConvertFloat64ToInt32Opcode(RoundModeTowardZero), dst, srcPlusHalf, func);
  6154. insertBeforeInstr->InsertBefore(instr);
  6155. LowererMD::Legalize(instr);
  6156. return instr;
  6157. }
  6158. case RoundModeHalfToEven:
  6159. {
  6160. instr = IR::Instr::New(LowererMD::MDConvertFloat64ToInt32Opcode(RoundModeHalfToEven), dst, src, func);
  6161. insertBeforeInstr->InsertBefore(instr);
  6162. LowererMD::Legalize(instr);
  6163. return instr;
  6164. }
  6165. default:
  6166. AssertMsg(0, "RoundMode not supported.");
  6167. return nullptr;
  6168. }
  6169. }
  6170. void
  6171. LowererMD::EmitFloatToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert, IR::Instr *instrBailOut, IR::LabelInstr * labelBailOut)
  6172. {
  6173. #ifdef _M_IX86
  6174. // We should only generate this if sse2 is available
  6175. Assert(AutoSystemInfo::Data.SSE2Available());
  6176. #endif
  6177. IR::BailOutKind bailOutKind = IR::BailOutInvalid;
  6178. if (instrBailOut && instrBailOut->HasBailOutInfo())
  6179. {
  6180. bailOutKind = instrBailOut->GetBailOutKind();
  6181. if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  6182. {
  6183. // Bail out instead of calling helper. If this is happening unconditionally, the caller should instead throw a rejit exception.
  6184. Assert(labelBailOut);
  6185. m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, instrInsert);
  6186. return;
  6187. }
  6188. }
  6189. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6190. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6191. IR::Instr *instr;
  6192. ConvertFloatToInt32(dst, src, labelHelper, labelDone, instrInsert);
  6193. // $Helper
  6194. instrInsert->InsertBefore(labelHelper);
  6195. IR::Opnd * arg = src;
  6196. if (src->IsFloat32())
  6197. {
  6198. arg = IR::RegOpnd::New(TyFloat64, m_func);
  6199. EmitFloat32ToFloat64(arg, src, instrInsert);
  6200. }
  6201. instr = IR::Instr::New(Js::OpCode::CALL, dst, this->m_func);
  6202. instrInsert->InsertBefore(instr);
  6203. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  6204. {
  6205. _Analysis_assume_(instrBailOut != nullptr);
  6206. instr = instr->ConvertToBailOutInstr(instrBailOut->GetBailOutInfo(), bailOutKind);
  6207. if (instrBailOut->GetBailOutInfo()->bailOutInstr == instrBailOut)
  6208. {
  6209. IR::Instr * instrShare = instrBailOut->ShareBailOut();
  6210. m_lowerer->LowerBailTarget(instrShare);
  6211. }
  6212. }
  6213. // dst = ToInt32Core(src);
  6214. LoadDoubleHelperArgument(instr, arg);
  6215. this->ChangeToHelperCall(instr, IR::HelperConv_ToInt32Core);
  6216. // $Done
  6217. instrInsert->InsertBefore(labelDone);
  6218. }
  6219. void
  6220. LowererMD::EmitLoadVarNoCheck(IR::RegOpnd * dst, IR::RegOpnd * src, IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  6221. {
  6222. #ifdef _M_IX86
  6223. if (!AutoSystemInfo::Data.SSE2Available())
  6224. {
  6225. IR::JnHelperMethod helperMethod;
  6226. // PUSH &floatTemp
  6227. IR::Opnd *tempOpnd;
  6228. if (instrLoad->dstIsTempNumber)
  6229. {
  6230. helperMethod = isFromUint32 ? IR::HelperOp_UInt32ToAtomInPlace : IR::HelperOp_Int32ToAtomInPlace;
  6231. // Use the original dst to get the temp number sym
  6232. StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(instrLoad->GetDst(), instrLoad->dstIsTempNumberTransferred);
  6233. IR::Instr *load = this->m_lowerer->InsertLoadStackAddress(tempNumberSym, instrLoad);
  6234. tempOpnd = load->GetDst();
  6235. this->LoadHelperArgument(instrLoad, tempOpnd);
  6236. }
  6237. else
  6238. {
  6239. helperMethod = isFromUint32 ? IR::HelperOp_UInt32ToAtom : IR::HelperOp_Int32ToAtom;
  6240. }
  6241. // PUSH memContext
  6242. this->m_lowerer->LoadScriptContext(instrLoad);
  6243. // PUSH s1
  6244. this->LoadHelperArgument(instrLoad, src);
  6245. // dst = ToVar()
  6246. IR::Instr * instr = IR::Instr::New(Js::OpCode::Call, dst,
  6247. IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
  6248. instrLoad->InsertBefore(instr);
  6249. this->LowerCall(instr, 0);
  6250. return;
  6251. }
  6252. #endif
  6253. IR::RegOpnd * floatReg = IR::RegOpnd::New(TyFloat64, this->m_func);
  6254. if (isFromUint32)
  6255. {
  6256. this->EmitUIntToFloat(floatReg, src, instrLoad);
  6257. }
  6258. else
  6259. {
  6260. this->EmitIntToFloat(floatReg, src, instrLoad);
  6261. }
  6262. this->SaveDoubleToVar(dst, floatReg, instrLoad, instrLoad, isHelper);
  6263. }
  6264. void
  6265. LowererMD::ImmedSrcToReg(IR::Instr * instr, IR::Opnd * newOpnd, int srcNum)
  6266. {
  6267. if (srcNum == 2)
  6268. {
  6269. instr->SetSrc2(newOpnd);
  6270. }
  6271. else
  6272. {
  6273. Assert(srcNum == 1);
  6274. instr->SetSrc1(newOpnd);
  6275. }
  6276. }
  6277. IR::LabelInstr *
  6278. LowererMD::GetBailOutStackRestoreLabel(BailOutInfo * bailOutInfo, IR::LabelInstr * exitTargetInstr)
  6279. {
  6280. return lowererMDArch.GetBailOutStackRestoreLabel(bailOutInfo, exitTargetInstr);
  6281. }
  6282. StackSym *
  6283. LowererMD::GetImplicitParamSlotSym(Js::ArgSlot argSlot)
  6284. {
  6285. return GetImplicitParamSlotSym(argSlot, this->m_func);
  6286. }
  6287. StackSym *
  6288. LowererMD::GetImplicitParamSlotSym(Js::ArgSlot argSlot, Func * func)
  6289. {
  6290. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  6291. // Pass in the EBP+8 to start at the function object, the start of the implicit param slots
  6292. StackSym * stackSym = StackSym::NewImplicitParamSym(argSlot, func);
  6293. func->SetArgOffset(stackSym, (2 + argSlot) * MachPtr);
  6294. func->SetHasImplicitParamLoad();
  6295. return stackSym;
  6296. }
  6297. bool LowererMD::GenerateFastAnd(IR::Instr * instrAnd)
  6298. {
  6299. return this->lowererMDArch.GenerateFastAnd(instrAnd);
  6300. }
  6301. bool LowererMD::GenerateFastDivAndRem(IR::Instr* instrDiv, IR::LabelInstr* bailoutLabel)
  6302. {
  6303. return this->lowererMDArch.GenerateFastDivAndRem(instrDiv, bailoutLabel);
  6304. }
  6305. bool LowererMD::GenerateFastXor(IR::Instr * instrXor)
  6306. {
  6307. return this->lowererMDArch.GenerateFastXor(instrXor);
  6308. }
  6309. bool LowererMD::GenerateFastOr(IR::Instr * instrOr)
  6310. {
  6311. return this->lowererMDArch.GenerateFastOr(instrOr);
  6312. }
  6313. bool LowererMD::GenerateFastNot(IR::Instr * instrNot)
  6314. {
  6315. return this->lowererMDArch.GenerateFastNot(instrNot);
  6316. }
  6317. bool LowererMD::GenerateFastShiftLeft(IR::Instr * instrShift)
  6318. {
  6319. return this->lowererMDArch.GenerateFastShiftLeft(instrShift);
  6320. }
  6321. bool LowererMD::GenerateFastShiftRight(IR::Instr * instrShift)
  6322. {
  6323. return this->lowererMDArch.GenerateFastShiftRight(instrShift);
  6324. }
  6325. void LowererMD::GenerateIsJsObjectTest(IR::RegOpnd* instanceReg, IR::Instr* insertInstr, IR::LabelInstr* labelHelper)
  6326. {
  6327. // TEST instanceReg, (Js::AtomTag_IntPtr | Js::FloatTag_Value )
  6328. GenerateObjectTest(instanceReg, insertInstr, labelHelper);
  6329. IR::RegOpnd * typeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  6330. // MOV typeReg, instanceReg + offsetof(RecyclableObject::type)
  6331. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, typeReg,
  6332. IR::IndirOpnd::New(instanceReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  6333. m_func));
  6334. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_LastJavascriptPrimitiveType
  6335. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6336. cmp->SetSrc1(IR::IndirOpnd::New(typeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func));
  6337. cmp->SetSrc2(IR::IntConstOpnd::New(Js::TypeId::TypeIds_LastJavascriptPrimitiveType, TyInt32, this->m_func));
  6338. insertInstr->InsertBefore(cmp);
  6339. // JLE labelHelper
  6340. insertInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JLE, labelHelper, this->m_func));
  6341. }
  6342. void
  6343. LowererMD::EmitReinterpretPrimitive(IR::Opnd* dst, IR::Opnd* src, IR::Instr* insertBeforeInstr)
  6344. {
  6345. Assert(dst && src);
  6346. Assert(dst->GetSize() == src->GetSize());
  6347. Assert(dst->GetType() != src->GetType());
  6348. if (
  6349. // Additional runtime check to prevent unknown behavior
  6350. (dst->GetSize() != src->GetSize()) ||
  6351. // There is nothing to do in this case
  6352. (dst->GetType() == src->GetType())
  6353. )
  6354. {
  6355. Lowerer::InsertMove(dst, src, insertBeforeInstr);
  6356. return;
  6357. }
  6358. auto LegalizeInsert = [insertBeforeInstr](IR::Instr* instr)
  6359. {
  6360. Legalize(instr);
  6361. insertBeforeInstr->InsertBefore(instr);
  6362. };
  6363. if (dst->GetSize() == 8)
  6364. {
  6365. #if _M_AMD64
  6366. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVQ, dst, src, m_func));
  6367. #elif LOWER_SPLIT_INT64
  6368. if (dst->IsInt64())
  6369. {
  6370. // movd xmm2, xmm1
  6371. // movd low_bits, xmm2
  6372. // shufps xmm2, xmm2, 1
  6373. // movd high_bits, xmm2
  6374. Assert(src->IsFloat64());
  6375. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(dst);
  6376. // shufps modifies the register, we shouldn't change the source here
  6377. IR::RegOpnd* tmpDouble = IR::RegOpnd::New(TyFloat64, m_func);
  6378. Lowerer::InsertMove(tmpDouble, src, insertBeforeInstr);
  6379. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dstPair.low, tmpDouble, m_func));
  6380. LegalizeInsert(IR::Instr::New(Js::OpCode::SHUFPS, tmpDouble, tmpDouble, IR::IntConstOpnd::New(1, TyInt8, m_func, true), m_func));
  6381. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dstPair.high, tmpDouble, m_func));
  6382. }
  6383. else
  6384. {
  6385. // movd xmm0, lowBits;
  6386. // movd xmm1, highBits;
  6387. // shufps xmm0, xmm1, (0 | 2 << 2 | 0 << 4 | 1 << 6);
  6388. // shufps xmm0, xmm0, (0 | 2 << 2 | 3 << 4 | 3 << 6);
  6389. Assert(src->IsInt64());
  6390. Assert(dst->IsFloat64());
  6391. Int64RegPair srcPair = m_func->FindOrCreateInt64Pair(src);
  6392. IR::RegOpnd* tmpDouble = IR::RegOpnd::New(TyFloat64, m_func);
  6393. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dst, srcPair.low, m_func));
  6394. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, tmpDouble, srcPair.high, m_func));
  6395. LegalizeInsert(IR::Instr::New(Js::OpCode::SHUFPS, dst, tmpDouble, IR::IntConstOpnd::New((0 | 2 << 2 | 0 << 4 | 1 << 6), TyInt8, m_func, true), m_func));
  6396. LegalizeInsert(IR::Instr::New(Js::OpCode::SHUFPS, dst, dst, IR::IntConstOpnd::New((0 | 2 << 2 | 3 << 4 | 3 << 6), TyInt8, m_func, true), m_func));
  6397. }
  6398. #endif
  6399. }
  6400. else if (dst->GetSize() == 4)
  6401. {
  6402. // 32bit reinterprets
  6403. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dst, src, m_func));
  6404. }
  6405. else
  6406. {
  6407. Assert(UNREACHED);
  6408. }
  6409. }
  6410. void LowererMD::EmitReinterpretFloatToInt(IR::Opnd* dst, IR::Opnd* src, IR::Instr* insertBeforeInstr)
  6411. {
  6412. Assert(dst->IsInt32() || dst->IsUInt32() || dst->IsInt64());
  6413. Assert(src->IsFloat());
  6414. EmitReinterpretPrimitive(dst, src, insertBeforeInstr);
  6415. }
  6416. void LowererMD::EmitReinterpretIntToFloat(IR::Opnd* dst, IR::Opnd* src, IR::Instr* insertBeforeInstr)
  6417. {
  6418. Assert(dst->IsFloat());
  6419. Assert(src->IsInt32() || src->IsUInt32() || src->IsInt64());
  6420. EmitReinterpretPrimitive(dst, src, insertBeforeInstr);
  6421. }
  6422. IR::Instr *
  6423. LowererMD::LowerToFloat(IR::Instr *instr)
  6424. {
  6425. switch (instr->m_opcode)
  6426. {
  6427. case Js::OpCode::Add_A:
  6428. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  6429. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  6430. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::ADDSD : Js::OpCode::ADDSS;
  6431. break;
  6432. case Js::OpCode::Sub_A:
  6433. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  6434. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  6435. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::SUBSD : Js::OpCode::SUBSS;
  6436. break;
  6437. case Js::OpCode::Mul_A:
  6438. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  6439. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  6440. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::MULSD : Js::OpCode::MULSS;
  6441. break;
  6442. case Js::OpCode::Div_A:
  6443. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  6444. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  6445. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::DIVSD : Js::OpCode::DIVSS;
  6446. break;
  6447. case Js::OpCode::Neg_A:
  6448. {
  6449. IR::Opnd *opnd;
  6450. instr->m_opcode = Js::OpCode::XORPS;
  6451. if (instr->GetDst()->IsFloat32())
  6452. {
  6453. opnd = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetMaskNegFloatAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6454. }
  6455. else
  6456. {
  6457. Assert(instr->GetDst()->IsFloat64());
  6458. opnd = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetMaskNegDoubleAddr(), TyMachDouble, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6459. }
  6460. instr->SetSrc2(opnd);
  6461. break;
  6462. }
  6463. case Js::OpCode::BrEq_A:
  6464. case Js::OpCode::BrNeq_A:
  6465. case Js::OpCode::BrSrEq_A:
  6466. case Js::OpCode::BrSrNeq_A:
  6467. case Js::OpCode::BrGt_A:
  6468. case Js::OpCode::BrGe_A:
  6469. case Js::OpCode::BrLt_A:
  6470. case Js::OpCode::BrLe_A:
  6471. case Js::OpCode::BrNotEq_A:
  6472. case Js::OpCode::BrNotNeq_A:
  6473. case Js::OpCode::BrSrNotEq_A:
  6474. case Js::OpCode::BrSrNotNeq_A:
  6475. case Js::OpCode::BrNotGt_A:
  6476. case Js::OpCode::BrNotGe_A:
  6477. case Js::OpCode::BrNotLt_A:
  6478. case Js::OpCode::BrNotLe_A:
  6479. return this->LowerFloatCondBranch(instr->AsBranchInstr());
  6480. default:
  6481. Assume(UNREACHED);
  6482. }
  6483. Legalize(instr);
  6484. return instr;
  6485. }
  6486. IR::BranchInstr *
  6487. LowererMD::LowerFloatCondBranch(IR::BranchInstr *instrBranch, bool ignoreNan)
  6488. {
  6489. Js::OpCode brOpcode = Js::OpCode::InvalidOpCode;
  6490. Js::OpCode cmpOpcode = Js::OpCode::InvalidOpCode;
  6491. IR::Instr *instr;
  6492. bool swapCmpOpnds = false;
  6493. bool addJP = false;
  6494. IR::LabelInstr *labelNaN = nullptr;
  6495. // Generate float compare that behave correctly for NaN's.
  6496. // These branch on unordered:
  6497. // JB
  6498. // JBE
  6499. // JE
  6500. // These don't branch on unordered:
  6501. // JA
  6502. // JAE
  6503. // JNE
  6504. // Unfortunately, only JA and JAE do what we'd like....
  6505. Func * func = instrBranch->m_func;
  6506. IR::Opnd *src1 = instrBranch->UnlinkSrc1();
  6507. IR::Opnd *src2 = instrBranch->UnlinkSrc2();
  6508. Assert(src1->GetType() == src2->GetType());
  6509. switch (instrBranch->m_opcode)
  6510. {
  6511. case Js::OpCode::BrSrEq_A:
  6512. case Js::OpCode::BrEq_A:
  6513. case Js::OpCode::BrSrNotNeq_A:
  6514. case Js::OpCode::BrNotNeq_A:
  6515. cmpOpcode = src1->IsFloat64() ? Js::OpCode::UCOMISD : Js::OpCode::UCOMISS;
  6516. brOpcode = Js::OpCode::JEQ;
  6517. if (!ignoreNan)
  6518. {
  6519. // Don't jump on NaN's
  6520. labelNaN = instrBranch->GetOrCreateContinueLabel();
  6521. addJP = true;
  6522. }
  6523. break;
  6524. case Js::OpCode::BrNeq_A:
  6525. case Js::OpCode::BrSrNeq_A:
  6526. case Js::OpCode::BrSrNotEq_A:
  6527. case Js::OpCode::BrNotEq_A:
  6528. cmpOpcode = src1->IsFloat64() ? Js::OpCode::UCOMISD : Js::OpCode::UCOMISS;
  6529. brOpcode = Js::OpCode::JNE;
  6530. if (!ignoreNan)
  6531. {
  6532. // Jump on NaN's
  6533. labelNaN = instrBranch->GetTarget();
  6534. addJP = true;
  6535. }
  6536. break;
  6537. case Js::OpCode::BrLe_A:
  6538. swapCmpOpnds = true;
  6539. brOpcode = Js::OpCode::JAE;
  6540. break;
  6541. case Js::OpCode::BrLt_A:
  6542. swapCmpOpnds = true;
  6543. brOpcode = Js::OpCode::JA;
  6544. break;
  6545. case Js::OpCode::BrGe_A:
  6546. brOpcode = Js::OpCode::JAE;
  6547. break;
  6548. case Js::OpCode::BrGt_A:
  6549. brOpcode = Js::OpCode::JA;
  6550. break;
  6551. case Js::OpCode::BrNotLe_A:
  6552. swapCmpOpnds = true;
  6553. brOpcode = Js::OpCode::JB;
  6554. break;
  6555. case Js::OpCode::BrNotLt_A:
  6556. swapCmpOpnds = true;
  6557. brOpcode = Js::OpCode::JBE;
  6558. break;
  6559. case Js::OpCode::BrNotGe_A:
  6560. brOpcode = Js::OpCode::JB;
  6561. break;
  6562. case Js::OpCode::BrNotGt_A:
  6563. brOpcode = Js::OpCode::JBE;
  6564. break;
  6565. default:
  6566. Assume(UNREACHED);
  6567. }
  6568. // if we haven't set cmpOpcode, then we are using COMISD/COMISS
  6569. if (cmpOpcode == Js::OpCode::InvalidOpCode)
  6570. {
  6571. cmpOpcode = src1->IsFloat64() ? Js::OpCode::COMISD : Js::OpCode::COMISS;
  6572. }
  6573. if (swapCmpOpnds)
  6574. {
  6575. IR::Opnd *tmp = src1;
  6576. src1 = src2;
  6577. src2 = tmp;
  6578. }
  6579. // VC generates UCOMISD for BrEq/BrNeq, and COMISD for all others, accordingly to IEEE 754.
  6580. // We'll do the same.
  6581. // COMISD / UCOMISD src1, src2
  6582. IR::Instr *instrCmp = IR::Instr::New(cmpOpcode, func);
  6583. instrCmp->SetSrc1(src1);
  6584. instrCmp->SetSrc2(src2);
  6585. instrBranch->InsertBefore(instrCmp);
  6586. Legalize(instrCmp);
  6587. if (addJP)
  6588. {
  6589. // JP $LabelNaN
  6590. instr = IR::BranchInstr::New(Js::OpCode::JP, labelNaN, func);
  6591. instrBranch->InsertBefore(instr);
  6592. }
  6593. // Jcc $L
  6594. instr = IR::BranchInstr::New(brOpcode, instrBranch->GetTarget(), func);
  6595. instrBranch->InsertBefore(instr);
  6596. instrBranch->Remove();
  6597. return instr->AsBranchInstr();
  6598. }
  6599. void LowererMD::HelperCallForAsmMathBuiltin(IR::Instr* instr, IR::JnHelperMethod helperMethodFloat, IR::JnHelperMethod helperMethodDouble)
  6600. {
  6601. Assert(instr->m_opcode == Js::OpCode::InlineMathFloor || instr->m_opcode == Js::OpCode::InlineMathCeil || instr->m_opcode == Js::OpCode::Trunc_A || instr->m_opcode == Js::OpCode::Nearest_A);
  6602. AssertMsg(instr->GetDst()->IsFloat(), "dst must be float.");
  6603. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  6604. Assert(!instr->GetSrc2());
  6605. IR::Opnd * argOpnd = instr->UnlinkSrc1();
  6606. IR::JnHelperMethod helperMethod;
  6607. if (argOpnd->IsFloat32())
  6608. {
  6609. helperMethod = helperMethodFloat;
  6610. LoadFloatHelperArgument(instr, argOpnd);
  6611. }
  6612. else
  6613. {
  6614. helperMethod = helperMethodDouble;
  6615. LoadDoubleHelperArgument(instr, argOpnd);
  6616. }
  6617. ChangeToHelperCall(instr, helperMethod);
  6618. }
  6619. void LowererMD::GenerateFastInlineBuiltInCall(IR::Instr* instr, IR::JnHelperMethod helperMethod)
  6620. {
  6621. switch (instr->m_opcode)
  6622. {
  6623. case Js::OpCode::InlineMathSqrt:
  6624. // Sqrt maps directly to the SSE2 instruction.
  6625. // src and dst should already be XMM registers, all we need is just change the opcode.
  6626. Assert(helperMethod == (IR::JnHelperMethod)0);
  6627. Assert(instr->GetSrc2() == nullptr);
  6628. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::SQRTSD : Js::OpCode::SQRTSS;
  6629. break;
  6630. case Js::OpCode::InlineMathAbs:
  6631. Assert(helperMethod == (IR::JnHelperMethod)0);
  6632. return GenerateFastInlineBuiltInMathAbs(instr);
  6633. case Js::OpCode::InlineMathPow:
  6634. #ifdef _M_IX86
  6635. if (!instr->GetSrc2()->IsFloat())
  6636. {
  6637. #endif
  6638. this->GenerateFastInlineBuiltInMathPow(instr);
  6639. break;
  6640. #ifdef _M_IX86
  6641. }
  6642. // fallthrough
  6643. #endif
  6644. case Js::OpCode::InlineMathAcos:
  6645. case Js::OpCode::InlineMathAsin:
  6646. case Js::OpCode::InlineMathAtan:
  6647. case Js::OpCode::InlineMathAtan2:
  6648. case Js::OpCode::InlineMathCos:
  6649. case Js::OpCode::InlineMathExp:
  6650. case Js::OpCode::InlineMathLog:
  6651. case Js::OpCode::Expo_A: //** operator reuses InlineMathPow fastpath
  6652. case Js::OpCode::InlineMathSin:
  6653. case Js::OpCode::InlineMathTan:
  6654. {
  6655. AssertMsg(instr->GetDst()->IsFloat(), "dst must be float.");
  6656. AssertMsg(instr->GetSrc1()->IsFloat(), "src1 must be float.");
  6657. AssertMsg(!instr->GetSrc2() || instr->GetSrc2()->IsFloat(), "src2 must be float.");
  6658. // Before:
  6659. // dst = <Built-in call> src1, src2
  6660. // After:
  6661. // I386:
  6662. // XMM0 = MOVSD src1
  6663. // CALL helperMethod
  6664. // dst = MOVSD call->dst
  6665. // AMD64:
  6666. // XMM0 = MOVSD src1
  6667. // RAX = MOV helperMethod
  6668. // CALL RAX
  6669. // dst = MOVSD call->dst
  6670. // Src1
  6671. IR::Instr* argOut = IR::Instr::New(Js::OpCode::MOVSD, this->m_func);
  6672. IR::RegOpnd* dst1 = IR::RegOpnd::New(nullptr, (RegNum)FIRST_FLOAT_ARG_REG, TyMachDouble, this->m_func);
  6673. dst1->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  6674. argOut->SetDst(dst1);
  6675. argOut->SetSrc1(instr->UnlinkSrc1());
  6676. instr->InsertBefore(argOut);
  6677. // Src2
  6678. if (instr->GetSrc2() != nullptr)
  6679. {
  6680. IR::Instr* argOut2 = IR::Instr::New(Js::OpCode::MOVSD, this->m_func);
  6681. IR::RegOpnd* dst2 = IR::RegOpnd::New(nullptr, (RegNum)(FIRST_FLOAT_ARG_REG + 1), TyMachDouble, this->m_func);
  6682. dst2->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  6683. argOut2->SetDst(dst2);
  6684. argOut2->SetSrc1(instr->UnlinkSrc2());
  6685. instr->InsertBefore(argOut2);
  6686. }
  6687. // Call CRT.
  6688. IR::RegOpnd* floatCallDst = IR::RegOpnd::New(nullptr, (RegNum)(FIRST_FLOAT_REG), TyMachDouble, this->m_func); // Dst in XMM0.
  6689. #ifdef _M_IX86
  6690. IR::Instr* floatCall = IR::Instr::New(Js::OpCode::CALL, floatCallDst, this->m_func);
  6691. floatCall->SetSrc1(IR::HelperCallOpnd::New(helperMethod, this->m_func));
  6692. instr->InsertBefore(floatCall);
  6693. #else
  6694. // s1 = MOV helperAddr
  6695. IR::RegOpnd* s1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  6696. IR::AddrOpnd* helperAddr = IR::AddrOpnd::New((Js::Var)IR::GetMethodOriginalAddress(m_func->GetThreadContextInfo(), helperMethod), IR::AddrOpndKind::AddrOpndKindDynamicMisc, this->m_func);
  6697. IR::Instr* mov = IR::Instr::New(Js::OpCode::MOV, s1, helperAddr, this->m_func);
  6698. instr->InsertBefore(mov);
  6699. // dst(XMM0) = CALL s1
  6700. IR::Instr *floatCall = IR::Instr::New(Js::OpCode::CALL, floatCallDst, s1, this->m_func);
  6701. instr->InsertBefore(floatCall);
  6702. #endif
  6703. instr->m_func->SetHasCallsOnSelfAndParents();
  6704. // Save the result.
  6705. instr->m_opcode = Js::OpCode::MOVSD;
  6706. instr->SetSrc1(floatCall->GetDst());
  6707. break;
  6708. }
  6709. case Js::OpCode::InlineMathFloor:
  6710. case Js::OpCode::InlineMathCeil:
  6711. case Js::OpCode::InlineMathRound:
  6712. #ifdef ENABLE_WASM
  6713. case Js::OpCode::Trunc_A:
  6714. case Js::OpCode::Nearest_A:
  6715. #endif //ENABLE_WASM
  6716. {
  6717. Assert(AutoSystemInfo::Data.SSE4_1Available());
  6718. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsFloat());
  6719. // MOVSD roundedFloat, src
  6720. //
  6721. // if(round)
  6722. // {
  6723. // /* N.B.: the following CMPs are lowered to COMISDs, whose results can only be >, <, or =.
  6724. // In fact, only ">" can be used if NaN has not been handled.
  6725. // */
  6726. // CMP 0.5, roundedFloat
  6727. // JA $ltHalf
  6728. // CMP TwoToFraction, roundedFloat
  6729. // JA $addHalfToRoundSrcLabel
  6730. // J $skipRoundSd (NaN is also handled here)
  6731. // $ltHalf:
  6732. // CMP roundedFloat, -0.5
  6733. // JL $ltNegHalf
  6734. // if (shouldCheckNegZero) {
  6735. // CMP roundedFloat, 0
  6736. // JA $setZero
  6737. // $negZeroTest [Helper]:
  6738. // JB $bailoutLabel
  6739. // isNegZero(src)
  6740. // JE $bailoutLabel
  6741. // J $skipRoundSd
  6742. // } // else: setZero
  6743. // $setZero:
  6744. // MOV roundedFloat, 0
  6745. // J $skipRoundSd
  6746. // $ltNegHalf:
  6747. // CMP roundedFloat, NegTwoToFraction
  6748. // JA $addHalfToRoundSrc
  6749. // J $skipRoundSd
  6750. // $addHalfToRoundSrc:
  6751. // ADDSD roundedFloat, 0.5
  6752. // $skipAddHalf:
  6753. // }
  6754. //
  6755. // if(isNotCeil)
  6756. // {
  6757. // CMP roundedFloat, 0
  6758. // JGE $skipRoundSd
  6759. // }
  6760. // ROUNDSD roundedFloat, roundedFloat, round_mode
  6761. //
  6762. // $skipRoundSd:
  6763. // if(isNotCeil)
  6764. // MOVSD checkNegZeroOpnd, roundedFloat
  6765. // else if (ceil)
  6766. // MOVSD checkNegZeroOpnd, src
  6767. //
  6768. // CMP checkNegZeroOpnd, 0
  6769. // JNE $convertToInt
  6770. //
  6771. // if(instr->ShouldCheckForNegativeZero())
  6772. // {
  6773. // isNegZero CALL IsNegZero(checkNegZeroOpnd)
  6774. // CMP isNegZero, 0
  6775. // JNE $bailoutLabel
  6776. // }
  6777. //
  6778. // $convertToInt:
  6779. // CVT(T)SD2SI dst, roundedFloat //CVTTSD2SI for floor/round and CVTSD2SI for ceil
  6780. // CMP dst 0x80000000
  6781. // JNE $fallthrough
  6782. //
  6783. // if(!sharedBailout)
  6784. // {
  6785. // $bailoutLabel:
  6786. // }
  6787. // GenerateBailout(instr)
  6788. //
  6789. // $fallthrough:
  6790. bool isNotCeil = instr->m_opcode != Js::OpCode::InlineMathCeil;
  6791. // MOVSD roundedFloat, src
  6792. IR::Opnd * src = instr->UnlinkSrc1();
  6793. IR::RegOpnd* roundedFloat = IR::RegOpnd::New(src->GetType(), this->m_func);
  6794. IR::Instr* argOut = IR::Instr::New(LowererMDArch::GetAssignOp(src->GetType()), roundedFloat, src, this->m_func);
  6795. instr->InsertBefore(argOut);
  6796. bool negZeroCheckDone = false;
  6797. IR::LabelInstr * bailoutLabel = nullptr;
  6798. bool sharedBailout = false;
  6799. if (instr->GetDst()->IsInt32())
  6800. {
  6801. sharedBailout = (instr->GetBailOutInfo()->bailOutInstr != instr) ? true : false;
  6802. bailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /*helperLabel*/true);
  6803. }
  6804. IR::Opnd * zero;
  6805. if (src->IsFloat64())
  6806. {
  6807. zero = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleZeroAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6808. }
  6809. else
  6810. {
  6811. Assert(src->IsFloat32());
  6812. zero = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatZeroAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6813. }
  6814. IR::AutoReuseOpnd autoReuseZero(zero, this->m_func);
  6815. IR::LabelInstr * skipRoundSd = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6816. if(instr->m_opcode == Js::OpCode::InlineMathRound)
  6817. {
  6818. IR::LabelInstr * addHalfToRoundSrcLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6819. IR::LabelInstr * ltHalf = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6820. IR::LabelInstr * setZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6821. IR::LabelInstr * ltNegHalf = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6822. IR::Opnd * pointFive;
  6823. IR::Opnd * negPointFive;
  6824. if (src->IsFloat64())
  6825. {
  6826. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoublePointFiveAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6827. negPointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNegPointFiveAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6828. }
  6829. else
  6830. {
  6831. Assert(src->IsFloat32());
  6832. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatPointFiveAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6833. negPointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNegPointFiveAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6834. }
  6835. // CMP 0.5, roundedFloat
  6836. // JA $ltHalf
  6837. this->m_lowerer->InsertCompareBranch(pointFive, roundedFloat, Js::OpCode::BrGt_A, ltHalf, instr);
  6838. if (instr->GetDst()->IsInt32())
  6839. {
  6840. // if we are specializing dst to int, we will bailout on overflow so don't need upperbound check
  6841. // Also, we will bailout on NaN, so it doesn't need special handling either
  6842. // J $addHalfToRoundSrcLabel
  6843. this->m_lowerer->InsertBranch(Js::OpCode::Br, addHalfToRoundSrcLabel, instr);
  6844. }
  6845. else
  6846. {
  6847. IR::Opnd * twoToFraction;
  6848. if (src->IsFloat64())
  6849. {
  6850. twoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleTwoToFractionAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6851. }
  6852. else
  6853. {
  6854. Assert(src->IsFloat32());
  6855. twoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatTwoToFractionAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6856. }
  6857. // CMP 2^fraction, roundedFloat
  6858. // JA $addHalfToRoundSrcLabel
  6859. this->m_lowerer->InsertCompareBranch(twoToFraction, roundedFloat, Js::OpCode::BrGt_A, addHalfToRoundSrcLabel, instr);
  6860. // J $skipRoundSd (NaN also handled here)
  6861. this->m_lowerer->InsertBranch(Js::OpCode::Br, skipRoundSd, instr);
  6862. }
  6863. // $ltHalf:
  6864. instr->InsertBefore(ltHalf);
  6865. // CMP roundedFloat, -0.5
  6866. // JL $ltNegHalf
  6867. this->m_lowerer->InsertCompareBranch(roundedFloat, negPointFive, Js::OpCode::BrLt_A, ltNegHalf, instr);
  6868. if (instr->ShouldCheckForNegativeZero())
  6869. {
  6870. // CMP roundedFloat, 0
  6871. // JA $setZero
  6872. this->m_lowerer->InsertCompareBranch(roundedFloat, zero, Js::OpCode::BrGt_A, setZero, instr);
  6873. // $negZeroTest [helper]
  6874. m_lowerer->InsertLabel(true, instr);
  6875. // JB $bailoutLabel
  6876. this->m_lowerer->InsertBranch(Js::OpCode::JB, bailoutLabel, instr);
  6877. // if isNegZero(src) J $bailoutLabel else J $skipRoundSd
  6878. NegZeroBranching(src, instr, bailoutLabel, skipRoundSd);
  6879. negZeroCheckDone = true;
  6880. }
  6881. // $setZero:
  6882. instr->InsertBefore(setZero);
  6883. // MOVSD_ZERO roundedFloat
  6884. LoadFloatZero(roundedFloat, instr);
  6885. // J $skipRoundSd
  6886. this->m_lowerer->InsertBranch(Js::OpCode::Br, skipRoundSd, instr);
  6887. // $ltNegHalf:
  6888. instr->InsertBefore(ltNegHalf);
  6889. if (!instr->GetDst()->IsInt32())
  6890. {
  6891. // if we are specializing dst to int, we will bailout on overflow so don't need lowerbound check
  6892. IR::Opnd * negTwoToFraction;
  6893. if (src->IsFloat64())
  6894. {
  6895. negTwoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNegTwoToFractionAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6896. }
  6897. else
  6898. {
  6899. Assert(src->IsFloat32());
  6900. negTwoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNegTwoToFractionAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6901. }
  6902. // CMP roundedFloat, negTwoToFraction
  6903. // JA $addHalfToRoundSrcLabel
  6904. this->m_lowerer->InsertCompareBranch(roundedFloat, negTwoToFraction, Js::OpCode::BrGt_A, addHalfToRoundSrcLabel, instr);
  6905. // J $skipRoundSd
  6906. this->m_lowerer->InsertBranch(Js::OpCode::Br, skipRoundSd, instr);
  6907. }
  6908. if (src->IsFloat64())
  6909. {
  6910. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoublePointFiveAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  6911. }
  6912. else
  6913. {
  6914. Assert(src->IsFloat32());
  6915. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatPointFiveAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  6916. }
  6917. // $addHalfToRoundSrcLabel
  6918. instr->InsertBefore(addHalfToRoundSrcLabel);
  6919. // ADDSD roundedFloat, 0.5
  6920. IR::Instr * addInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::ADDSD : Js::OpCode::ADDSS, roundedFloat, roundedFloat, pointFive, this->m_func);
  6921. instr->InsertBefore(addInstr);
  6922. Legalize(addInstr);
  6923. }
  6924. if (instr->m_opcode == Js::OpCode::InlineMathFloor && instr->GetDst()->IsInt32())
  6925. {
  6926. this->m_lowerer->InsertCompareBranch(roundedFloat, zero, Js::OpCode::BrGe_A, skipRoundSd, instr);
  6927. }
  6928. // ROUNDSD srcCopy, srcCopy, round_mode
  6929. IR::Opnd * roundMode = nullptr;
  6930. switch (instr->m_opcode)
  6931. {
  6932. #ifdef ENABLE_WASM
  6933. case Js::OpCode::Trunc_A:
  6934. roundMode = IR::IntConstOpnd::New(0x03, TyInt32, this->m_func);
  6935. break;
  6936. case Js::OpCode::Nearest_A:
  6937. roundMode = IR::IntConstOpnd::New(0x00, TyInt32, this->m_func);
  6938. break;
  6939. #endif //ENABLE_WASM
  6940. case Js::OpCode::InlineMathRound:
  6941. case Js::OpCode::InlineMathFloor:
  6942. roundMode = IR::IntConstOpnd::New(0x01, TyInt32, this->m_func);
  6943. break;
  6944. case Js::OpCode::InlineMathCeil:
  6945. roundMode = IR::IntConstOpnd::New(0x02, TyInt32, this->m_func);
  6946. break;
  6947. }
  6948. IR::Instr* roundInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::ROUNDSD : Js::OpCode::ROUNDSS, roundedFloat, roundedFloat, roundMode, this->m_func);
  6949. instr->InsertBefore(roundInstr);
  6950. if (instr->m_opcode == Js::OpCode::InlineMathRound)
  6951. {
  6952. instr->InsertBefore(skipRoundSd);
  6953. }
  6954. if (instr->GetDst()->IsInt32())
  6955. {
  6956. if (instr->m_opcode == Js::OpCode::InlineMathFloor)
  6957. {
  6958. instr->InsertBefore(skipRoundSd);
  6959. }
  6960. //negZero bailout
  6961. if(instr->ShouldCheckForNegativeZero() && !negZeroCheckDone)
  6962. {
  6963. IR::LabelInstr * convertToInt = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6964. IR::Opnd * checkNegZeroOpnd = isNotCeil ? src : roundedFloat;
  6965. this->m_lowerer->InsertCompareBranch(checkNegZeroOpnd, zero, Js::OpCode::BrNeq_A, convertToInt, instr);
  6966. m_lowerer->InsertLabel(true, instr);
  6967. NegZeroBranching(checkNegZeroOpnd, instr, bailoutLabel, convertToInt);
  6968. instr->InsertBefore(convertToInt);
  6969. }
  6970. IR::Opnd * originalDst = instr->UnlinkDst();
  6971. // CVT(T)SD2SI dst, srcCopy
  6972. IR::Instr* convertToIntInstr;
  6973. if (isNotCeil)
  6974. {
  6975. convertToIntInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::CVTTSD2SI : Js::OpCode::CVTTSS2SI, originalDst, roundedFloat, this->m_func);
  6976. }
  6977. else
  6978. {
  6979. convertToIntInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::CVTSD2SI : Js::OpCode::CVTSS2SI, originalDst, roundedFloat, this->m_func);
  6980. }
  6981. instr->InsertBefore(convertToIntInstr);
  6982. IR::LabelInstr * fallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6983. IR::Opnd * intOverflowValue = IR::IntConstOpnd::New(INT32_MIN, IRType::TyInt32, this->m_func, true);
  6984. this->m_lowerer->InsertCompareBranch(originalDst, intOverflowValue, Js::OpCode::BrNeq_A, fallthrough, instr);
  6985. instr->InsertAfter(fallthrough);
  6986. if (!sharedBailout)
  6987. {
  6988. instr->InsertBefore(bailoutLabel);
  6989. }
  6990. // In case of a shared bailout, we should jump to the code that sets some data on the bailout record which is specific
  6991. // to this bailout. Pass the bailoutLabel to GenerateFunction so that it may use the label as the collectRuntimeStatsLabel.
  6992. this->m_lowerer->GenerateBailOut(instr, nullptr, nullptr, sharedBailout ? bailoutLabel : nullptr);
  6993. }
  6994. else
  6995. {
  6996. IR::Opnd * originalDst = instr->UnlinkDst();
  6997. Assert(originalDst->IsFloat());
  6998. Assert(originalDst->GetType() == roundedFloat->GetType());
  6999. IR::Instr * movInstr = IR::Instr::New(originalDst->IsFloat64() ? Js::OpCode::MOVSD : Js::OpCode::MOVSS, originalDst, roundedFloat, this->m_func);
  7000. instr->InsertBefore(movInstr);
  7001. instr->Remove();
  7002. }
  7003. break;
  7004. }
  7005. case Js::OpCode::InlineMathMin:
  7006. case Js::OpCode::InlineMathMax:
  7007. {
  7008. IR::Opnd* src1 = instr->GetSrc1();
  7009. IR::Opnd* src2 = instr->GetSrc2();
  7010. IR::Opnd* dst = instr->GetDst();
  7011. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7012. IR::LabelInstr* labelNaNHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  7013. IR::LabelInstr* labelNegZeroAndNaNCheckHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  7014. IR::Instr* branchInstr;
  7015. bool min = instr->m_opcode == Js::OpCode::InlineMathMin ? true : false;
  7016. bool dstEqualsSrc1 = dst->IsEqual(src1);
  7017. bool dstEqualsSrc2 = dst->IsEqual(src2);
  7018. IR::Opnd * otherSrc = src2;
  7019. IR::Opnd * compareSrc1 = src1;
  7020. IR::Opnd * compareSrc2 = src2;
  7021. if (dstEqualsSrc2)
  7022. {
  7023. otherSrc = src1;
  7024. compareSrc1 = src2;
  7025. compareSrc2 = src1;
  7026. }
  7027. if (!dstEqualsSrc1 && !dstEqualsSrc2)
  7028. {
  7029. //MOV dst, src1;
  7030. this->m_lowerer->InsertMove(dst, src1, instr);
  7031. }
  7032. // CMP src1, src2
  7033. if(dst->IsInt32())
  7034. {
  7035. if(min)
  7036. {
  7037. // JLT $continueLabel
  7038. branchInstr = IR::BranchInstr::New(Js::OpCode::BrLt_I4, doneLabel, compareSrc1, compareSrc2, instr->m_func);
  7039. instr->InsertBefore(branchInstr);
  7040. LowererMDArch::EmitInt4Instr(branchInstr);
  7041. }
  7042. else
  7043. {
  7044. // JGT $continueLabel
  7045. branchInstr = IR::BranchInstr::New(Js::OpCode::BrGt_I4, doneLabel, compareSrc1, compareSrc2, instr->m_func);
  7046. instr->InsertBefore(branchInstr);
  7047. LowererMDArch::EmitInt4Instr(branchInstr);
  7048. }
  7049. // MOV dst, src1
  7050. this->m_lowerer->InsertMove(dst, otherSrc, instr);
  7051. }
  7052. else if(dst->IsFloat())
  7053. {
  7054. // COMISD/COMISS src1 (src2), src2 (src1)
  7055. // JA $doneLabel
  7056. // JEQ $labelNegZeroAndNaNCheckHelper
  7057. // MOVSD/MOVSS dst, src2
  7058. // JMP $doneLabel
  7059. //
  7060. // $labelNegZeroAndNaNCheckHelper
  7061. // JP $labelNaNHelper
  7062. // if(min)
  7063. // {
  7064. // if(src2 == -0.0)
  7065. // MOVSD/MOVSS dst, src2
  7066. // }
  7067. // else
  7068. // {
  7069. // if(src1 == -0.0)
  7070. // MOVSD/MOVSS dst, src2
  7071. // }
  7072. // JMP $doneLabel
  7073. //
  7074. // $labelNaNHelper
  7075. // MOVSD/MOVSS dst, NaN
  7076. //
  7077. // $doneLabel
  7078. if(min)
  7079. {
  7080. this->m_lowerer->InsertCompareBranch(compareSrc1, compareSrc2, Js::OpCode::BrLt_A, doneLabel, instr); // Lowering of BrLt_A for floats is done to JA with operands swapped
  7081. }
  7082. else
  7083. {
  7084. this->m_lowerer->InsertCompareBranch(compareSrc1, compareSrc2, Js::OpCode::BrGt_A, doneLabel, instr);
  7085. }
  7086. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JEQ, labelNegZeroAndNaNCheckHelper, instr->m_func));
  7087. this->m_lowerer->InsertMove(dst, otherSrc, instr);
  7088. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, instr->m_func));
  7089. instr->InsertBefore(labelNegZeroAndNaNCheckHelper);
  7090. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JP, labelNaNHelper, instr->m_func));
  7091. IR::LabelInstr *isNeg0Label = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  7092. NegZeroBranching(min ? compareSrc2 : compareSrc1, instr, isNeg0Label, doneLabel);
  7093. instr->InsertBefore(isNeg0Label);
  7094. this->m_lowerer->InsertMove(dst, otherSrc, instr);
  7095. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, instr->m_func));
  7096. instr->InsertBefore(labelNaNHelper);
  7097. IR::Opnd * opndNaN = nullptr;
  7098. if (dst->IsFloat32())
  7099. {
  7100. opndNaN = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNaNAddr(), IRType::TyFloat32, this->m_func);
  7101. }
  7102. else
  7103. {
  7104. opndNaN = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNaNAddr(), IRType::TyFloat64, this->m_func);
  7105. }
  7106. this->m_lowerer->InsertMove(dst, opndNaN, instr);
  7107. }
  7108. instr->InsertBefore(doneLabel);
  7109. instr->Remove();
  7110. break;
  7111. }
  7112. default:
  7113. AssertMsg(FALSE, "Unknown inline built-in opcode");
  7114. break;
  7115. }
  7116. }
  7117. void LowererMD::GenerateFastInlineBuiltInMathAbs(IR::Instr* inlineInstr)
  7118. {
  7119. IR::Opnd* src = inlineInstr->GetSrc1()->Copy(this->m_func);
  7120. IR::Opnd* dst = inlineInstr->UnlinkDst();
  7121. Assert(src);
  7122. IR::Instr* tmpInstr;
  7123. IR::Instr* nextInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  7124. IR::Instr* continueInstr = m_lowerer->LowerBailOnIntMin(inlineInstr);
  7125. continueInstr->InsertAfter(nextInstr);
  7126. IRType srcType = src->GetType();
  7127. if (srcType == IRType::TyInt32)
  7128. {
  7129. // Note: if execution gets so far, we always get (untagged) int32 here.
  7130. // Since -x = ~x + 1, abs(x) = x, abs(-x) = -x, sign-extend(x) = 0, sign_extend(-x) = -1, where 0 <= x.
  7131. // Then: abs(x) = sign-extend(x) XOR x - sign-extend(x)
  7132. // Expected input (otherwise bailout):
  7133. // - src1 is (untagged) int, not equal to int_min (abs(int_min) would produce overflow, as there's no corresponding positive int).
  7134. // MOV EAX, src
  7135. IR::RegOpnd *regEAX = IR::RegOpnd::New(TyInt32, this->m_func);
  7136. regEAX->SetReg(LowererMDArch::GetRegIMulDestLower());
  7137. tmpInstr = IR::Instr::New(Js::OpCode::MOV, regEAX, src, this->m_func);
  7138. nextInstr->InsertBefore(tmpInstr);
  7139. IR::RegOpnd *regEDX = IR::RegOpnd::New(TyInt32, this->m_func);
  7140. regEDX->SetReg(LowererMDArch::GetRegIMulHighDestLower());
  7141. // CDQ (sign-extend EAX into EDX, producing 64bit EDX:EAX value)
  7142. // Note: put EDX on dst to give of def to the EDX lifetime
  7143. tmpInstr = IR::Instr::New(Js::OpCode::CDQ, regEDX, this->m_func);
  7144. nextInstr->InsertBefore(tmpInstr);
  7145. // XOR EAX, EDX
  7146. tmpInstr = IR::Instr::New(Js::OpCode::XOR, regEAX, regEAX, regEDX, this->m_func);
  7147. nextInstr->InsertBefore(tmpInstr);
  7148. // SUB EAX, EDX
  7149. tmpInstr = IR::Instr::New(Js::OpCode::SUB, regEAX, regEAX, regEDX, this->m_func);
  7150. nextInstr->InsertBefore(tmpInstr);
  7151. // MOV dst, EAX
  7152. tmpInstr = IR::Instr::New(Js::OpCode::MOV, dst, regEAX, this->m_func);
  7153. nextInstr->InsertBefore(tmpInstr);
  7154. }
  7155. else if (srcType == IRType::TyFloat64)
  7156. {
  7157. if (!dst->IsRegOpnd())
  7158. {
  7159. // MOVSD tempRegOpnd, src
  7160. IR::RegOpnd* tempRegOpnd = IR::RegOpnd::New(nullptr, TyMachDouble, this->m_func);
  7161. tempRegOpnd->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  7162. tmpInstr = IR::Instr::New(Js::OpCode::MOVSD, tempRegOpnd, src, this->m_func);
  7163. nextInstr->InsertBefore(tmpInstr);
  7164. // This saves the result in the same register.
  7165. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(tempRegOpnd), nextInstr);
  7166. // MOVSD dst, tempRegOpnd
  7167. tmpInstr = IR::Instr::New(Js::OpCode::MOVSD, dst, tempRegOpnd, this->m_func);
  7168. nextInstr->InsertBefore(tmpInstr);
  7169. }
  7170. else
  7171. {
  7172. // MOVSD dst, src
  7173. tmpInstr = IR::Instr::New(Js::OpCode::MOVSD, dst, src, this->m_func);
  7174. nextInstr->InsertBefore(tmpInstr);
  7175. // This saves the result in the same register.
  7176. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(dst), nextInstr);
  7177. }
  7178. }
  7179. else if (srcType == IRType::TyFloat32)
  7180. {
  7181. if (!dst->IsRegOpnd())
  7182. {
  7183. // MOVSS tempRegOpnd, src
  7184. IR::RegOpnd* tempRegOpnd = IR::RegOpnd::New(nullptr, TyFloat32, this->m_func);
  7185. tempRegOpnd->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  7186. tmpInstr = IR::Instr::New(Js::OpCode::MOVSS, tempRegOpnd, src, this->m_func);
  7187. nextInstr->InsertBefore(tmpInstr);
  7188. // This saves the result in the same register.
  7189. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(tempRegOpnd), nextInstr);
  7190. // MOVSS dst, tempRegOpnd
  7191. tmpInstr = IR::Instr::New(Js::OpCode::MOVSS, dst, tempRegOpnd, this->m_func);
  7192. nextInstr->InsertBefore(tmpInstr);
  7193. }
  7194. else
  7195. {
  7196. // MOVSS dst, src
  7197. tmpInstr = IR::Instr::New(Js::OpCode::MOVSS, dst, src, this->m_func);
  7198. nextInstr->InsertBefore(tmpInstr);
  7199. // This saves the result in the same register.
  7200. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(dst), nextInstr);
  7201. }
  7202. }
  7203. else
  7204. {
  7205. AssertMsg(FALSE, "GenerateFastInlineBuiltInMathAbs: unexpected type of the src!");
  7206. }
  7207. }
  7208. void LowererMD::GenerateFastInlineBuiltInMathPow(IR::Instr* instr)
  7209. {
  7210. #ifdef _M_IX86
  7211. AssertMsg(!instr->GetSrc2()->IsFloat(), "Math.pow(*, double) needs customized lowering!");
  7212. #endif
  7213. IR::JnHelperMethod directPowHelper = (IR::JnHelperMethod)0;
  7214. IR::Opnd* bailoutOpnd = nullptr;
  7215. if (!instr->GetSrc2()->IsFloat())
  7216. {
  7217. LoadHelperArgument(instr, instr->UnlinkSrc2());
  7218. if (instr->GetSrc1()->IsFloat())
  7219. {
  7220. directPowHelper = IR::HelperDirectMath_PowDoubleInt;
  7221. LoadDoubleHelperArgument(instr, instr->UnlinkSrc1());
  7222. }
  7223. else
  7224. {
  7225. directPowHelper = IR::HelperDirectMath_PowIntInt;
  7226. LoadHelperArgument(instr, instr->UnlinkSrc1());
  7227. if (!this->m_func->tempSymBool)
  7228. {
  7229. this->m_func->tempSymBool = StackSym::New(TyUint8, this->m_func);
  7230. this->m_func->StackAllocate(this->m_func->tempSymBool, TySize[TyUint8]);
  7231. }
  7232. IR::SymOpnd* boolOpnd = IR::SymOpnd::New(this->m_func->tempSymBool, TyUint8, this->m_func);
  7233. IR::RegOpnd* boolRefOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  7234. this->m_lowerer->InsertLea(boolRefOpnd, boolOpnd, instr);
  7235. LoadHelperArgument(instr, boolRefOpnd);
  7236. bailoutOpnd = boolOpnd;
  7237. }
  7238. }
  7239. #ifndef _M_IX86
  7240. else
  7241. {
  7242. AssertMsg(instr->GetSrc1()->IsFloat(), "Math.Pow(int, double) should not generated by GlobOpt!");
  7243. directPowHelper = IR::HelperDirectMath_Pow;
  7244. LoadDoubleHelperArgument(instr, instr->UnlinkSrc2());
  7245. LoadDoubleHelperArgument(instr, instr->UnlinkSrc1());
  7246. }
  7247. #endif
  7248. ChangeToHelperCall(instr, directPowHelper, nullptr, bailoutOpnd);
  7249. }
  7250. IR::Instr *
  7251. LowererMD::NegZeroBranching(IR::Opnd* opnd, IR::Instr* instr, IR::LabelInstr* isNeg0Label, IR::LabelInstr* isNotNeg0Label)
  7252. {
  7253. Assert(opnd->IsFloat());
  7254. bool is32Bits = opnd->IsFloat32();
  7255. IRType regType = is32Bits ? TyUint32 : TyUint64;
  7256. // Use UInt64 comparison between the opnd to check and negative zero constant.
  7257. // For this we have to convert opnd which is a double to uint64.
  7258. // MOV intOpnd, src
  7259. IR::RegOpnd *intOpnd = IR::RegOpnd::New(regType, this->m_func);
  7260. EmitReinterpretFloatToInt(intOpnd, opnd, instr);
  7261. #if LOWER_SPLIT_INT64
  7262. if (!is32Bits)
  7263. {
  7264. // For 64bits comparisons on x86 we need to check 2 registers
  7265. // CMP intOpnd.high, (k_NegZero >> 32).i32
  7266. // BRNEQ isNotNeg0Label
  7267. // CMP intOpnd.low, k_NegZero.i32
  7268. // BREQ isNeg0Label
  7269. // JMP isNotNeg0Label
  7270. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(intOpnd);
  7271. const uint32 high64NegZero = Js::NumberConstants::k_NegZero >> 32;
  7272. const uint32 low64NegZero = Js::NumberConstants::k_NegZero & UINT32_MAX;
  7273. IR::IntConstOpnd *negZeroHighOpnd = IR::IntConstOpnd::New(high64NegZero, TyUint32, m_func);
  7274. IR::IntConstOpnd *negZeroLowOpnd = IR::IntConstOpnd::New(low64NegZero, TyUint32, m_func);
  7275. m_lowerer->InsertCompareBranch(dstPair.high, negZeroHighOpnd, Js::OpCode::BrNeq_A, isNotNeg0Label, instr);
  7276. m_lowerer->InsertCompareBranch(dstPair.low, negZeroLowOpnd, Js::OpCode::BrEq_A, isNeg0Label, instr);
  7277. }
  7278. else
  7279. #endif
  7280. {
  7281. #if _M_IX86
  7282. IR::IntConstOpnd *negZeroOpnd = IR::IntConstOpnd::New(Js::NumberConstants::k_Float32NegZero, regType, m_func);
  7283. #else
  7284. IR::IntConstOpnd *negZeroOpnd = IR::IntConstOpnd::New(is32Bits ? Js::NumberConstants::k_Float32NegZero : Js::NumberConstants::k_NegZero, regType, m_func);
  7285. #endif
  7286. // CMP intOpnd, k_NegZero
  7287. // BREQ isNeg0Label
  7288. // JMP isNotNeg0Label
  7289. m_lowerer->InsertCompareBranch(intOpnd, negZeroOpnd, Js::OpCode::BrEq_A, isNeg0Label, instr);
  7290. }
  7291. IR::Instr* jmpNotNegZero = IR::BranchInstr::New(Js::OpCode::JMP, isNotNeg0Label, m_func);
  7292. instr->InsertBefore(jmpNotNegZero);
  7293. return jmpNotNegZero;
  7294. }
  7295. void
  7296. LowererMD::FinalLower()
  7297. {
  7298. this->lowererMDArch.FinalLower();
  7299. }
  7300. IR::Instr *
  7301. LowererMD::LowerDivI4AndBailOnReminder(IR::Instr * instr, IR::LabelInstr * bailOutLabel)
  7302. {
  7303. // Don't have save the operand for bailout because the lowering of IDIV don't overwrite their values
  7304. // (EDX) = CDQ
  7305. // EAX = numerator
  7306. // (EDX:EAX)= IDIV (EAX), denominator
  7307. // TEST EDX, EDX
  7308. // JNE bailout
  7309. // <Caller insert more checks here>
  7310. // dst = MOV EAX <-- assignInstr
  7311. Assert(instr);
  7312. Assert(instr->m_opcode == Js::OpCode::Div_I4);
  7313. Assert(!instr->HasBailOutInfo());
  7314. EmitInt4Instr(instr);
  7315. Assert(instr->m_opcode == Js::OpCode::IDIV);
  7316. IR::Instr * prev = instr->m_prev;
  7317. Assert(prev->m_opcode == Js::OpCode::CDQ);
  7318. #ifdef _M_IX86
  7319. Assert(prev->GetDst()->AsRegOpnd()->GetReg() == RegEDX);
  7320. #else
  7321. Assert(prev->GetDst()->AsRegOpnd()->GetReg() == RegRDX);
  7322. #endif
  7323. IR::Opnd * reminderOpnd = prev->GetDst();
  7324. // Insert all check before the assignment to the actual dst.
  7325. IR::Instr * insertBeforeInstr = instr->m_next;
  7326. Assert(insertBeforeInstr->m_opcode == Js::OpCode::MOV);
  7327. #ifdef _M_IX86
  7328. Assert(insertBeforeInstr->GetSrc1()->AsRegOpnd()->GetReg() == RegEAX);
  7329. #else
  7330. Assert(insertBeforeInstr->GetSrc1()->AsRegOpnd()->GetReg() == RegRAX);
  7331. #endif
  7332. // Jump to bailout if the reminder is not 0 (not int result)
  7333. this->m_lowerer->InsertTestBranch(reminderOpnd, reminderOpnd, Js::OpCode::BrNeq_A, bailOutLabel, insertBeforeInstr);
  7334. return insertBeforeInstr;
  7335. }
  7336. void
  7337. LowererMD::LowerTypeof(IR::Instr * typeOfInstr)
  7338. {
  7339. Func * func = typeOfInstr->m_func;
  7340. IR::Opnd * src1 = typeOfInstr->GetSrc1();
  7341. IR::Opnd * dst = typeOfInstr->GetDst();
  7342. Assert(src1->IsRegOpnd() && dst->IsRegOpnd());
  7343. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  7344. IR::LabelInstr * taggedIntLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  7345. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  7346. // MOV typeDisplayStringsArray, &javascriptLibrary->typeDisplayStrings
  7347. IR::RegOpnd * typeDisplayStringsArrayOpnd = IR::RegOpnd::New(TyMachPtr, func);
  7348. m_lowerer->InsertMove(typeDisplayStringsArrayOpnd, IR::AddrOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetTypeDisplayStringsOffset(), IR::AddrOpndKindConstantAddress, this->m_func), typeOfInstr);
  7349. GenerateObjectTest(src1, typeOfInstr, taggedIntLabel);
  7350. // MOV typeId, TypeIds_Object
  7351. // MOV typeRegOpnd, [src1 + offset(Type)]
  7352. // MOV objTypeId, [typeRegOpnd + offsetof(typeId)]
  7353. // CMP objTypeId, TypeIds_Limit /*external object test*/
  7354. // CMOVB typeId, objTypeId
  7355. // TEST [typeRegOpnd + offsetof(flags)], TypeFlagMask_IsFalsy /*test for falsy*/
  7356. // CMOVNE typeId, TypeIds_Undefined
  7357. // MOV dst, typeDisplayStrings[typeId]
  7358. // TEST dst, dst
  7359. // JE $helper
  7360. // JMP $done
  7361. IR::RegOpnd * typeIdOpnd = IR::RegOpnd::New(TyUint32, func);
  7362. m_lowerer->InsertMove(typeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_Object, TyUint32, func), typeOfInstr);
  7363. IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, func);
  7364. m_lowerer->InsertMove(typeRegOpnd,
  7365. IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func),
  7366. typeOfInstr);
  7367. IR::RegOpnd * objTypeIdOpnd = IR::RegOpnd::New(TyUint32, func);
  7368. m_lowerer->InsertMove(objTypeIdOpnd, IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, func), typeOfInstr);
  7369. m_lowerer->InsertCompare(objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_Limit, TyUint32, func), typeOfInstr);
  7370. InsertCmovCC(Js::OpCode::CMOVB, typeIdOpnd, objTypeIdOpnd, typeOfInstr);
  7371. // Insert MOV reg, 0 before the TEST because MOV reg, 0 will be peeped to XOR reg, reg and that may affect the zero flags that CMOVE depends on
  7372. IR::RegOpnd* typeIdUndefinedOpnd = IR::RegOpnd::New(TyUint32, func);
  7373. m_lowerer->InsertMove(typeIdUndefinedOpnd, IR::IntConstOpnd::New(Js::TypeIds_Undefined, TyUint32, func), typeOfInstr);
  7374. IR::Opnd *flagsOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfFlags(), TyInt32, this->m_func);
  7375. m_lowerer->InsertTest(flagsOpnd, IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyInt32, this->m_func), typeOfInstr);
  7376. InsertCmovCC(Js::OpCode::CMOVNE, typeIdOpnd, typeIdUndefinedOpnd, typeOfInstr);
  7377. if (dst->IsEqual(src1))
  7378. {
  7379. ChangeToAssign(typeOfInstr->HoistSrc1(Js::OpCode::Ld_A));
  7380. }
  7381. m_lowerer->InsertMove(dst, IR::IndirOpnd::New(typeDisplayStringsArrayOpnd, typeIdOpnd, this->GetDefaultIndirScale(), TyMachPtr, func), typeOfInstr);
  7382. m_lowerer->InsertTestBranch(dst, dst, Js::OpCode::BrEq_A, helperLabel, typeOfInstr);
  7383. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, typeOfInstr);
  7384. // $taggedInt:
  7385. // MOV dst, typeDisplayStrings[TypeIds_Number]
  7386. // JMP $done
  7387. typeOfInstr->InsertBefore(taggedIntLabel);
  7388. m_lowerer->InsertMove(dst, IR::IndirOpnd::New(typeDisplayStringsArrayOpnd, Js::TypeIds_Number * sizeof(Js::Var), TyMachPtr, func), typeOfInstr);
  7389. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, typeOfInstr);
  7390. // $helper
  7391. // CALL OP_TypeOf
  7392. // $done
  7393. typeOfInstr->InsertBefore(helperLabel);
  7394. typeOfInstr->InsertAfter(doneLabel);
  7395. m_lowerer->LowerUnaryHelperMem(typeOfInstr, IR::HelperOp_Typeof);
  7396. }
  7397. void
  7398. LowererMD::InsertObjectPoison(IR::Opnd* poisonedOpnd, IR::BranchInstr* branchInstr, IR::Instr* insertInstr, bool isForStore)
  7399. {
  7400. if ((isForStore && CONFIG_FLAG_RELEASE(PoisonObjectsForStores)) || (!isForStore && CONFIG_FLAG_RELEASE(PoisonObjectsForLoads)))
  7401. {
  7402. Js::OpCode opcode;
  7403. if (branchInstr->m_opcode == Js::OpCode::JNE)
  7404. {
  7405. opcode = Js::OpCode::CMOVNE;
  7406. }
  7407. else
  7408. {
  7409. AssertOrFailFastMsg(branchInstr->m_opcode == Js::OpCode::JEQ, "Unexpected branch type in InsertObjectPoison preceeding instruction");
  7410. opcode = Js::OpCode::CMOVE;
  7411. }
  7412. AssertOrFailFast(branchInstr->m_prev->m_opcode == Js::OpCode::CMP || branchInstr->m_prev->m_opcode == Js::OpCode::TEST);
  7413. IR::RegOpnd* regZero = IR::RegOpnd::New(TyMachPtr, insertInstr->m_func);
  7414. Lowerer::InsertMove(regZero, IR::IntConstOpnd::New(0, TyMachPtr, insertInstr->m_func), branchInstr->m_prev);
  7415. InsertCmovCC(opcode, poisonedOpnd, regZero, insertInstr);
  7416. }
  7417. }
  7418. IR::Instr*
  7419. LowererMD::InsertCmovCC(const Js::OpCode opCode, IR::Opnd * dst, IR::Opnd* src1, IR::Instr* insertBeforeInstr, bool postRegAlloc)
  7420. {
  7421. Assert(opCode > Js::OpCode::MDStart);
  7422. Func* func = insertBeforeInstr->m_func;
  7423. IR::Opnd* src2 = nullptr;
  7424. if (!postRegAlloc)
  7425. {
  7426. src2 = src1;
  7427. src1 = dst;
  7428. }
  7429. IR::Instr * instr = IR::Instr::New(opCode, dst, src1, src2, func);
  7430. insertBeforeInstr->InsertBefore(instr);
  7431. LowererMD::Legalize(instr);
  7432. return instr;
  7433. }
  7434. IR::BranchInstr*
  7435. LowererMD::InsertMissingItemCompareBranch(IR::Opnd* compareSrc, IR::Opnd* missingItemOpnd, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr)
  7436. {
  7437. return this->lowererMDArch.InsertMissingItemCompareBranch(compareSrc, missingItemOpnd, opcode, target, insertBeforeInstr);
  7438. }