LowerMDShared.cpp 340 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572957395749575957695779578957995809581958295839584958595869587958895899590959195929593959495959596959795989599960096019602960396049605960696079608960996109611961296139614961596169617961896199620962196229623962496259626962796289629963096319632963396349635963696379638963996409641964296439644964596469647964896499650965196529653965496559656965796589659966096619662966396649665966696679668966996709671967296739674967596769677967896799680968196829683968496859686968796889689969096919692969396949695969696979698969997009701970297039704970597069707970897099710971197129713971497159716971797189719972097219722972397249725972697279728972997309731973297339734973597369737973897399740974197429743974497459746974797489749975097519752975397549755975697579758975997609761976297639764976597669767976897699770977197729773977497759776977797789779
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Language/JavascriptFunctionArgIndex.h"
  7. const Js::OpCode LowererMD::MDUncondBranchOpcode = Js::OpCode::JMP;
  8. const Js::OpCode LowererMD::MDTestOpcode = Js::OpCode::TEST;
  9. const Js::OpCode LowererMD::MDOrOpcode = Js::OpCode::OR;
  10. const Js::OpCode LowererMD::MDXorOpcode = Js::OpCode::XOR;
  11. #if _M_X64
  12. const Js::OpCode LowererMD::MDMovUint64ToFloat64Opcode = Js::OpCode::MOVQ;
  13. #endif
  14. const Js::OpCode LowererMD::MDOverflowBranchOpcode = Js::OpCode::JO;
  15. const Js::OpCode LowererMD::MDNotOverflowBranchOpcode = Js::OpCode::JNO;
  16. const Js::OpCode LowererMD::MDConvertFloat32ToFloat64Opcode = Js::OpCode::CVTSS2SD;
  17. const Js::OpCode LowererMD::MDConvertFloat64ToFloat32Opcode = Js::OpCode::CVTSD2SS;
  18. const Js::OpCode LowererMD::MDCallOpcode = Js::OpCode::CALL;
  19. const Js::OpCode LowererMD::MDImulOpcode = Js::OpCode::IMUL2;
  20. static const int TWO_31_FLOAT = 0x4f000000;
  21. static const int FLOAT_INT_MIN = 0xcf000000;
  22. //
  23. // Static utility fn()
  24. //
  25. bool
  26. LowererMD::IsAssign(IR::Instr *instr)
  27. {
  28. return instr->GetDst() && instr->m_opcode == LowererMDArch::GetAssignOp(instr->GetDst()->GetType());
  29. }
  30. ///----------------------------------------------------------------------------
  31. ///
  32. /// LowererMD::IsCall
  33. ///
  34. ///----------------------------------------------------------------------------
  35. bool
  36. LowererMD::IsCall(IR::Instr *instr)
  37. {
  38. return instr->m_opcode == Js::OpCode::CALL;
  39. }
  40. ///----------------------------------------------------------------------------
  41. ///
  42. /// LowererMD::IsUnconditionalBranch
  43. ///
  44. ///----------------------------------------------------------------------------
  45. bool
  46. LowererMD::IsUnconditionalBranch(const IR::Instr *instr)
  47. {
  48. return (instr->m_opcode == Js::OpCode::JMP);
  49. }
  50. // GenerateMemRef: Return an opnd that can be used to access the given address.
  51. IR::Opnd *
  52. LowererMD::GenerateMemRef(intptr_t addr, IRType type, IR::Instr *instr, bool dontEncode)
  53. {
  54. return IR::MemRefOpnd::New(addr, type, this->m_func);
  55. }
  56. void
  57. LowererMD::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, size_t value, IR::Instr * insertBeforeInstr, bool isZeroed)
  58. {
  59. #if _M_X64
  60. lowererMDArch.GenerateMemInit(opnd, offset, value, insertBeforeInstr, isZeroed);
  61. #else
  62. m_lowerer->GenerateMemInit(opnd, offset, (uint32)value, insertBeforeInstr, isZeroed);
  63. #endif
  64. }
  65. ///----------------------------------------------------------------------------
  66. ///
  67. /// LowererMD::InvertBranch
  68. ///
  69. ///----------------------------------------------------------------------------
  70. void
  71. LowererMD::InvertBranch(IR::BranchInstr *branchInstr)
  72. {
  73. switch (branchInstr->m_opcode)
  74. {
  75. case Js::OpCode::JA:
  76. branchInstr->m_opcode = Js::OpCode::JBE;
  77. break;
  78. case Js::OpCode::JAE:
  79. branchInstr->m_opcode = Js::OpCode::JB;
  80. break;
  81. case Js::OpCode::JB:
  82. branchInstr->m_opcode = Js::OpCode::JAE;
  83. break;
  84. case Js::OpCode::JBE:
  85. branchInstr->m_opcode = Js::OpCode::JA;
  86. break;
  87. case Js::OpCode::JEQ:
  88. branchInstr->m_opcode = Js::OpCode::JNE;
  89. break;
  90. case Js::OpCode::JNE:
  91. branchInstr->m_opcode = Js::OpCode::JEQ;
  92. break;
  93. case Js::OpCode::JGE:
  94. branchInstr->m_opcode = Js::OpCode::JLT;
  95. break;
  96. case Js::OpCode::JGT:
  97. branchInstr->m_opcode = Js::OpCode::JLE;
  98. break;
  99. case Js::OpCode::JLT:
  100. branchInstr->m_opcode = Js::OpCode::JGE;
  101. break;
  102. case Js::OpCode::JLE:
  103. branchInstr->m_opcode = Js::OpCode::JGT;
  104. break;
  105. case Js::OpCode::JO:
  106. branchInstr->m_opcode = Js::OpCode::JNO;
  107. break;
  108. case Js::OpCode::JNO:
  109. branchInstr->m_opcode = Js::OpCode::JO;
  110. break;
  111. case Js::OpCode::JP:
  112. branchInstr->m_opcode = Js::OpCode::JNP;
  113. break;
  114. case Js::OpCode::JNP:
  115. branchInstr->m_opcode = Js::OpCode::JP;
  116. break;
  117. case Js::OpCode::JSB:
  118. branchInstr->m_opcode = Js::OpCode::JNSB;
  119. break;
  120. case Js::OpCode::JNSB:
  121. branchInstr->m_opcode = Js::OpCode::JSB;
  122. break;
  123. default:
  124. AssertMsg(UNREACHED, "JCC missing in InvertBranch()");
  125. }
  126. }
  127. void
  128. LowererMD::ReverseBranch(IR::BranchInstr *branchInstr)
  129. {
  130. switch (branchInstr->m_opcode)
  131. {
  132. case Js::OpCode::JA:
  133. branchInstr->m_opcode = Js::OpCode::JB;
  134. break;
  135. case Js::OpCode::JAE:
  136. branchInstr->m_opcode = Js::OpCode::JBE;
  137. break;
  138. case Js::OpCode::JB:
  139. branchInstr->m_opcode = Js::OpCode::JA;
  140. break;
  141. case Js::OpCode::JBE:
  142. branchInstr->m_opcode = Js::OpCode::JAE;
  143. break;
  144. case Js::OpCode::JGE:
  145. branchInstr->m_opcode = Js::OpCode::JLE;
  146. break;
  147. case Js::OpCode::JGT:
  148. branchInstr->m_opcode = Js::OpCode::JLT;
  149. break;
  150. case Js::OpCode::JLT:
  151. branchInstr->m_opcode = Js::OpCode::JGT;
  152. break;
  153. case Js::OpCode::JLE:
  154. branchInstr->m_opcode = Js::OpCode::JGE;
  155. break;
  156. case Js::OpCode::JEQ:
  157. case Js::OpCode::JNE:
  158. case Js::OpCode::JO:
  159. case Js::OpCode::JNO:
  160. case Js::OpCode::JP:
  161. case Js::OpCode::JNP:
  162. case Js::OpCode::JSB:
  163. case Js::OpCode::JNSB:
  164. break;
  165. default:
  166. AssertMsg(UNREACHED, "JCC missing in ReverseBranch()");
  167. }
  168. }
  169. IR::Instr *
  170. LowererMD::LowerCallHelper(IR::Instr *instrCall)
  171. {
  172. IR::Opnd *argOpnd = instrCall->UnlinkSrc2();
  173. IR::Instr *prevInstr = nullptr;
  174. IR::JnHelperMethod helperMethod = instrCall->GetSrc1()->AsHelperCallOpnd()->m_fnHelper;
  175. instrCall->FreeSrc1();
  176. #ifndef _M_X64
  177. prevInstr = ChangeToHelperCall(instrCall, helperMethod);
  178. #endif
  179. prevInstr = instrCall;
  180. while (argOpnd)
  181. {
  182. Assert(argOpnd->IsRegOpnd());
  183. IR::RegOpnd *regArg = argOpnd->AsRegOpnd();
  184. Assert(regArg->m_sym->m_isSingleDef);
  185. IR::Instr *instrArg = regArg->m_sym->m_instrDef;
  186. Assert(instrArg->m_opcode == Js::OpCode::ArgOut_A ||
  187. (helperMethod == IR::JnHelperMethod::HelperOP_InitCachedScope && instrArg->m_opcode == Js::OpCode::ExtendArg_A));
  188. prevInstr = LoadHelperArgument(prevInstr, instrArg->GetSrc1());
  189. argOpnd = instrArg->GetSrc2();
  190. if (prevInstr == instrArg)
  191. {
  192. prevInstr = prevInstr->m_prev;
  193. }
  194. if (instrArg->m_opcode == Js::OpCode::ArgOut_A)
  195. {
  196. instrArg->UnlinkSrc1();
  197. if (argOpnd)
  198. {
  199. instrArg->UnlinkSrc2();
  200. }
  201. regArg->Free(this->m_func);
  202. instrArg->Remove();
  203. }
  204. }
  205. prevInstr = m_lowerer->LoadScriptContext(prevInstr);
  206. #ifdef _M_X64
  207. FlipHelperCallArgsOrder();
  208. ChangeToHelperCall(instrCall, helperMethod);
  209. #else
  210. this->lowererMDArch.ResetHelperArgsCount();
  211. #endif
  212. // There might be ToVar in between the ArgOut, need to continue lower from the call still
  213. return instrCall;
  214. }
  215. //
  216. // forwarding functions
  217. //
  218. IR::Instr *
  219. LowererMD::LowerCall(IR::Instr * callInstr, Js::ArgSlot argCount)
  220. {
  221. return this->lowererMDArch.LowerCall(callInstr, argCount);
  222. }
  223. IR::Instr *
  224. LowererMD::LowerCallI(IR::Instr * callInstr, ushort callFlags, bool isHelper, IR::Instr * insertBeforeInstrForCFG)
  225. {
  226. return this->lowererMDArch.LowerCallI(callInstr, callFlags, isHelper, insertBeforeInstrForCFG);
  227. }
  228. IR::Instr *
  229. LowererMD::LowerAsmJsCallI(IR::Instr * callInstr)
  230. {
  231. #if DBG
  232. if (PHASE_ON(Js::AsmjsCallDebugBreakPhase, this->m_func))
  233. {
  234. this->GenerateDebugBreak(callInstr->m_next);
  235. }
  236. #endif
  237. return this->lowererMDArch.LowerAsmJsCallI(callInstr);
  238. }
  239. IR::Instr *
  240. LowererMD::LowerAsmJsCallE(IR::Instr * callInstr)
  241. {
  242. #if DBG
  243. if (PHASE_ON(Js::AsmjsCallDebugBreakPhase, this->m_func))
  244. {
  245. this->GenerateDebugBreak(callInstr->m_next);
  246. }
  247. #endif
  248. return this->lowererMDArch.LowerAsmJsCallE(callInstr);
  249. }
  250. IR::Instr *
  251. LowererMD::LowerWasmMemOp(IR::Instr * instr, IR::Opnd *addrOpnd)
  252. {
  253. return this->lowererMDArch.LowerWasmMemOp(instr, addrOpnd);
  254. }
  255. IR::Instr *
  256. LowererMD::LowerAsmJsLdElemHelper(IR::Instr * callInstr)
  257. {
  258. return this->lowererMDArch.LowerAsmJsLdElemHelper(callInstr);
  259. }
  260. IR::Instr *
  261. LowererMD::LowerAsmJsStElemHelper(IR::Instr * callInstr)
  262. {
  263. return this->lowererMDArch.LowerAsmJsStElemHelper(callInstr);
  264. }
  265. IR::Instr *
  266. LowererMD::LowerCallPut(IR::Instr * callInstr)
  267. {
  268. int32 argCount = this->lowererMDArch.LowerCallArgs(callInstr, Js::CallFlags_None, 2);
  269. // load native entry point from script function into eax
  270. IR::Opnd * functionWrapOpnd = callInstr->UnlinkSrc1();
  271. AssertMsg(functionWrapOpnd->IsRegOpnd() && functionWrapOpnd->AsRegOpnd()->m_sym->IsStackSym(),
  272. "Expected call src to be stackSym");
  273. this->LoadHelperArgument(callInstr, functionWrapOpnd);
  274. this->m_lowerer->LoadScriptContext(callInstr);
  275. IR::HelperCallOpnd *helperCallOpnd = IR::HelperCallOpnd::New(IR::HelperOp_InvokePut, this->m_func);
  276. callInstr->SetSrc1(helperCallOpnd);
  277. return this->lowererMDArch.LowerCall(callInstr, argCount);
  278. }
  279. IR::Instr *
  280. LowererMD::LoadInt64HelperArgument(IR::Instr * instr, IR::Opnd* opnd)
  281. {
  282. return this->lowererMDArch.LoadInt64HelperArgument(instr, opnd);
  283. }
  284. IR::Instr *
  285. LowererMD::LoadHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  286. {
  287. return this->lowererMDArch.LoadHelperArgument(instr, opndArg);
  288. }
  289. IR::Instr *
  290. LowererMD::LoadDoubleHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  291. {
  292. return this->lowererMDArch.LoadDoubleHelperArgument(instr, opndArg);
  293. }
  294. IR::Instr *
  295. LowererMD::LoadFloatHelperArgument(IR::Instr * instr, IR::Opnd * opndArg)
  296. {
  297. return this->lowererMDArch.LoadFloatHelperArgument(instr, opndArg);
  298. }
  299. IR::Instr *
  300. LowererMD::LowerEntryInstr(IR::EntryInstr * entryInstr)
  301. {
  302. return this->lowererMDArch.LowerEntryInstr(entryInstr);
  303. }
  304. IR::Instr *
  305. LowererMD::LowerExitInstr(IR::ExitInstr * exitInstr)
  306. {
  307. return this->lowererMDArch.LowerExitInstr(exitInstr);
  308. }
  309. IR::Instr *
  310. LowererMD::LowerEntryInstrAsmJs(IR::EntryInstr * entryInstr)
  311. {
  312. return this->lowererMDArch.LowerEntryInstrAsmJs(entryInstr);
  313. }
  314. IR::Instr *
  315. LowererMD::LowerExitInstrAsmJs(IR::ExitInstr * exitInstr)
  316. {
  317. return this->lowererMDArch.LowerExitInstrAsmJs(exitInstr);
  318. }
  319. IR::Instr *
  320. LowererMD::LoadNewScObjFirstArg(IR::Instr * instr, IR::Opnd * dst, ushort extraArgs)
  321. {
  322. return this->lowererMDArch.LoadNewScObjFirstArg(instr, dst, extraArgs);
  323. }
  324. IR::Instr *
  325. LowererMD::LowerTry(IR::Instr *tryInstr, IR::JnHelperMethod helperMethod)
  326. {
  327. // Mark the entry to the try
  328. IR::Instr *instr = tryInstr->GetNextRealInstrOrLabel();
  329. AssertMsg(instr->IsLabelInstr(), "No label at the entry to a try?");
  330. IR::LabelInstr *tryAddr = instr->AsLabelInstr();
  331. // Arg 5: ScriptContext
  332. this->m_lowerer->LoadScriptContext(tryAddr);
  333. if (tryInstr->m_opcode == Js::OpCode::TryCatch || this->m_func->DoOptimizeTry())
  334. {
  335. // Arg 4 : hasBailedOutOffset
  336. IR::Opnd * hasBailedOutOffset = IR::IntConstOpnd::New(this->m_func->m_hasBailedOutSym->m_offset, TyInt32, this->m_func);
  337. this->LoadHelperArgument(tryAddr, hasBailedOutOffset);
  338. }
  339. #ifdef _M_X64
  340. // Arg: args size
  341. IR::RegOpnd *argsSizeOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  342. tryAddr->InsertBefore(IR::Instr::New(Js::OpCode::LdArgSize, argsSizeOpnd, this->m_func));
  343. this->LoadHelperArgument(tryAddr, argsSizeOpnd);
  344. // Arg: spill size
  345. IR::RegOpnd *spillSizeOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  346. tryAddr->InsertBefore(IR::Instr::New(Js::OpCode::LdSpillSize, spillSizeOpnd, this->m_func));
  347. this->LoadHelperArgument(tryAddr, spillSizeOpnd);
  348. #endif
  349. // Arg 3: frame pointer
  350. IR::RegOpnd *ebpOpnd = IR::RegOpnd::New(nullptr, lowererMDArch.GetRegBlockPointer(), TyMachReg, this->m_func);
  351. this->LoadHelperArgument(tryAddr, ebpOpnd);
  352. // Arg 2: handler address
  353. IR::LabelInstr *helperAddr = tryInstr->AsBranchInstr()->GetTarget();
  354. this->LoadHelperArgument(tryAddr, IR::LabelOpnd::New(helperAddr, this->m_func));
  355. // Arg 1: try address
  356. this->LoadHelperArgument(tryAddr, IR::LabelOpnd::New(tryAddr, this->m_func));
  357. // Call the helper
  358. IR::RegOpnd *continuationAddr =
  359. IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), lowererMDArch.GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  360. IR::Instr *callInstr = IR::Instr::New(
  361. Js::OpCode::Call, continuationAddr, IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
  362. tryAddr->InsertBefore(callInstr);
  363. this->LowerCall(callInstr, 0);
  364. #ifdef _M_X64
  365. {
  366. // Emit some instruction to separate the CALL from the JMP following it. The OS stack unwinder
  367. // mistakes the JMP for the start of the epilog otherwise.
  368. IR::Instr *nop = IR::Instr::New(Js::OpCode::NOP, m_func);
  369. tryAddr->InsertBefore(nop);
  370. }
  371. #endif
  372. // Jump to the continuation address supplied by the helper
  373. IR::BranchInstr *branchInstr = IR::MultiBranchInstr::New(Js::OpCode::JMP, continuationAddr, this->m_func);
  374. tryAddr->InsertBefore(branchInstr);
  375. return tryInstr->m_prev;
  376. }
  377. IR::Instr *
  378. LowererMD::LowerLeave(IR::Instr *leaveInstr, IR::LabelInstr *targetInstr, bool fromFinalLower, bool isOrphanedLeave)
  379. {
  380. if (isOrphanedLeave)
  381. {
  382. Assert(this->m_func->IsLoopBodyInTry());
  383. leaveInstr->m_opcode = Js::OpCode::JMP;
  384. return leaveInstr->m_prev;
  385. }
  386. IR::Instr *instrPrev = leaveInstr->m_prev;
  387. IR::LabelOpnd *labelOpnd = IR::LabelOpnd::New(targetInstr, this->m_func);
  388. lowererMDArch.LowerEHRegionReturn(leaveInstr, labelOpnd);
  389. if (fromFinalLower)
  390. {
  391. instrPrev = leaveInstr->m_prev; // Need to lower LdArgSize and LdSpillSize
  392. }
  393. leaveInstr->Remove();
  394. return instrPrev;
  395. }
  396. IR::Instr *
  397. LowererMD::LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * targetOpnd)
  398. {
  399. return lowererMDArch.LowerEHRegionReturn(insertBeforeInstr, targetOpnd);
  400. }
  401. IR::Instr *
  402. LowererMD::LowerLeaveNull(IR::Instr *finallyEndInstr)
  403. {
  404. IR::Instr *instrPrev = finallyEndInstr->m_prev;
  405. IR::Instr *instr = nullptr;
  406. // Return a null continuation address to the helper: execution will resume at the point determined by the try
  407. // or the exception handler.
  408. IR::RegOpnd *retReg = IR::RegOpnd::New(StackSym::New(TyMachReg,this->m_func), lowererMDArch.GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  409. instr = IR::Instr::New(Js::OpCode::XOR, retReg, this->m_func);
  410. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(nullptr, lowererMDArch.GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  411. instr->SetSrc1(eaxOpnd);
  412. instr->SetSrc2(eaxOpnd);
  413. finallyEndInstr->InsertBefore(instr);
  414. #if _M_X64
  415. {
  416. // amd64_ReturnFromCallWithFakeFrame expects to find the spill size and args size
  417. // in REG_EH_SPILL_SIZE and REG_EH_ARGS_SIZE.
  418. // MOV REG_EH_SPILL_SIZE, spillSize
  419. IR::Instr *movR8 = IR::Instr::New(Js::OpCode::LdSpillSize,
  420. IR::RegOpnd::New(nullptr, REG_EH_SPILL_SIZE, TyMachReg, m_func),
  421. m_func);
  422. finallyEndInstr->InsertBefore(movR8);
  423. // MOV REG_EH_ARGS_SIZE, argsSize
  424. IR::Instr *movR9 = IR::Instr::New(Js::OpCode::LdArgSize,
  425. IR::RegOpnd::New(nullptr, REG_EH_ARGS_SIZE, TyMachReg, m_func),
  426. m_func);
  427. finallyEndInstr->InsertBefore(movR9);
  428. IR::Opnd *targetOpnd = IR::RegOpnd::New(nullptr, REG_EH_TARGET, TyMachReg, m_func);
  429. IR::Instr *movTarget = IR::Instr::New(Js::OpCode::MOV,
  430. targetOpnd,
  431. IR::HelperCallOpnd::New(IR::HelperOp_ReturnFromCallWithFakeFrame, m_func),
  432. m_func);
  433. finallyEndInstr->InsertBefore(movTarget);
  434. IR::Instr *push = IR::Instr::New(Js::OpCode::PUSH, m_func);
  435. push->SetSrc1(targetOpnd);
  436. finallyEndInstr->InsertBefore(push);
  437. }
  438. #endif
  439. IR::IntConstOpnd *intSrc = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  440. instr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  441. instr->SetSrc1(intSrc);
  442. instr->SetSrc2(retReg);
  443. finallyEndInstr->InsertBefore(instr);
  444. finallyEndInstr->Remove();
  445. return instrPrev;
  446. }
  447. ///----------------------------------------------------------------------------
  448. ///
  449. /// LowererMD::Init
  450. ///
  451. ///----------------------------------------------------------------------------
  452. void
  453. LowererMD::Init(Lowerer *lowerer)
  454. {
  455. m_lowerer = lowerer;
  456. this->lowererMDArch.Init(this);
  457. #ifdef ENABLE_SIMDJS
  458. Simd128InitOpcodeMap();
  459. #endif
  460. }
  461. ///----------------------------------------------------------------------------
  462. ///
  463. /// LowererMD::LoadInputParamCount
  464. ///
  465. /// Load the passed-in parameter count from the appropriate EBP slot.
  466. ///
  467. ///----------------------------------------------------------------------------
  468. IR::Instr *
  469. LowererMD::LoadInputParamCount(IR::Instr * instrInsert, int adjust, bool needFlags)
  470. {
  471. IR::Instr * instr;
  472. IR::RegOpnd * dstOpnd;
  473. IR::SymOpnd * srcOpnd;
  474. srcOpnd = Lowerer::LoadCallInfo(instrInsert);
  475. dstOpnd = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), TyMachReg, this->m_func);
  476. instr = IR::Instr::New(Js::OpCode::MOV, dstOpnd, srcOpnd, this->m_func);
  477. instrInsert->InsertBefore(instr);
  478. // Copy the callinfo before masking off the param count
  479. Assert(Js::CallInfo::ksizeofCount == 24);
  480. // Mask off call flags from callinfo
  481. instr = IR::Instr::New(Js::OpCode::AND, dstOpnd, dstOpnd,
  482. IR::IntConstOpnd::New((Js::CallFlags_ExtraArg << static_cast<unsigned>(Js::CallInfo::ksizeofCount)) | 0x00FFFFFF, TyMachReg, this->m_func, true), this->m_func);
  483. instrInsert->InsertBefore(instr);
  484. // Shift and mask the "calling eval" bit and subtract it from the incoming count.
  485. // ("Calling eval" means the last param is the frame display, which only the eval built-in should see.)
  486. instr = IR::Instr::New(Js::OpCode::BTR, dstOpnd, dstOpnd, IR::IntConstOpnd::New(Math::Log2(Js::CallFlags_ExtraArg) + Js::CallInfo::ksizeofCount, TyInt8, this->m_func), this->m_func);
  487. instrInsert->InsertBefore(instr);
  488. instr = IR::Instr::New(Js::OpCode::SBB, dstOpnd, dstOpnd, IR::IntConstOpnd::New(-adjust, TyMachReg, this->m_func), this->m_func);
  489. instrInsert->InsertBefore(instr);
  490. return instr;
  491. }
  492. IR::Instr *
  493. LowererMD::LoadStackArgPtr(IR::Instr * instr)
  494. {
  495. if (this->m_func->IsLoopBody())
  496. {
  497. // Get the first user param from the interpreter frame instance that was passed in.
  498. // These args don't include the func object and callinfo; we just need to advance past "this".
  499. // t1 = MOV [prm1 + m_inParams]
  500. // dst = LEA &[t1 + sizeof(var)]
  501. Assert(this->m_func->m_loopParamSym);
  502. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  503. size_t offset = Js::InterpreterStackFrame::GetOffsetOfInParams();
  504. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, this->m_func);
  505. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  506. IR::Instr *instrLdParams = IR::Instr::New(Js::OpCode::MOV, tmpOpnd, indirOpnd, this->m_func);
  507. instr->InsertBefore(instrLdParams);
  508. indirOpnd = IR::IndirOpnd::New(tmpOpnd, sizeof(Js::Var), TyMachReg, this->m_func);
  509. instr->SetSrc1(indirOpnd);
  510. instr->m_opcode = Js::OpCode::LEA;
  511. return instr->m_prev;
  512. }
  513. else
  514. {
  515. return this->lowererMDArch.LoadStackArgPtr(instr);
  516. }
  517. }
  518. IR::Instr *
  519. LowererMD::LoadArgumentsFromFrame(IR::Instr * instr)
  520. {
  521. if (this->m_func->IsLoopBody())
  522. {
  523. // Get the arguments ptr from the interpreter frame instance that was passed in.
  524. Assert(this->m_func->m_loopParamSym);
  525. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  526. int32 offset = (int32)Js::InterpreterStackFrame::GetOffsetOfArguments();
  527. instr->SetSrc1(IR::IndirOpnd::New(baseOpnd, offset, TyMachReg, this->m_func));
  528. }
  529. else
  530. {
  531. instr->SetSrc1(this->CreateStackArgumentsSlotOpnd());
  532. }
  533. instr->m_opcode = Js::OpCode::MOV;
  534. return instr->m_prev;
  535. }
  536. // load argument count as I4
  537. IR::Instr *
  538. LowererMD::LoadArgumentCount(IR::Instr * instr)
  539. {
  540. if (this->m_func->IsLoopBody())
  541. {
  542. // Pull the arg count from the interpreter frame instance that was passed in.
  543. // (The callinfo in the loop body's frame just shows the single parameter, the interpreter frame.)
  544. Assert(this->m_func->m_loopParamSym);
  545. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(this->m_func->m_loopParamSym, TyMachReg, this->m_func);
  546. size_t offset = Js::InterpreterStackFrame::GetOffsetOfInSlotsCount();
  547. instr->SetSrc1(IR::IndirOpnd::New(baseOpnd, (int32)offset, TyInt32, this->m_func));
  548. }
  549. else
  550. {
  551. StackSym *sym = StackSym::New(TyVar, this->m_func);
  552. this->m_func->SetArgOffset(sym, (Js::JavascriptFunctionArgIndex_CallInfo - Js::JavascriptFunctionArgIndex_Frame) * sizeof(Js::Var));
  553. instr->SetSrc1(IR::SymOpnd::New(sym, TyMachReg, this->m_func));
  554. }
  555. instr->m_opcode = Js::OpCode::MOV;
  556. return instr->m_prev;
  557. }
  558. IR::Instr *
  559. LowererMD::LoadHeapArguments(IR::Instr * instrArgs)
  560. {
  561. return this->lowererMDArch.LoadHeapArguments(instrArgs);
  562. }
  563. IR::Instr *
  564. LowererMD::LoadHeapArgsCached(IR::Instr * instrArgs)
  565. {
  566. return this->lowererMDArch.LoadHeapArgsCached(instrArgs);
  567. }
  568. ///----------------------------------------------------------------------------
  569. ///
  570. /// LowererMD::ChangeToHelperCall
  571. ///
  572. /// Change the current instruction to a call to the given helper.
  573. ///
  574. ///----------------------------------------------------------------------------
  575. IR::Instr *
  576. LowererMD::ChangeToHelperCall(IR::Instr * callInstr, IR::JnHelperMethod helperMethod, IR::LabelInstr *labelBailOut,
  577. IR::Opnd *opndBailOutArg, IR::PropertySymOpnd *propSymOpnd, bool isHelperContinuation)
  578. {
  579. IR::Instr * bailOutInstr = callInstr;
  580. if (callInstr->HasBailOutInfo())
  581. {
  582. IR::BailOutKind bailOutKind = callInstr->GetBailOutKind();
  583. if (bailOutKind == IR::BailOutOnNotPrimitive ||
  584. bailOutKind == IR::BailOutOnPowIntIntOverflow)
  585. {
  586. callInstr = IR::Instr::New(callInstr->m_opcode, callInstr->m_func);
  587. bailOutInstr->TransferTo(callInstr);
  588. bailOutInstr->InsertBefore(callInstr);
  589. bailOutInstr->m_opcode = bailOutKind == IR::BailOutOnNotPrimitive
  590. ? Js::OpCode::BailOnNotPrimitive
  591. : Js::OpCode::BailOnPowIntIntOverflow;
  592. bailOutInstr->SetSrc1(opndBailOutArg);
  593. }
  594. else
  595. {
  596. bailOutInstr = this->m_lowerer->SplitBailOnImplicitCall(callInstr);
  597. }
  598. }
  599. callInstr->m_opcode = Js::OpCode::CALL;
  600. IR::HelperCallOpnd *helperCallOpnd = Lowerer::CreateHelperCallOpnd(helperMethod, this->lowererMDArch.GetHelperArgsCount(), m_func);
  601. if (helperCallOpnd->IsDiagHelperCallOpnd())
  602. {
  603. // Load arguments for the wrapper.
  604. this->LoadHelperArgument(callInstr, IR::AddrOpnd::New((Js::Var)IR::GetMethodOriginalAddress(m_func->GetThreadContextInfo(), helperMethod), IR::AddrOpndKindDynamicMisc, m_func));
  605. this->m_lowerer->LoadScriptContext(callInstr);
  606. }
  607. callInstr->SetSrc1(helperCallOpnd);
  608. IR::Instr * instrRet = this->lowererMDArch.LowerCall(callInstr, 0);
  609. if (bailOutInstr != callInstr)
  610. {
  611. // The bailout needs to be lowered after we lower the helper call because the helper argument
  612. // has already been loaded. We need to drain them on AMD64 before starting another helper call
  613. if (bailOutInstr->m_opcode == Js::OpCode::BailOnNotObject)
  614. {
  615. this->m_lowerer->LowerBailOnNotObject(bailOutInstr, nullptr, labelBailOut);
  616. }
  617. else if (bailOutInstr->m_opcode == Js::OpCode::BailOnNotPrimitive ||
  618. bailOutInstr->m_opcode == Js::OpCode::BailOnPowIntIntOverflow)
  619. {
  620. this->m_lowerer->LowerBailOnTrue(bailOutInstr, labelBailOut);
  621. }
  622. else if (bailOutInstr->m_opcode == Js::OpCode::BailOut)
  623. {
  624. this->m_lowerer->GenerateBailOut(bailOutInstr, nullptr, labelBailOut);
  625. }
  626. else
  627. {
  628. this->m_lowerer->LowerBailOnEqualOrNotEqual(bailOutInstr, nullptr, labelBailOut, propSymOpnd, isHelperContinuation);
  629. }
  630. }
  631. #if DBG
  632. if (PHASE_ON(Js::AsmjsCallDebugBreakPhase, this->m_func))
  633. {
  634. this->GenerateDebugBreak(instrRet->m_next);
  635. }
  636. #endif
  637. return instrRet;
  638. }
  639. IR::Instr* LowererMD::ChangeToHelperCallMem(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  640. {
  641. this->m_lowerer->LoadScriptContext(instr);
  642. return this->ChangeToHelperCall(instr, helperMethod);
  643. }
  644. ///----------------------------------------------------------------------------
  645. ///
  646. /// LowererMD::ChangeToAssign
  647. ///
  648. /// Change to a MOV.
  649. ///
  650. ///----------------------------------------------------------------------------
  651. IR::Instr *
  652. LowererMD::ChangeToAssignNoBarrierCheck(IR::Instr * instr)
  653. {
  654. return ChangeToAssign(instr, instr->GetDst()->GetType());
  655. }
  656. IR::Instr *
  657. LowererMD::ChangeToAssign(IR::Instr * instr)
  658. {
  659. return ChangeToWriteBarrierAssign(instr, instr->m_func);
  660. }
  661. IR::Instr *
  662. LowererMD::ChangeToAssign(IR::Instr * instr, IRType type)
  663. {
  664. Assert(!instr->HasBailOutInfo() || instr->GetBailOutKind() == IR::BailOutExpectingString);
  665. instr->m_opcode = LowererMDArch::GetAssignOp(type);
  666. Legalize(instr);
  667. return instr;
  668. }
  669. ///----------------------------------------------------------------------------
  670. ///
  671. /// LowererMD::ChangeToLea
  672. ///
  673. /// Change to an LEA.
  674. ///
  675. ///----------------------------------------------------------------------------
  676. IR::Instr *
  677. LowererMD::ChangeToLea(IR::Instr * instr, bool postRegAlloc)
  678. {
  679. Assert(instr);
  680. Assert(instr->GetDst());
  681. Assert(instr->GetDst()->IsRegOpnd());
  682. Assert(instr->GetSrc1());
  683. Assert(instr->GetSrc1()->IsIndirOpnd() || instr->GetSrc1()->IsSymOpnd());
  684. Assert(!instr->GetSrc2());
  685. instr->m_opcode = Js::OpCode::LEA;
  686. return instr;
  687. }
  688. ///----------------------------------------------------------------------------
  689. ///
  690. /// LowererMD::CreateAssign
  691. ///
  692. /// Create a MOV.
  693. ///
  694. ///----------------------------------------------------------------------------
  695. IR::Instr *
  696. LowererMD::CreateAssign(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsertPt, bool generateWriteBarrier)
  697. {
  698. return Lowerer::InsertMove(dst, src, instrInsertPt, generateWriteBarrier);
  699. }
  700. ///----------------------------------------------------------------------------
  701. ///
  702. /// LowererMD::LowerRet
  703. ///
  704. /// Lower Ret to "MOV EAX, src"
  705. /// The real RET is inserted at the exit of the function when emitting the
  706. /// epilog.
  707. ///
  708. ///----------------------------------------------------------------------------
  709. IR::Instr *
  710. LowererMD::LowerRet(IR::Instr * retInstr)
  711. {
  712. IR::RegOpnd * retReg;
  713. #ifdef ASMJS_PLAT
  714. if (m_func->GetJITFunctionBody()->IsAsmJsMode() && !m_func->IsLoopBody()) // for loop body ret is the bytecodeoffset
  715. {
  716. Js::AsmJsRetType::Which asmType = m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetRetType();
  717. IRType regType = TyInt32;
  718. switch (asmType)
  719. {
  720. case Js::AsmJsRetType::Double:
  721. regType = TyFloat64;
  722. break;
  723. case Js::AsmJsRetType::Float:
  724. regType = TyFloat32;
  725. break;
  726. case Js::AsmJsRetType::Int64:
  727. {
  728. regType = TyInt64;
  729. #if LOWER_SPLIT_INT64
  730. regType = TyInt32;
  731. {
  732. IR::Opnd* lowOpnd = nullptr;
  733. IR::Opnd* highOpnd = nullptr;
  734. if (retInstr->GetSrc1()->IsRegOpnd())
  735. {
  736. Int64RegPair srcPair = m_func->FindOrCreateInt64Pair(retInstr->GetSrc1()->AsRegOpnd());
  737. lowOpnd = srcPair.low;
  738. highOpnd = srcPair.high;
  739. }
  740. else if (retInstr->GetSrc1()->IsImmediateOpnd())
  741. {
  742. int64 value = retInstr->GetSrc1()->GetImmediateValue(m_func);
  743. lowOpnd = IR::IntConstOpnd::New(value & UINT_MAX, regType, m_func);
  744. highOpnd = IR::IntConstOpnd::New(value >> 32, regType, m_func);
  745. }
  746. else
  747. {
  748. Assert(UNREACHED);
  749. }
  750. retInstr->UnlinkSrc1();
  751. retInstr->SetSrc1(lowOpnd);
  752. // Mov high bits to edx
  753. IR::RegOpnd* regEdx = IR::RegOpnd::New(regType, this->m_func);
  754. regEdx->SetReg(RegEDX);
  755. Lowerer::InsertMove(regEdx, highOpnd, retInstr);
  756. retInstr->SetSrc2(regEdx);
  757. }
  758. #endif
  759. break;
  760. }
  761. case Js::AsmJsRetType::Signed:
  762. case Js::AsmJsRetType::Void:
  763. regType = TyInt32;
  764. break;
  765. case Js::AsmJsRetType::Float32x4:
  766. regType = TySimd128F4;
  767. break;
  768. case Js::AsmJsRetType::Int32x4:
  769. regType = TySimd128I4;
  770. break;
  771. case Js::AsmJsRetType::Float64x2:
  772. regType = TySimd128D2;
  773. break;
  774. case Js::AsmJsRetType::Int16x8:
  775. regType = TySimd128I8;
  776. break;
  777. case Js::AsmJsRetType::Int8x16:
  778. regType = TySimd128I16;
  779. break;
  780. case Js::AsmJsRetType::Uint32x4:
  781. regType = TySimd128U4;
  782. break;
  783. case Js::AsmJsRetType::Uint16x8:
  784. regType = TySimd128U8;
  785. break;
  786. case Js::AsmJsRetType::Uint8x16:
  787. regType = TySimd128U16;
  788. break;
  789. case Js::AsmJsRetType::Bool32x4:
  790. regType = TySimd128B4;
  791. break;
  792. case Js::AsmJsRetType::Bool16x8:
  793. regType = TySimd128B8;
  794. break;
  795. case Js::AsmJsRetType::Bool8x16:
  796. regType = TySimd128B16;
  797. break;
  798. default:
  799. Assert(UNREACHED);
  800. }
  801. retReg = IR::RegOpnd::New(regType, m_func);
  802. retReg->SetReg(lowererMDArch.GetRegReturnAsmJs(regType));
  803. }
  804. else
  805. #endif
  806. {
  807. retReg = IR::RegOpnd::New(TyMachReg, m_func);
  808. retReg->SetReg(lowererMDArch.GetRegReturn(TyMachReg));
  809. }
  810. Lowerer::InsertMove(retReg, retInstr->UnlinkSrc1(), retInstr);
  811. retInstr->SetSrc1(retReg);
  812. return retInstr;
  813. }
  814. ///----------------------------------------------------------------------------
  815. ///
  816. /// LowererMD::LowerUncondBranch
  817. ///
  818. ///----------------------------------------------------------------------------
  819. IR::Instr *
  820. LowererMD::LowerUncondBranch(IR::Instr * instr)
  821. {
  822. instr->m_opcode = Js::OpCode::JMP;
  823. return instr;
  824. }
  825. ///----------------------------------------------------------------------------
  826. ///
  827. /// LowererMD::LowerMultiBranch
  828. ///
  829. ///----------------------------------------------------------------------------
  830. IR::Instr *
  831. LowererMD::LowerMultiBranch(IR::Instr * instr)
  832. {
  833. return LowerUncondBranch(instr);
  834. }
  835. ///----------------------------------------------------------------------------
  836. ///
  837. /// LowererMD::LowerCondBranch
  838. ///
  839. ///----------------------------------------------------------------------------
  840. IR::Instr *
  841. LowererMD::LowerCondBranch(IR::Instr * instr)
  842. {
  843. AssertMsg(instr->GetSrc1() != nullptr, "Expected src opnds on conditional branch");
  844. Assert(!instr->HasBailOutInfo());
  845. IR::Opnd * opndSrc1 = instr->UnlinkSrc1();
  846. IR::Instr * instrPrev = nullptr;
  847. switch (instr->m_opcode)
  848. {
  849. case Js::OpCode::BrTrue_A:
  850. case Js::OpCode::BrFalse_A:
  851. case Js::OpCode::BrNotNull_A:
  852. case Js::OpCode::BrOnObject_A:
  853. case Js::OpCode::BrOnClassConstructor:
  854. case Js::OpCode::BrOnBaseConstructorKind:
  855. Assert(!opndSrc1->IsFloat64());
  856. AssertMsg(instr->GetSrc2() == nullptr, "Expected 1 src on boolean branch");
  857. instrPrev = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  858. instrPrev->SetSrc1(opndSrc1);
  859. instrPrev->SetSrc2(opndSrc1);
  860. instr->InsertBefore(instrPrev);
  861. if (instr->m_opcode != Js::OpCode::BrFalse_A)
  862. {
  863. instr->m_opcode = Js::OpCode::JNE;
  864. }
  865. else
  866. {
  867. instr->m_opcode = Js::OpCode::JEQ;
  868. }
  869. break;
  870. case Js::OpCode::BrOnEmpty:
  871. case Js::OpCode::BrOnNotEmpty:
  872. AssertMsg(0, "BrOnEmpty opcodes should not be passed to MD lowerer");
  873. break;
  874. default:
  875. IR::Opnd * opndSrc2 = instr->UnlinkSrc2();
  876. AssertMsg(opndSrc2 != nullptr, "Expected 2 src's on non-boolean branch");
  877. if (opndSrc1->IsFloat())
  878. {
  879. Assert(opndSrc1->GetType() == opndSrc2->GetType());
  880. instrPrev = IR::Instr::New(opndSrc1->IsFloat64() ? Js::OpCode::COMISD : Js::OpCode::COMISS, m_func);
  881. instrPrev->SetSrc1(opndSrc1);
  882. instrPrev->SetSrc2(opndSrc2);
  883. instr->InsertBefore(instrPrev);
  884. }
  885. else
  886. {
  887. // This check assumes src1 is a variable.
  888. if (opndSrc2->IsIntConstOpnd() && opndSrc2->AsIntConstOpnd()->GetValue() == 0)
  889. {
  890. instrPrev = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  891. instrPrev->SetSrc1(opndSrc1);
  892. instrPrev->SetSrc2(opndSrc1);
  893. instr->InsertBefore(instrPrev);
  894. opndSrc2->Free(this->m_func);
  895. }
  896. else
  897. {
  898. instrPrev = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  899. //
  900. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  901. // relevant only on AMD64.
  902. //
  903. opndSrc1 = instrPrev->SetSrc1(opndSrc1);
  904. opndSrc2 = instrPrev->SetSrc2(opndSrc2);
  905. instr->InsertBefore(instrPrev);
  906. LowererMD::Legalize(instrPrev);
  907. }
  908. }
  909. instr->m_opcode = LowererMD::MDBranchOpcode(instr->m_opcode);
  910. break;
  911. }
  912. return instrPrev;
  913. }
  914. ///----------------------------------------------------------------------------
  915. ///
  916. /// LowererMD::MDBranchOpcode
  917. ///
  918. /// Map HIR branch opcode to machine-dependent equivalent.
  919. ///
  920. ///----------------------------------------------------------------------------
  921. Js::OpCode
  922. LowererMD::MDBranchOpcode(Js::OpCode opcode)
  923. {
  924. switch (opcode)
  925. {
  926. case Js::OpCode::BrSrEq_A:
  927. case Js::OpCode::BrEq_A:
  928. case Js::OpCode::BrSrNotNeq_A:
  929. case Js::OpCode::BrNotNeq_A:
  930. case Js::OpCode::BrAddr_A:
  931. return Js::OpCode::JEQ;
  932. case Js::OpCode::BrSrNeq_A:
  933. case Js::OpCode::BrNeq_A:
  934. case Js::OpCode::BrSrNotEq_A:
  935. case Js::OpCode::BrNotEq_A:
  936. case Js::OpCode::BrNotAddr_A:
  937. return Js::OpCode::JNE;
  938. case Js::OpCode::BrLt_A:
  939. case Js::OpCode::BrNotGe_A:
  940. return Js::OpCode::JLT;
  941. case Js::OpCode::BrLe_A:
  942. case Js::OpCode::BrNotGt_A:
  943. return Js::OpCode::JLE;
  944. case Js::OpCode::BrGt_A:
  945. case Js::OpCode::BrNotLe_A:
  946. return Js::OpCode::JGT;
  947. case Js::OpCode::BrGe_A:
  948. case Js::OpCode::BrNotLt_A:
  949. return Js::OpCode::JGE;
  950. default:
  951. AssertMsg(0, "Branch opcode has no MD mapping");
  952. return opcode;
  953. }
  954. }
  955. Js::OpCode
  956. LowererMD::MDConvertFloat64ToInt32Opcode(const RoundMode roundMode)
  957. {
  958. switch (roundMode)
  959. {
  960. case RoundModeTowardZero:
  961. return Js::OpCode::CVTTSD2SI;
  962. case RoundModeTowardInteger:
  963. return Js::OpCode::Nop;
  964. case RoundModeHalfToEven:
  965. return Js::OpCode::CVTSD2SI;
  966. default:
  967. AssertMsg(0, "RoundMode has no MD mapping.");
  968. return Js::OpCode::Nop;
  969. }
  970. }
  971. Js::OpCode
  972. LowererMD::MDUnsignedBranchOpcode(Js::OpCode opcode)
  973. {
  974. switch (opcode)
  975. {
  976. case Js::OpCode::BrEq_A:
  977. case Js::OpCode::BrSrEq_A:
  978. case Js::OpCode::BrSrNotNeq_A:
  979. case Js::OpCode::BrNotNeq_A:
  980. case Js::OpCode::BrAddr_A:
  981. return Js::OpCode::JEQ;
  982. case Js::OpCode::BrNeq_A:
  983. case Js::OpCode::BrSrNeq_A:
  984. case Js::OpCode::BrSrNotEq_A:
  985. case Js::OpCode::BrNotEq_A:
  986. case Js::OpCode::BrNotAddr_A:
  987. return Js::OpCode::JNE;
  988. case Js::OpCode::BrLt_A:
  989. case Js::OpCode::BrNotGe_A:
  990. return Js::OpCode::JB;
  991. case Js::OpCode::BrLe_A:
  992. case Js::OpCode::BrNotGt_A:
  993. return Js::OpCode::JBE;
  994. case Js::OpCode::BrGt_A:
  995. case Js::OpCode::BrNotLe_A:
  996. return Js::OpCode::JA;
  997. case Js::OpCode::BrGe_A:
  998. case Js::OpCode::BrNotLt_A:
  999. return Js::OpCode::JAE;
  1000. default:
  1001. AssertMsg(0, "Branch opcode has no MD mapping");
  1002. return opcode;
  1003. }
  1004. }
  1005. Js::OpCode LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode opcode)
  1006. {
  1007. Assert(opcode == Js::OpCode::BrLt_A || opcode == Js::OpCode::BrGe_A);
  1008. return opcode == Js::OpCode::BrLt_A ? Js::OpCode::JSB : Js::OpCode::JNSB;
  1009. }
  1010. void LowererMD::ChangeToAdd(IR::Instr *const instr, const bool needFlags)
  1011. {
  1012. Assert(instr);
  1013. Assert(instr->GetDst());
  1014. Assert(instr->GetSrc1());
  1015. Assert(instr->GetSrc2());
  1016. if(instr->GetDst()->IsFloat64())
  1017. {
  1018. Assert(instr->GetSrc1()->IsFloat64());
  1019. Assert(instr->GetSrc2()->IsFloat64());
  1020. Assert(!needFlags);
  1021. instr->m_opcode = Js::OpCode::ADDSD;
  1022. return;
  1023. }
  1024. else if (instr->GetDst()->IsFloat32())
  1025. {
  1026. Assert(instr->GetSrc1()->IsFloat32());
  1027. Assert(instr->GetSrc2()->IsFloat32());
  1028. Assert(!needFlags);
  1029. instr->m_opcode = Js::OpCode::ADDSS;
  1030. return;
  1031. }
  1032. instr->m_opcode = Js::OpCode::ADD;
  1033. MakeDstEquSrc1(instr);
  1034. if (!needFlags)
  1035. {
  1036. // Prefer INC for add by one
  1037. if ((instr->GetDst()->IsEqual(instr->GetSrc1()) &&
  1038. instr->GetSrc2()->IsIntConstOpnd() &&
  1039. instr->GetSrc2()->AsIntConstOpnd()->GetValue() == 1) ||
  1040. (instr->GetDst()->IsEqual(instr->GetSrc2()) &&
  1041. instr->GetSrc1()->IsIntConstOpnd() &&
  1042. instr->GetSrc1()->AsIntConstOpnd()->GetValue() == 1))
  1043. {
  1044. if (instr->GetSrc1()->IsIntConstOpnd())
  1045. {
  1046. // Swap the operands, such that we would create (dst = INC src2)
  1047. instr->SwapOpnds();
  1048. }
  1049. instr->FreeSrc2();
  1050. instr->m_opcode = Js::OpCode::INC;
  1051. }
  1052. }
  1053. }
  1054. void LowererMD::ChangeToSub(IR::Instr *const instr, const bool needFlags)
  1055. {
  1056. Assert(instr);
  1057. Assert(instr->GetDst());
  1058. Assert(instr->GetSrc1());
  1059. Assert(instr->GetSrc2());
  1060. if(instr->GetDst()->IsFloat64())
  1061. {
  1062. Assert(instr->GetSrc1()->IsFloat64());
  1063. Assert(instr->GetSrc2()->IsFloat64());
  1064. Assert(!needFlags);
  1065. instr->m_opcode = Js::OpCode::SUBSD;
  1066. return;
  1067. }
  1068. // Prefer DEC for sub by one
  1069. if(instr->GetDst()->IsEqual(instr->GetSrc1()) &&
  1070. instr->GetSrc2()->IsIntConstOpnd() &&
  1071. instr->GetSrc2()->AsIntConstOpnd()->GetValue() == 1)
  1072. {
  1073. instr->FreeSrc2();
  1074. instr->m_opcode = Js::OpCode::DEC;
  1075. return;
  1076. }
  1077. instr->m_opcode = Js::OpCode::SUB;
  1078. }
  1079. void LowererMD::ChangeToShift(IR::Instr *const instr, const bool needFlags)
  1080. {
  1081. Assert(instr);
  1082. Assert(instr->GetDst());
  1083. Assert(instr->GetSrc1());
  1084. Assert(instr->GetSrc2());
  1085. switch(instr->m_opcode)
  1086. {
  1087. case Js::OpCode::Shl_A:
  1088. case Js::OpCode::Shl_I4:
  1089. instr->m_opcode = Js::OpCode::SHL;
  1090. break;
  1091. case Js::OpCode::Shr_A:
  1092. case Js::OpCode::Shr_I4:
  1093. instr->m_opcode = Js::OpCode::SAR;
  1094. break;
  1095. case Js::OpCode::ShrU_A:
  1096. case Js::OpCode::ShrU_I4:
  1097. instr->m_opcode = Js::OpCode::SHR;
  1098. break;
  1099. case Js::OpCode::Rol_I4:
  1100. instr->m_opcode = Js::OpCode::ROL;
  1101. break;
  1102. case Js::OpCode::Ror_I4:
  1103. instr->m_opcode = Js::OpCode::ROR;
  1104. break;
  1105. default:
  1106. Assert(false);
  1107. __assume(false);
  1108. }
  1109. if(instr->GetSrc2()->IsIntConstOpnd())
  1110. {
  1111. // Only values between 0-31 mean anything
  1112. IntConstType value = instr->GetSrc2()->AsIntConstOpnd()->GetValue();
  1113. value &= 0x1f;
  1114. instr->GetSrc2()->AsIntConstOpnd()->SetValue(value);
  1115. }
  1116. }
  1117. void LowererMD::ChangeToIMul(IR::Instr *const instr, bool hasOverflowCheck)
  1118. {
  1119. // If non-32 bit overflow check is needed, we have to use the IMUL form.
  1120. if (hasOverflowCheck && !instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow())
  1121. {
  1122. IR::RegOpnd *regEAX = IR::RegOpnd::New(TyInt32, instr->m_func);
  1123. IR::Opnd *temp2 = nullptr;
  1124. // MOV eax, src1
  1125. regEAX->SetReg(LowererMDArch::GetRegIMulDestLower());
  1126. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, regEAX, instr->GetSrc1(), instr->m_func));
  1127. if (instr->GetSrc2()->IsImmediateOpnd())
  1128. {
  1129. // MOV reg, imm
  1130. temp2 = IR::RegOpnd::New(TyInt32, instr->m_func);
  1131. IR::Opnd * src2 = instr->GetSrc2();
  1132. bool dontEncode = false;
  1133. if (src2->IsHelperCallOpnd())
  1134. {
  1135. dontEncode = true;
  1136. }
  1137. else if (src2->IsIntConstOpnd() || src2->IsAddrOpnd())
  1138. {
  1139. dontEncode = src2->IsIntConstOpnd() ? src2->AsIntConstOpnd()->m_dontEncode : src2->AsAddrOpnd()->m_dontEncode;
  1140. }
  1141. else if (src2->IsInt64ConstOpnd())
  1142. {
  1143. dontEncode = false;
  1144. }
  1145. else
  1146. {
  1147. AssertMsg(false, "Unexpected immediate opnd");
  1148. throw Js::OperationAbortedException();
  1149. }
  1150. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, temp2,
  1151. IR::IntConstOpnd::New((IntConstType)instr->GetSrc2()->GetImmediateValue(instr->m_func), TyInt32, instr->m_func, dontEncode),
  1152. instr->m_func));
  1153. }
  1154. // eax = IMUL eax, reg
  1155. instr->m_opcode = Js::OpCode::IMUL;
  1156. instr->ReplaceSrc1(regEAX);
  1157. if (temp2 != nullptr)
  1158. instr->ReplaceSrc2(temp2);
  1159. auto *dst = instr->GetDst()->Copy(instr->m_func);
  1160. instr->ReplaceDst(regEAX);
  1161. // MOV dst, eax
  1162. instr->InsertAfter(IR::Instr::New(Js::OpCode::MOV, dst, regEAX, instr->m_func));
  1163. }
  1164. else
  1165. EmitInt4Instr(instr); // IMUL2
  1166. }
  1167. const uint16
  1168. LowererMD::GetFormalParamOffset()
  1169. {
  1170. //In x86\x64 formal params were offset from EBP by the EBP chain, return address, and the 2 non-user params
  1171. return 4;
  1172. }
  1173. IR::Instr *
  1174. LowererMD::LowerCatch(IR::Instr * instr)
  1175. {
  1176. // t1 = catch => t2(eax) = catch
  1177. // => t1 = t2(eax)
  1178. IR::Opnd *catchObj = instr->UnlinkDst();
  1179. IR::RegOpnd *catchParamReg = IR::RegOpnd::New(TyMachPtr, this->m_func);
  1180. catchParamReg->SetReg(this->lowererMDArch.GetRegReturn(TyMachReg));
  1181. instr->SetDst(catchParamReg);
  1182. instr->InsertAfter(IR::Instr::New(Js::OpCode::MOV, catchObj, catchParamReg, this->m_func));
  1183. return instr->m_prev;
  1184. }
  1185. ///----------------------------------------------------------------------------
  1186. ///
  1187. /// LowererMD::ForceDstToReg
  1188. ///
  1189. ///----------------------------------------------------------------------------
  1190. void
  1191. LowererMD::ForceDstToReg(IR::Instr *instr)
  1192. {
  1193. IR::Opnd * dst = instr->GetDst();
  1194. if (dst->IsRegOpnd())
  1195. {
  1196. return;
  1197. }
  1198. if(dst->IsFloat64())
  1199. {
  1200. instr->SinkDst(Js::OpCode::MOVSD);
  1201. return;
  1202. }
  1203. instr->SinkDst(Js::OpCode::MOV);
  1204. }
  1205. template <bool verify>
  1206. void
  1207. LowererMD::Legalize(IR::Instr *const instr, bool fPostRegAlloc)
  1208. {
  1209. Assert(instr);
  1210. Assert(!instr->isInlineeEntryInstr
  1211. || (instr->m_opcode == Js::OpCode::MOV && instr->GetSrc1()->IsIntConstOpnd()));
  1212. switch(instr->m_opcode)
  1213. {
  1214. case Js::OpCode::MOV:
  1215. {
  1216. Assert(instr->GetSrc2() == nullptr);
  1217. IR::Opnd *const dst = instr->GetDst();
  1218. const IRType dstType = dst->GetType();
  1219. IR::Opnd *const src = instr->GetSrc1();
  1220. const IRType srcType = src->GetType();
  1221. if(TySize[dstType] > TySize[srcType])
  1222. {
  1223. if (verify)
  1224. {
  1225. return;
  1226. }
  1227. #if DBG
  1228. switch(dstType)
  1229. {
  1230. case TyInt32:
  1231. case TyUint32:
  1232. #ifdef _M_X64
  1233. case TyInt64:
  1234. case TyUint64:
  1235. #endif
  1236. case TyVar:
  1237. break;
  1238. default:
  1239. Assert(false);
  1240. }
  1241. #endif
  1242. IR::IntConstOpnd *const intConstantSrc = src->IsIntConstOpnd() ? src->AsIntConstOpnd() : nullptr;
  1243. const auto UpdateIntConstantSrc = [&](const size_t extendedValue)
  1244. {
  1245. Assert(intConstantSrc);
  1246. #ifdef _M_X64
  1247. if(TySize[dstType] > sizeof(IntConstType))
  1248. {
  1249. instr->ReplaceSrc1(
  1250. IR::AddrOpnd::New(
  1251. reinterpret_cast<void *>(extendedValue),
  1252. IR::AddrOpndKindConstantVar,
  1253. instr->m_func,
  1254. intConstantSrc->m_dontEncode));
  1255. }
  1256. else
  1257. #endif
  1258. {
  1259. intConstantSrc->SetType(dstType);
  1260. intConstantSrc->SetValue(static_cast<IntConstType>(extendedValue));
  1261. }
  1262. };
  1263. switch(srcType)
  1264. {
  1265. case TyInt8:
  1266. if(intConstantSrc)
  1267. {
  1268. UpdateIntConstantSrc(static_cast<int8>(intConstantSrc->GetValue())); // sign-extend
  1269. break;
  1270. }
  1271. instr->m_opcode = Js::OpCode::MOVSX;
  1272. break;
  1273. case TyUint8:
  1274. if(intConstantSrc)
  1275. {
  1276. UpdateIntConstantSrc(static_cast<uint8>(intConstantSrc->GetValue())); // zero-extend
  1277. break;
  1278. }
  1279. instr->m_opcode = Js::OpCode::MOVZX;
  1280. break;
  1281. case TyInt16:
  1282. if(intConstantSrc)
  1283. {
  1284. UpdateIntConstantSrc(static_cast<int16>(intConstantSrc->GetValue())); // sign-extend
  1285. break;
  1286. }
  1287. instr->m_opcode = Js::OpCode::MOVSXW;
  1288. break;
  1289. case TyUint16:
  1290. if(intConstantSrc)
  1291. {
  1292. UpdateIntConstantSrc(static_cast<uint16>(intConstantSrc->GetValue())); // zero-extend
  1293. break;
  1294. }
  1295. instr->m_opcode = Js::OpCode::MOVZXW;
  1296. break;
  1297. #ifdef _M_X64
  1298. case TyInt32:
  1299. if(intConstantSrc)
  1300. {
  1301. UpdateIntConstantSrc(static_cast<int32>(intConstantSrc->GetValue())); // sign-extend
  1302. break;
  1303. }
  1304. instr->m_opcode = Js::OpCode::MOVSXD;
  1305. break;
  1306. case TyUint32:
  1307. if(intConstantSrc)
  1308. {
  1309. UpdateIntConstantSrc(static_cast<uint32>(intConstantSrc->GetValue())); // zero-extend
  1310. break;
  1311. }
  1312. switch(dst->GetKind())
  1313. {
  1314. case IR::OpndKindReg:
  1315. // (mov r0.u32, r1.u32) clears the upper 32 bits of r0
  1316. dst->SetType(TyUint32);
  1317. instr->m_opcode = Js::OpCode::MOV_TRUNC;
  1318. break;
  1319. case IR::OpndKindSym:
  1320. case IR::OpndKindIndir:
  1321. case IR::OpndKindMemRef:
  1322. // Even if the src is a reg, we don't know if the upper 32 bits are zero. Copy the value to a
  1323. // reg first to zero-extend it to 64 bits, and then copy the 64-bit value to the original dst.
  1324. instr->HoistSrc1(Js::OpCode::MOV_TRUNC);
  1325. instr->GetSrc1()->SetType(dstType);
  1326. break;
  1327. default:
  1328. Assert(false);
  1329. __assume(false);
  1330. }
  1331. break;
  1332. #endif
  1333. default:
  1334. Assert(false);
  1335. __assume(false);
  1336. }
  1337. }
  1338. else if (TySize[dstType] < TySize[srcType])
  1339. {
  1340. instr->GetSrc1()->SetType(dst->GetType());
  1341. }
  1342. if(instr->m_opcode == Js::OpCode::MOV)
  1343. {
  1344. uint src1Forms = L_Reg | L_Mem | L_Ptr; // Allow 64 bit values in x64 as well
  1345. if (dst->IsMemoryOpnd())
  1346. {
  1347. #if _M_X64
  1348. // Only allow <= 32 bit values
  1349. src1Forms = L_Reg | L_Imm32;
  1350. #else
  1351. src1Forms = L_Reg | L_Ptr;
  1352. #endif
  1353. }
  1354. LegalizeOpnds<verify>(
  1355. instr,
  1356. L_Reg | L_Mem,
  1357. src1Forms,
  1358. L_None);
  1359. }
  1360. else
  1361. {
  1362. LegalizeOpnds<verify>(
  1363. instr,
  1364. L_Reg,
  1365. L_Reg | L_Mem,
  1366. L_None);
  1367. }
  1368. break;
  1369. }
  1370. case Js::OpCode::CMOVA:
  1371. case Js::OpCode::CMOVAE:
  1372. case Js::OpCode::CMOVB:
  1373. case Js::OpCode::CMOVBE:
  1374. case Js::OpCode::CMOVE:
  1375. case Js::OpCode::CMOVG:
  1376. case Js::OpCode::CMOVGE:
  1377. case Js::OpCode::CMOVL:
  1378. case Js::OpCode::CMOVLE:
  1379. case Js::OpCode::CMOVNE:
  1380. case Js::OpCode::CMOVNO:
  1381. case Js::OpCode::CMOVNP:
  1382. case Js::OpCode::CMOVNS:
  1383. case Js::OpCode::CMOVO:
  1384. case Js::OpCode::CMOVP:
  1385. case Js::OpCode::CMOVS:
  1386. if (instr->GetSrc2())
  1387. {
  1388. Assert(instr->GetDst()->GetSize() == instr->GetSrc2()->GetSize());
  1389. Assert(instr->GetDst()->GetSize() == instr->GetSrc1()->GetSize());
  1390. // 0 shouldn't be the src2 of a CMOVcc.
  1391. // CMOVcc doesn't support moving a constant and the legalizer will hoist the load of the constant
  1392. // to a register. If the constant was 0, Peeps will turn it into a XOR which, in turn, may change
  1393. // the zero flags and hence the result of CMOVcc. If you do want to CMOVcc 0, you should load 0
  1394. // into a register before the instruction whose result the CMOVcc depends on.
  1395. Assert(!instr->GetSrc2()->IsIntConstOpnd() || instr->GetSrc2()->AsIntConstOpnd()->GetValue() != 0);
  1396. // sometimes we have fake src1 to help reg alloc
  1397. LegalizeOpnds<verify>(
  1398. instr,
  1399. L_Reg,
  1400. L_Reg,
  1401. L_Reg | L_Mem);
  1402. }
  1403. else
  1404. {
  1405. Assert(instr->GetDst()->GetSize() == instr->GetSrc1()->GetSize());
  1406. LegalizeOpnds<verify>(
  1407. instr,
  1408. L_Reg,
  1409. L_Reg | L_Mem,
  1410. L_None);
  1411. }
  1412. break;
  1413. case Js::OpCode::MOVSD:
  1414. Assert(AutoSystemInfo::Data.SSE2Available());
  1415. case Js::OpCode::MOVSS:
  1416. {
  1417. Assert(instr->GetDst()->GetType() == (instr->m_opcode == Js::OpCode::MOVSD? TyFloat64 : TyFloat32) || instr->GetDst()->IsSimd128());
  1418. Assert(instr->GetSrc1()->GetType() == (instr->m_opcode == Js::OpCode::MOVSD ? TyFloat64 : TyFloat32) || instr->GetSrc1()->IsSimd128());
  1419. LegalizeOpnds<verify>(
  1420. instr,
  1421. L_Reg | L_Mem,
  1422. instr->GetDst()->IsMemoryOpnd()?
  1423. L_Reg : L_Reg | L_Mem, // LegalizeOpnds doesn't check if dst/src1 are both memopnd, check it here.
  1424. L_None);
  1425. break;
  1426. }
  1427. case Js::OpCode::MOVUPS:
  1428. case Js::OpCode::MOVAPS:
  1429. {
  1430. LegalizeOpnds<verify>(
  1431. instr,
  1432. L_Reg | L_Mem,
  1433. instr->GetDst()->IsMemoryOpnd()?
  1434. L_Reg : L_Reg | L_Mem, // LegalizeOpnds doesn't check if dst/src1 are both memopnd, check it here.
  1435. L_None);
  1436. break;
  1437. }
  1438. case Js::OpCode::CMP:
  1439. LegalizeOpnds<verify>(
  1440. instr,
  1441. L_None,
  1442. L_Reg | L_Mem,
  1443. L_Reg | L_Mem | L_Imm32);
  1444. break;
  1445. case Js::OpCode::TEST:
  1446. if((instr->GetSrc1()->IsImmediateOpnd() && !instr->GetSrc2()->IsImmediateOpnd()) ||
  1447. (instr->GetSrc2()->IsMemoryOpnd() && !instr->GetSrc1()->IsMemoryOpnd()))
  1448. {
  1449. if (verify)
  1450. {
  1451. AssertMsg(false, "Missing legalization");
  1452. return;
  1453. }
  1454. instr->SwapOpnds();
  1455. }
  1456. LegalizeOpnds<verify>(
  1457. instr,
  1458. L_None,
  1459. L_Reg | L_Mem,
  1460. L_Reg | L_Imm32);
  1461. break;
  1462. case Js::OpCode::COMISD:
  1463. case Js::OpCode::UCOMISD:
  1464. Assert(AutoSystemInfo::Data.SSE2Available());
  1465. case Js::OpCode::COMISS:
  1466. case Js::OpCode::UCOMISS:
  1467. LegalizeOpnds<verify>(
  1468. instr,
  1469. L_None,
  1470. L_Reg,
  1471. L_Reg | L_Mem);
  1472. break;
  1473. case Js::OpCode::INC:
  1474. case Js::OpCode::DEC:
  1475. case Js::OpCode::NEG:
  1476. MakeDstEquSrc1<verify>(instr);
  1477. LegalizeOpnds<verify>(
  1478. instr,
  1479. L_Reg | L_Mem,
  1480. L_Reg | L_Mem,
  1481. L_None);
  1482. break;
  1483. #ifdef _M_IX86
  1484. case Js::OpCode::ADC:
  1485. #endif
  1486. case Js::OpCode::ADD:
  1487. case Js::OpCode::SUB:
  1488. case Js::OpCode::SBB:
  1489. case Js::OpCode::AND:
  1490. case Js::OpCode::OR:
  1491. case Js::OpCode::XOR:
  1492. MakeDstEquSrc1<verify>(instr);
  1493. LegalizeOpnds<verify>(
  1494. instr,
  1495. L_Reg | L_Mem,
  1496. L_Reg | L_Mem,
  1497. L_Reg | L_Mem | L_Imm32);
  1498. break;
  1499. case Js::OpCode::ADDSD:
  1500. case Js::OpCode::ADDPD:
  1501. case Js::OpCode::SUBSD:
  1502. case Js::OpCode::ANDPD:
  1503. case Js::OpCode::ANDNPD:
  1504. case Js::OpCode::DIVPD:
  1505. case Js::OpCode::MAXPD:
  1506. case Js::OpCode::MINPD:
  1507. case Js::OpCode::MULPD:
  1508. case Js::OpCode::SUBPD:
  1509. Assert(AutoSystemInfo::Data.SSE2Available());
  1510. case Js::OpCode::ADDPS:
  1511. case Js::OpCode::ADDSS:
  1512. case Js::OpCode::SUBSS:
  1513. case Js::OpCode::ANDPS:
  1514. case Js::OpCode::ANDNPS:
  1515. case Js::OpCode::DIVPS:
  1516. case Js::OpCode::MAXPS:
  1517. case Js::OpCode::MINPS:
  1518. case Js::OpCode::MULPS:
  1519. case Js::OpCode::ORPS:
  1520. case Js::OpCode::PADDB:
  1521. case Js::OpCode::PADDSB:
  1522. case Js::OpCode::PADDD:
  1523. case Js::OpCode::PADDW:
  1524. case Js::OpCode::PADDSW:
  1525. case Js::OpCode::PADDUSB:
  1526. case Js::OpCode::PADDUSW:
  1527. case Js::OpCode::PAND:
  1528. case Js::OpCode::PANDN:
  1529. case Js::OpCode::PCMPEQB:
  1530. case Js::OpCode::PCMPEQD:
  1531. case Js::OpCode::PCMPEQW:
  1532. case Js::OpCode::PCMPGTB:
  1533. case Js::OpCode::PCMPGTW:
  1534. case Js::OpCode::PCMPGTD:
  1535. case Js::OpCode::PMAXSW:
  1536. case Js::OpCode::PMAXUB:
  1537. case Js::OpCode::PMINSW:
  1538. case Js::OpCode::PMINUB:
  1539. case Js::OpCode::PMULLW:
  1540. case Js::OpCode::PMULUDQ:
  1541. case Js::OpCode::POR:
  1542. case Js::OpCode::PSUBB:
  1543. case Js::OpCode::PSUBSB:
  1544. case Js::OpCode::PSUBD:
  1545. case Js::OpCode::PSUBW:
  1546. case Js::OpCode::PSUBSW:
  1547. case Js::OpCode::PSUBUSB:
  1548. case Js::OpCode::PSUBUSW:
  1549. case Js::OpCode::PXOR:
  1550. case Js::OpCode::SUBPS:
  1551. case Js::OpCode::XORPS:
  1552. case Js::OpCode::CMPLTPS:
  1553. case Js::OpCode::CMPLEPS:
  1554. case Js::OpCode::CMPEQPS:
  1555. case Js::OpCode::CMPNEQPS:
  1556. case Js::OpCode::CMPLTPD:
  1557. case Js::OpCode::CMPLEPD:
  1558. case Js::OpCode::CMPEQPD:
  1559. case Js::OpCode::CMPNEQPD:
  1560. case Js::OpCode::CMPUNORDPS:
  1561. case Js::OpCode::PUNPCKLBW:
  1562. case Js::OpCode::PUNPCKLDQ:
  1563. case Js::OpCode::PUNPCKLWD:
  1564. MakeDstEquSrc1<verify>(instr);
  1565. LegalizeOpnds<verify>(
  1566. instr,
  1567. L_Reg,
  1568. L_Reg,
  1569. L_Reg | L_Mem);
  1570. break;
  1571. case Js::OpCode::SHL:
  1572. case Js::OpCode::SHR:
  1573. case Js::OpCode::SAR:
  1574. case Js::OpCode::ROL:
  1575. case Js::OpCode::ROR:
  1576. if (verify)
  1577. {
  1578. Assert(instr->GetSrc2()->IsIntConstOpnd()
  1579. || instr->GetSrc2()->AsRegOpnd()->GetReg() == LowererMDArch::GetRegShiftCount());
  1580. }
  1581. else
  1582. {
  1583. if(!instr->GetSrc2()->IsIntConstOpnd())
  1584. {
  1585. IR::Instr *const newInstr = instr->HoistSrc2(Js::OpCode::MOV);
  1586. newInstr->GetDst()->AsRegOpnd()->SetReg(LowererMDArch::GetRegShiftCount());
  1587. instr->GetSrc2()->AsRegOpnd()->SetReg(LowererMDArch::GetRegShiftCount());
  1588. }
  1589. instr->GetSrc2()->SetType(TyUint8);
  1590. }
  1591. MakeDstEquSrc1<verify>(instr);
  1592. LegalizeOpnds<verify>(
  1593. instr,
  1594. L_Reg | L_Mem,
  1595. L_Reg | L_Mem,
  1596. L_Reg | L_Imm32);
  1597. break;
  1598. case Js::OpCode::IMUL2:
  1599. MakeDstEquSrc1<verify>(instr); // the encoder does not support IMUL3 r, r/m, imm
  1600. LegalizeOpnds<verify>(
  1601. instr,
  1602. L_Reg,
  1603. L_Reg,
  1604. L_Reg | L_Mem | L_Imm32); // for L_Imm32, the encoder converts it into an IMUL3
  1605. break;
  1606. case Js::OpCode::TZCNT:
  1607. case Js::OpCode::LZCNT:
  1608. Assert(
  1609. (instr->m_opcode == Js::OpCode::LZCNT && AutoSystemInfo::Data.LZCntAvailable()) ||
  1610. (instr->m_opcode == Js::OpCode::TZCNT && AutoSystemInfo::Data.TZCntAvailable())
  1611. );
  1612. case Js::OpCode::BSF:
  1613. case Js::OpCode::BSR:
  1614. LegalizeOpnds<verify>(
  1615. instr,
  1616. L_Reg,
  1617. L_Reg | L_Mem,
  1618. L_None);
  1619. break;
  1620. case Js::OpCode::LEA:
  1621. Assert(instr->GetDst()->IsRegOpnd());
  1622. Assert(instr->GetSrc1()->IsIndirOpnd() || instr->GetSrc1()->IsSymOpnd()
  1623. || instr->GetSrc1()->IsMemRefOpnd()); // We may convert IndirOpnd to MemRefOpnd
  1624. Assert(!instr->GetSrc2());
  1625. break;
  1626. case Js::OpCode::PSRLDQ:
  1627. case Js::OpCode::PSLLDQ:
  1628. case Js::OpCode::PSRLW:
  1629. case Js::OpCode::PSRLD:
  1630. case Js::OpCode::PSRAW:
  1631. case Js::OpCode::PSRAD:
  1632. case Js::OpCode::PSLLW:
  1633. case Js::OpCode::PSLLD:
  1634. Assert(AutoSystemInfo::Data.SSE2Available());
  1635. MakeDstEquSrc1<verify>(instr);
  1636. LegalizeOpnds<verify>(
  1637. instr,
  1638. L_Reg,
  1639. L_Reg,
  1640. L_Reg | L_Imm32);
  1641. break;
  1642. case Js::OpCode::ROUNDSD:
  1643. case Js::OpCode::ROUNDSS:
  1644. Assert(AutoSystemInfo::Data.SSE4_1Available());
  1645. break;
  1646. case Js::OpCode::CVTDQ2PD:
  1647. case Js::OpCode::CVTDQ2PS:
  1648. case Js::OpCode::CVTPD2PS:
  1649. case Js::OpCode::CVTPS2PD:
  1650. case Js::OpCode::CVTSD2SI:
  1651. case Js::OpCode::CVTSD2SS:
  1652. case Js::OpCode::CVTSI2SD:
  1653. case Js::OpCode::CVTSS2SD:
  1654. case Js::OpCode::CVTTPD2DQ:
  1655. case Js::OpCode::CVTTPS2DQ:
  1656. case Js::OpCode::CVTTSD2SI:
  1657. case Js::OpCode::DIVSD:
  1658. case Js::OpCode::SQRTPD:
  1659. case Js::OpCode::SQRTSD:
  1660. case Js::OpCode::SHUFPD:
  1661. Assert(AutoSystemInfo::Data.SSE2Available());
  1662. break;
  1663. }
  1664. #if DBG
  1665. // Asserting general rules
  1666. // There should be at most 1 memory opnd in an instruction
  1667. if (instr->GetDst() && instr->GetDst()->IsMemoryOpnd())
  1668. {
  1669. // All memref address need to fit in a dword
  1670. Assert(!instr->GetDst()->IsMemRefOpnd() || Math::FitsInDWord((size_t)instr->GetDst()->AsMemRefOpnd()->GetMemLoc()));
  1671. if (instr->GetSrc1())
  1672. {
  1673. Assert(instr->GetSrc1()->IsEqual(instr->GetDst()) || !instr->GetSrc1()->IsMemoryOpnd());
  1674. if (instr->GetSrc2())
  1675. {
  1676. Assert(!instr->GetSrc2()->IsMemoryOpnd());
  1677. }
  1678. }
  1679. }
  1680. else if (instr->GetSrc1() && instr->GetSrc1()->IsMemoryOpnd())
  1681. {
  1682. // All memref address need to fit in a dword
  1683. Assert(!instr->GetSrc1()->IsMemRefOpnd() || Math::FitsInDWord((size_t)instr->GetSrc1()->AsMemRefOpnd()->GetMemLoc()));
  1684. Assert(!instr->GetSrc2() || !instr->GetSrc2()->IsMemoryOpnd());
  1685. }
  1686. else if (instr->GetSrc2() && instr->GetSrc2()->IsMemRefOpnd())
  1687. {
  1688. // All memref address need to fit in a dword
  1689. Assert(Math::FitsInDWord((size_t)instr->GetSrc2()->AsMemRefOpnd()->GetMemLoc()));
  1690. }
  1691. // Non-MOV (second operand) immediate need to fit in DWORD for AMD64
  1692. Assert(!instr->GetSrc2() || !instr->GetSrc2()->IsImmediateOpnd()
  1693. || (TySize[instr->GetSrc2()->GetType()] != 8) || Math::FitsInDWord(instr->GetSrc2()->GetImmediateValue(instr->m_func)));
  1694. #endif
  1695. }
  1696. template <bool verify>
  1697. void LowererMD::LegalizeOpnds(IR::Instr *const instr, const uint dstForms, const uint src1Forms, uint src2Forms)
  1698. {
  1699. Assert(instr);
  1700. Assert(!instr->GetDst() == !dstForms);
  1701. Assert(!instr->GetSrc1() == !src1Forms);
  1702. Assert(!instr->GetSrc2() == !src2Forms);
  1703. Assert(src1Forms || !src2Forms);
  1704. const auto NormalizeForms = [](uint forms) -> uint
  1705. {
  1706. #ifdef _M_X64
  1707. if(forms & L_Ptr)
  1708. {
  1709. forms |= L_Imm32;
  1710. }
  1711. #else
  1712. if(forms & (L_Imm32 | L_Ptr))
  1713. {
  1714. forms |= L_Imm32 | L_Ptr;
  1715. }
  1716. #endif
  1717. return forms;
  1718. };
  1719. if(dstForms)
  1720. {
  1721. LegalizeDst<verify>(instr, NormalizeForms(dstForms));
  1722. }
  1723. if(!src1Forms)
  1724. {
  1725. return;
  1726. }
  1727. LegalizeSrc<verify>(instr, instr->GetSrc1(), NormalizeForms(src1Forms));
  1728. if(src2Forms & L_Mem && instr->GetSrc1()->IsMemoryOpnd())
  1729. {
  1730. src2Forms ^= L_Mem;
  1731. }
  1732. if(src2Forms)
  1733. {
  1734. LegalizeSrc<verify>(instr, instr->GetSrc2(), NormalizeForms(src2Forms));
  1735. }
  1736. }
  1737. template <bool verify>
  1738. void LowererMD::LegalizeDst(IR::Instr *const instr, const uint forms)
  1739. {
  1740. Assert(instr);
  1741. Assert(forms);
  1742. IR::Opnd *dst = instr->GetDst();
  1743. Assert(dst);
  1744. #ifndef _M_X64
  1745. AssertMsg(!dst->IsInt64(), "Int64 supported only on x64");
  1746. #endif
  1747. switch(dst->GetKind())
  1748. {
  1749. case IR::OpndKindReg:
  1750. Assert(forms & L_Reg);
  1751. return;
  1752. case IR::OpndKindMemRef:
  1753. {
  1754. IR::MemRefOpnd *const memRefOpnd = dst->AsMemRefOpnd();
  1755. if(!LowererMDArch::IsLegalMemLoc(memRefOpnd))
  1756. {
  1757. if (verify)
  1758. {
  1759. AssertMsg(false, "Missing legalization");
  1760. return;
  1761. }
  1762. dst = instr->HoistMemRefAddress(memRefOpnd, Js::OpCode::MOV);
  1763. }
  1764. // fall through
  1765. }
  1766. case IR::OpndKindSym:
  1767. case IR::OpndKindIndir:
  1768. if(forms & L_Mem)
  1769. {
  1770. return;
  1771. }
  1772. break;
  1773. default:
  1774. Assert(false);
  1775. __assume(false);
  1776. }
  1777. if (verify)
  1778. {
  1779. AssertMsg(false, "Missing legalization");
  1780. return;
  1781. }
  1782. // Use a reg dst, then store that reg into the original dst
  1783. Assert(forms & L_Reg);
  1784. const IRType irType = dst->GetType();
  1785. IR::RegOpnd *const regOpnd = IR::RegOpnd::New(irType, instr->m_func);
  1786. regOpnd->SetValueType(dst->GetValueType());
  1787. instr->UnlinkDst();
  1788. instr->SetDst(regOpnd);
  1789. instr->InsertAfter(IR::Instr::New(GetStoreOp(irType), dst, regOpnd, instr->m_func));
  1790. // If the original dst is the same as one of the srcs, hoist a src into the same reg and replace the same srcs with the reg
  1791. const bool equalsSrc1 = instr->GetSrc1() && dst->IsEqual(instr->GetSrc1());
  1792. const bool equalsSrc2 = instr->GetSrc2() && dst->IsEqual(instr->GetSrc2());
  1793. if(!(equalsSrc1 || equalsSrc2))
  1794. {
  1795. return;
  1796. }
  1797. const Js::OpCode loadOpCode = GetLoadOp(irType);
  1798. if(equalsSrc1)
  1799. {
  1800. instr->HoistSrc1(loadOpCode, RegNOREG, regOpnd->m_sym);
  1801. if(equalsSrc2)
  1802. {
  1803. instr->ReplaceSrc2(regOpnd);
  1804. }
  1805. }
  1806. else
  1807. {
  1808. instr->HoistSrc2(loadOpCode, RegNOREG, regOpnd->m_sym);
  1809. }
  1810. }
  1811. bool LowererMD::HoistLargeConstant(IR::IndirOpnd *indirOpnd, IR::Opnd *src, IR::Instr *instr) {
  1812. if (indirOpnd != nullptr)
  1813. {
  1814. if (indirOpnd->GetOffset() == 0)
  1815. {
  1816. instr->ReplaceSrc(src, indirOpnd->GetBaseOpnd());
  1817. }
  1818. else
  1819. {
  1820. // Hoist the address load as LEA [reg + offset]
  1821. // with the reg = MOV <some address within 32-bit range at the start of the function
  1822. IR::RegOpnd * regOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  1823. Lowerer::InsertLea(regOpnd, indirOpnd, instr);
  1824. instr->ReplaceSrc(src, regOpnd);
  1825. }
  1826. return true;
  1827. }
  1828. return false;
  1829. }
  1830. template <bool verify>
  1831. void LowererMD::LegalizeSrc(IR::Instr *const instr, IR::Opnd *src, const uint forms)
  1832. {
  1833. Assert(instr);
  1834. Assert(src);
  1835. Assert(src == instr->GetSrc1() || src == instr->GetSrc2());
  1836. Assert(forms);
  1837. #ifndef _M_X64
  1838. AssertMsg(!src->IsInt64(), "Int64 supported only on x64");
  1839. #endif
  1840. switch(src->GetKind())
  1841. {
  1842. case IR::OpndKindReg:
  1843. Assert(forms & L_Reg);
  1844. return;
  1845. case IR::OpndKindIntConst:
  1846. if(forms & L_Ptr)
  1847. {
  1848. return;
  1849. }
  1850. #ifdef _M_X64
  1851. {
  1852. IR::IntConstOpnd * intOpnd = src->AsIntConstOpnd();
  1853. if ((TySize[intOpnd->GetType()] != 8) ||
  1854. (!instr->isInlineeEntryInstr && Math::FitsInDWord(intOpnd->GetValue())))
  1855. {
  1856. if (forms & L_Imm32)
  1857. {
  1858. // the constant fits in 32-bit, no need to hoist
  1859. return;
  1860. }
  1861. break;
  1862. }
  1863. if (verify)
  1864. {
  1865. AssertMsg(false, "Missing legalization");
  1866. return;
  1867. }
  1868. // The actual value for inlinee entry instr isn't determined until encoder
  1869. // So it need to be hoisted conventionally.
  1870. if (!instr->isInlineeEntryInstr)
  1871. {
  1872. Assert(forms & L_Reg);
  1873. IR::IntConstOpnd * newIntOpnd = intOpnd->Copy(instr->m_func)->AsIntConstOpnd();
  1874. IR::IndirOpnd * indirOpnd = instr->m_func->GetTopFunc()->GetConstantAddressIndirOpnd(intOpnd->GetValue(), newIntOpnd, IR::AddrOpndKindConstantAddress, TyMachPtr, Js::OpCode::MOV);
  1875. if (HoistLargeConstant(indirOpnd, src, instr))
  1876. {
  1877. return;
  1878. }
  1879. }
  1880. }
  1881. #endif
  1882. break;
  1883. case IR::OpndKindFloatConst:
  1884. break; // assume for now that it always needs to be hoisted
  1885. case IR::OpndKindInt64Const:
  1886. if (forms & L_Ptr)
  1887. {
  1888. return;
  1889. }
  1890. #ifdef _M_X64
  1891. {
  1892. IR::Int64ConstOpnd * int64Opnd = src->AsInt64ConstOpnd();
  1893. if ((forms & L_Imm32) && ((src->GetSize() != 8) ||
  1894. (!instr->isInlineeEntryInstr && Math::FitsInDWord(int64Opnd->GetValue()))))
  1895. {
  1896. // the immediate fits in 32-bit, no need to hoist
  1897. return;
  1898. }
  1899. if (verify)
  1900. {
  1901. AssertMsg(false, "Missing legalization");
  1902. return;
  1903. }
  1904. IR::Opnd* regOpnd = IR::RegOpnd::New(src->GetType(), instr->m_func);
  1905. IR::Instr* moveToReg = IR::Instr::New(Js::OpCode::MOV, regOpnd, src, instr->m_func);
  1906. instr->InsertBefore(moveToReg);
  1907. instr->ReplaceSrc(src, regOpnd);
  1908. return;
  1909. }
  1910. #endif
  1911. break;
  1912. case IR::OpndKindAddr:
  1913. if (forms & L_Ptr)
  1914. {
  1915. return;
  1916. }
  1917. #ifdef _M_X64
  1918. {
  1919. IR::AddrOpnd * addrOpnd = src->AsAddrOpnd();
  1920. if ((forms & L_Imm32) && ((TySize[addrOpnd->GetType()] != 8) ||
  1921. (!instr->isInlineeEntryInstr && Math::FitsInDWord((size_t)addrOpnd->m_address))))
  1922. {
  1923. // the address fits in 32-bit, no need to hoist
  1924. return;
  1925. }
  1926. if (verify)
  1927. {
  1928. AssertMsg(false, "Missing legalization");
  1929. return;
  1930. }
  1931. Assert(!instr->isInlineeEntryInstr);
  1932. Assert(forms & L_Reg);
  1933. // TODO: michhol, remove cast after making m_address intptr
  1934. IR::AddrOpnd * newAddrOpnd = addrOpnd->Copy(instr->m_func)->AsAddrOpnd();
  1935. IR::IndirOpnd * indirOpnd = instr->m_func->GetTopFunc()->GetConstantAddressIndirOpnd((intptr_t)addrOpnd->m_address, newAddrOpnd, addrOpnd->GetAddrOpndKind(), TyMachPtr, Js::OpCode::MOV);
  1936. if (HoistLargeConstant(indirOpnd, src, instr))
  1937. {
  1938. return;
  1939. }
  1940. }
  1941. #endif
  1942. break;
  1943. case IR::OpndKindMemRef:
  1944. {
  1945. IR::MemRefOpnd *const memRefOpnd = src->AsMemRefOpnd();
  1946. if(!LowererMDArch::IsLegalMemLoc(memRefOpnd))
  1947. {
  1948. if (verify)
  1949. {
  1950. AssertMsg(false, "Missing legalization");
  1951. return;
  1952. }
  1953. src = instr->HoistMemRefAddress(memRefOpnd, Js::OpCode::MOV);
  1954. }
  1955. // fall through
  1956. }
  1957. case IR::OpndKindSym:
  1958. case IR::OpndKindIndir:
  1959. if(forms & L_Mem)
  1960. {
  1961. return;
  1962. }
  1963. break;
  1964. case IR::OpndKindHelperCall:
  1965. case IR::OpndKindLabel:
  1966. Assert(!instr->isInlineeEntryInstr);
  1967. Assert(forms & L_Ptr);
  1968. return;
  1969. default:
  1970. Assert(false);
  1971. __assume(false);
  1972. }
  1973. if (verify)
  1974. {
  1975. AssertMsg(false, "Missing legalization");
  1976. return;
  1977. }
  1978. // Hoist the src into a reg
  1979. Assert(forms & L_Reg);
  1980. Assert(!(instr->GetDst() && instr->GetDst()->IsEqual(src)));
  1981. const Js::OpCode loadOpCode = GetLoadOp(src->GetType());
  1982. if(src == instr->GetSrc2())
  1983. {
  1984. instr->HoistSrc2(loadOpCode);
  1985. return;
  1986. }
  1987. const bool equalsSrc2 = instr->GetSrc2() && src->IsEqual(instr->GetSrc2());
  1988. IR::Instr * hoistInstr = instr->HoistSrc1(loadOpCode);
  1989. if(equalsSrc2)
  1990. {
  1991. instr->ReplaceSrc2(hoistInstr->GetDst());
  1992. }
  1993. hoistInstr->isInlineeEntryInstr = instr->isInlineeEntryInstr;
  1994. instr->isInlineeEntryInstr = false;
  1995. }
  1996. template void LowererMD::Legalize<false>(IR::Instr *const instr, bool fPostRegAlloc);
  1997. template void LowererMD::LegalizeOpnds<false>(IR::Instr *const instr, const uint dstForms, const uint src1Forms, uint src2Forms);
  1998. template void LowererMD::LegalizeDst<false>(IR::Instr *const instr, const uint forms);
  1999. template void LowererMD::LegalizeSrc<false>(IR::Instr *const instr, IR::Opnd *src, const uint forms);
  2000. template void LowererMD::MakeDstEquSrc1<false>(IR::Instr *const instr);
  2001. #if DBG
  2002. template void LowererMD::Legalize<true>(IR::Instr *const instr, bool fPostRegAlloc);
  2003. template void LowererMD::LegalizeOpnds<true>(IR::Instr *const instr, const uint dstForms, const uint src1Forms, uint src2Forms);
  2004. template void LowererMD::LegalizeDst<true>(IR::Instr *const instr, const uint forms);
  2005. template void LowererMD::LegalizeSrc<true>(IR::Instr *const instr, IR::Opnd *src, const uint forms);
  2006. template void LowererMD::MakeDstEquSrc1<true>(IR::Instr *const instr);
  2007. #endif
  2008. IR::Instr *
  2009. LowererMD::LoadFunctionObjectOpnd(IR::Instr *instr, IR::Opnd *&functionObjOpnd)
  2010. {
  2011. IR::Opnd * src1 = instr->GetSrc1();
  2012. IR::Instr * instrPrev = instr->m_prev;
  2013. if (src1 == nullptr)
  2014. {
  2015. IR::RegOpnd * regOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  2016. StackSym *paramSym = StackSym::New(TyMachPtr, m_func);
  2017. IR::SymOpnd *paramOpnd = IR::SymOpnd::New(paramSym, TyMachPtr, m_func);
  2018. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  2019. IR::Instr * mov1 = IR::Instr::New(Js::OpCode::MOV, regOpnd, paramOpnd, m_func);
  2020. instr->InsertBefore(mov1);
  2021. functionObjOpnd = mov1->GetDst()->AsRegOpnd();
  2022. instrPrev = mov1;
  2023. instr->m_func->SetHasImplicitParamLoad();
  2024. }
  2025. else
  2026. {
  2027. // Inlinee, use the function object opnd on the instruction
  2028. functionObjOpnd = instr->UnlinkSrc1();
  2029. if (!functionObjOpnd->IsRegOpnd())
  2030. {
  2031. Assert(functionObjOpnd->IsAddrOpnd());
  2032. }
  2033. }
  2034. return instrPrev;
  2035. }
  2036. IR::Instr *
  2037. LowererMD::LowerLdSuper(IR::Instr *instr, IR::JnHelperMethod helperOpCode)
  2038. {
  2039. IR::Opnd * functionObjOpnd;
  2040. IR::Instr * instrPrev = LoadFunctionObjectOpnd(instr, functionObjOpnd);
  2041. m_lowerer->LoadScriptContext(instr);
  2042. LoadHelperArgument(instr, functionObjOpnd);
  2043. ChangeToHelperCall(instr, helperOpCode);
  2044. return instrPrev;
  2045. }
  2046. void
  2047. LowererMD::GenerateFastDivByPow2(IR::Instr *instr)
  2048. {
  2049. //
  2050. // Given:
  2051. // dst = Div_A src1, src2
  2052. // where src2 == power of 2
  2053. //
  2054. // Generate:
  2055. // MOV s1, src1
  2056. // AND s1, 0xFFFF000000000000 | (src2Value-1) ----- test for tagged int and divisibility by src2Value [int32]
  2057. // AND s1, 0x00000001 | ((src2Value-1)<<1) [int31]
  2058. // CMP s1, AtomTag_IntPtr
  2059. // JNE $divbyhalf
  2060. // MOV s1, src1
  2061. // SAR s1, log2(src2Value) ------ perform the divide
  2062. // OR s1, 1
  2063. // MOV dst, s1
  2064. // JMP $done
  2065. // $divbyhalf:
  2066. // AND s1, 0xFFFF000000000000 | (src2Value-1>>1) ----- test for tagged int and divisibility by src2Value /2 [int32]
  2067. // AND s1, 0x00000001 | ((src2Value-1)) [int31]
  2068. // CMP s1, AtomTag_IntPtr
  2069. // JNE $helper
  2070. // MOV s1, src1
  2071. // SAR s1, log2(src2Value) [int32]
  2072. // SAR s1, log2(src2Value) + 1 ------ removes the tag and divides [int31]
  2073. // PUSH s1
  2074. // PUSH 0xXXXXXXXX (ScriptContext)
  2075. // CALL Op_FinishOddDivByPow2
  2076. // MOV dst, eax
  2077. // JMP $done
  2078. // $helper:
  2079. // ...
  2080. // $done:
  2081. //
  2082. if (instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->IsNotInt())
  2083. return;
  2084. IR::Opnd *dst = instr->GetDst();
  2085. IR::Opnd *src1 = instr->GetSrc1();
  2086. IR::AddrOpnd *src2 = instr->GetSrc2()->IsAddrOpnd() ? instr->GetSrc2()->AsAddrOpnd() : nullptr;
  2087. IR::LabelInstr *divbyhalf = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2088. IR::LabelInstr *helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  2089. IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2090. IR::RegOpnd *s1 = IR::RegOpnd::New(TyVar, m_func);
  2091. AnalysisAssert(src2);
  2092. Assert(src2->IsVar() && Js::TaggedInt::Is(src2->m_address) && (Math::IsPow2(Js::TaggedInt::ToInt32(src2->m_address))));
  2093. int32 src2Value = Js::TaggedInt::ToInt32(src2->m_address);
  2094. // MOV s1, src1
  2095. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, s1, src1, m_func));
  2096. #if INT32VAR
  2097. // dontEncode as src2 is a power of 2.
  2098. IR::Opnd *constant = IR::AddrOpnd::New((Js::Var)(0xFFFF000000000000 | (src2Value - 1)), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true);
  2099. #else
  2100. IR::Opnd *constant = IR::IntConstOpnd::New((0x00000001 | ((src2Value - 1) << 1)), TyInt32, m_func);
  2101. #endif
  2102. // AND s1, constant
  2103. {
  2104. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, s1, s1, constant, m_func);
  2105. instr->InsertBefore(andInstr);
  2106. Legalize(andInstr);
  2107. }
  2108. // CMP s1, AtomTag_IntPtr
  2109. {
  2110. IR::Instr *cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  2111. cmp->SetSrc1(s1);
  2112. cmp->SetSrc2(IR::AddrOpnd::New((Js::Var)(Js::AtomTag_IntPtr), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true));
  2113. instr->InsertBefore(cmp);
  2114. Legalize(cmp);
  2115. }
  2116. // JNE $divbyhalf
  2117. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, divbyhalf, m_func));
  2118. // MOV s1, src1
  2119. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, s1, src1, m_func));
  2120. s1 = s1->UseWithNewType(TyInt32, m_func)->AsRegOpnd();
  2121. // SAR s1, log2(src2Value)
  2122. instr->InsertBefore(IR::Instr::New(Js::OpCode::SAR, s1, s1, IR::IntConstOpnd::New(Math::Log2(src2Value), TyInt32, m_func), m_func));
  2123. if(s1->GetSize() != MachPtr)
  2124. {
  2125. s1 = s1->UseWithNewType(TyMachPtr, m_func)->AsRegOpnd();
  2126. }
  2127. #if INT32VAR
  2128. GenerateInt32ToVarConversion(s1, instr);
  2129. #else
  2130. // OR s1, 1
  2131. instr->InsertBefore(IR::Instr::New(Js::OpCode::OR, s1, s1, IR::IntConstOpnd::New(1, TyInt32, m_func), m_func));
  2132. #endif
  2133. // MOV dst, s1
  2134. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, s1, m_func));
  2135. // JMP $done
  2136. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  2137. // $divbyhalf:
  2138. instr->InsertBefore(divbyhalf);
  2139. #if INT32VAR
  2140. constant = IR::AddrOpnd::New((Js::Var)(0xFFFF000000000000 | ((src2Value-1) >> 1)), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true);
  2141. #else
  2142. constant = IR::IntConstOpnd::New((0x00000001 | (src2Value-1)), TyInt32, m_func);
  2143. #endif
  2144. // AND s1, constant
  2145. {
  2146. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, s1, s1, constant, m_func);
  2147. instr->InsertBefore(andInstr);
  2148. Legalize(andInstr);
  2149. }
  2150. // CMP s1, AtomTag_IntPtr
  2151. {
  2152. IR::Instr *cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  2153. cmp->SetSrc1(s1);
  2154. cmp->SetSrc2(IR::AddrOpnd::New((Js::Var)(Js::AtomTag_IntPtr), IR::AddrOpndKindConstantVar, m_func, /* dontEncode = */ true));
  2155. instr->InsertBefore(cmp);
  2156. Legalize(cmp);
  2157. }
  2158. // JNE $helper
  2159. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, helper, m_func));
  2160. // MOV s1, src1
  2161. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, s1, src1, m_func));
  2162. s1 = s1->UseWithNewType(TyInt32, this->m_func)->AsRegOpnd();
  2163. #if INT32VAR
  2164. IR::Opnd* shiftOpnd = IR::IntConstOpnd::New(Math::Log2(src2Value), TyInt32, m_func);
  2165. #else
  2166. IR::Opnd* shiftOpnd = IR::IntConstOpnd::New(Math::Log2(src2Value) + 1, TyInt32, m_func);
  2167. #endif
  2168. // SAR s1, shiftOpnd
  2169. instr->InsertBefore(IR::Instr::New(Js::OpCode::SAR, s1, s1, shiftOpnd, m_func));
  2170. // PUSH s1
  2171. // PUSH ScriptContext
  2172. // CALL Op_FinishOddDivByPow2
  2173. {
  2174. IR::JnHelperMethod helperMethod;
  2175. if (instr->dstIsTempNumber)
  2176. {
  2177. IR::Opnd *tempOpnd;
  2178. helperMethod = IR::HelperOp_FinishOddDivByPow2InPlace;
  2179. Assert(dst->IsRegOpnd());
  2180. StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(dst, instr->dstIsTempNumberTransferred);
  2181. IR::Instr *load = this->LoadStackAddress(tempNumberSym);
  2182. instr->InsertBefore(load);
  2183. tempOpnd = load->GetDst();
  2184. this->lowererMDArch.LoadHelperArgument(instr, tempOpnd);
  2185. }
  2186. else
  2187. {
  2188. helperMethod = IR::HelperOp_FinishOddDivByPow2;
  2189. }
  2190. m_lowerer->LoadScriptContext(instr);
  2191. lowererMDArch.LoadHelperArgument(instr, s1);
  2192. IR::Instr *call = IR::Instr::New(Js::OpCode::Call, dst, IR::HelperCallOpnd::New(helperMethod, m_func), m_func);
  2193. instr->InsertBefore(call);
  2194. lowererMDArch.LowerCall(call, 0);
  2195. }
  2196. // JMP $done
  2197. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  2198. // $helper:
  2199. instr->InsertBefore(helper);
  2200. // $done:
  2201. instr->InsertAfter(done);
  2202. }
  2203. ///----------------------------------------------------------------------------
  2204. ///
  2205. /// LowererMD::GenerateFastCmSrEqConst
  2206. ///
  2207. ///----------------------------------------------------------------------------
  2208. bool
  2209. LowererMD::GenerateFastCmSrEqConst(IR::Instr *instr)
  2210. {
  2211. //
  2212. // Given:
  2213. // s1 = CmSrEq_A s2, s3
  2214. // where either s2 or s3 is 'null', 'true' or 'false'
  2215. //
  2216. // Generate:
  2217. //
  2218. // CMP s2, s3
  2219. // JEQ $mov_true
  2220. // MOV s1, Library.GetFalse()
  2221. // JMP $done
  2222. // $mov_true:
  2223. // MOV s1, Library.GetTrue()
  2224. // $done:
  2225. //
  2226. Assert(m_lowerer->IsConstRegOpnd(instr->GetSrc2()->AsRegOpnd()));
  2227. IR::Opnd *opnd = instr->GetSrc1();
  2228. IR::RegOpnd *opndReg = instr->GetSrc2()->AsRegOpnd();
  2229. IR::LabelInstr *labelMovTrue = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2230. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2231. if (!opnd->IsRegOpnd())
  2232. {
  2233. IR::RegOpnd *lhsReg = IR::RegOpnd::New(TyVar, m_func);
  2234. IR::Instr *mov = IR::Instr::New(Js::OpCode::MOV, lhsReg, opnd, m_func);
  2235. instr->InsertBefore(mov);
  2236. opnd = lhsReg;
  2237. }
  2238. Assert(opnd->IsRegOpnd());
  2239. // CMP s2, s3
  2240. // JEQ $mov_true
  2241. this->m_lowerer->InsertCompareBranch(opnd, opndReg->m_sym->GetConstOpnd(), Js::OpCode::BrEq_A, labelMovTrue, instr);
  2242. // MOV s1, 'false'
  2243. IR::Instr *instrMov = IR::Instr::New(Js::OpCode::MOV,
  2244. instr->GetDst(),
  2245. m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse),
  2246. m_func);
  2247. instr->InsertBefore(instrMov);
  2248. // JMP $done
  2249. IR::BranchInstr *jmp = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, this->m_func);
  2250. instr->InsertBefore(jmp);
  2251. // $mov_true:
  2252. instr->InsertBefore(labelMovTrue);
  2253. // MOV s1, 'true'
  2254. instr->m_opcode = Js::OpCode::MOV;
  2255. instr->UnlinkSrc1();
  2256. instr->UnlinkSrc2();
  2257. instr->SetSrc1(m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue));
  2258. instr->ClearBailOutInfo();
  2259. Legalize(instr);
  2260. // $done:
  2261. instr->InsertAfter(labelDone);
  2262. return true;
  2263. }
  2264. ///----------------------------------------------------------------------------
  2265. ///
  2266. /// LowererMD::GenerateFastCmXxTaggedInt
  2267. ///
  2268. ///----------------------------------------------------------------------------
  2269. bool LowererMD::GenerateFastCmXxTaggedInt(IR::Instr *instr, bool isInHelper /* = false */)
  2270. {
  2271. // The idea is to do an inline compare if we can prove that both sources
  2272. // are tagged ints (i.e., are vars with the low bit set).
  2273. //
  2274. // Given:
  2275. //
  2276. // Cmxx_A dst, src1, src2
  2277. //
  2278. // Generate:
  2279. //
  2280. // (If not Int31's, goto $helper)
  2281. // MOV r1, src1
  2282. // if (==, !=, !== or ===)
  2283. // SUB r1, src2
  2284. // NEG r1 // Sets CF if r1 != 0
  2285. // SBB r1, r1 // CF == 1 ? r1 = -1 : r1 = 0
  2286. // else
  2287. // MOV r2, 0
  2288. // CMP r1, src2
  2289. // SETcc r2
  2290. // DEC r2
  2291. // set r1 to r2
  2292. // AND r1, (notEqualResult - equalResult)
  2293. // ADD r1, equalResult
  2294. // MOV dst, r1
  2295. // JMP $fallthru
  2296. // $helper:
  2297. // (caller will generate normal helper call sequence)
  2298. // $fallthru:
  2299. IR::Opnd * src1 = instr->GetSrc1();
  2300. IR::Opnd * src2 = instr->GetSrc2();
  2301. IR::Opnd * dst = instr->GetDst();
  2302. IR::RegOpnd * r1 = IR::RegOpnd::New(TyMachReg, m_func);
  2303. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  2304. IR::LabelInstr * fallthru = IR::LabelInstr::New(Js::OpCode::Label, m_func, isInHelper);
  2305. Assert(src1 && src2 && dst);
  2306. // Not tagged ints?
  2307. if (src1->IsRegOpnd() && src1->AsRegOpnd()->IsNotInt())
  2308. {
  2309. return false;
  2310. }
  2311. if (src2->IsRegOpnd() && src2->AsRegOpnd()->IsNotInt())
  2312. {
  2313. return false;
  2314. }
  2315. bool isNeqOp = instr->m_opcode == Js::OpCode::CmSrNeq_A || instr->m_opcode == Js::OpCode::CmNeq_A;
  2316. intptr_t notEqualResult = isNeqOp ? m_func->GetScriptContextInfo()->GetTrueAddr() : m_func->GetScriptContextInfo()->GetFalseAddr();
  2317. intptr_t equalResult = !isNeqOp ? m_func->GetScriptContextInfo()->GetTrueAddr() : m_func->GetScriptContextInfo()->GetFalseAddr();
  2318. // Tagged ints?
  2319. bool isTaggedInts = false;
  2320. if (src1->IsTaggedInt())
  2321. {
  2322. if (src2->IsTaggedInt())
  2323. {
  2324. isTaggedInts = true;
  2325. }
  2326. }
  2327. if (!isTaggedInts)
  2328. {
  2329. this->GenerateSmIntPairTest(instr, src1, src2, helper);
  2330. }
  2331. // MOV r1, src1
  2332. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, r1, src1, m_func));
  2333. Js::OpCode setCC_Opcode = Js::OpCode::Nop;
  2334. switch(instr->m_opcode)
  2335. {
  2336. case Js::OpCode::CmSrEq_A:
  2337. case Js::OpCode::CmEq_A:
  2338. break;
  2339. case Js::OpCode::CmSrNeq_A:
  2340. case Js::OpCode::CmNeq_A:
  2341. break;
  2342. case Js::OpCode::CmGe_A:
  2343. setCC_Opcode = Js::OpCode::SETGE;
  2344. break;
  2345. case Js::OpCode::CmGt_A:
  2346. setCC_Opcode = Js::OpCode::SETG;
  2347. break;
  2348. case Js::OpCode::CmLe_A:
  2349. setCC_Opcode = Js::OpCode::SETLE;
  2350. break;
  2351. case Js::OpCode::CmLt_A:
  2352. setCC_Opcode = Js::OpCode::SETL;
  2353. break;
  2354. default:
  2355. Assume(UNREACHED);
  2356. }
  2357. if (setCC_Opcode == Js::OpCode::Nop)
  2358. {
  2359. // SUB r1, src2
  2360. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::SUB, r1, r1, src2, m_func);
  2361. instr->InsertBefore(subInstr);
  2362. Legalize(subInstr); // src2 may need legalizing
  2363. // NEG r1
  2364. instr->InsertBefore(IR::Instr::New(Js::OpCode::NEG, r1, r1, m_func));
  2365. // SBB r1, r1
  2366. instr->InsertBefore(IR::Instr::New(Js::OpCode::SBB, r1, r1, r1, m_func));
  2367. }
  2368. else
  2369. {
  2370. IR::Instr *instrNew;
  2371. IR::RegOpnd *r2 = IR::RegOpnd::New(TyMachPtr, this->m_func);
  2372. // MOV r2, 0
  2373. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, r2, IR::IntConstOpnd::New(0, TyMachReg, this->m_func), m_func));
  2374. // CMP r1, src2
  2375. IR::Opnd *r1_32 = r1->UseWithNewType(TyInt32, this->m_func);
  2376. IR::Opnd *src2_32 =src2->UseWithNewType(TyInt32, this->m_func);
  2377. instrNew = IR::Instr::New(Js::OpCode::CMP, m_func);
  2378. instrNew->SetSrc1(r1_32);
  2379. instrNew->SetSrc2(src2_32);
  2380. instr->InsertBefore(instrNew);
  2381. // SETcc r2
  2382. IR::RegOpnd *r2_i8 = (IR::RegOpnd*) r2->UseWithNewType(TyInt8, this->m_func);
  2383. instrNew = IR::Instr::New(setCC_Opcode, r2_i8, r2_i8, m_func);
  2384. instr->InsertBefore(instrNew);
  2385. // DEC r2
  2386. instr->InsertBefore(IR::Instr::New(Js::OpCode::DEC, r2, r2, m_func));
  2387. // r1 <- r2
  2388. r1 = r2;
  2389. }
  2390. // AND r1, (notEqualResult - equalResult)
  2391. {
  2392. IR::Instr * andInstr = IR::Instr::New(Js::OpCode::AND, r1, r1, m_func);
  2393. andInstr->SetSrc2(IR::AddrOpnd::New((void*)((size_t)notEqualResult - (size_t)equalResult), IR::AddrOpndKind::AddrOpndKindDynamicMisc, this->m_func));
  2394. instr->InsertBefore(andInstr);
  2395. Legalize(andInstr);
  2396. }
  2397. // ADD r1, equalResult
  2398. {
  2399. IR::Instr * add = IR::Instr::New(Js::OpCode::ADD, r1, r1, m_func);
  2400. add->SetSrc2(IR::AddrOpnd::New(equalResult, IR::AddrOpndKind::AddrOpndKindDynamicVar, this->m_func));
  2401. instr->InsertBefore(add);
  2402. Legalize(add);
  2403. }
  2404. // MOV dst, r1
  2405. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, r1, m_func));
  2406. if (isTaggedInts)
  2407. {
  2408. instr->Remove();
  2409. return true;
  2410. }
  2411. // JMP $fallthru
  2412. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, fallthru, m_func));
  2413. instr->InsertBefore(helper);
  2414. instr->InsertAfter(fallthru);
  2415. return false;
  2416. }
  2417. void LowererMD::GenerateFastCmXxR8(IR::Instr *instr)
  2418. {
  2419. GenerateFastCmXx(instr);
  2420. }
  2421. void LowererMD::GenerateFastCmXxI4(IR::Instr *instr)
  2422. {
  2423. GenerateFastCmXx(instr);
  2424. }
  2425. void LowererMD::GenerateFastCmXx(IR::Instr *instr)
  2426. {
  2427. // For float src:
  2428. // dst = MOV 0/1
  2429. // (U)COMISD src1, src2
  2430. // JP $done
  2431. // dst.i8 = SetCC dst.i8
  2432. // $done:
  2433. // for int src:
  2434. // CMP src1, src2
  2435. // dst = MOV 0 / false
  2436. // dst.i8 = SetCC dst.i8 / CMOCcc true
  2437. IR::Opnd * src1 = instr->UnlinkSrc1();
  2438. IR::Opnd * src2 = instr->UnlinkSrc2();
  2439. IR::Opnd * dst = instr->UnlinkDst();
  2440. IR::Opnd * tmp = dst;
  2441. bool isIntDst = dst->AsRegOpnd()->m_sym->IsInt32();
  2442. bool isFloatSrc = src1->IsFloat();
  2443. bool isInt64Src = src1->IsInt64();
  2444. Assert(!isFloatSrc || src2->IsFloat());
  2445. Assert(!isFloatSrc || isIntDst);
  2446. Assert(!isInt64Src || src2->IsInt64());
  2447. Assert(!isInt64Src || isIntDst);
  2448. Assert(!isFloatSrc || AutoSystemInfo::Data.SSE2Available());
  2449. IR::Opnd *opnd;
  2450. IR::Instr *newInstr;
  2451. Assert(src1->IsRegOpnd());
  2452. #if LOWER_SPLIT_INT64
  2453. Int64RegPair src1Pair, src2Pair;
  2454. if (isInt64Src)
  2455. {
  2456. src1Pair = this->m_func->FindOrCreateInt64Pair(src1);
  2457. src2Pair = this->m_func->FindOrCreateInt64Pair(src2);
  2458. src1 = src1Pair.high;
  2459. src2 = src2Pair.high;
  2460. }
  2461. #endif
  2462. IR::Instr * done;
  2463. if (isFloatSrc)
  2464. {
  2465. done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2466. instr->InsertBefore(done);
  2467. }
  2468. else
  2469. {
  2470. done = instr;
  2471. }
  2472. if (isIntDst)
  2473. {
  2474. // reg = MOV 0 will get peeped to XOR reg, reg which sets the flags.
  2475. // Put the MOV before the CMP, but use a tmp if dst == src1/src2
  2476. if (dst->IsEqual(src1) || dst->IsEqual(src2))
  2477. {
  2478. tmp = IR::RegOpnd::New(dst->GetType(), this->m_func);
  2479. }
  2480. // dst = MOV 0
  2481. if (isFloatSrc && instr->m_opcode == Js::OpCode::CmNeq_A)
  2482. {
  2483. opnd = IR::IntConstOpnd::New(1, TyInt32, this->m_func);
  2484. }
  2485. else
  2486. {
  2487. opnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  2488. }
  2489. m_lowerer->InsertMove(tmp, opnd, done);
  2490. }
  2491. Js::OpCode cmpOp;
  2492. if (isFloatSrc)
  2493. {
  2494. if (instr->m_opcode == Js::OpCode::CmEq_A || instr->m_opcode == Js::OpCode::CmNeq_A)
  2495. {
  2496. cmpOp = src1->IsFloat64() ? Js::OpCode::UCOMISD : Js::OpCode::UCOMISS;
  2497. }
  2498. else
  2499. {
  2500. cmpOp = src1->IsFloat64() ? Js::OpCode::COMISD : Js::OpCode::COMISS;
  2501. }
  2502. }
  2503. else
  2504. {
  2505. cmpOp = Js::OpCode::CMP;
  2506. }
  2507. // CMP src1, src2
  2508. newInstr = IR::Instr::New(cmpOp, this->m_func);
  2509. newInstr->SetSrc1(src1);
  2510. newInstr->SetSrc2(src2);
  2511. done->InsertBefore(newInstr);
  2512. LowererMD::Legalize(newInstr);
  2513. if (isFloatSrc)
  2514. {
  2515. newInstr = IR::BranchInstr::New(Js::OpCode::JP, done->AsLabelInstr(), this->m_func);
  2516. done->InsertBefore(newInstr);
  2517. }
  2518. if (!isIntDst)
  2519. {
  2520. opnd = this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse);
  2521. LowererMD::CreateAssign(tmp, opnd, done);
  2522. }
  2523. Js::OpCode useCC;
  2524. switch(instr->m_opcode)
  2525. {
  2526. case Js::OpCode::CmEq_I4:
  2527. case Js::OpCode::CmEq_A:
  2528. useCC = isIntDst ? Js::OpCode::SETE : Js::OpCode::CMOVE;
  2529. break;
  2530. case Js::OpCode::CmNeq_I4:
  2531. case Js::OpCode::CmNeq_A:
  2532. useCC = isIntDst ? Js::OpCode::SETNE : Js::OpCode::CMOVNE;
  2533. break;
  2534. case Js::OpCode::CmGe_I4:
  2535. useCC = isIntDst ? Js::OpCode::SETGE : Js::OpCode::CMOVGE;
  2536. break;
  2537. case Js::OpCode::CmGt_I4:
  2538. useCC = isIntDst ? Js::OpCode::SETG : Js::OpCode::CMOVG;
  2539. break;
  2540. case Js::OpCode::CmLe_I4:
  2541. useCC = isIntDst ? Js::OpCode::SETLE : Js::OpCode::CMOVLE;
  2542. break;
  2543. case Js::OpCode::CmLt_I4:
  2544. useCC = isIntDst ? Js::OpCode::SETL : Js::OpCode::CMOVL;
  2545. break;
  2546. case Js::OpCode::CmUnGe_I4:
  2547. case Js::OpCode::CmGe_A:
  2548. useCC = isIntDst ? Js::OpCode::SETAE : Js::OpCode::CMOVAE;
  2549. break;
  2550. case Js::OpCode::CmUnGt_I4:
  2551. case Js::OpCode::CmGt_A:
  2552. useCC = isIntDst ? Js::OpCode::SETA : Js::OpCode::CMOVA;
  2553. break;
  2554. case Js::OpCode::CmUnLe_I4:
  2555. case Js::OpCode::CmLe_A:
  2556. useCC = isIntDst ? Js::OpCode::SETBE : Js::OpCode::CMOVBE;
  2557. break;
  2558. case Js::OpCode::CmUnLt_I4:
  2559. case Js::OpCode::CmLt_A:
  2560. useCC = isIntDst ? Js::OpCode::SETB : Js::OpCode::CMOVB;
  2561. break;
  2562. default:
  2563. useCC = Js::OpCode::InvalidOpCode;
  2564. Assume(UNREACHED);
  2565. }
  2566. if (isIntDst)
  2567. {
  2568. // tmp.i8 = SetCC tmp.i8
  2569. IR::Opnd *tmp_i8 = tmp->UseWithNewType(TyInt8, this->m_func);
  2570. newInstr = IR::Instr::New(useCC, tmp_i8, tmp_i8, this->m_func);
  2571. }
  2572. else
  2573. {
  2574. // regTrue = MOV true
  2575. IR::Opnd *regTrue = IR::RegOpnd::New(TyMachPtr, this->m_func);
  2576. Lowerer::InsertMove(regTrue, this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), done);
  2577. // tmp = CMOVcc tmp, regTrue
  2578. newInstr = IR::Instr::New(useCC, tmp, tmp, regTrue, this->m_func);
  2579. }
  2580. done->InsertBefore(newInstr);
  2581. #ifndef _M_X64
  2582. if (isInt64Src)
  2583. {
  2584. IR::LabelInstr* skipLow = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2585. newInstr = IR::BranchInstr::New(Js::OpCode::JNE, skipLow, this->m_func);
  2586. done->InsertBefore(newInstr);
  2587. newInstr = IR::Instr::New(cmpOp, this->m_func);
  2588. newInstr->SetSrc1(src1Pair.low);
  2589. newInstr->SetSrc2(src2Pair.low);
  2590. done->InsertBefore(newInstr);
  2591. Js::OpCode lowUseCC = useCC;
  2592. // Need to do an unsigned compare for the lower part
  2593. switch (instr->m_opcode)
  2594. {
  2595. case Js::OpCode::CmGe_I4: lowUseCC = Js::OpCode::SETAE; break;
  2596. case Js::OpCode::CmGt_I4: lowUseCC = Js::OpCode::SETA; break;
  2597. case Js::OpCode::CmLe_I4: lowUseCC = Js::OpCode::SETBE; break;
  2598. case Js::OpCode::CmLt_I4: lowUseCC = Js::OpCode::SETB; break;
  2599. }
  2600. // tmp.i8 = SetCC tmp.i8
  2601. IR::Opnd *tmp_i8 = tmp->UseWithNewType(TyInt8, this->m_func);
  2602. newInstr = IR::Instr::New(lowUseCC, tmp_i8, tmp_i8, this->m_func);
  2603. done->InsertBefore(newInstr);
  2604. done->InsertBefore(skipLow);
  2605. }
  2606. #endif
  2607. if (tmp != dst)
  2608. {
  2609. newInstr = IR::Instr::New(Js::OpCode::MOV, dst, tmp, this->m_func);
  2610. instr->InsertBefore(newInstr);
  2611. }
  2612. instr->Remove();
  2613. }
  2614. IR::Instr * LowererMD::GenerateConvBool(IR::Instr *instr)
  2615. {
  2616. // TEST src1, src1
  2617. // dst = MOV true
  2618. // rf = MOV false
  2619. // dst = CMOV dst, rf
  2620. IR::Instr *instrNew, *instrFirst;
  2621. IR::RegOpnd *dst = instr->GetDst()->AsRegOpnd();
  2622. IR::RegOpnd *regFalse;
  2623. // TEST src1, src2
  2624. instrFirst = instrNew = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  2625. instrNew->SetSrc1(instr->GetSrc1());
  2626. instrNew->SetSrc2(instr->GetSrc1());
  2627. instr->InsertBefore(instrNew);
  2628. // dst = MOV true
  2629. Lowerer::InsertMove(dst, this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), instr);
  2630. // rf = MOV false
  2631. regFalse = IR::RegOpnd::New(TyMachPtr, this->m_func);
  2632. Lowerer::InsertMove(regFalse, this->m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
  2633. // Add dst as src1 of CMOV to create a pseudo use of dst. Otherwise, the register allocator
  2634. // won't know the previous dst is needed. and needed in the same register as the dst of the CMOV.
  2635. // dst = CMOV dst, rf
  2636. instrNew = IR::Instr::New(Js::OpCode::CMOVE, dst, dst, regFalse, this->m_func);
  2637. instr->InsertBefore(instrNew);
  2638. instr->Remove();
  2639. return instrFirst;
  2640. }
  2641. ///----------------------------------------------------------------------------
  2642. ///
  2643. /// LowererMD::GenerateFastAdd
  2644. ///
  2645. /// NOTE: We assume that only the sum of two Int31's will have 0x2 set. This
  2646. /// is only true until we have a var type with tag == 0x2.
  2647. ///
  2648. ///----------------------------------------------------------------------------
  2649. bool
  2650. LowererMD::GenerateFastAdd(IR::Instr * instrAdd)
  2651. {
  2652. // Given:
  2653. //
  2654. // dst = Add src1, src2
  2655. //
  2656. // Generate:
  2657. //
  2658. // (If not 2 Int31's, jump to $helper.)
  2659. // s1 = MOV src1
  2660. // s1 = DEC s1 -- Get rid of one of the tag [Int31 only]
  2661. // s1 = ADD s1, src2 -- try an inline add
  2662. // JO $helper -- bail if the add overflowed
  2663. // s1 = OR s1, AtomTag_IntPtr [Int32 only]
  2664. // dst = MOV s1
  2665. // JMP $fallthru
  2666. // $helper:
  2667. // (caller generates helper call)
  2668. // $fallthru:
  2669. IR::Instr * instr;
  2670. IR::LabelInstr * labelHelper;
  2671. IR::LabelInstr * labelFallThru;
  2672. IR::Opnd * opndReg;
  2673. IR::Opnd * opndSrc1;
  2674. IR::Opnd * opndSrc2;
  2675. opndSrc1 = instrAdd->GetSrc1();
  2676. opndSrc2 = instrAdd->GetSrc2();
  2677. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Add instruction");
  2678. // Generate fastpath for Incr_A anyway -
  2679. // Incrementing strings representing integers can be inter-mixed with integers e.g. "1"++ -> converts 1 to an int and thereafter, integer increment is expected.
  2680. if (opndSrc1->IsRegOpnd() && (opndSrc1->AsRegOpnd()->IsNotInt() || opndSrc1->GetValueType().IsString()
  2681. || (instrAdd->m_opcode != Js::OpCode::Incr_A && opndSrc1->GetValueType().IsLikelyString())))
  2682. {
  2683. return false;
  2684. }
  2685. if (opndSrc2->IsRegOpnd() && (opndSrc2->AsRegOpnd()->IsNotInt() ||
  2686. opndSrc2->GetValueType().IsLikelyString()))
  2687. {
  2688. return false;
  2689. }
  2690. // Tagged ints?
  2691. bool isTaggedInts = false;
  2692. if (opndSrc1->IsTaggedInt())
  2693. {
  2694. if (opndSrc2->IsTaggedInt())
  2695. {
  2696. isTaggedInts = true;
  2697. }
  2698. }
  2699. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2700. if (!isTaggedInts)
  2701. {
  2702. // (If not 2 Int31's, jump to $helper.)
  2703. this->GenerateSmIntPairTest(instrAdd, opndSrc1, opndSrc2, labelHelper);
  2704. }
  2705. if (opndSrc1->IsAddrOpnd())
  2706. {
  2707. // If opnd1 is a constant, just swap them.
  2708. IR::Opnd *opndTmp = opndSrc1;
  2709. opndSrc1 = opndSrc2;
  2710. opndSrc2 = opndTmp;
  2711. }
  2712. //
  2713. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2714. // relevant only on AMD64.
  2715. //
  2716. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2717. // s1 = MOV src1
  2718. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2719. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  2720. instrAdd->InsertBefore(instr);
  2721. #if !INT32VAR
  2722. // Do the DEC in place
  2723. if (opndSrc2->IsAddrOpnd())
  2724. {
  2725. Assert(opndSrc2->AsAddrOpnd()->GetAddrOpndKind() == IR::AddrOpndKindConstantVar);
  2726. opndSrc2 = IR::IntConstOpnd::New(*((int *)&(opndSrc2->AsAddrOpnd()->m_address)) - 1, TyInt32, this->m_func, opndSrc2->AsAddrOpnd()->m_dontEncode);
  2727. opndSrc2 = opndSrc2->Use(this->m_func);
  2728. }
  2729. else if (opndSrc2->IsIntConstOpnd())
  2730. {
  2731. Assert(opndSrc2->GetType() == TyInt32);
  2732. opndSrc2 = opndSrc2->Use(this->m_func);
  2733. opndSrc2->AsIntConstOpnd()->DecrValue(1);
  2734. }
  2735. else
  2736. {
  2737. // s1 = DEC s1
  2738. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2739. instr = IR::Instr::New(Js::OpCode::DEC, opndReg, opndReg, this->m_func);
  2740. instrAdd->InsertBefore(instr);
  2741. }
  2742. instr = IR::Instr::New(Js::OpCode::ADD, opndReg, opndReg, opndSrc2, this->m_func);
  2743. #else
  2744. if (opndSrc2->IsAddrOpnd())
  2745. {
  2746. // truncate to untag
  2747. int value = ::Math::PointerCastToIntegralTruncate<int>(opndSrc2->AsAddrOpnd()->m_address);
  2748. if (value == 1)
  2749. {
  2750. instr = IR::Instr::New(Js::OpCode::INC, opndReg, opndReg, this->m_func);
  2751. }
  2752. else
  2753. {
  2754. opndSrc2 = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
  2755. instr = IR::Instr::New(Js::OpCode::ADD, opndReg, opndReg, opndSrc2, this->m_func);
  2756. }
  2757. }
  2758. else
  2759. {
  2760. instr = IR::Instr::New(Js::OpCode::ADD, opndReg, opndReg, opndSrc2->UseWithNewType(TyInt32, this->m_func), this->m_func);
  2761. }
  2762. #endif
  2763. // s1 = ADD s1, src2
  2764. instrAdd->InsertBefore(instr);
  2765. Legalize(instr);
  2766. // JO $helper
  2767. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  2768. instrAdd->InsertBefore(instr);
  2769. //
  2770. // Convert TyInt32 operand, back to TyMachPtr type.
  2771. //
  2772. if(TyMachReg != opndReg->GetType())
  2773. {
  2774. opndReg = opndReg->UseWithNewType(TyMachPtr, this->m_func);
  2775. }
  2776. #if INT32VAR
  2777. // s1 = OR s1, AtomTag_IntPtr
  2778. GenerateInt32ToVarConversion(opndReg, instrAdd);
  2779. #endif
  2780. // dst = MOV s1
  2781. instr = IR::Instr::New(Js::OpCode::MOV, instrAdd->GetDst(), opndReg, this->m_func);
  2782. instrAdd->InsertBefore(instr);
  2783. // JMP $fallthru
  2784. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2785. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2786. instrAdd->InsertBefore(instr);
  2787. // $helper:
  2788. // (caller generates helper call)
  2789. // $fallthru:
  2790. instrAdd->InsertBefore(labelHelper);
  2791. instrAdd->InsertAfter(labelFallThru);
  2792. return true;
  2793. }
  2794. ///----------------------------------------------------------------------------
  2795. ///
  2796. /// LowererMD::GenerateFastSub
  2797. ///
  2798. ///
  2799. ///----------------------------------------------------------------------------
  2800. bool
  2801. LowererMD::GenerateFastSub(IR::Instr * instrSub)
  2802. {
  2803. // Given:
  2804. //
  2805. // dst = Sub src1, src2
  2806. //
  2807. // Generate:
  2808. //
  2809. // (If not 2 Int31's, jump to $helper.)
  2810. // s1 = MOV src1
  2811. // s1 = SUB s1, src2 -- try an inline sub
  2812. // JO $helper -- bail if the subtract overflowed
  2813. // JNE $helper
  2814. // s1 = INC s1 -- restore the var tag on the result [Int31 only]
  2815. // s1 = OR s1, AtomTag_IntPtr [Int32 only]
  2816. // dst = MOV s1
  2817. // JMP $fallthru
  2818. // $helper:
  2819. // (caller generates helper call)
  2820. // $fallthru:
  2821. IR::Instr * instr;
  2822. IR::LabelInstr * labelHelper;
  2823. IR::LabelInstr * labelFallThru;
  2824. IR::Opnd * opndReg;
  2825. IR::Opnd * opndSrc1;
  2826. IR::Opnd * opndSrc2;
  2827. opndSrc1 = instrSub->GetSrc1();
  2828. opndSrc2 = instrSub->GetSrc2();
  2829. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Sub instruction");
  2830. // Not tagged ints?
  2831. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2832. {
  2833. return false;
  2834. }
  2835. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2836. {
  2837. return false;
  2838. }
  2839. // Tagged ints?
  2840. bool isTaggedInts = false;
  2841. if (opndSrc1->IsTaggedInt())
  2842. {
  2843. if (opndSrc2->IsTaggedInt())
  2844. {
  2845. isTaggedInts = true;
  2846. }
  2847. }
  2848. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2849. if (!isTaggedInts)
  2850. {
  2851. // (If not 2 Int31's, jump to $helper.)
  2852. this->GenerateSmIntPairTest(instrSub, opndSrc1, opndSrc2, labelHelper);
  2853. }
  2854. //
  2855. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2856. // relevant only on AMD64.
  2857. //
  2858. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2859. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2860. // s1 = MOV src1
  2861. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2862. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  2863. instrSub->InsertBefore(instr);
  2864. // s1 = SUB s1, src2
  2865. instr = IR::Instr::New(Js::OpCode::SUB, opndReg, opndReg, opndSrc2, this->m_func);
  2866. instrSub->InsertBefore(instr);
  2867. // JO $helper
  2868. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  2869. instrSub->InsertBefore(instr);
  2870. #if !INT32VAR
  2871. // s1 = INC s1
  2872. instr = IR::Instr::New(Js::OpCode::INC, opndReg, opndReg, this->m_func);
  2873. instrSub->InsertBefore(instr);
  2874. #endif
  2875. //
  2876. // Convert TyInt32 operand, back to TyMachPtr type.
  2877. //
  2878. if(TyMachReg != opndReg->GetType())
  2879. {
  2880. opndReg = opndReg->UseWithNewType(TyMachPtr, this->m_func);
  2881. }
  2882. #if INT32VAR
  2883. // s1 = OR s1, AtomTag_IntPtr
  2884. GenerateInt32ToVarConversion(opndReg, instrSub);
  2885. #endif
  2886. // dst = MOV s1
  2887. instr = IR::Instr::New(Js::OpCode::MOV, instrSub->GetDst(), opndReg, this->m_func);
  2888. instrSub->InsertBefore(instr);
  2889. // JMP $fallthru
  2890. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2891. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2892. instrSub->InsertBefore(instr);
  2893. // $helper:
  2894. // (caller generates helper call)
  2895. // $fallthru:
  2896. instrSub->InsertBefore(labelHelper);
  2897. instrSub->InsertAfter(labelFallThru);
  2898. return true;
  2899. }
  2900. ///----------------------------------------------------------------------------
  2901. ///
  2902. /// LowererMD::GenerateFastMul
  2903. ///
  2904. ///----------------------------------------------------------------------------
  2905. bool
  2906. LowererMD::GenerateFastMul(IR::Instr * instrMul)
  2907. {
  2908. // Given:
  2909. //
  2910. // dst = Mul src1, src2
  2911. //
  2912. // Generate:
  2913. //
  2914. // (If not 2 Int31's, jump to $helper.)
  2915. // s1 = MOV src1
  2916. // s1 = DEC s1 -- clear the var tag from the value to be multiplied [Int31 only]
  2917. // s2 = MOV src2
  2918. // s2 = SAR s2, Js::VarTag_Shift -- extract the real src2 amount from the var [Int31 only]
  2919. // s1 = IMUL s1, s2 -- do the signed mul
  2920. // JO $helper -- bail if the result overflowed
  2921. // s3 = MOV s1
  2922. // TEST s3, s3 -- Check result is 0. might be -0. Result is -0 when a negative number is multiplied with 0.
  2923. // JEQ $zero
  2924. // JMP $nonzero
  2925. // $zero: -- result of mul was 0. try to check for -0
  2926. // s2 = ADD s2, src1 -- Add src1 to s2
  2927. // JGT $nonzero -- positive 0. [Int31 only]
  2928. // JGE $nonzero -- positive 0. [Int32 only]
  2929. // dst = ToVar(-0.0) -- load negative 0
  2930. // JMP $fallthru
  2931. // $nonzero:
  2932. // s3 = INC s3 -- restore the var tag on the result [Int31 only]
  2933. // s3 = OR s3, AtomTag_IntPtr [Int32 only]
  2934. // dst= MOV s3
  2935. // JMP $fallthru
  2936. // $helper:
  2937. // (caller generates helper call)
  2938. // $fallthru:
  2939. IR::LabelInstr * labelHelper;
  2940. IR::LabelInstr * labelFallThru;
  2941. IR::LabelInstr * labelNonZero;
  2942. IR::Instr * instr;
  2943. IR::RegOpnd * opndReg1;
  2944. IR::RegOpnd * opndReg2;
  2945. IR::RegOpnd * s3;
  2946. IR::Opnd * opndSrc1;
  2947. IR::Opnd * opndSrc2;
  2948. opndSrc1 = instrMul->GetSrc1();
  2949. opndSrc2 = instrMul->GetSrc2();
  2950. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on mul instruction");
  2951. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2952. {
  2953. return true;
  2954. }
  2955. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2956. {
  2957. return true;
  2958. }
  2959. // (If not 2 Int31's, jump to $helper.)
  2960. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2961. labelNonZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2962. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2963. this->GenerateSmIntPairTest(instrMul, opndSrc1, opndSrc2, labelHelper);
  2964. //
  2965. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  2966. // relevant only on AMD64.
  2967. //
  2968. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2969. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2970. if (opndSrc1->IsImmediateOpnd())
  2971. {
  2972. IR::Opnd * temp = opndSrc1;
  2973. opndSrc1 = opndSrc2;
  2974. opndSrc2 = temp;
  2975. }
  2976. // s1 = MOV src1
  2977. opndReg1 = IR::RegOpnd::New(TyInt32, this->m_func);
  2978. instr = IR::Instr::New(Js::OpCode::MOV, opndReg1, opndSrc1, this->m_func);
  2979. instrMul->InsertBefore(instr);
  2980. #if !INT32VAR
  2981. // s1 = DEC s1
  2982. instr = IR::Instr::New(Js::OpCode::DEC, opndReg1, opndReg1, this->m_func);
  2983. instrMul->InsertBefore(instr);
  2984. #endif
  2985. if (opndSrc2->IsImmediateOpnd())
  2986. {
  2987. Assert(opndSrc2->IsAddrOpnd() && opndSrc2->AsAddrOpnd()->IsVar());
  2988. IR::Opnd *opnd2 = IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(opndSrc2->AsAddrOpnd()->m_address), TyInt32, this->m_func);
  2989. // s2 = MOV src2
  2990. opndReg2 = IR::RegOpnd::New(TyInt32, this->m_func);
  2991. instr = IR::Instr::New(Js::OpCode::MOV, opndReg2, opnd2, this->m_func);
  2992. instrMul->InsertBefore(instr);
  2993. }
  2994. else
  2995. {
  2996. // s2 = MOV src2
  2997. opndReg2 = IR::RegOpnd::New(TyInt32, this->m_func);
  2998. instr = IR::Instr::New(Js::OpCode::MOV, opndReg2, opndSrc2, this->m_func);
  2999. instrMul->InsertBefore(instr);
  3000. #if !INT32VAR
  3001. // s2 = SAR s2, Js::VarTag_Shift
  3002. instr = IR::Instr::New(
  3003. Js::OpCode::SAR, opndReg2, opndReg2,
  3004. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  3005. instrMul->InsertBefore(instr);
  3006. #endif
  3007. }
  3008. // s1 = IMUL s1, s2
  3009. instr = IR::Instr::New(Js::OpCode::IMUL2, opndReg1, opndReg1, opndReg2, this->m_func);
  3010. instrMul->InsertBefore(instr);
  3011. // JO $helper
  3012. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  3013. instrMul->InsertBefore(instr);
  3014. // MOV s3, s1
  3015. s3 = IR::RegOpnd::New(TyInt32, this->m_func);
  3016. instr = IR::Instr::New(Js::OpCode::MOV, s3, opndReg1, this->m_func);
  3017. instrMul->InsertBefore(instr);
  3018. // TEST s3, s3
  3019. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  3020. instr->SetSrc1(s3);
  3021. instr->SetSrc2(s3);
  3022. instrMul->InsertBefore(instr);
  3023. // JEQ $zero
  3024. IR::LabelInstr *labelZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3025. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelZero, this->m_func);
  3026. instrMul->InsertBefore(instr);
  3027. // JMP $nonzero
  3028. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelNonZero, this->m_func);
  3029. instrMul->InsertBefore(instr);
  3030. // $zero:
  3031. instrMul->InsertBefore(labelZero);
  3032. // s2 = ADD s2, src1
  3033. instr = IR::Instr::New(Js::OpCode::ADD, opndReg2, opndReg2, opndSrc1, this->m_func);
  3034. instrMul->InsertBefore(instr);
  3035. Legalize(instr);
  3036. // JGT $nonzero
  3037. #if INT32VAR
  3038. Js::OpCode greaterOpCode = Js::OpCode::JGE;
  3039. #else
  3040. Js::OpCode greaterOpCode = Js::OpCode::JGT;
  3041. #endif
  3042. instr = IR::BranchInstr::New(greaterOpCode, labelNonZero, this->m_func);
  3043. instrMul->InsertBefore(instr);
  3044. // dst = ToVar(-0.0) -- load negative 0
  3045. instr = IR::Instr::New(Js::OpCode::MOV, instrMul->GetDst(), m_lowerer->LoadLibraryValueOpnd(instrMul, LibraryValue::ValueNegativeZero), this->m_func);
  3046. instrMul->InsertBefore(instr);
  3047. // JMP $fallthru
  3048. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  3049. instrMul->InsertBefore(instr);
  3050. // $nonzero:
  3051. instrMul->InsertBefore(labelNonZero);
  3052. #if !INT32VAR
  3053. // s3 = INC s3
  3054. instr = IR::Instr::New(Js::OpCode::INC, s3, s3, this->m_func);
  3055. instrMul->InsertBefore(instr);
  3056. #endif
  3057. //
  3058. // Convert TyInt32 operand, back to TyMachPtr type.
  3059. // Cast is fine. We know ChangeType returns IR::Opnd * but it
  3060. // preserves the Type.
  3061. //
  3062. if(TyMachReg != s3->GetType())
  3063. {
  3064. s3 = static_cast<IR::RegOpnd *>(s3->UseWithNewType(TyMachPtr, this->m_func));
  3065. }
  3066. #if INT32VAR
  3067. // s3 = OR s3, AtomTag_IntPtr
  3068. GenerateInt32ToVarConversion(s3, instrMul);
  3069. #endif
  3070. // dst = MOV s3
  3071. instr = IR::Instr::New(Js::OpCode::MOV, instrMul->GetDst(), s3, this->m_func);
  3072. instrMul->InsertBefore(instr);
  3073. // JMP $fallthru
  3074. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  3075. instrMul->InsertBefore(instr);
  3076. // $helper:
  3077. // (caller generates helper call)
  3078. // $fallthru:
  3079. instrMul->InsertBefore(labelHelper);
  3080. instrMul->InsertAfter(labelFallThru);
  3081. return true;
  3082. }
  3083. bool
  3084. LowererMD::GenerateFastNeg(IR::Instr * instrNeg)
  3085. {
  3086. // Given:
  3087. //
  3088. // dst = Not src
  3089. //
  3090. // Generate:
  3091. //
  3092. // if not int, jump $helper
  3093. // if src == 0 -- test for zero (must be handled by the runtime to preserve
  3094. // JEQ $helper difference btw +0 and -0)
  3095. // dst = MOV src
  3096. // dst = NEG dst -- do an inline NEG
  3097. // dst = ADD dst, 2 -- restore the var tag on the result [int31 only]
  3098. // JO $helper
  3099. // dst = OR dst, AtomTag_Ptr [int32 only]
  3100. // JMP $fallthru
  3101. // $helper:
  3102. // (caller generates helper call)
  3103. // $fallthru:
  3104. IR::Instr * instr;
  3105. IR::LabelInstr * labelHelper = nullptr;
  3106. IR::LabelInstr * labelFallThru = nullptr;
  3107. IR::Opnd * opndSrc1;
  3108. IR::Opnd * opndDst;
  3109. bool usingNewDst = false;
  3110. opndSrc1 = instrNeg->GetSrc1();
  3111. AssertMsg(opndSrc1, "Expected src opnd on Neg instruction");
  3112. if(opndSrc1->IsEqual(instrNeg->GetDst()))
  3113. {
  3114. usingNewDst = true;
  3115. opndDst = IR::RegOpnd::New(TyInt32, this->m_func);
  3116. }
  3117. else
  3118. {
  3119. opndDst = instrNeg->GetDst()->UseWithNewType(TyInt32, this->m_func);
  3120. }
  3121. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->m_sym->IsIntConst())
  3122. {
  3123. IR::Opnd *newOpnd;
  3124. IntConstType value = opndSrc1->AsRegOpnd()->m_sym->GetIntConstValue();
  3125. if (value == 0)
  3126. {
  3127. // If the negate operand is zero, the result is -0.0, which is a Number rather than an Int31.
  3128. newOpnd = m_lowerer->LoadLibraryValueOpnd(instrNeg, LibraryValue::ValueNegativeZero);
  3129. }
  3130. else
  3131. {
  3132. // negation below can overflow because max negative int32 value > max positive value by 1.
  3133. newOpnd = IR::AddrOpnd::NewFromNumber(-(int64)value, m_func);
  3134. }
  3135. instrNeg->ClearBailOutInfo();
  3136. instrNeg->FreeSrc1();
  3137. instrNeg->SetSrc1(newOpnd);
  3138. instrNeg = this->ChangeToAssign(instrNeg);
  3139. // Skip lowering call to helper
  3140. return false;
  3141. }
  3142. bool isInt = (opndSrc1->IsTaggedInt());
  3143. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  3144. {
  3145. return true;
  3146. }
  3147. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  3148. if (!isInt)
  3149. {
  3150. GenerateSmIntTest(opndSrc1, instrNeg, labelHelper);
  3151. }
  3152. //
  3153. // For 32 bit arithmetic we copy them and set the size of operands to be 32 bits. This is
  3154. // relevant only on AMD64.
  3155. //
  3156. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  3157. GenerateTaggedZeroTest(opndSrc1, instrNeg, labelHelper);
  3158. // dst = MOV src
  3159. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndSrc1, this->m_func);
  3160. instrNeg->InsertBefore(instr);
  3161. // dst = NEG dst
  3162. instr = IR::Instr::New(Js::OpCode::NEG, opndDst, opndDst, this->m_func);
  3163. instrNeg->InsertBefore(instr);
  3164. #if !INT32VAR
  3165. // dst = ADD dst, 2
  3166. instr = IR::Instr::New(Js::OpCode::ADD, opndDst, opndDst, IR::IntConstOpnd::New(2, TyInt32, this->m_func), this->m_func);
  3167. instrNeg->InsertBefore(instr);
  3168. #endif
  3169. // JO $helper
  3170. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  3171. instrNeg->InsertBefore(instr);
  3172. //
  3173. // Convert TyInt32 operand, back to TyMachPtr type.
  3174. //
  3175. if(TyMachReg != opndDst->GetType())
  3176. {
  3177. opndDst = opndDst->UseWithNewType(TyMachPtr, this->m_func);
  3178. }
  3179. #if INT32VAR
  3180. GenerateInt32ToVarConversion(opndDst, instrNeg);
  3181. #endif
  3182. if(usingNewDst)
  3183. {
  3184. instr = IR::Instr::New(Js::OpCode::MOV, instrNeg->GetDst(), opndDst, this->m_func);
  3185. instrNeg->InsertBefore(instr);
  3186. }
  3187. // JMP $fallthru
  3188. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3189. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  3190. instrNeg->InsertBefore(instr);
  3191. // $helper:
  3192. // (caller generates helper sequence)
  3193. // $fallthru:
  3194. AssertMsg(labelHelper, "Should not be NULL");
  3195. instrNeg->InsertBefore(labelHelper);
  3196. instrNeg->InsertAfter(labelFallThru);
  3197. return true;
  3198. }
  3199. void
  3200. LowererMD::GenerateFastBrS(IR::BranchInstr *brInstr)
  3201. {
  3202. IR::Opnd *src1 = brInstr->UnlinkSrc1();
  3203. Assert(src1->IsIntConstOpnd() || src1->IsAddrOpnd() || src1->IsRegOpnd());
  3204. IR::Instr *cmpInstr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  3205. cmpInstr->SetSrc1(m_lowerer->LoadOptimizationOverridesValueOpnd(brInstr, OptimizationOverridesValue::OptimizationOverridesSideEffects));
  3206. cmpInstr->SetSrc2(src1);
  3207. brInstr->InsertBefore(cmpInstr);
  3208. Legalize(cmpInstr);
  3209. Js::OpCode opcode = Js::OpCode::InvalidOpCode;
  3210. switch(brInstr->m_opcode)
  3211. {
  3212. case Js::OpCode::BrHasSideEffects:
  3213. opcode = Js::OpCode::JNE;
  3214. break;
  3215. case Js::OpCode::BrNotHasSideEffects:
  3216. opcode = Js::OpCode::JEQ;
  3217. break;
  3218. default:
  3219. Assert(UNREACHED);
  3220. __assume(false);
  3221. }
  3222. brInstr->m_opcode = opcode;
  3223. }
  3224. ///----------------------------------------------------------------------------
  3225. ///
  3226. /// LowererMD::GenerateSmIntPairTest
  3227. ///
  3228. /// Generate code to test whether the given operands are both Int31 vars
  3229. /// and branch to the given label if not.
  3230. ///
  3231. ///----------------------------------------------------------------------------
  3232. #if !INT32VAR
  3233. IR::Instr *
  3234. LowererMD::GenerateSmIntPairTest(
  3235. IR::Instr * instrInsert,
  3236. IR::Opnd * opndSrc1,
  3237. IR::Opnd * opndSrc2,
  3238. IR::LabelInstr * labelFail)
  3239. {
  3240. IR::Opnd * opndReg;
  3241. IR::Instr * instrPrev = instrInsert->m_prev;
  3242. IR::Instr * instr;
  3243. Assert(opndSrc1->GetType() == TyVar);
  3244. Assert(opndSrc2->GetType() == TyVar);
  3245. if (opndSrc1->IsTaggedInt())
  3246. {
  3247. IR::Opnd *tempOpnd = opndSrc1;
  3248. opndSrc1 = opndSrc2;
  3249. opndSrc2 = tempOpnd;
  3250. }
  3251. if (opndSrc2->IsTaggedInt())
  3252. {
  3253. if (opndSrc1->IsTaggedInt())
  3254. {
  3255. return instrPrev;
  3256. }
  3257. // TEST src1, AtomTag
  3258. // JEQ $fail
  3259. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  3260. instr->SetSrc1(opndSrc1);
  3261. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func));
  3262. instrInsert->InsertBefore(instr);
  3263. }
  3264. else
  3265. {
  3266. // s1 = MOV src1
  3267. // s1 = AND s1, 1
  3268. // TEST s1, src2
  3269. // JEQ $fail
  3270. // s1 = MOV src1
  3271. opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  3272. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  3273. instrInsert->InsertBefore(instr);
  3274. // s1 = AND s1, AtomTag
  3275. instr = IR::Instr::New(
  3276. Js::OpCode::AND, opndReg, opndReg, IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func), this->m_func);
  3277. instrInsert->InsertBefore(instr);
  3278. // TEST s1, src2
  3279. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  3280. instr->SetSrc1(opndReg);
  3281. instr->SetSrc2(opndSrc2);
  3282. instrInsert->InsertBefore(instr);
  3283. }
  3284. // JEQ $fail
  3285. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelFail, this->m_func);
  3286. instrInsert->InsertBefore(instr);
  3287. return instrPrev;
  3288. }
  3289. #else
  3290. IR::Instr *
  3291. LowererMD::GenerateSmIntPairTest(
  3292. IR::Instr * instrInsert,
  3293. IR::Opnd * opndSrc1,
  3294. IR::Opnd * opndSrc2,
  3295. IR::LabelInstr * labelFail)
  3296. {
  3297. IR::Opnd * opndReg;
  3298. IR::Instr * instrPrev = instrInsert->m_prev;
  3299. IR::Instr * instr;
  3300. Assert(opndSrc1->GetType() == TyVar);
  3301. Assert(opndSrc2->GetType() == TyVar);
  3302. if (opndSrc1->IsTaggedInt())
  3303. {
  3304. IR::Opnd *tempOpnd = opndSrc1;
  3305. opndSrc1 = opndSrc2;
  3306. opndSrc2 = tempOpnd;
  3307. }
  3308. if (opndSrc2->IsTaggedInt())
  3309. {
  3310. if (opndSrc1->IsTaggedInt())
  3311. {
  3312. return instrPrev;
  3313. }
  3314. GenerateSmIntTest(opndSrc1, instrInsert, labelFail);
  3315. return instrPrev;
  3316. }
  3317. else
  3318. {
  3319. opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  3320. #ifdef SHIFTLOAD
  3321. instr = IR::Instr::New(Js::OpCode::SHLD, opndReg, opndSrc1, IR::IntConstOpnd::New(16, TyInt8, this->m_func), this->m_func);
  3322. instrInsert->InsertBefore(instr);
  3323. instr = IR::Instr::New(Js::OpCode::SHLD, opndReg, opndSrc2, IR::IntConstOpnd::New(16, TyInt8, this->m_func), this->m_func);
  3324. instrInsert->InsertBefore(instr);
  3325. #else
  3326. IR::Opnd * opndReg1;
  3327. // s1 = MOV src1
  3328. // s1 = SHR s1, VarTag_Shift
  3329. // s2 = MOV src2
  3330. // s2 = SHR s2, 32
  3331. // s1 = OR s1, s2 ------ move both tags to the lower 32 bits
  3332. // CMP s1, AtomTag_Pair ------ compare the tags together to the expected tag pair
  3333. // JNE $fail
  3334. // s1 = MOV src1
  3335. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  3336. instrInsert->InsertBefore(instr);
  3337. // s1 = SHR s1, VarTag_Shift
  3338. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  3339. instrInsert->InsertBefore(instr);
  3340. // s2 = MOV src2
  3341. opndReg1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  3342. instr = IR::Instr::New(Js::OpCode::MOV, opndReg1, opndSrc2, this->m_func);
  3343. instrInsert->InsertBefore(instr);
  3344. // s2 = SHR s2, 32
  3345. instr = IR::Instr::New(Js::OpCode::SHR, opndReg1, opndReg1, IR::IntConstOpnd::New(32, TyInt8, this->m_func), this->m_func);
  3346. instrInsert->InsertBefore(instr);
  3347. // s1 = OR s1, s2
  3348. instr = IR::Instr::New(Js::OpCode::OR, opndReg, opndReg, opndReg1, this->m_func);
  3349. instrInsert->InsertBefore(instr);
  3350. #endif
  3351. opndReg = opndReg->UseWithNewType(TyInt32, this->m_func)->AsRegOpnd();
  3352. // CMP s1, AtomTag_Pair
  3353. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  3354. instr->SetSrc1(opndReg);
  3355. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag_Pair, TyInt32, this->m_func, true));
  3356. instrInsert->InsertBefore(instr);
  3357. }
  3358. // JNE $fail
  3359. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelFail, this->m_func);
  3360. instrInsert->InsertBefore(instr);
  3361. return instrPrev;
  3362. }
  3363. #endif
  3364. IR::BranchInstr *
  3365. LowererMD::GenerateLocalInlineCacheCheck(
  3366. IR::Instr * instrLdSt,
  3367. IR::RegOpnd * opndType,
  3368. IR::RegOpnd * inlineCache,
  3369. IR::LabelInstr * labelNext,
  3370. bool checkTypeWithoutProperty)
  3371. {
  3372. // Generate:
  3373. //
  3374. // CMP s1, [&(inlineCache->u.local.type/typeWithoutProperty)]
  3375. // JNE $next
  3376. IR::Instr * instr;
  3377. IR::Opnd* typeOpnd;
  3378. if (checkTypeWithoutProperty)
  3379. {
  3380. typeOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.local.typeWithoutProperty), TyMachReg, instrLdSt->m_func);
  3381. }
  3382. else
  3383. {
  3384. typeOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.local.type), TyMachReg, instrLdSt->m_func);
  3385. }
  3386. // CMP type, [&(inlineCache->u.local.type/typeWithoutProperty)]
  3387. instr = IR::Instr::New(Js::OpCode::CMP, instrLdSt->m_func);
  3388. instr->SetSrc1(opndType);
  3389. instr->SetSrc2(typeOpnd);
  3390. instrLdSt->InsertBefore(instr);
  3391. // JNE $next
  3392. IR::BranchInstr * branchInstr = IR::BranchInstr::New(Js::OpCode::JNE, labelNext, instrLdSt->m_func);
  3393. instrLdSt->InsertBefore(branchInstr);
  3394. return branchInstr;
  3395. }
  3396. IR::BranchInstr *
  3397. LowererMD::GenerateProtoInlineCacheCheck(
  3398. IR::Instr * instrLdSt,
  3399. IR::RegOpnd * opndType,
  3400. IR::RegOpnd * inlineCache,
  3401. IR::LabelInstr * labelNext)
  3402. {
  3403. // Generate:
  3404. //
  3405. // CMP s1, [&(inlineCache->u.proto.type)]
  3406. // JNE $next
  3407. IR::Instr * instr;
  3408. IR::Opnd* typeOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.proto.type), TyMachReg, instrLdSt->m_func);
  3409. // CMP s1, [&(inlineCache->u.proto.type)]
  3410. instr = IR::Instr::New(Js::OpCode::CMP, instrLdSt->m_func);
  3411. instr->SetSrc1(opndType);
  3412. instr->SetSrc2(typeOpnd);
  3413. instrLdSt->InsertBefore(instr);
  3414. // JNE $next
  3415. IR::BranchInstr * branchInstr = IR::BranchInstr::New(Js::OpCode::JNE, labelNext, instrLdSt->m_func);
  3416. instrLdSt->InsertBefore(branchInstr);
  3417. return branchInstr;
  3418. }
  3419. IR::BranchInstr *
  3420. LowererMD::GenerateFlagInlineCacheCheck(
  3421. IR::Instr * instrLdSt,
  3422. IR::RegOpnd * opndType,
  3423. IR::RegOpnd * opndInlineCache,
  3424. IR::LabelInstr * labelNext)
  3425. {
  3426. // Generate:
  3427. //
  3428. // CMP s1, [&(inlineCache->u.accessor.type)]
  3429. // JNE $next
  3430. IR::Instr * instr;
  3431. IR::Opnd* typeOpnd;
  3432. typeOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.type), TyMachReg, instrLdSt->m_func);
  3433. // CMP s1, [&(inlineCache->u.flag.type)]
  3434. instr = IR::Instr::New(Js::OpCode::CMP, instrLdSt->m_func);
  3435. instr->SetSrc1(opndType);
  3436. instr->SetSrc2(typeOpnd);
  3437. instrLdSt->InsertBefore(instr);
  3438. // JNE $next
  3439. IR::BranchInstr * branchInstr = IR::BranchInstr::New(Js::OpCode::JNE, labelNext, instrLdSt->m_func);
  3440. instrLdSt->InsertBefore(branchInstr);
  3441. return branchInstr;
  3442. }
  3443. void
  3444. LowererMD::GenerateFlagInlineCacheCheckForGetterSetter(
  3445. IR::Instr * insertBeforeInstr,
  3446. IR::RegOpnd * opndInlineCache,
  3447. IR::LabelInstr * labelNext)
  3448. {
  3449. uint accessorFlagMask;
  3450. if (PHASE_OFF(Js::InlineGettersPhase, insertBeforeInstr->m_func))
  3451. {
  3452. accessorFlagMask = Js::InlineCache::GetSetterFlagMask();
  3453. }
  3454. else if (PHASE_OFF(Js::InlineSettersPhase, insertBeforeInstr->m_func))
  3455. {
  3456. accessorFlagMask = Js::InlineCache::GetGetterFlagMask();
  3457. }
  3458. else
  3459. {
  3460. accessorFlagMask = Js::InlineCache::GetGetterSetterFlagMask();
  3461. }
  3462. // Generate:
  3463. //
  3464. // TEST [&(inlineCache->u.accessor.flags)], Js::InlineCacheGetterFlag | Js::InlineCacheSetterFlag
  3465. // JEQ $next
  3466. IR::Instr * instr;
  3467. IR::Opnd* flagsOpnd;
  3468. flagsOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.rawUInt16), TyInt8, insertBeforeInstr->m_func);
  3469. // TEST [&(inlineCache->u.accessor.flags)], InlineCacheGetterFlag | InlineCacheSetterFlag
  3470. instr = IR::Instr::New(Js::OpCode::TEST,this->m_func);
  3471. instr->SetSrc1(flagsOpnd);
  3472. instr->SetSrc2(IR::IntConstOpnd::New(accessorFlagMask, TyInt8, this->m_func));
  3473. insertBeforeInstr->InsertBefore(instr);
  3474. // JEQ $next
  3475. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelNext, this->m_func);
  3476. insertBeforeInstr->InsertBefore(instr);
  3477. }
  3478. void
  3479. LowererMD::GenerateLdFldFromLocalInlineCache(
  3480. IR::Instr * instrLdFld,
  3481. IR::RegOpnd * opndBase,
  3482. IR::Opnd * opndDst,
  3483. IR::RegOpnd * inlineCache,
  3484. IR::LabelInstr * labelFallThru,
  3485. bool isInlineSlot)
  3486. {
  3487. // Generate:
  3488. //
  3489. // s1 = MOV base->slots -- load the slot array
  3490. // s2 = MOVZXw [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  3491. // dst = MOV [s1 + s2* Scale] -- load the value directly from the slot
  3492. // JMP $fallthru
  3493. IR::Instr * instr;
  3494. IR::Opnd* slotIndexOpnd;
  3495. IR::IndirOpnd * opndIndir;
  3496. IR::RegOpnd * opndSlotArray = nullptr;
  3497. if (!isInlineSlot)
  3498. {
  3499. opndSlotArray = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3500. opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrLdFld->m_func);
  3501. instr = IR::Instr::New(Js::OpCode::MOV, opndSlotArray, opndIndir, instrLdFld->m_func);
  3502. instrLdFld->InsertBefore(instr);
  3503. }
  3504. // s2 = MOVZXw [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  3505. IR::RegOpnd * opndReg2 = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3506. slotIndexOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.local.slotIndex), TyUint16, instrLdFld->m_func);
  3507. instr = IR::Instr::New(Js::OpCode::MOVZXW, opndReg2, slotIndexOpnd, instrLdFld->m_func);
  3508. instrLdFld->InsertBefore(instr);
  3509. if (isInlineSlot)
  3510. {
  3511. // dst = MOV [base + s2* Scale] -- load the value directly from the slot
  3512. opndIndir = IR::IndirOpnd::New(opndBase, opndReg2, LowererMDArch::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3513. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndIndir, instrLdFld->m_func);
  3514. instrLdFld->InsertBefore(instr);
  3515. }
  3516. else
  3517. {
  3518. // dst = MOV [s1 + s2* Scale] -- load the value directly from the slot
  3519. opndIndir = IR::IndirOpnd::New(opndSlotArray, opndReg2, LowererMDArch::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3520. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndIndir, instrLdFld->m_func);
  3521. instrLdFld->InsertBefore(instr);
  3522. }
  3523. // JMP $fallthru
  3524. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrLdFld->m_func);
  3525. instrLdFld->InsertBefore(instr);
  3526. }
  3527. void
  3528. LowererMD::GenerateLdLocalFldFromFlagInlineCache(
  3529. IR::Instr * instrLdFld,
  3530. IR::RegOpnd * opndBase,
  3531. IR::Opnd * opndDst,
  3532. IR::RegOpnd * opndInlineCache,
  3533. IR::LabelInstr * labelFallThru,
  3534. bool isInlineSlot)
  3535. {
  3536. // Generate:
  3537. //
  3538. // s1 = MOV [&base->slots] -- load the slot array
  3539. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  3540. // dst = MOV [s1 + s2*4]
  3541. // JMP $fallthru
  3542. IR::Instr * instr;
  3543. IR::Opnd* slotIndexOpnd;
  3544. IR::IndirOpnd * opndIndir;
  3545. IR::RegOpnd * opndSlotArray = nullptr;
  3546. if (!isInlineSlot)
  3547. {
  3548. opndSlotArray = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3549. opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrLdFld->m_func);
  3550. instr = IR::Instr::New(Js::OpCode::MOV, opndSlotArray, opndIndir, instrLdFld->m_func);
  3551. instrLdFld->InsertBefore(instr);
  3552. }
  3553. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  3554. IR::RegOpnd *opndSlotIndex = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3555. slotIndexOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.slotIndex), TyUint16, instrLdFld->m_func);
  3556. instr = IR::Instr::New(Js::OpCode::MOVZXW, opndSlotIndex, slotIndexOpnd, instrLdFld->m_func);
  3557. instrLdFld->InsertBefore(instr);
  3558. if (isInlineSlot)
  3559. {
  3560. // dst = MOV [s1 + s2*4]
  3561. opndIndir = IR::IndirOpnd::New(opndBase, opndSlotIndex, LowererMDArch::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3562. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndIndir, instrLdFld->m_func);
  3563. instrLdFld->InsertBefore(instr);
  3564. }
  3565. else
  3566. {
  3567. // dst = MOV [s1 + s2*4]
  3568. opndIndir = IR::IndirOpnd::New(opndSlotArray, opndSlotIndex, LowererMDArch::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3569. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndIndir, instrLdFld->m_func);
  3570. instrLdFld->InsertBefore(instr);
  3571. }
  3572. // JMP $fallthru
  3573. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrLdFld->m_func);
  3574. instrLdFld->InsertBefore(instr);
  3575. }
  3576. void
  3577. LowererMD::GenerateLdFldFromFlagInlineCache(
  3578. IR::Instr * insertBeforeInstr,
  3579. IR::RegOpnd * opndBase,
  3580. IR::Opnd * opndDst,
  3581. IR::RegOpnd * opndInlineCache,
  3582. IR::LabelInstr * labelFallThru,
  3583. bool isInlineSlot)
  3584. {
  3585. // Generate:
  3586. //
  3587. // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
  3588. // s1 = MOV [&s1->slots] -- load the slot array
  3589. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  3590. // dst = MOV [s1 + s2*4]
  3591. // JMP $fallthru
  3592. IR::Instr * instr;
  3593. IR::Opnd* inlineCacheObjOpnd;
  3594. IR::IndirOpnd * opndIndir;
  3595. IR::RegOpnd * opndObjSlots = nullptr;
  3596. inlineCacheObjOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.object), TyMachReg, this->m_func);
  3597. // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
  3598. IR::RegOpnd *opndObject = IR::RegOpnd::New(TyMachReg, this->m_func);
  3599. instr = IR::Instr::New(Js::OpCode::MOV, opndObject, inlineCacheObjOpnd, this->m_func);
  3600. insertBeforeInstr->InsertBefore(instr);
  3601. if (!isInlineSlot)
  3602. {
  3603. // s1 = MOV [&s1->slots] -- load the slot array
  3604. opndObjSlots = IR::RegOpnd::New(TyMachReg, this->m_func);
  3605. opndIndir = IR::IndirOpnd::New(opndObject, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  3606. instr = IR::Instr::New(Js::OpCode::MOV, opndObjSlots, opndIndir, this->m_func);
  3607. insertBeforeInstr->InsertBefore(instr);
  3608. }
  3609. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  3610. IR::RegOpnd *opndSlotIndex = IR::RegOpnd::New(TyMachReg, this->m_func);
  3611. IR::Opnd* slotIndexOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.slotIndex), TyUint16, this->m_func);
  3612. instr = IR::Instr::New(Js::OpCode::MOVZXW, opndSlotIndex, slotIndexOpnd, this->m_func);
  3613. insertBeforeInstr->InsertBefore(instr);
  3614. if (isInlineSlot)
  3615. {
  3616. // dst = MOV [s1 + s2*4]
  3617. opndIndir = IR::IndirOpnd::New(opndObject, opndSlotIndex, this->lowererMDArch.GetDefaultIndirScale(), TyMachReg, this->m_func);
  3618. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndIndir, this->m_func);
  3619. insertBeforeInstr->InsertBefore(instr);
  3620. }
  3621. else
  3622. {
  3623. // dst = MOV [s1 + s2*4]
  3624. opndIndir = IR::IndirOpnd::New(opndObjSlots, opndSlotIndex, this->lowererMDArch.GetDefaultIndirScale(), TyMachReg, this->m_func);
  3625. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndIndir, this->m_func);
  3626. insertBeforeInstr->InsertBefore(instr);
  3627. }
  3628. // JMP $fallthru
  3629. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  3630. insertBeforeInstr->InsertBefore(instr);
  3631. }
  3632. void
  3633. LowererMD::GenerateLdFldFromProtoInlineCache(
  3634. IR::Instr * instrLdFld,
  3635. IR::RegOpnd * opndBase,
  3636. IR::Opnd * opndDst,
  3637. IR::RegOpnd * inlineCache,
  3638. IR::LabelInstr * labelFallThru,
  3639. bool isInlineSlot)
  3640. {
  3641. // Generate:
  3642. //
  3643. // s1 = MOV [&(inlineCache->u.proto.prototypeObject)] -- load the cached prototype object
  3644. // s1 = MOV [&s1->slots] -- load the slot array
  3645. // s2 = MOVZXW [&(inlineCache->u.proto.slotIndex)] -- load the cached slot index
  3646. // dst = MOV [s1 + s2*4]
  3647. // JMP $fallthru
  3648. IR::Instr * instr;
  3649. IR::Opnd* inlineCacheProtoOpnd;
  3650. IR::IndirOpnd * opndIndir;
  3651. IR::RegOpnd * opndProtoSlots = nullptr;
  3652. inlineCacheProtoOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.proto.prototypeObject), TyMachReg, instrLdFld->m_func);
  3653. // s1 = MOV [&(inlineCache->u.proto.prototypeObject)] -- load the cached prototype object
  3654. IR::RegOpnd *opndProto = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3655. instr = IR::Instr::New(Js::OpCode::MOV, opndProto, inlineCacheProtoOpnd, instrLdFld->m_func);
  3656. instrLdFld->InsertBefore(instr);
  3657. if (!isInlineSlot)
  3658. {
  3659. // s1 = MOV [&s1->slots] -- load the slot array
  3660. opndProtoSlots = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3661. opndIndir = IR::IndirOpnd::New(opndProto, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrLdFld->m_func);
  3662. instr = IR::Instr::New(Js::OpCode::MOV, opndProtoSlots, opndIndir, instrLdFld->m_func);
  3663. instrLdFld->InsertBefore(instr);
  3664. }
  3665. // s2 = MOVZXW [&(inlineCache->u.proto.slotIndex)] -- load the cached slot index
  3666. IR::RegOpnd *opndSlotIndex = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  3667. IR::Opnd* slotIndexOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.proto.slotIndex), TyUint16, instrLdFld->m_func);
  3668. instr = IR::Instr::New(Js::OpCode::MOVZXW, opndSlotIndex, slotIndexOpnd, instrLdFld->m_func);
  3669. instrLdFld->InsertBefore(instr);
  3670. if (isInlineSlot)
  3671. {
  3672. // dst = MOV [s1 + s2*4]
  3673. opndIndir = IR::IndirOpnd::New(opndProto, opndSlotIndex, LowererMDArch::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3674. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndIndir, instrLdFld->m_func);
  3675. instrLdFld->InsertBefore(instr);
  3676. }
  3677. else
  3678. {
  3679. // dst = MOV [s1 + s2*4]
  3680. opndIndir = IR::IndirOpnd::New(opndProtoSlots, opndSlotIndex, LowererMDArch::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  3681. instr = IR::Instr::New(Js::OpCode::MOV, opndDst, opndIndir, instrLdFld->m_func);
  3682. instrLdFld->InsertBefore(instr);
  3683. }
  3684. // JMP $fallthru
  3685. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrLdFld->m_func);
  3686. instrLdFld->InsertBefore(instr);
  3687. }
  3688. void
  3689. LowererMD::GenerateLoadTaggedType(IR::Instr * instrLdSt, IR::RegOpnd * opndType, IR::RegOpnd * opndTaggedType)
  3690. {
  3691. // Generate
  3692. //
  3693. // MOV taggedType, type
  3694. // OR taggedType, InlineCacheAuxSlotTypeTag
  3695. // MOV taggedType, type
  3696. {
  3697. IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV, opndTaggedType, opndType, instrLdSt->m_func);
  3698. instrLdSt->InsertBefore(instrMov);
  3699. }
  3700. // OR taggedType, InlineCacheAuxSlotTypeTag
  3701. {
  3702. IR::IntConstOpnd * opndAuxSlotTag = IR::IntConstOpnd::New(InlineCacheAuxSlotTypeTag, TyMachPtr, instrLdSt->m_func);
  3703. IR::Instr * instrAnd = IR::Instr::New(Js::OpCode::OR, opndTaggedType, opndTaggedType, opndAuxSlotTag, instrLdSt->m_func);
  3704. instrLdSt->InsertBefore(instrAnd);
  3705. }
  3706. }
  3707. ///----------------------------------------------------------------------------
  3708. ///
  3709. /// LowererMD::GenerateFastLdMethodFromFlags
  3710. ///
  3711. /// Make use of the helper to cache the type and slot index used to do a LdFld
  3712. /// and do an inline load from the appropriate slot if the type hasn't changed
  3713. /// since the last time this LdFld was executed.
  3714. ///
  3715. ///----------------------------------------------------------------------------
  3716. bool
  3717. LowererMD::GenerateFastLdMethodFromFlags(IR::Instr * instrLdFld)
  3718. {
  3719. IR::LabelInstr * labelFallThru;
  3720. IR::LabelInstr * bailOutLabel;
  3721. IR::Opnd * opndSrc;
  3722. IR::Opnd * opndDst;
  3723. IR::RegOpnd * opndBase;
  3724. IR::RegOpnd * opndType;
  3725. IR::RegOpnd * opndInlineCache;
  3726. opndSrc = instrLdFld->GetSrc1();
  3727. AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->IsPropertySymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(),
  3728. "Expected property sym operand as src of LdFldFlags");
  3729. IR::PropertySymOpnd * propertySymOpnd = opndSrc->AsPropertySymOpnd();
  3730. Assert(!instrLdFld->DoStackArgsOpt(this->m_func));
  3731. if (propertySymOpnd->IsTypeCheckSeqCandidate())
  3732. {
  3733. AssertMsg(propertySymOpnd->HasObjectTypeSym(), "Type optimized property sym operand without a type sym?");
  3734. StackSym *typeSym = propertySymOpnd->GetObjectTypeSym();
  3735. opndType = IR::RegOpnd::New(typeSym, TyMachReg, this->m_func);
  3736. }
  3737. else
  3738. {
  3739. opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3740. }
  3741. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  3742. opndDst = instrLdFld->GetDst();
  3743. opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  3744. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3745. // Label to jump to (or fall through to) when bailing out
  3746. bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instrLdFld->m_func, true /* isOpHelper */);
  3747. instrLdFld->InsertBefore(IR::Instr::New(Js::OpCode::MOV, opndInlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrLdFld, propertySymOpnd), this->m_func));
  3748. IR::LabelInstr * labelFlagAux = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  3749. // Check the flag cache with the untagged type
  3750. this->m_lowerer->GenerateObjectTestAndTypeLoad(instrLdFld, opndBase, opndType, bailOutLabel);
  3751. // Blindly do the check for getter flag first and then do the type check
  3752. // We avoid repeated check for getter flag when the function object may be in either
  3753. // inline slots or auxiliary slots
  3754. GenerateFlagInlineCacheCheckForGetterSetter(instrLdFld, opndInlineCache, bailOutLabel);
  3755. GenerateFlagInlineCacheCheck(instrLdFld, opndType, opndInlineCache, labelFlagAux);
  3756. GenerateLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  3757. // Check the flag cache with the tagged type
  3758. instrLdFld->InsertBefore(labelFlagAux);
  3759. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  3760. GenerateLoadTaggedType(instrLdFld, opndType, opndTaggedType);
  3761. GenerateFlagInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, bailOutLabel);
  3762. GenerateLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  3763. instrLdFld->InsertBefore(bailOutLabel);
  3764. instrLdFld->InsertAfter(labelFallThru);
  3765. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  3766. // ordering instructions anymore.
  3767. instrLdFld->UnlinkSrc1();
  3768. this->m_lowerer->GenerateBailOut(instrLdFld);
  3769. return true;
  3770. }
  3771. void
  3772. LowererMD::GenerateLoadPolymorphicInlineCacheSlot(IR::Instr * instrLdSt, IR::RegOpnd * opndInlineCache, IR::RegOpnd * opndType, uint polymorphicInlineCacheSize)
  3773. {
  3774. // Generate
  3775. //
  3776. // MOV r1, type
  3777. // SHR r1, PolymorphicInlineCacheShift
  3778. // AND r1, (size - 1)
  3779. // SHL r1, log2(sizeof(Js::InlineCache))
  3780. // LEA inlineCache, [inlineCache + r1]
  3781. // MOV r1, type
  3782. IR::RegOpnd * opndOffset = IR::RegOpnd::New(TyMachPtr, instrLdSt->m_func);
  3783. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndOffset, opndType, instrLdSt->m_func);
  3784. instrLdSt->InsertBefore(instr);
  3785. IntConstType rightShiftAmount = PolymorphicInlineCacheShift;
  3786. IntConstType leftShiftAmount = Math::Log2(sizeof(Js::InlineCache));
  3787. // instead of generating
  3788. // SHR r1, PolymorphicInlineCacheShift
  3789. // AND r1, (size - 1)
  3790. // SHL r1, log2(sizeof(Js::InlineCache))
  3791. //
  3792. // we can generate:
  3793. // SHR r1, (PolymorphicInlineCacheShift - log2(sizeof(Js::InlineCache))
  3794. // AND r1, (size - 1) << log2(sizeof(Js::InlineCache))
  3795. Assert(rightShiftAmount > leftShiftAmount);
  3796. instr = IR::Instr::New(Js::OpCode::SHR, opndOffset, opndOffset, IR::IntConstOpnd::New(rightShiftAmount - leftShiftAmount, TyUint8, instrLdSt->m_func, true), instrLdSt->m_func);
  3797. instrLdSt->InsertBefore(instr);
  3798. instr = IR::Instr::New(Js::OpCode::AND, opndOffset, opndOffset, IR::IntConstOpnd::New(((__int64)(polymorphicInlineCacheSize - 1) << leftShiftAmount), TyMachReg, instrLdSt->m_func, true), instrLdSt->m_func);
  3799. instrLdSt->InsertBefore(instr);
  3800. // LEA inlineCache, [inlineCache + r1]
  3801. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(opndInlineCache, opndOffset, TyMachPtr, instrLdSt->m_func);
  3802. instr = IR::Instr::New(Js::OpCode::LEA, opndInlineCache, indirOpnd, instrLdSt->m_func);
  3803. instrLdSt->InsertBefore(instr);
  3804. }
  3805. IR::Instr *
  3806. LowererMD::ChangeToWriteBarrierAssign(IR::Instr * assignInstr, const Func* func)
  3807. {
  3808. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3809. IR::Opnd* dest = assignInstr->GetDst();
  3810. auto threadContextInfo = func->GetTopFunc()->GetThreadContextInfo();
  3811. void* destAddr = nullptr;
  3812. bool isPossibleBarrieredDest = false;
  3813. if (TySize[dest->GetType()] == sizeof(void*))
  3814. {
  3815. if (dest->IsIndirOpnd())
  3816. {
  3817. Assert(!dest->AsIndirOpnd()->HasAddrKind());
  3818. isPossibleBarrieredDest = true;
  3819. }
  3820. else if (dest->IsMemRefOpnd())
  3821. {
  3822. // looks all thread context field access are from MemRefOpnd
  3823. destAddr = (void*)dest->AsMemRefOpnd()->GetMemLoc();
  3824. isPossibleBarrieredDest = destAddr != nullptr
  3825. && ((intptr_t)destAddr % sizeof(void*)) == 0
  3826. && destAddr != (void*)threadContextInfo->GetImplicitCallFlagsAddr()
  3827. && destAddr != (void*)threadContextInfo->GetDisableImplicitFlagsAddr()
  3828. && destAddr != (void*)threadContextInfo->GetBailOutRegisterSaveSpaceAddr();
  3829. if (isPossibleBarrieredDest)
  3830. {
  3831. Assert(Recycler::WBCheckIsRecyclerAddress((char*)destAddr));
  3832. }
  3833. }
  3834. }
  3835. #endif
  3836. IR::Instr * instr = ChangeToAssignNoBarrierCheck(assignInstr);
  3837. // Now insert write barrier if necessary
  3838. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3839. if (isPossibleBarrieredDest
  3840. && assignInstr->m_opcode == Js::OpCode::MOV // ignore SSE instructions like MOVSD
  3841. && assignInstr->GetSrc1()->IsWriteBarrierTriggerableValue())
  3842. {
  3843. instr = LowererMD::GenerateWriteBarrier(assignInstr);
  3844. }
  3845. #endif
  3846. return instr;
  3847. }
  3848. void
  3849. LowererMD::GenerateWriteBarrierAssign(IR::MemRefOpnd * opndDst, IR::Opnd * opndSrc, IR::Instr * insertBeforeInstr)
  3850. {
  3851. Lowerer::InsertMove(opndDst, opndSrc, insertBeforeInstr);
  3852. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3853. if (opndSrc->IsWriteBarrierTriggerableValue())
  3854. {
  3855. void * address = (void *)opndDst->AsMemRefOpnd()->GetMemLoc();
  3856. #ifdef RECYCLER_WRITE_BARRIER_BYTE
  3857. // WriteBarrier-TODO: need to pass card table address through RPC
  3858. IR::MemRefOpnd * cardTableEntry = IR::MemRefOpnd::New(
  3859. &RecyclerWriteBarrierManager::GetAddressOfCardTable()[RecyclerWriteBarrierManager::GetCardTableIndex(address)], TyInt8, insertBeforeInstr->m_func);
  3860. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, cardTableEntry, IR::IntConstOpnd::New(1, TyInt8, insertBeforeInstr->m_func), insertBeforeInstr->m_func);
  3861. insertBeforeInstr->InsertBefore(movInstr);
  3862. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  3863. if (CONFIG_FLAG(ForceSoftwareWriteBarrier) && CONFIG_FLAG(RecyclerVerifyMark))
  3864. {
  3865. this->LoadHelperArgument(insertBeforeInstr, opndDst);
  3866. IR::Instr* instrCall = IR::Instr::New(Js::OpCode::Call, m_func);
  3867. insertBeforeInstr->InsertBefore(instrCall);
  3868. this->ChangeToHelperCall(instrCall, IR::HelperWriteBarrierSetVerifyBit);
  3869. }
  3870. #endif
  3871. #else
  3872. IR::MemRefOpnd * cardTableEntry = IR::MemRefOpnd::New(
  3873. &RecyclerWriteBarrierManager::GetAddressOfCardTable()[RecyclerWriteBarrierManager::GetCardTableIndex(address)], TyMachPtr, assignInstr->m_func);
  3874. IR::Instr * orInstr = IR::Instr::New(Js::OpCode::OR, cardTableEntry,
  3875. IR::IntConstOpnd::New(1 << ((uint)address >> 7), TyInt32, assignInstr->m_func), assignInstr->m_func);
  3876. assignInstr->InsertBefore(orInstr);
  3877. #endif
  3878. }
  3879. #endif
  3880. }
  3881. void
  3882. LowererMD::GenerateWriteBarrierAssign(IR::IndirOpnd * opndDst, IR::Opnd * opndSrc, IR::Instr * insertBeforeInstr)
  3883. {
  3884. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3885. if (opndSrc->IsWriteBarrierTriggerableValue())
  3886. {
  3887. IR::RegOpnd * writeBarrierAddrRegOpnd = IR::RegOpnd::New(TyMachPtr, insertBeforeInstr->m_func);
  3888. insertBeforeInstr->InsertBefore(IR::Instr::New(Js::OpCode::LEA, writeBarrierAddrRegOpnd, opndDst, insertBeforeInstr->m_func));
  3889. IR::Instr* movInstr = IR::Instr::New(Js::OpCode::MOV,
  3890. IR::IndirOpnd::New(writeBarrierAddrRegOpnd, 0, TyMachReg, insertBeforeInstr->m_func), opndSrc, insertBeforeInstr->m_func);
  3891. insertBeforeInstr->InsertBefore(movInstr);
  3892. GenerateWriteBarrier(movInstr);
  3893. // The mov happens above, and it's slightly faster doing it that way since we've already calculated the address we're writing to
  3894. return;
  3895. }
  3896. #endif
  3897. Lowerer::InsertMove(opndDst, opndSrc, insertBeforeInstr);
  3898. return;
  3899. }
  3900. #ifdef RECYCLER_WRITE_BARRIER_JIT
  3901. IR::Instr*
  3902. LowererMD::GenerateWriteBarrier(IR::Instr * assignInstr)
  3903. {
  3904. #if defined(RECYCLER_WRITE_BARRIER_BYTE)
  3905. PHASE_PRINT_TRACE(Js::JitWriteBarrierPhase, assignInstr->m_func, _u("Generating write barrier\n"));
  3906. IR::RegOpnd * indexOpnd = IR::RegOpnd::New(TyMachPtr, assignInstr->m_func);
  3907. IR::Instr * loadIndexInstr = IR::Instr::New(Js::OpCode::LEA, indexOpnd, assignInstr->GetDst(), assignInstr->m_func);
  3908. assignInstr->InsertBefore(loadIndexInstr);
  3909. IR::Instr * shiftBitInstr = IR::Instr::New(Js::OpCode::SHR, indexOpnd, indexOpnd,
  3910. IR::IntConstOpnd::New(12 /* 1 << 12 = 4096 */, TyInt8, assignInstr->m_func), assignInstr->m_func);
  3911. assignInstr->InsertAfter(shiftBitInstr);
  3912. // The cardtable address is likely 64 bits already so we have to load it to a register
  3913. // That is, we have to do the following:
  3914. // LEA reg1, targetOfWrite
  3915. // SHR reg1, 12
  3916. // MOV reg2, cardTableAddress
  3917. // MOV [reg1 + reg2], 1
  3918. //
  3919. // Instead of doing this:
  3920. // LEA reg1, targetOfWrite
  3921. // SHR reg1, 12
  3922. // MOV [cardTableAddress + reg2], 1
  3923. //
  3924. //TODO: (leish)(swb) hoist RecyclerWriteBarrierManager::GetAddressOfCardTable()
  3925. IR::RegOpnd * cardTableRegOpnd = IR::RegOpnd::New(TyMachReg, assignInstr->m_func);
  3926. IR::Instr * cardTableAddrInstr = IR::Instr::New(Js::OpCode::MOV, cardTableRegOpnd,
  3927. IR::AddrOpnd::New(RecyclerWriteBarrierManager::GetAddressOfCardTable(), IR::AddrOpndKindWriteBarrierCardTable, assignInstr->m_func),
  3928. assignInstr->m_func);
  3929. shiftBitInstr->InsertAfter(cardTableAddrInstr);
  3930. IR::IndirOpnd * cardTableEntryOpnd = IR::IndirOpnd::New(cardTableRegOpnd, indexOpnd,
  3931. TyInt8, assignInstr->m_func);
  3932. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, cardTableEntryOpnd, IR::IntConstOpnd::New(1, TyInt8, assignInstr->m_func), assignInstr->m_func);
  3933. cardTableAddrInstr->InsertAfter(movInstr);
  3934. return loadIndexInstr;
  3935. #else
  3936. Assert(writeBarrierAddrRegOpnd->IsRegOpnd());
  3937. IR::RegOpnd * shiftBitOpnd = IR::RegOpnd::New(TyInt32, assignInstr->m_func);
  3938. shiftBitOpnd->SetReg(LowererMDArch::GetRegShiftCount());
  3939. IR::Instr * moveShiftBitOpnd = IR::Instr::New(Js::OpCode::MOV, shiftBitOpnd, writeBarrierAddrRegOpnd, assignInstr->m_func);
  3940. assignInstr->InsertBefore(moveShiftBitOpnd);
  3941. IR::Instr * shiftBitInstr = IR::Instr::New(Js::OpCode::SHR, shiftBitOpnd, shiftBitOpnd,
  3942. IR::IntConstOpnd::New(7 /* 1 << 7 = 128 */, TyInt32, assignInstr->m_func), assignInstr->m_func);
  3943. assignInstr->InsertBefore(shiftBitInstr);
  3944. IR::RegOpnd * bitOpnd = IR::RegOpnd::New(TyInt32, assignInstr->m_func);
  3945. IR::Instr * mov1Instr = IR::Instr::New(Js::OpCode::MOV, bitOpnd,
  3946. IR::IntConstOpnd::New(1, TyInt32, assignInstr->m_func), assignInstr->m_func);
  3947. assignInstr->InsertBefore(mov1Instr);
  3948. IR::Instr * bitInstr = IR::Instr::New(Js::OpCode::SHL, bitOpnd, bitOpnd, shiftBitOpnd, assignInstr->m_func);
  3949. assignInstr->InsertBefore(bitInstr);
  3950. IR::RegOpnd * indexOpnd = shiftBitOpnd;
  3951. IR::Instr * indexInstr = IR::Instr::New(Js::OpCode::SHR, indexOpnd, indexOpnd,
  3952. IR::IntConstOpnd::New(5 /* 1 << 5 = 32 */, TyInt32, assignInstr->m_func), assignInstr->m_func);
  3953. assignInstr->InsertBefore(indexInstr);
  3954. IR::RegOpnd * cardTableRegOpnd = IR::RegOpnd::New(TyMachReg, assignInstr->m_func);
  3955. IR::Instr * cardTableAddrInstr = IR::Instr::New(Js::OpCode::MOV, cardTableRegOpnd,
  3956. IR::AddrOpnd::New(RecyclerWriteBarrierManager::GetAddressOfCardTable(), IR::AddrOpndKindDynamicMisc, assignInstr->m_func),
  3957. assignInstr->m_func);
  3958. assignInstr->InsertBefore(cardTableAddrInstr);
  3959. IR::IndirOpnd * cardTableEntryOpnd = IR::IndirOpnd::New(cardTableRegOpnd, indexOpnd, LowererMDArch::GetDefaultIndirScale(),
  3960. TyInt32, assignInstr->m_func);
  3961. IR::Instr * orInstr = IR::Instr::New(Js::OpCode::OR, cardTableEntryOpnd, cardTableEntryOpnd,
  3962. bitOpnd, assignInstr->m_func);
  3963. assignInstr->InsertBefore(orInstr);
  3964. #endif
  3965. }
  3966. #endif
  3967. void
  3968. LowererMD::GenerateStFldFromLocalInlineCache(
  3969. IR::Instr * instrStFld,
  3970. IR::RegOpnd * opndBase,
  3971. IR::Opnd * opndSrc,
  3972. IR::RegOpnd * inlineCache,
  3973. IR::LabelInstr * labelFallThru,
  3974. bool isInlineSlot)
  3975. {
  3976. IR::Instr * instr;
  3977. IR::Opnd* slotIndexOpnd;
  3978. IR::RegOpnd * opndIndirBase = opndBase;
  3979. if (!isInlineSlot)
  3980. {
  3981. // slotArray = MOV base->slots -- load the slot array
  3982. IR::RegOpnd * opndSlotArray = IR::RegOpnd::New(TyMachReg, instrStFld->m_func);
  3983. IR::IndirOpnd * opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrStFld->m_func);
  3984. instr = IR::Instr::New(Js::OpCode::MOV, opndSlotArray, opndIndir, instrStFld->m_func);
  3985. instrStFld->InsertBefore(instr);
  3986. opndIndirBase = opndSlotArray;
  3987. }
  3988. // slotIndex = MOV [&inlineCache->u.local.inlineSlotOffsetOrAuxSlotIndex] -- load the cached slot offset or index
  3989. IR::RegOpnd * opndSlotIndex = IR::RegOpnd::New(TyMachReg, instrStFld->m_func);
  3990. slotIndexOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.local.slotIndex), TyUint16, instrStFld->m_func);
  3991. instr = IR::Instr::New(Js::OpCode::MOVZXW, opndSlotIndex, slotIndexOpnd, instrStFld->m_func);
  3992. instrStFld->InsertBefore(instr);
  3993. // [base + slotIndex * (1 << indirScale)] = MOV src -- store the value directly to the slot
  3994. // [slotArray + slotIndex * (1 << indirScale)] = MOV src -- store the value directly to the slot
  3995. IR::IndirOpnd * storeLocIndirOpnd = IR::IndirOpnd::New(opndIndirBase, opndSlotIndex,
  3996. LowererMDArch::GetDefaultIndirScale(), TyMachReg, instrStFld->m_func);
  3997. GenerateWriteBarrierAssign(storeLocIndirOpnd, opndSrc, instrStFld);
  3998. // JMP $fallthru
  3999. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, instrStFld->m_func);
  4000. instrStFld->InsertBefore(instr);
  4001. }
  4002. //----------------------------------------------------------------------------
  4003. //
  4004. // LowererMD::GenerateFastScopedLdFld
  4005. //
  4006. // Make use of the helper to cache the type and slot index used to do a ScopedLdFld
  4007. // when the scope is an array of length 1.
  4008. // Extract the only element from array and do an inline load from the appropriate slot
  4009. // if the type hasn't changed since the last time this ScopedLdFld was executed.
  4010. //
  4011. //----------------------------------------------------------------------------
  4012. IR::Instr *
  4013. LowererMD::GenerateFastScopedLdFld(IR::Instr * instrLdScopedFld)
  4014. {
  4015. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  4016. // JNE $helper
  4017. // MOV r1, [base + offset(scopes)] -- load the first scope
  4018. // MOV r2, r1->type
  4019. // CMP r2, [&(inlineCache->u.local.type)] -- check type
  4020. // JNE $helper
  4021. // MOV r1, r1->slots -- load the slots array
  4022. // MOV r2 , [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  4023. // MOV dst, [r1+r2] -- load the value from the slot
  4024. // JMP $fallthru
  4025. // $helper:
  4026. // dst = CALL PatchGetPropertyScoped(inlineCache, base, field, defaultInstance, scriptContext)
  4027. // $fallthru:
  4028. IR::RegOpnd * opndBase;
  4029. IR::Instr * instr;
  4030. IR::IndirOpnd * indirOpnd;
  4031. IR::LabelInstr * labelHelper;
  4032. IR::Opnd * opndDst;
  4033. IR::RegOpnd * inlineCache;
  4034. IR::RegOpnd *r1;
  4035. IR::LabelInstr * labelFallThru;
  4036. IR::Opnd *propertySrc = instrLdScopedFld->GetSrc1();
  4037. AssertMsg(propertySrc->IsSymOpnd() && propertySrc->AsSymOpnd()->IsPropertySymOpnd() && propertySrc->AsSymOpnd()->m_sym->IsPropertySym(),
  4038. "Expected property sym operand as src of LdScoped");
  4039. IR::PropertySymOpnd * propertySymOpnd = propertySrc->AsPropertySymOpnd();
  4040. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  4041. IR::Opnd *srcBase = instrLdScopedFld->GetSrc2();
  4042. AssertMsg(srcBase->IsRegOpnd(), "Expected reg opnd as src2");
  4043. //opndBase = srcBase;
  4044. //IR::IndirOpnd * indirOpnd = src->AsIndirOpnd();
  4045. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4046. AssertMsg(opndBase->m_sym->m_isSingleDef, "We assume this isn't redefined");
  4047. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  4048. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfLength(), TyInt16, this->m_func);
  4049. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4050. instr->SetSrc1(indirOpnd);
  4051. instr->SetSrc2(IR::IntConstOpnd::New(0x1, TyInt8, this->m_func));
  4052. instrLdScopedFld->InsertBefore(instr);
  4053. // JNE $helper
  4054. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  4055. instrLdScopedFld->InsertBefore(instr);
  4056. // MOV r1, [base + offset(scopes)] -- load the first scope
  4057. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg, this->m_func);
  4058. r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  4059. instr = IR::Instr::New(Js::OpCode::MOV, r1, indirOpnd, this->m_func);
  4060. instrLdScopedFld->InsertBefore(instr);
  4061. //first load the inlineCache type
  4062. inlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  4063. Assert(inlineCache != nullptr);
  4064. IR::RegOpnd * opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  4065. opndDst = instrLdScopedFld->GetDst();
  4066. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4067. r1->m_sym->m_isNotInt = true;
  4068. // Load the type
  4069. this->m_lowerer->GenerateObjectTestAndTypeLoad(instrLdScopedFld, r1, opndType, labelHelper);
  4070. // Check the local cache with the tagged type
  4071. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  4072. GenerateLoadTaggedType(instrLdScopedFld, opndType, opndTaggedType);
  4073. instrLdScopedFld->InsertBefore(IR::Instr::New(Js::OpCode::MOV, inlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrLdScopedFld, propertySymOpnd), this->m_func));
  4074. GenerateLocalInlineCacheCheck(instrLdScopedFld, opndTaggedType, inlineCache, labelHelper);
  4075. GenerateLdFldFromLocalInlineCache(instrLdScopedFld, r1, opndDst, inlineCache, labelFallThru, false);
  4076. // $helper:
  4077. // dst = CALL PatchGetPropertyScoped(inlineCache, opndBase, propertyId, srcBase, scriptContext)
  4078. // $fallthru:
  4079. instrLdScopedFld->InsertBefore(labelHelper);
  4080. instrLdScopedFld->InsertAfter(labelFallThru);
  4081. return instrLdScopedFld->m_prev;
  4082. }
  4083. //----------------------------------------------------------------------------
  4084. //
  4085. // LowererMD::GenerateFastScopedStFld
  4086. //
  4087. // Make use of the helper to cache the type and slot index used to do a ScopedStFld
  4088. // when the scope is an array of length 1.
  4089. // Extract the only element from array and do an inline load from the appropriate slot
  4090. // if the type hasn't changed since the last time this ScopedStFld was executed.
  4091. //
  4092. //----------------------------------------------------------------------------
  4093. IR::Instr *
  4094. LowererMD::GenerateFastScopedStFld(IR::Instr * instrStScopedFld)
  4095. {
  4096. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  4097. // JNE $helper
  4098. // MOV r1, [base + offset(scopes)] -- load the first scope
  4099. // MOV r2, r1->type
  4100. // CMP r2, [&(inlineCache->u.local.type)] -- check type
  4101. // JNE $helper
  4102. // MOV r1, r1->slots -- load the slots array
  4103. // MOV r2, [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  4104. // [r1 + r2*4] = MOV value -- store the value directly to the slot
  4105. // JMP $fallthru
  4106. // $helper:
  4107. // CALL PatchSetPropertyScoped(inlineCache, base, field, value, defaultInstance, scriptContext)
  4108. // $fallthru:
  4109. IR::RegOpnd * opndBase;
  4110. IR::Instr * instr;
  4111. IR::IndirOpnd * indirOpnd;
  4112. IR::LabelInstr * labelHelper;
  4113. IR::Opnd * opndDst;
  4114. IR::RegOpnd * inlineCache;
  4115. IR::RegOpnd *r1;
  4116. IR::LabelInstr * labelFallThru;
  4117. IR::Opnd *newValue = instrStScopedFld->GetSrc1();
  4118. // IR::Opnd *defaultInstance = instrStScopedFld->UnlinkSrc2();
  4119. opndDst = instrStScopedFld->GetDst();
  4120. AssertMsg(opndDst->IsSymOpnd() && opndDst->AsSymOpnd()->IsPropertySymOpnd() && opndDst->AsSymOpnd()->m_sym->IsPropertySym(),
  4121. "Expected property sym operand as dst of StScoped");
  4122. IR::PropertySymOpnd * propertySymOpnd = opndDst->AsPropertySymOpnd();
  4123. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  4124. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4125. AssertMsg(opndBase->m_sym->m_isSingleDef, "We assume this isn't redefined");
  4126. // CMP [base + offset(length)], 1 -- get the length on array and test if it is 1.
  4127. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfLength(), TyInt16, this->m_func);
  4128. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4129. instr->SetSrc1(indirOpnd);
  4130. instr->SetSrc2(IR::IntConstOpnd::New(0x1, TyInt8, this->m_func));
  4131. instrStScopedFld->InsertBefore(instr);
  4132. // JNE $helper
  4133. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  4134. instrStScopedFld->InsertBefore(instr);
  4135. // MOV r1, [base + offset(scopes)] -- load the first scope
  4136. indirOpnd = IR::IndirOpnd::New(opndBase, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg, this->m_func);
  4137. r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  4138. instr = IR::Instr::New(Js::OpCode::MOV, r1, indirOpnd, this->m_func);
  4139. instrStScopedFld->InsertBefore(instr);
  4140. //first load the inlineCache type
  4141. inlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  4142. Assert(inlineCache != nullptr);
  4143. IR::RegOpnd * opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  4144. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4145. r1->m_sym->m_isNotInt = true;
  4146. // Load the type
  4147. this->m_lowerer->GenerateObjectTestAndTypeLoad(instrStScopedFld, r1, opndType, labelHelper);
  4148. // Check the local cache with the tagged type
  4149. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  4150. GenerateLoadTaggedType(instrStScopedFld, opndType, opndTaggedType);
  4151. instrStScopedFld->InsertBefore(IR::Instr::New(Js::OpCode::MOV, inlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrStScopedFld, propertySymOpnd), this->m_func));
  4152. GenerateLocalInlineCacheCheck(instrStScopedFld, opndTaggedType, inlineCache, labelHelper);
  4153. GenerateStFldFromLocalInlineCache(instrStScopedFld, r1, newValue, inlineCache, labelFallThru, false);
  4154. // $helper:
  4155. // CALL PatchSetPropertyScoped(inlineCache, opndBase, propertyId, newValue, defaultInstance, scriptContext)
  4156. // $fallthru:
  4157. instrStScopedFld->InsertBefore(labelHelper);
  4158. instrStScopedFld->InsertAfter(labelFallThru);
  4159. return instrStScopedFld->m_prev;
  4160. }
  4161. IR::Opnd *
  4162. LowererMD::CreateStackArgumentsSlotOpnd()
  4163. {
  4164. StackSym *sym = StackSym::New(TyMachReg, this->m_func);
  4165. sym->m_offset = -MachArgsSlotOffset;
  4166. sym->m_allocated = true;
  4167. return IR::SymOpnd::New(sym, TyMachReg, this->m_func);
  4168. }
  4169. IR::RegOpnd *
  4170. LowererMD::GenerateUntagVar(IR::RegOpnd * src, IR::LabelInstr * labelFail, IR::Instr * assignInstr, bool generateTagCheck)
  4171. {
  4172. Assert(src->IsVar());
  4173. // MOV valueOpnd, index
  4174. IR::RegOpnd *valueOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  4175. //
  4176. // Convert Index to 32 bits.
  4177. //
  4178. IR::Opnd * opnd = src->UseWithNewType(TyMachReg, this->m_func);
  4179. #if INT32VAR
  4180. if (generateTagCheck)
  4181. {
  4182. Assert(!opnd->IsTaggedInt());
  4183. this->GenerateSmIntTest(opnd, assignInstr, labelFail);
  4184. }
  4185. // Moving into r2 clears the tag bits on AMD64.
  4186. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV_TRUNC, valueOpnd, opnd, this->m_func);
  4187. assignInstr->InsertBefore(instr);
  4188. #else
  4189. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, valueOpnd, opnd, this->m_func);
  4190. assignInstr->InsertBefore(instr);
  4191. // SAR valueOpnd, Js::VarTag_Shift
  4192. instr = IR::Instr::New(Js::OpCode::SAR, valueOpnd, valueOpnd,
  4193. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  4194. assignInstr->InsertBefore(instr);
  4195. if (generateTagCheck)
  4196. {
  4197. Assert(!opnd->IsTaggedInt());
  4198. // SAR set the carry flag (CF) to 1 if the lower bit is 1
  4199. // JAE will jmp if CF = 0
  4200. instr = IR::BranchInstr::New(Js::OpCode::JAE, labelFail, this->m_func);
  4201. assignInstr->InsertBefore(instr);
  4202. }
  4203. #endif
  4204. return valueOpnd;
  4205. }
  4206. IR::RegOpnd *LowererMD::LoadNonnegativeIndex(
  4207. IR::RegOpnd *indexOpnd,
  4208. const bool skipNegativeCheck,
  4209. IR::LabelInstr *const notTaggedIntLabel,
  4210. IR::LabelInstr *const negativeLabel,
  4211. IR::Instr *const insertBeforeInstr)
  4212. {
  4213. Assert(indexOpnd);
  4214. Assert(indexOpnd->IsVar() || indexOpnd->GetType() == TyInt32 || indexOpnd->GetType() == TyUint32);
  4215. Assert(indexOpnd->GetType() != TyUint32 || skipNegativeCheck);
  4216. Assert(!indexOpnd->IsVar() || notTaggedIntLabel);
  4217. Assert(skipNegativeCheck || negativeLabel);
  4218. Assert(insertBeforeInstr);
  4219. if(indexOpnd->IsVar())
  4220. {
  4221. if (indexOpnd->GetValueType().IsLikelyFloat()
  4222. #ifdef _M_IX86
  4223. && AutoSystemInfo::Data.SSE2Available()
  4224. #endif
  4225. )
  4226. {
  4227. return m_lowerer->LoadIndexFromLikelyFloat(indexOpnd, skipNegativeCheck, notTaggedIntLabel, negativeLabel, insertBeforeInstr);
  4228. }
  4229. // mov intIndex, index
  4230. // sar intIndex, 1
  4231. // jae $notTaggedIntOrNegative
  4232. indexOpnd = GenerateUntagVar(indexOpnd, notTaggedIntLabel, insertBeforeInstr, !indexOpnd->IsTaggedInt());
  4233. }
  4234. if(!skipNegativeCheck)
  4235. {
  4236. // test index, index
  4237. // js $notTaggedIntOrNegative
  4238. Lowerer::InsertTestBranch(indexOpnd, indexOpnd, Js::OpCode::JSB, negativeLabel, insertBeforeInstr);
  4239. }
  4240. return indexOpnd;
  4241. }
  4242. // Inlines fast-path for int Mul/Add or int Mul/Sub. If not int, call MulAdd/MulSub helper
  4243. bool LowererMD::TryGenerateFastMulAdd(IR::Instr * instrAdd, IR::Instr ** pInstrPrev)
  4244. {
  4245. IR::Instr *instrMul = instrAdd->GetPrevRealInstrOrLabel();
  4246. IR::Opnd *addSrc;
  4247. IR::RegOpnd *addCommonSrcOpnd;
  4248. Assert(instrAdd->m_opcode == Js::OpCode::Add_A || instrAdd->m_opcode == Js::OpCode::Sub_A);
  4249. bool isSub = (instrAdd->m_opcode == Js::OpCode::Sub_A) ? true : false;
  4250. // Mul needs to be a single def reg
  4251. if (instrMul->m_opcode != Js::OpCode::Mul_A || instrMul->GetDst()->IsRegOpnd() == false)
  4252. {
  4253. // Cannot generate MulAdd
  4254. return false;
  4255. }
  4256. if (instrMul->HasBailOutInfo())
  4257. {
  4258. // Bailout will be generated for the Add, but not the Mul.
  4259. // We could handle this, but this path isn't used that much anymore.
  4260. return false;
  4261. }
  4262. IR::RegOpnd *regMulDst = instrMul->GetDst()->AsRegOpnd();
  4263. if (regMulDst->m_sym->m_isSingleDef == false)
  4264. {
  4265. // Cannot generate MulAdd
  4266. return false;
  4267. }
  4268. // Only handle a * b + c, so dst of Mul needs to match left source of Add
  4269. if (instrMul->GetDst()->IsEqual(instrAdd->GetSrc1()))
  4270. {
  4271. addCommonSrcOpnd = instrAdd->GetSrc1()->AsRegOpnd();
  4272. addSrc = instrAdd->GetSrc2();
  4273. }
  4274. else if (instrMul->GetDst()->IsEqual(instrAdd->GetSrc2()))
  4275. {
  4276. addSrc = instrAdd->GetSrc1();
  4277. addCommonSrcOpnd = instrAdd->GetSrc2()->AsRegOpnd();
  4278. }
  4279. else
  4280. {
  4281. return false;
  4282. }
  4283. // Only handle a * b + c where c != a * b
  4284. if (instrAdd->GetSrc1()->IsEqual(instrAdd->GetSrc2()))
  4285. {
  4286. return false;
  4287. }
  4288. if (addCommonSrcOpnd->m_isTempLastUse == false)
  4289. {
  4290. return false;
  4291. }
  4292. IR::Opnd *mulSrc1 = instrMul->GetSrc1();
  4293. IR::Opnd *mulSrc2 = instrMul->GetSrc2();
  4294. if (mulSrc1->IsRegOpnd() && mulSrc1->AsRegOpnd()->IsTaggedInt()
  4295. && mulSrc2->IsRegOpnd() && mulSrc2->AsRegOpnd()->IsTaggedInt())
  4296. {
  4297. return false;
  4298. }
  4299. // Save prevInstr for the main lower loop
  4300. *pInstrPrev = instrMul->m_prev;
  4301. // Generate int31 fast-path for Mul, go to MulAdd helper if it fails, or one of the source is marked notInt
  4302. if (!(addSrc->IsRegOpnd() && addSrc->AsRegOpnd()->IsNotInt())
  4303. && !(mulSrc1->IsRegOpnd() && mulSrc1->AsRegOpnd()->IsNotInt())
  4304. && !(mulSrc2->IsRegOpnd() && mulSrc2->AsRegOpnd()->IsNotInt()))
  4305. {
  4306. this->GenerateFastMul(instrMul);
  4307. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4308. IR::Instr *instr = IR::BranchInstr::New(Js::OpCode::JMP, labelHelper, this->m_func);
  4309. instrMul->InsertBefore(instr);
  4310. // Generate int31 fast-path for Add
  4311. bool success;
  4312. if (isSub)
  4313. {
  4314. success = this->GenerateFastSub(instrAdd);
  4315. }
  4316. else
  4317. {
  4318. success = this->GenerateFastAdd(instrAdd);
  4319. }
  4320. if (!success)
  4321. {
  4322. labelHelper->isOpHelper = false;
  4323. }
  4324. // Generate MulAdd helper call
  4325. instrAdd->InsertBefore(labelHelper);
  4326. }
  4327. if (instrAdd->dstIsTempNumber)
  4328. {
  4329. m_lowerer->LoadHelperTemp(instrAdd, instrAdd);
  4330. }
  4331. else
  4332. {
  4333. IR::Opnd *tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  4334. this->LoadHelperArgument(instrAdd, tempOpnd);
  4335. }
  4336. this->m_lowerer->LoadScriptContext(instrAdd);
  4337. IR::JnHelperMethod helper;
  4338. if (addSrc == instrAdd->GetSrc2())
  4339. {
  4340. instrAdd->FreeSrc1();
  4341. IR::Opnd *addOpnd = instrAdd->UnlinkSrc2();
  4342. this->LoadHelperArgument(instrAdd, addOpnd);
  4343. helper = isSub ? IR::HelperOp_MulSubRight : IR::HelperOp_MulAddRight;
  4344. }
  4345. else
  4346. {
  4347. instrAdd->FreeSrc2();
  4348. IR::Opnd *addOpnd = instrAdd->UnlinkSrc1();
  4349. this->LoadHelperArgument(instrAdd, addOpnd);
  4350. helper = isSub ? IR::HelperOp_MulSubLeft : IR::HelperOp_MulAddLeft;
  4351. }
  4352. IR::Opnd *src2 = instrMul->UnlinkSrc2();
  4353. this->LoadHelperArgument(instrAdd, src2);
  4354. IR::Opnd *src1 = instrMul->UnlinkSrc1();
  4355. this->LoadHelperArgument(instrAdd, src1);
  4356. this->ChangeToHelperCall(instrAdd, helper);
  4357. instrMul->Remove();
  4358. return true;
  4359. }
  4360. void
  4361. LowererMD::GenerateFastAbs(IR::Opnd *dst, IR::Opnd *src, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
  4362. {
  4363. // TEST src1, AtomTag
  4364. // JEQ $float
  4365. // MOV EAX, src
  4366. // SAR EAX, AtomTag_Int32
  4367. // CDQ
  4368. // XOR EAX, EDX
  4369. // SUB EAX, EDX
  4370. // SHL EAX, AtomTag_Int32
  4371. // JO $labelHelper
  4372. // INC EAX
  4373. // MOV dst, EAX
  4374. // JMP $done
  4375. // $float
  4376. // CMP [src], JavascriptNumber.vtable
  4377. // JNE $helper
  4378. // MOVSD r1, [src + offsetof(value)]
  4379. // ANDPD r1, absDoubleCst
  4380. // dst = DoubleToVar(r1)
  4381. IR::Instr *instr = nullptr;
  4382. IR::LabelInstr *labelFloat = nullptr;
  4383. bool isInt = false;
  4384. bool isNotInt = false;
  4385. if (src->IsRegOpnd())
  4386. {
  4387. if (src->AsRegOpnd()->IsTaggedInt())
  4388. {
  4389. isInt = true;
  4390. }
  4391. else if (src->AsRegOpnd()->IsNotInt())
  4392. {
  4393. isNotInt = true;
  4394. }
  4395. }
  4396. else if (src->IsAddrOpnd())
  4397. {
  4398. IR::AddrOpnd *varOpnd = src->AsAddrOpnd();
  4399. Assert(varOpnd->IsVar() && Js::TaggedInt::Is(varOpnd->m_address));
  4400. #ifdef _M_X64
  4401. __int64 absValue = ::_abs64(Js::TaggedInt::ToInt32(varOpnd->m_address));
  4402. #else
  4403. __int32 absValue = ::abs(Js::TaggedInt::ToInt32(varOpnd->m_address));
  4404. #endif
  4405. if (!Js::TaggedInt::IsOverflow(absValue))
  4406. {
  4407. varOpnd->SetAddress(Js::TaggedInt::ToVarUnchecked((__int32)absValue), IR::AddrOpndKindConstantVar);
  4408. instr = IR::Instr::New(Js::OpCode::MOV, dst, varOpnd, this->m_func);
  4409. insertInstr->InsertBefore(instr);
  4410. return;
  4411. }
  4412. }
  4413. if (src->IsRegOpnd() == false)
  4414. {
  4415. IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  4416. instr = IR::Instr::New(Js::OpCode::MOV, regOpnd, src, this->m_func);
  4417. insertInstr->InsertBefore(instr);
  4418. src = regOpnd;
  4419. }
  4420. #ifdef _M_IX86
  4421. bool emitFloatAbs = !isInt && AutoSystemInfo::Data.SSE2Available();
  4422. #else
  4423. bool emitFloatAbs = !isInt;
  4424. #endif
  4425. if (!isNotInt)
  4426. {
  4427. if (!isInt)
  4428. {
  4429. IR::LabelInstr *label = labelHelper;
  4430. if (emitFloatAbs)
  4431. {
  4432. label = labelFloat = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4433. }
  4434. GenerateSmIntTest(src, insertInstr, label);
  4435. }
  4436. // MOV EAX, src
  4437. IR::RegOpnd *regEAX = IR::RegOpnd::New(TyInt32, this->m_func);
  4438. regEAX->SetReg(LowererMDArch::GetRegIMulDestLower());
  4439. instr = IR::Instr::New(Js::OpCode::MOV, regEAX, src, this->m_func);
  4440. insertInstr->InsertBefore(instr);
  4441. #ifdef _M_IX86
  4442. // SAR EAX, AtomTag_Int32
  4443. instr = IR::Instr::New(Js::OpCode::SAR, regEAX, regEAX, IR::IntConstOpnd::New(Js::AtomTag_Int32, TyInt32, this->m_func), this->m_func);
  4444. insertInstr->InsertBefore(instr);
  4445. #endif
  4446. IR::RegOpnd *regEDX = IR::RegOpnd::New(TyInt32, this->m_func);
  4447. regEDX->SetReg(LowererMDArch::GetRegIMulHighDestLower());
  4448. // CDQ
  4449. // Note: put EDX on dst to give of def to the EDX lifetime
  4450. instr = IR::Instr::New(Js::OpCode::CDQ, regEDX, this->m_func);
  4451. insertInstr->InsertBefore(instr);
  4452. // XOR EAX, EDX
  4453. instr = IR::Instr::New(Js::OpCode::XOR, regEAX, regEAX, regEDX, this->m_func);
  4454. insertInstr->InsertBefore(instr);
  4455. // SUB EAX, EDX
  4456. instr = IR::Instr::New(Js::OpCode::SUB, regEAX, regEAX, regEDX, this->m_func);
  4457. insertInstr->InsertBefore(instr);
  4458. #ifdef _M_X64
  4459. // abs(INT_MIN) overflows a 32 bit integer.
  4460. // JO $labelHelper
  4461. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  4462. insertInstr->InsertBefore(instr);
  4463. #endif
  4464. #ifdef _M_IX86
  4465. // SHL EAX, AtomTag_Int32
  4466. instr = IR::Instr::New(Js::OpCode::SHL, regEAX, regEAX, IR::IntConstOpnd::New(Js::AtomTag_Int32, TyInt32, this->m_func), this->m_func);
  4467. insertInstr->InsertBefore(instr);
  4468. // JO $labelHelper
  4469. instr = IR::BranchInstr::New(Js::OpCode::JO, labelHelper, this->m_func);
  4470. insertInstr->InsertBefore(instr);
  4471. // INC EAX
  4472. instr = IR::Instr::New(Js::OpCode::INC, regEAX, regEAX, this->m_func);
  4473. insertInstr->InsertBefore(instr);
  4474. #endif
  4475. // MOV dst, EAX
  4476. instr = IR::Instr::New(Js::OpCode::MOV, dst, regEAX, this->m_func);
  4477. insertInstr->InsertBefore(instr);
  4478. #ifdef _M_X64
  4479. GenerateInt32ToVarConversion(dst, insertInstr);
  4480. #endif
  4481. }
  4482. if (labelFloat)
  4483. {
  4484. // JMP $done
  4485. instr = IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, this->m_func);
  4486. insertInstr->InsertBefore(instr);
  4487. // $float
  4488. insertInstr->InsertBefore(labelFloat);
  4489. }
  4490. if (emitFloatAbs)
  4491. {
  4492. #if defined(_M_IX86)
  4493. // CMP [src], JavascriptNumber.vtable
  4494. IR::Opnd *opnd = IR::IndirOpnd::New(src->AsRegOpnd(), (int32)0, TyMachPtr, this->m_func);
  4495. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4496. instr->SetSrc1(opnd);
  4497. instr->SetSrc2(m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptNumber));
  4498. insertInstr->InsertBefore(instr);
  4499. // JNE $helper
  4500. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  4501. insertInstr->InsertBefore(instr);
  4502. // MOVSD r1, [src + offsetof(value)]
  4503. opnd = IR::IndirOpnd::New(src->AsRegOpnd(), Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  4504. IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyMachDouble, this->m_func);
  4505. instr = IR::Instr::New(Js::OpCode::MOVSD, regOpnd, opnd, this->m_func);
  4506. insertInstr->InsertBefore(instr);
  4507. this->GenerateFloatAbs(regOpnd, insertInstr);
  4508. // dst = DoubleToVar(r1)
  4509. SaveDoubleToVar(callInstr->GetDst()->AsRegOpnd(), regOpnd, callInstr, insertInstr);
  4510. #elif defined(_M_X64)
  4511. // if (typeof(src) == double)
  4512. IR::RegOpnd *src64 = src->AsRegOpnd();
  4513. GenerateFloatTest(src64, insertInstr, labelHelper);
  4514. // dst64 = MOV src64
  4515. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, dst, src64, this->m_func));
  4516. // Unconditionally set the sign bit. This will get XORd away when we remove the tag.
  4517. // dst64 = OR 0x8000000000000000
  4518. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::OR, dst, dst, IR::IntConstOpnd::New(MachSignBit, TyMachReg, this->m_func), this->m_func));
  4519. #endif
  4520. }
  4521. else if(!isInt)
  4522. {
  4523. // The source is not known to be a tagged int, so either it's definitely not an int (isNotInt), or the int version of
  4524. // abs failed the tag check and jumped here. We can't emit the float version of abs (!emitFloatAbs) due to SSE2 not
  4525. // being available, so jump straight to the helper.
  4526. // JMP $helper
  4527. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelHelper, this->m_func);
  4528. insertInstr->InsertBefore(instr);
  4529. }
  4530. }
  4531. IR::Instr * LowererMD::GenerateFloatAbs(IR::RegOpnd * regOpnd, IR::Instr * insertInstr)
  4532. {
  4533. // ANDPS reg, absDoubleCst
  4534. IR::Opnd * opnd;
  4535. if (regOpnd->IsFloat64())
  4536. {
  4537. opnd = m_lowerer->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueAbsDoubleCst);
  4538. }
  4539. else
  4540. {
  4541. Assert(regOpnd->IsFloat32());
  4542. opnd = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetAbsFloatCstAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  4543. }
  4544. // ANDPS has smaller encoding then ANDPD
  4545. IR::Instr * instr = IR::Instr::New(Js::OpCode::ANDPS, regOpnd, regOpnd, opnd, this->m_func);
  4546. insertInstr->InsertBefore(instr);
  4547. Legalize(instr);
  4548. return instr;
  4549. }
  4550. bool LowererMD::GenerateFastCharAt(Js::BuiltinFunction index, IR::Opnd *dst, IR::Opnd *srcStr, IR::Opnd *srcIndex, IR::Instr *callInstr,
  4551. IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
  4552. {
  4553. // if regSrcStr is not object, JMP $helper
  4554. // CMP [regSrcStr + offset(type)] , static string type -- check base string type
  4555. // JNE $helper
  4556. // MOV r1, [regSrcStr + offset(m_pszValue)]
  4557. // TEST r1, r1
  4558. // JEQ $helper
  4559. // MOV r2, srcIndex
  4560. // If r2 is not int, JMP $helper
  4561. // Convert r2 to int
  4562. // CMP [regSrcStr + offsetof(length)], r2
  4563. // JBE $helper
  4564. // MOVZX r2, [r1 + r2 * 2]
  4565. // if (charAt)
  4566. // PUSH r1
  4567. // PUSH scriptContext
  4568. // CALL GetStringFromChar
  4569. // MOV dst, EAX
  4570. // else (charCodeAt)
  4571. // if (codePointAt)
  4572. // Lowerer.GenerateFastCodePointAt -- Common inline functions
  4573. // Convert r2 to Var
  4574. // MOV dst, r2
  4575. bool isInt = false;
  4576. bool isNotTaggedValue = false;
  4577. IR::Instr *instr;
  4578. IR::RegOpnd *regSrcStr;
  4579. if (srcStr->IsRegOpnd())
  4580. {
  4581. if (srcStr->AsRegOpnd()->IsTaggedInt())
  4582. {
  4583. isInt = true;
  4584. }
  4585. else if (srcStr->AsRegOpnd()->IsNotTaggedValue())
  4586. {
  4587. isNotTaggedValue = true;
  4588. }
  4589. }
  4590. if (srcStr->IsRegOpnd() == false)
  4591. {
  4592. IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  4593. instr = IR::Instr::New(Js::OpCode::MOV, regOpnd, srcStr, this->m_func);
  4594. insertInstr->InsertBefore(instr);
  4595. regSrcStr = regOpnd;
  4596. }
  4597. else
  4598. {
  4599. regSrcStr = srcStr->AsRegOpnd();
  4600. }
  4601. if (!isNotTaggedValue)
  4602. {
  4603. if (!isInt)
  4604. {
  4605. GenerateObjectTest(regSrcStr, insertInstr, labelHelper);
  4606. }
  4607. else
  4608. {
  4609. // Insert delete branch opcode to tell the dbChecks not to assert on this helper label
  4610. IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
  4611. insertInstr->InsertBefore(fakeBr);
  4612. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelHelper, this->m_func);
  4613. insertInstr->InsertBefore(instr);
  4614. }
  4615. }
  4616. // Bail out if index a constant and is less than zero.
  4617. if (srcIndex->IsAddrOpnd() && Js::TaggedInt::ToInt32(srcIndex->AsAddrOpnd()->m_address) < 0)
  4618. {
  4619. labelHelper->isOpHelper = false;
  4620. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelHelper, this->m_func);
  4621. insertInstr->InsertBefore(instr);
  4622. return false;
  4623. }
  4624. this->m_lowerer->GenerateStringTest(regSrcStr, insertInstr, labelHelper, nullptr, false);
  4625. // r1 contains the value of the char16* pointer inside JavascriptString.
  4626. // MOV r1, [regSrcStr + offset(m_pszValue)]
  4627. IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  4628. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(regSrcStr->AsRegOpnd(), Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, this->m_func);
  4629. instr = IR::Instr::New(Js::OpCode::MOV, r1, indirOpnd, this->m_func);
  4630. insertInstr->InsertBefore(instr);
  4631. // TEST r1, r1 -- Null pointer test
  4632. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  4633. instr->SetSrc1(r1);
  4634. instr->SetSrc2(r1);
  4635. insertInstr->InsertBefore(instr);
  4636. // JEQ $helper
  4637. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  4638. insertInstr->InsertBefore(instr);
  4639. IR::IndirOpnd *strLength = IR::IndirOpnd::New(regSrcStr, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func);
  4640. if (srcIndex->IsAddrOpnd())
  4641. {
  4642. // CMP [regSrcStr + offsetof(length)], index
  4643. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4644. instr->SetSrc1(strLength);
  4645. instr->SetSrc2(IR::IntConstOpnd::New(Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address), TyUint32, this->m_func));
  4646. insertInstr->InsertBefore(instr);
  4647. // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
  4648. // JBE $helper
  4649. instr = IR::BranchInstr::New(Js::OpCode::JBE, labelHelper, this->m_func);
  4650. insertInstr->InsertBefore(instr);
  4651. indirOpnd = IR::IndirOpnd::New(r1, Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address) * sizeof(char16), TyInt16, this->m_func);
  4652. }
  4653. else
  4654. {
  4655. IR::RegOpnd *r2 = IR::RegOpnd::New(TyVar, this->m_func);
  4656. // MOV r2, srcIndex
  4657. instr = IR::Instr::New(Js::OpCode::MOV, r2, srcIndex, this->m_func);
  4658. insertInstr->InsertBefore(instr);
  4659. if (!srcIndex->IsRegOpnd() || !srcIndex->AsRegOpnd()->IsTaggedInt())
  4660. {
  4661. GenerateSmIntTest(r2, insertInstr, labelHelper);
  4662. }
  4663. #if INT32VAR
  4664. // Remove the tag
  4665. // MOV r2, [32-bit] r2
  4666. IR::Opnd * r2_32 = r2->UseWithNewType(TyInt32, this->m_func);
  4667. instr = IR::Instr::New(Js::OpCode::MOVSXD, r2, r2_32, this->m_func);
  4668. insertInstr->InsertBefore(instr);
  4669. r2 = r2_32->AsRegOpnd();
  4670. #else
  4671. // r2 = SAR r2, VarTag_Shift
  4672. instr = IR::Instr::New(Js::OpCode::SAR, r2, r2, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  4673. insertInstr->InsertBefore(instr);
  4674. #endif
  4675. // CMP [regSrcStr + offsetof(length)], r2
  4676. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4677. instr->SetSrc1(strLength);
  4678. instr->SetSrc2(r2);
  4679. insertInstr->InsertBefore(instr);
  4680. if (r2->GetSize() != MachPtr)
  4681. {
  4682. r2 = r2->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
  4683. }
  4684. // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
  4685. // JBE $helper
  4686. instr = IR::BranchInstr::New(Js::OpCode::JBE, labelHelper, this->m_func);
  4687. insertInstr->InsertBefore(instr);
  4688. indirOpnd = IR::IndirOpnd::New(r1, r2, 1, TyInt16, this->m_func);
  4689. }
  4690. // MOVZX charReg, [r1 + r2 * 2] -- this is the value of the char
  4691. IR::RegOpnd *charReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  4692. instr = IR::Instr::New(Js::OpCode::MOVZXW, charReg, indirOpnd, this->m_func);
  4693. insertInstr->InsertBefore(instr);
  4694. if (index == Js::BuiltinFunction::JavascriptString_CharAt)
  4695. {
  4696. IR::Opnd *resultOpnd;
  4697. if (dst->IsEqual(srcStr))
  4698. {
  4699. resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  4700. }
  4701. else
  4702. {
  4703. resultOpnd = dst;
  4704. }
  4705. this->m_lowerer->GenerateGetSingleCharString(charReg, resultOpnd, labelHelper, doneLabel, insertInstr, false);
  4706. }
  4707. else
  4708. {
  4709. Assert(index == Js::BuiltinFunction::JavascriptString_CharCodeAt || index == Js::BuiltinFunction::JavascriptString_CodePointAt);
  4710. if (index == Js::BuiltinFunction::JavascriptString_CodePointAt)
  4711. {
  4712. this->m_lowerer->GenerateFastInlineStringCodePointAt(insertInstr, this->m_func, strLength, srcIndex, charReg, r1);
  4713. }
  4714. GenerateInt32ToVarConversion(charReg, insertInstr);
  4715. // MOV dst, charReg
  4716. instr = IR::Instr::New(Js::OpCode::MOV, dst, charReg, this->m_func);
  4717. insertInstr->InsertBefore(instr);
  4718. }
  4719. return true;
  4720. }
  4721. IR::RegOpnd* LowererMD::MaterializeDoubleConstFromInt(intptr_t constAddr, IR::Instr* instr)
  4722. {
  4723. IR::Opnd* constVal = IR::MemRefOpnd::New(constAddr, IRType::TyFloat64, this->m_func);
  4724. IR::RegOpnd * xmmReg = IR::RegOpnd::New(TyFloat64, m_func);
  4725. this->m_lowerer->InsertMove(xmmReg, constVal, instr);
  4726. return xmmReg;
  4727. }
  4728. IR::RegOpnd* LowererMD::MaterializeConstFromBits(int bits, IRType type, IR::Instr* instr)
  4729. {
  4730. IR::Opnd * regBits = IR::RegOpnd::New(TyInt32, m_func);
  4731. this->m_lowerer->InsertMove(regBits, IR::IntConstOpnd::New(bits, TyInt32, m_func), instr);
  4732. IR::RegOpnd * regConst = IR::RegOpnd::New(type, m_func);
  4733. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOVD, regConst, regBits, m_func));
  4734. return regConst;
  4735. }
  4736. IR::Opnd* LowererMD::Subtract2To31(IR::Opnd* src1, IR::Opnd* intMinFP, IRType type, IR::Instr* instr)
  4737. {
  4738. Js::OpCode op = (type == TyFloat32) ? Js::OpCode::SUBSS : Js::OpCode::SUBSD;
  4739. IR::Opnd* adjSrc = IR::RegOpnd::New(type, m_func);
  4740. IR::Instr* sub = IR::Instr::New(op, adjSrc, src1, intMinFP, m_func);
  4741. instr->InsertBefore(sub);
  4742. Legalize(sub);
  4743. return adjSrc;
  4744. }
  4745. IR::Opnd* LowererMD::GenerateTruncChecks(IR::Instr* instr)
  4746. {
  4747. IR::LabelInstr * conversion = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4748. IR::LabelInstr * throwLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  4749. IR::Opnd* src1 = instr->GetSrc1();
  4750. IR::Opnd * src64 = nullptr;
  4751. if (src1->IsFloat32())
  4752. {
  4753. src64 = IR::RegOpnd::New(TyFloat64, m_func);
  4754. EmitFloat32ToFloat64(src64, src1, instr);
  4755. }
  4756. else
  4757. {
  4758. src64 = src1;
  4759. }
  4760. IR::RegOpnd* limitReg = MaterializeDoubleConstFromInt(instr->GetDst()->IsUInt32() ?
  4761. m_func->GetThreadContextInfo()->GetDoubleNegOneAddr() :
  4762. m_func->GetThreadContextInfo()->GetDoubleIntMinMinusOneAddr(), instr);
  4763. m_lowerer->InsertCompareBranch(src64, limitReg, Js::OpCode::BrLe_A, throwLabel, instr);
  4764. limitReg = MaterializeDoubleConstFromInt(instr->GetDst()->IsUInt32() ?
  4765. m_func->GetThreadContextInfo()->GetDoubleUintMaxPlusOneAddr() :
  4766. m_func->GetThreadContextInfo()->GetDoubleIntMaxPlusOneAddr(), instr);
  4767. m_lowerer->InsertCompareBranch(limitReg, src64, Js::OpCode::BrGt_A, conversion, instr, true /*no NaN check*/);
  4768. instr->InsertBefore(throwLabel);
  4769. this->m_lowerer->GenerateThrow(IR::IntConstOpnd::New(SCODE_CODE(VBSERR_Overflow), TyInt32, m_func), instr);
  4770. //no jump here we aren't coming back
  4771. instr->InsertBefore(conversion);
  4772. return src64;
  4773. }
  4774. void
  4775. LowererMD::GenerateTruncWithCheck(IR::Instr * instr)
  4776. {
  4777. Assert(AutoSystemInfo::Data.SSE2Available());
  4778. IR::Opnd* src64 = GenerateTruncChecks(instr); //converts src to double and checks if MIN <= src <= MAX
  4779. IR::Opnd* dst = instr->GetDst();
  4780. if (dst->IsUnsigned())
  4781. {
  4782. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(0, TyUint32, m_func), instr);
  4783. IR::LabelInstr * skipUnsignedPart = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4784. IR::Opnd* twoTo31 = MaterializeDoubleConstFromInt(m_func->GetThreadContextInfo()->GetDoubleTwoTo31Addr(), instr);
  4785. m_lowerer->InsertCompareBranch(src64, twoTo31, Js::OpCode::BrLt_A, skipUnsignedPart, instr);
  4786. instr->InsertBefore(IR::Instr::New(Js::OpCode::SUBPD, src64, src64, twoTo31, m_func));
  4787. m_lowerer->InsertMove(dst, IR::IntConstOpnd::New(0x80000000 /*2^31*/, TyUint32, m_func), instr);
  4788. instr->InsertBefore(skipUnsignedPart);
  4789. IR::Opnd* tmp = IR::RegOpnd::New(TyInt32, m_func);
  4790. instr->InsertBefore(IR::Instr::New(Js::OpCode::CVTTSD2SI, tmp, src64, m_func));
  4791. instr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, dst, dst, tmp, m_func));
  4792. }
  4793. else
  4794. {
  4795. instr->InsertBefore(IR::Instr::New(Js::OpCode::CVTTSD2SI, dst, src64, m_func));
  4796. }
  4797. instr->UnlinkSrc1();
  4798. instr->UnlinkDst();
  4799. instr->Remove();
  4800. }
  4801. void
  4802. LowererMD::GenerateCtz(IR::Instr * instr)
  4803. {
  4804. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  4805. Assert(IRType_IsNativeInt(instr->GetDst()->GetType()));
  4806. #ifdef _M_IX86
  4807. if (instr->GetSrc1()->IsInt64())
  4808. {
  4809. lowererMDArch.EmitInt64Instr(instr);
  4810. return;
  4811. }
  4812. #endif
  4813. if (AutoSystemInfo::Data.TZCntAvailable())
  4814. {
  4815. instr->m_opcode = Js::OpCode::TZCNT;
  4816. Legalize(instr);
  4817. }
  4818. else
  4819. {
  4820. // dst = BSF src
  4821. // dst = CMOVE dst, 32 // dst is src1 to help reg alloc
  4822. int instrSize = instr->GetSrc1()->GetSize();
  4823. IRType type = instrSize == 8 ? TyInt64 : TyInt32;
  4824. instr->m_opcode = Js::OpCode::BSF;
  4825. Legalize(instr);
  4826. IR::IntConstOpnd * const32 = IR::IntConstOpnd::New(instrSize * 8, type, m_func);
  4827. IR::Instr* cmove = IR::Instr::New(Js::OpCode::CMOVE, instr->GetDst(), instr->GetDst(), const32, this->m_func);
  4828. instr->InsertAfter(cmove);
  4829. Legalize(cmove);
  4830. }
  4831. }
  4832. void
  4833. LowererMD::GeneratePopCnt(IR::Instr * instr)
  4834. {
  4835. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  4836. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsUInt32() || instr->GetDst()->IsInt64());
  4837. #ifdef _M_IX86
  4838. if (instr->GetSrc1()->IsInt64())
  4839. {
  4840. lowererMDArch.EmitInt64Instr(instr);
  4841. return;
  4842. }
  4843. #endif
  4844. if (AutoSystemInfo::Data.PopCntAvailable())
  4845. {
  4846. instr->m_opcode = Js::OpCode::POPCNT;
  4847. Legalize(instr);
  4848. }
  4849. else
  4850. {
  4851. int instrSize = instr->GetSrc1()->GetSize();
  4852. LoadHelperArgument(instr, instr->GetSrc1());
  4853. instr->UnlinkSrc1();
  4854. this->ChangeToHelperCall(instr, instrSize == 8 ? IR::HelperPopCnt64 : IR::HelperPopCnt32);
  4855. }
  4856. }
  4857. void
  4858. LowererMD::GenerateClz(IR::Instr * instr)
  4859. {
  4860. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  4861. Assert(IRType_IsNativeInt(instr->GetDst()->GetType()));
  4862. #ifdef _M_IX86
  4863. if (instr->GetSrc1()->IsInt64())
  4864. {
  4865. lowererMDArch.EmitInt64Instr(instr);
  4866. return;
  4867. }
  4868. #endif
  4869. if (AutoSystemInfo::Data.LZCntAvailable())
  4870. {
  4871. instr->m_opcode = Js::OpCode::LZCNT;
  4872. Legalize(instr);
  4873. }
  4874. else
  4875. {
  4876. // tmp = BSR src
  4877. // JE $label32
  4878. // dst = SUB 31, tmp
  4879. // dst = SUB 63, tmp; for int64
  4880. // JMP $done
  4881. // label32:
  4882. // dst = mov 32;
  4883. // dst = mov 64; for int64
  4884. // $done
  4885. int instrSize = instr->GetSrc1()->GetSize();
  4886. IRType type = instrSize == 8 ? TyInt64 : TyInt32;
  4887. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr->m_next);
  4888. IR::Opnd * dst = instr->UnlinkDst();
  4889. IR::Opnd * tmpOpnd = IR::RegOpnd::New(type, m_func);
  4890. instr->SetDst(tmpOpnd);
  4891. instr->m_opcode = Js::OpCode::BSR;
  4892. Legalize(instr);
  4893. IR::LabelInstr * label32 = Lowerer::InsertLabel(false, doneLabel);
  4894. instr = IR::BranchInstr::New(Js::OpCode::JEQ, label32, m_func);
  4895. label32->InsertBefore(instr);
  4896. Lowerer::InsertSub(false, dst, IR::IntConstOpnd::New(instrSize == 8 ? 63 : 31, type, m_func), tmpOpnd, label32);
  4897. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, label32);
  4898. Lowerer::InsertMove(dst, IR::IntConstOpnd::New(instrSize == 8 ? 64 : 32, type, m_func), doneLabel);
  4899. }
  4900. }
  4901. #if !FLOATVAR
  4902. void
  4903. LowererMD::GenerateNumberAllocation(IR::RegOpnd * opndDst, IR::Instr * instrInsert, bool isHelper)
  4904. {
  4905. size_t alignedAllocSize = Js::RecyclerJavascriptNumberAllocator::GetAlignedAllocSize(
  4906. m_func->GetScriptContextInfo()->IsRecyclerVerifyEnabled(),
  4907. m_func->GetScriptContextInfo()->GetRecyclerVerifyPad());
  4908. IR::Opnd * endAddressOpnd = m_lowerer->LoadNumberAllocatorValueOpnd(instrInsert, NumberAllocatorValue::NumberAllocatorEndAddress);
  4909. IR::Opnd * freeObjectListOpnd = m_lowerer->LoadNumberAllocatorValueOpnd(instrInsert, NumberAllocatorValue::NumberAllocatorFreeObjectList);
  4910. // MOV dst, allocator->freeObjectList
  4911. IR::Instr * loadMemBlockInstr = IR::Instr::New(Js::OpCode::MOV, opndDst, freeObjectListOpnd, this->m_func);
  4912. instrInsert->InsertBefore(loadMemBlockInstr);
  4913. // LEA nextMemBlock, [dst + allocSize]
  4914. IR::RegOpnd * nextMemBlockOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  4915. IR::Instr * loadNextMemBlockInstr = IR::Instr::New(Js::OpCode::LEA, nextMemBlockOpnd,
  4916. IR::IndirOpnd::New(opndDst, alignedAllocSize, TyMachPtr, this->m_func), this->m_func);
  4917. instrInsert->InsertBefore(loadNextMemBlockInstr);
  4918. // CMP nextMemBlock, allocator->endAddress
  4919. IR::Instr * checkInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  4920. checkInstr->SetSrc1(nextMemBlockOpnd);
  4921. checkInstr->SetSrc2(endAddressOpnd);
  4922. instrInsert->InsertBefore(checkInstr);
  4923. // JA $helper
  4924. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4925. IR::BranchInstr * branchInstr = IR::BranchInstr::New(Js::OpCode::JA, helperLabel, this->m_func);
  4926. instrInsert->InsertBefore(branchInstr);
  4927. // MOV allocator->freeObjectList, nextMemBlock
  4928. IR::Instr * setFreeObjectListInstr = IR::Instr::New(Js::OpCode::MOV, freeObjectListOpnd, nextMemBlockOpnd, this->m_func);
  4929. instrInsert->InsertBefore(setFreeObjectListInstr);
  4930. // JMP $done
  4931. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  4932. IR::BranchInstr * branchToDoneInstr = IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, this->m_func);
  4933. instrInsert->InsertBefore(branchToDoneInstr);
  4934. // $helper:
  4935. instrInsert->InsertBefore(helperLabel);
  4936. // PUSH allocator
  4937. this->LoadHelperArgument(instrInsert, m_lowerer->LoadScriptContextValueOpnd(instrInsert, ScriptContextValue::ScriptContextNumberAllocator));
  4938. // dst = Call AllocUninitializedNumber
  4939. IR::Instr * instrCall = IR::Instr::New(Js::OpCode::CALL, opndDst,
  4940. IR::HelperCallOpnd::New(IR::HelperAllocUninitializedNumber, this->m_func), this->m_func);
  4941. instrInsert->InsertBefore(instrCall);
  4942. this->lowererMDArch.LowerCall(instrCall, 0);
  4943. // $done:
  4944. instrInsert->InsertBefore(doneLabel);
  4945. }
  4946. #endif
  4947. #ifdef _CONTROL_FLOW_GUARD
  4948. void
  4949. LowererMD::GenerateCFGCheck(IR::Opnd * entryPointOpnd, IR::Instr * insertBeforeInstr)
  4950. {
  4951. bool useJITTrampoline = CONFIG_FLAG(UseJITTrampoline);
  4952. IR::LabelInstr * callLabelInstr = nullptr;
  4953. uintptr_t jitThunkStartAddress = NULL;
  4954. if (useJITTrampoline)
  4955. {
  4956. #if ENABLE_OOP_NATIVE_CODEGEN
  4957. if (m_func->IsOOPJIT())
  4958. {
  4959. OOPJITThunkEmitter * jitThunkEmitter = m_func->GetOOPThreadContext()->GetJITThunkEmitter();
  4960. jitThunkStartAddress = jitThunkEmitter->EnsureInitialized();
  4961. }
  4962. else
  4963. #endif
  4964. {
  4965. InProcJITThunkEmitter * jitThunkEmitter = m_func->GetInProcThreadContext()->GetJITThunkEmitter();
  4966. jitThunkStartAddress = jitThunkEmitter->EnsureInitialized();
  4967. }
  4968. if (jitThunkStartAddress)
  4969. {
  4970. uintptr_t endAddressOfSegment = jitThunkStartAddress + InProcJITThunkEmitter::TotalThunkSize;
  4971. Assert(endAddressOfSegment > jitThunkStartAddress);
  4972. // Generate instructions for local Pre-Reserved Segment Range check
  4973. IR::AddrOpnd * endAddressOfSegmentConstOpnd = IR::AddrOpnd::New(endAddressOfSegment, IR::AddrOpndKindDynamicMisc, m_func);
  4974. IR::RegOpnd *resultOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4975. callLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4976. IR::LabelInstr * cfgLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  4977. // resultOpnd = SUB endAddressOfSegmentConstOpnd, entryPointOpnd
  4978. // CMP resultOpnd, TotalThunkSize
  4979. // JAE $cfgLabel
  4980. // AND entryPointOpnd, ~(ThunkSize-1)
  4981. // JMP $callLabel
  4982. m_lowerer->InsertSub(false, resultOpnd, endAddressOfSegmentConstOpnd, entryPointOpnd, insertBeforeInstr);
  4983. m_lowerer->InsertCompareBranch(resultOpnd, IR::IntConstOpnd::New(InProcJITThunkEmitter::TotalThunkSize, TyMachReg, m_func, true), Js::OpCode::BrGe_A, true, cfgLabelInstr, insertBeforeInstr);
  4984. m_lowerer->InsertAnd(entryPointOpnd, entryPointOpnd, IR::IntConstOpnd::New(InProcJITThunkEmitter::ThunkAlignmentMask, TyMachReg, m_func, true), insertBeforeInstr);
  4985. m_lowerer->InsertBranch(Js::OpCode::Br, callLabelInstr, insertBeforeInstr);
  4986. insertBeforeInstr->InsertBefore(cfgLabelInstr);
  4987. }
  4988. }
  4989. //MOV ecx, entryPoint
  4990. IR::RegOpnd * entryPointRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4991. #if _M_IX86
  4992. entryPointRegOpnd->SetReg(RegECX);
  4993. #elif _M_X64
  4994. entryPointRegOpnd->SetReg(RegRCX);
  4995. #endif
  4996. entryPointRegOpnd->m_isCallArg = true;
  4997. IR::Instr* movInstrEntryPointToRegister = IR::Instr::New(Js::OpCode::MOV, entryPointRegOpnd, entryPointOpnd, this->m_func);
  4998. insertBeforeInstr->InsertBefore(movInstrEntryPointToRegister);
  4999. //Generate CheckCFG CALL here
  5000. IR::HelperCallOpnd *cfgCallOpnd = IR::HelperCallOpnd::New(IR::HelperGuardCheckCall, this->m_func);
  5001. IR::Instr* cfgCallInstr = IR::Instr::New(Js::OpCode::CALL, this->m_func);
  5002. #if _M_IX86
  5003. //call[__guard_check_icall_fptr]
  5004. cfgCallInstr->SetSrc1(cfgCallOpnd);
  5005. #elif _M_X64
  5006. //mov rax, __guard_check_icall_fptr
  5007. IR::RegOpnd *targetOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, m_func), RegRAX, TyMachPtr, this->m_func);
  5008. IR::Instr *movInstr = IR::Instr::New(Js::OpCode::MOV, targetOpnd, cfgCallOpnd, this->m_func);
  5009. insertBeforeInstr->InsertBefore(movInstr);
  5010. //call rax
  5011. cfgCallInstr->SetSrc1(targetOpnd);
  5012. #endif
  5013. //CALL cfg(rax)
  5014. insertBeforeInstr->InsertBefore(cfgCallInstr);
  5015. if (jitThunkStartAddress)
  5016. {
  5017. Assert(callLabelInstr);
  5018. if (CONFIG_FLAG(ForceJITCFGCheck))
  5019. {
  5020. // Always generate CFG check to make sure that the address is still valid
  5021. movInstrEntryPointToRegister->InsertBefore(callLabelInstr);
  5022. }
  5023. else
  5024. {
  5025. insertBeforeInstr->InsertBefore(callLabelInstr);
  5026. }
  5027. }
  5028. }
  5029. #endif
  5030. void
  5031. LowererMD::GenerateFastRecyclerAlloc(size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, IR::LabelInstr* allocHelperLabel, IR::LabelInstr* allocDoneLabel)
  5032. {
  5033. IR::Opnd * endAddressOpnd;
  5034. IR::Opnd * freeListOpnd;
  5035. ScriptContextInfo* scriptContext = this->m_func->GetScriptContextInfo();
  5036. void* allocatorAddress;
  5037. uint32 endAddressOffset;
  5038. uint32 freeListOffset;
  5039. size_t alignedSize = HeapInfo::GetAlignedSizeNoCheck(allocSize);
  5040. bool allowNativeCodeBumpAllocation = scriptContext->GetRecyclerAllowNativeCodeBumpAllocation();
  5041. Recycler::GetNormalHeapBlockAllocatorInfoForNativeAllocation((void*)scriptContext->GetRecyclerAddr(), alignedSize,
  5042. allocatorAddress, endAddressOffset, freeListOffset,
  5043. allowNativeCodeBumpAllocation, this->m_func->IsOOPJIT());
  5044. endAddressOpnd = IR::MemRefOpnd::New((char*)allocatorAddress + endAddressOffset, TyMachPtr, this->m_func, IR::AddrOpndKindDynamicRecyclerAllocatorEndAddressRef);
  5045. freeListOpnd = IR::MemRefOpnd::New((char*)allocatorAddress + freeListOffset, TyMachPtr, this->m_func, IR::AddrOpndKindDynamicRecyclerAllocatorFreeListRef);
  5046. const IR::AutoReuseOpnd autoReuseTempOpnd(freeListOpnd, m_func);
  5047. // MOV newObjDst, allocator->freeObjectList
  5048. Lowerer::InsertMove(newObjDst, freeListOpnd, insertionPointInstr);
  5049. // LEA nextMemBlock, [newObjDst + allocSize]
  5050. IR::RegOpnd * nextMemBlockOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  5051. IR::IndirOpnd* nextMemBlockSrc = IR::IndirOpnd::New(newObjDst, (int32)alignedSize, TyMachPtr, this->m_func);
  5052. IR::Instr * loadNextMemBlockInstr = IR::Instr::New(Js::OpCode::LEA, nextMemBlockOpnd, nextMemBlockSrc, this->m_func);
  5053. insertionPointInstr->InsertBefore(loadNextMemBlockInstr);
  5054. // CMP nextMemBlock, allocator->endAddress
  5055. IR::Instr * checkInstr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  5056. checkInstr->SetSrc1(nextMemBlockOpnd);
  5057. checkInstr->SetSrc2(endAddressOpnd);
  5058. insertionPointInstr->InsertBefore(checkInstr);
  5059. Legalize(checkInstr);
  5060. // JA $allocHelper
  5061. IR::BranchInstr * branchToAllocHelperInstr = IR::BranchInstr::New(Js::OpCode::JA, allocHelperLabel, this->m_func);
  5062. insertionPointInstr->InsertBefore(branchToAllocHelperInstr);
  5063. // MOV allocator->freeObjectList, nextMemBlock
  5064. Lowerer::InsertMove(freeListOpnd, nextMemBlockOpnd, insertionPointInstr, false);
  5065. // JMP $allocDone
  5066. IR::BranchInstr * branchToAllocDoneInstr = IR::BranchInstr::New(Js::OpCode::JMP, allocDoneLabel, this->m_func);
  5067. insertionPointInstr->InsertBefore(branchToAllocDoneInstr);
  5068. }
  5069. #ifdef ENABLE_WASM
  5070. void
  5071. LowererMD::GenerateCopysign(IR::Instr * instr)
  5072. {
  5073. #if defined(_M_IX86)
  5074. // We should only generate this if sse2 is available
  5075. Assert(AutoSystemInfo::Data.SSE2Available());
  5076. #endif
  5077. // ANDPS reg0, absDoubleCst
  5078. // ANDPS reg1, sgnBitDoubleCst
  5079. // ORPS reg0, reg1
  5080. // Copy sign from src2 to src1
  5081. IR::Opnd* src1 = instr->GetSrc1();
  5082. IR::Opnd* src2 = instr->GetSrc2();
  5083. Assert(src1->IsFloat32() || src1->IsFloat64());
  5084. GenerateFloatAbs(src1->AsRegOpnd(), instr);
  5085. IR::MemRefOpnd *memRef = IR::MemRefOpnd::New(src2->IsFloat32() ? this->m_func->GetThreadContextInfo()->GetSgnFloatBitCst() : this->m_func->GetThreadContextInfo()->GetSgnDoubleBitCst(),
  5086. src2->GetType(), this->m_func, src2->IsFloat32() ? IR::AddrOpndKindDynamicFloatRef : IR::AddrOpndKindDynamicDoubleRef);
  5087. IR::Instr* t2 = IR::Instr::New(Js::OpCode::ANDPS, instr->GetSrc2(), instr->GetSrc2(), memRef, m_func);
  5088. instr->InsertBefore(t2);
  5089. Legalize(t2);
  5090. instr->m_opcode = Js::OpCode::ORPS;
  5091. Legalize(instr);
  5092. };
  5093. #endif //ENABLE_WASM
  5094. void
  5095. LowererMD::SaveDoubleToVar(IR::RegOpnd * dstOpnd, IR::RegOpnd *opndFloat, IR::Instr *instrOrig, IR::Instr *instrInsert, bool isHelper)
  5096. {
  5097. Assert(opndFloat->GetType() == TyFloat64);
  5098. // Call JSNumber::ToVar to save the float operand to the result of the original (var) instruction
  5099. #if !FLOATVAR
  5100. // We should only generate this if sse2 is available
  5101. Assert(AutoSystemInfo::Data.SSE2Available());
  5102. IR::Opnd * symVTableDst;
  5103. IR::Opnd * symDblDst;
  5104. IR::Opnd * symTypeDst;
  5105. IR::Instr * newInstr;
  5106. IR::Instr * numberInitInsertInstr = nullptr;
  5107. if (instrOrig->dstIsTempNumber)
  5108. {
  5109. // Use the original dst to get the temp number sym
  5110. StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(instrOrig->GetDst(), instrOrig->dstIsTempNumberTransferred);
  5111. // LEA dst, &tempSym
  5112. IR::SymOpnd * symTempSrc = IR::SymOpnd::New(tempNumberSym, TyMachPtr, this->m_func);
  5113. IR::Instr * loadTempNumberInstr = IR::Instr::New(Js::OpCode::LEA, dstOpnd, symTempSrc, this->m_func);
  5114. instrInsert->InsertBefore(loadTempNumberInstr);
  5115. symVTableDst = IR::SymOpnd::New(tempNumberSym, TyMachPtr, this->m_func);
  5116. symDblDst = IR::SymOpnd::New(tempNumberSym, (uint32)Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func);
  5117. symTypeDst = IR::SymOpnd::New(tempNumberSym, (uint32)Js::JavascriptNumber::GetOffsetOfType(), TyMachPtr, this->m_func);
  5118. if (this->m_lowerer->outerMostLoopLabel == nullptr)
  5119. {
  5120. // If we are not in loop, just insert in place
  5121. numberInitInsertInstr = instrInsert;
  5122. }
  5123. else
  5124. {
  5125. // Otherwise, initialize in the outer most loop top if we haven't initialized it yet.
  5126. numberInitInsertInstr = this->m_lowerer->initializedTempSym->TestAndSet(tempNumberSym->m_id) ?
  5127. nullptr : this->m_lowerer->outerMostLoopLabel;
  5128. }
  5129. }
  5130. else
  5131. {
  5132. this->GenerateNumberAllocation(dstOpnd, instrInsert, isHelper);
  5133. symVTableDst = IR::IndirOpnd::New(dstOpnd, 0, TyMachPtr, this->m_func);
  5134. symDblDst = IR::IndirOpnd::New(dstOpnd, (uint32)Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func);
  5135. symTypeDst = IR::IndirOpnd::New(dstOpnd, (uint32)Js::JavascriptNumber::GetOffsetOfType(), TyMachPtr, this->m_func);
  5136. numberInitInsertInstr = instrInsert;
  5137. }
  5138. if (numberInitInsertInstr)
  5139. {
  5140. // Inline the case where the dst is marked as temp.
  5141. IR::Opnd *jsNumberVTable = m_lowerer->LoadVTableValueOpnd(numberInitInsertInstr, VTableValue::VtableJavascriptNumber);
  5142. // MOV dst->vtable, JavascriptNumber::vtable
  5143. newInstr = IR::Instr::New(Js::OpCode::MOV, symVTableDst, jsNumberVTable, this->m_func);
  5144. numberInitInsertInstr->InsertBefore(newInstr);
  5145. // MOV dst->type, JavascriptNumber_type
  5146. IR::Opnd *typeOpnd = m_lowerer->LoadLibraryValueOpnd(numberInitInsertInstr, LibraryValue::ValueNumberTypeStatic);
  5147. newInstr = IR::Instr::New(Js::OpCode::MOV, symTypeDst, typeOpnd, this->m_func);
  5148. numberInitInsertInstr->InsertBefore(newInstr);
  5149. }
  5150. // MOVSD dst->value, opndFloat ; copy the float result to the temp JavascriptNumber
  5151. newInstr = IR::Instr::New(Js::OpCode::MOVSD, symDblDst, opndFloat, this->m_func);
  5152. instrInsert->InsertBefore(newInstr);
  5153. #else
  5154. // s1 = MOVD opndFloat
  5155. IR::RegOpnd *s1 = IR::RegOpnd::New(TyMachReg, m_func);
  5156. IR::Instr *movd = IR::Instr::New(Js::OpCode::MOVD, s1, opndFloat, m_func);
  5157. instrInsert->InsertBefore(movd);
  5158. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  5159. {
  5160. // s1 = MOVD src
  5161. // tmp = NOT s1
  5162. // tmp = AND tmp, 0x7FF0000000000000ull
  5163. // test tmp, tmp
  5164. // je helper
  5165. // jmp done
  5166. // helper:
  5167. // tmp2 = AND s1, 0x000FFFFFFFFFFFFFull
  5168. // test tmp2, tmp2
  5169. // je done
  5170. // s1 = JavascriptNumber::k_Nan
  5171. // done:
  5172. IR::RegOpnd *tmp = IR::RegOpnd::New(TyMachReg, m_func);
  5173. IR::Instr * newInstr = IR::Instr::New(Js::OpCode::NOT, tmp, s1, m_func);
  5174. instrInsert->InsertBefore(newInstr);
  5175. LowererMD::MakeDstEquSrc1(newInstr);
  5176. newInstr = IR::Instr::New(Js::OpCode::AND, tmp, tmp, IR::AddrOpnd::New((Js::Var)0x7FF0000000000000, IR::AddrOpndKindConstantVar, m_func, true), m_func);
  5177. instrInsert->InsertBefore(newInstr);
  5178. LowererMD::Legalize(newInstr);
  5179. IR::LabelInstr* helper = Lowerer::InsertLabel(true, instrInsert);
  5180. Lowerer::InsertTestBranch(tmp, tmp, Js::OpCode::BrEq_A, helper, helper);
  5181. IR::LabelInstr* done = Lowerer::InsertLabel(isHelper, instrInsert);
  5182. Lowerer::InsertBranch(Js::OpCode::Br, done, helper);
  5183. IR::RegOpnd *tmp2 = IR::RegOpnd::New(TyMachReg, m_func);
  5184. newInstr = IR::Instr::New(Js::OpCode::AND, tmp2, s1, IR::AddrOpnd::New((Js::Var)0x000FFFFFFFFFFFFFull, IR::AddrOpndKindConstantVar, m_func, true), m_func);
  5185. done->InsertBefore(newInstr);
  5186. LowererMD::Legalize(newInstr);
  5187. Lowerer::InsertTestBranch(tmp2, tmp2, Js::OpCode::BrEq_A, done, done);
  5188. IR::Opnd * opndNaN = IR::AddrOpnd::New((Js::Var)Js::JavascriptNumber::k_Nan, IR::AddrOpndKindConstantVar, m_func, true);
  5189. Lowerer::InsertMove(s1, opndNaN, done);
  5190. }
  5191. // s1 = XOR s1, FloatTag_Value
  5192. // dst = s1
  5193. IR::Instr *setTag = IR::Instr::New(Js::OpCode::XOR,
  5194. s1,
  5195. s1,
  5196. IR::AddrOpnd::New((Js::Var)Js::FloatTag_Value,
  5197. IR::AddrOpndKindConstantVar,
  5198. this->m_func,
  5199. /* dontEncode = */ true),
  5200. this->m_func);
  5201. IR::Instr *movDst = IR::Instr::New(Js::OpCode::MOV, dstOpnd, s1, this->m_func);
  5202. instrInsert->InsertBefore(setTag);
  5203. instrInsert->InsertBefore(movDst);
  5204. LowererMD::Legalize(setTag);
  5205. #endif
  5206. }
  5207. void
  5208. LowererMD::EmitLoadFloatFromNumber(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr)
  5209. {
  5210. IR::LabelInstr *labelDone;
  5211. IR::Instr *instr;
  5212. labelDone = EmitLoadFloatCommon(dst, src, insertInstr, insertInstr->HasBailOutInfo());
  5213. if (labelDone == nullptr)
  5214. {
  5215. // We're done
  5216. insertInstr->Remove();
  5217. return;
  5218. }
  5219. // $Done note: insertAfter
  5220. insertInstr->InsertAfter(labelDone);
  5221. if (!insertInstr->HasBailOutInfo())
  5222. {
  5223. // $Done
  5224. insertInstr->Remove();
  5225. return;
  5226. }
  5227. Assert(!m_func->GetJITFunctionBody()->IsAsmJsMode());
  5228. IR::LabelInstr *labelNoBailOut = nullptr;
  5229. IR::SymOpnd *tempSymOpnd = nullptr;
  5230. if (insertInstr->GetBailOutKind() == IR::BailOutPrimitiveButString)
  5231. {
  5232. if (!this->m_func->tempSymDouble)
  5233. {
  5234. this->m_func->tempSymDouble = StackSym::New(TyFloat64, this->m_func);
  5235. this->m_func->StackAllocate(this->m_func->tempSymDouble, MachDouble);
  5236. }
  5237. // LEA r3, tempSymDouble
  5238. IR::RegOpnd *reg3Opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  5239. tempSymOpnd = IR::SymOpnd::New(this->m_func->tempSymDouble, TyFloat64, this->m_func);
  5240. instr = IR::Instr::New(Js::OpCode::LEA, reg3Opnd, tempSymOpnd, this->m_func);
  5241. insertInstr->InsertBefore(instr);
  5242. // regBoolResult = to_number_fromPrimitive(value, &dst, allowUndef, scriptContext);
  5243. this->m_lowerer->LoadScriptContext(insertInstr);
  5244. IR::IntConstOpnd *allowUndefOpnd;
  5245. if (insertInstr->GetBailOutKind() == IR::BailOutPrimitiveButString)
  5246. {
  5247. allowUndefOpnd = IR::IntConstOpnd::New(true, TyInt32, this->m_func);
  5248. }
  5249. else
  5250. {
  5251. Assert(insertInstr->GetBailOutKind() == IR::BailOutNumberOnly);
  5252. allowUndefOpnd = IR::IntConstOpnd::New(false, TyInt32, this->m_func);
  5253. }
  5254. this->LoadHelperArgument(insertInstr, allowUndefOpnd);
  5255. this->LoadHelperArgument(insertInstr, reg3Opnd);
  5256. this->LoadHelperArgument(insertInstr, src);
  5257. IR::RegOpnd *regBoolResult = IR::RegOpnd::New(TyInt32, this->m_func);
  5258. instr = IR::Instr::New(Js::OpCode::CALL, regBoolResult, IR::HelperCallOpnd::New(IR::HelperOp_ConvNumber_FromPrimitive, this->m_func), this->m_func);
  5259. insertInstr->InsertBefore(instr);
  5260. this->lowererMDArch.LowerCall(instr, 0);
  5261. // TEST regBoolResult, regBoolResult
  5262. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  5263. instr->SetSrc1(regBoolResult);
  5264. instr->SetSrc2(regBoolResult);
  5265. insertInstr->InsertBefore(instr);
  5266. // JNE $noBailOut
  5267. labelNoBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5268. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelNoBailOut, this->m_func);
  5269. insertInstr->InsertBefore(instr);
  5270. }
  5271. // Bailout code
  5272. Assert(insertInstr->m_opcode == Js::OpCode::FromVar);
  5273. insertInstr->UnlinkDst();
  5274. insertInstr->FreeSrc1();
  5275. IR::Instr *bailoutInstr = insertInstr;
  5276. insertInstr = bailoutInstr->m_next;
  5277. this->m_lowerer->GenerateBailOut(bailoutInstr);
  5278. // $noBailOut
  5279. if (labelNoBailOut)
  5280. {
  5281. insertInstr->InsertBefore(labelNoBailOut);
  5282. Assert(dst->IsRegOpnd());
  5283. // MOVSD dst, [pResult].f64
  5284. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, tempSymOpnd, this->m_func);
  5285. insertInstr->InsertBefore(instr);
  5286. }
  5287. }
  5288. IR::LabelInstr*
  5289. LowererMD::EmitLoadFloatCommon(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr, bool needHelperLabel)
  5290. {
  5291. IR::Instr *instr;
  5292. Assert(src->GetType() == TyVar);
  5293. Assert(dst->IsFloat());
  5294. bool isFloatConst = false;
  5295. IR::RegOpnd *regFloatOpnd = nullptr;
  5296. if (src->IsRegOpnd() && src->AsRegOpnd()->m_sym->m_isFltConst)
  5297. {
  5298. IR::RegOpnd *regOpnd = src->AsRegOpnd();
  5299. Assert(regOpnd->m_sym->m_isSingleDef);
  5300. Js::Var value = regOpnd->m_sym->GetFloatConstValueAsVar_PostGlobOpt();
  5301. #if FLOATVAR
  5302. void *pDouble = (double*)NativeCodeDataNewNoFixup(this->m_func->GetNativeCodeDataAllocator(), DoubleType<DataDesc_LowererMD_EmitLoadFloatCommon_Double>, Js::JavascriptNumber::GetValue(value));
  5303. IR::Opnd * doubleRef;
  5304. if (!m_func->IsOOPJIT())
  5305. {
  5306. doubleRef = IR::MemRefOpnd::New(pDouble, TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  5307. }
  5308. else
  5309. {
  5310. int offset = NativeCodeData::GetDataTotalOffset(pDouble);
  5311. doubleRef = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), offset, TyMachDouble,
  5312. #if DBG
  5313. NativeCodeData::GetDataDescription(pDouble, m_func->m_alloc),
  5314. #endif
  5315. m_func, true);
  5316. GetLowerer()->addToLiveOnBackEdgeSyms->Set(m_func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  5317. }
  5318. #else
  5319. IR::MemRefOpnd *doubleRef = IR::MemRefOpnd::New((BYTE*)value + Js::JavascriptNumber::GetValueOffset(), TyFloat64, this->m_func,
  5320. IR::AddrOpndKindDynamicDoubleRef);
  5321. #endif
  5322. regFloatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  5323. instr = IR::Instr::New(Js::OpCode::MOVSD, regFloatOpnd, doubleRef, this->m_func);
  5324. insertInstr->InsertBefore(instr);
  5325. Legalize(instr);
  5326. isFloatConst = true;
  5327. }
  5328. // Src is constant?
  5329. if (src->IsImmediateOpnd() || src->IsFloatConstOpnd())
  5330. {
  5331. regFloatOpnd = IR::RegOpnd::New(TyFloat64, this->m_func);
  5332. m_lowerer->LoadFloatFromNonReg(src, regFloatOpnd, insertInstr);
  5333. isFloatConst = true;
  5334. }
  5335. if (isFloatConst)
  5336. {
  5337. if (dst->GetType() == TyFloat32)
  5338. {
  5339. // CVTSD2SS regOpnd32.f32, regOpnd.f64 -- Convert regOpnd from f64 to f32
  5340. IR::RegOpnd *regOpnd32 = regFloatOpnd->UseWithNewType(TyFloat32, this->m_func)->AsRegOpnd();
  5341. instr = IR::Instr::New(Js::OpCode::CVTSD2SS, regOpnd32, regFloatOpnd, this->m_func);
  5342. insertInstr->InsertBefore(instr);
  5343. // MOVSS dst, regOpnd32
  5344. instr = IR::Instr::New(Js::OpCode::MOVSS, dst, regOpnd32, this->m_func);
  5345. insertInstr->InsertBefore(instr);
  5346. }
  5347. else
  5348. {
  5349. // MOVSD dst, regOpnd
  5350. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, regFloatOpnd, this->m_func);
  5351. insertInstr->InsertBefore(instr);
  5352. }
  5353. return nullptr;
  5354. }
  5355. Assert(src->IsRegOpnd());
  5356. IR::LabelInstr *labelStore = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5357. IR::LabelInstr *labelHelper;
  5358. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5359. if (needHelperLabel)
  5360. {
  5361. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5362. }
  5363. else
  5364. {
  5365. labelHelper = labelDone;
  5366. }
  5367. bool const isFloat32 = dst->GetType() == TyFloat32;
  5368. IR::RegOpnd *reg2 = ((isFloat32 || !dst->IsRegOpnd()) ? IR::RegOpnd::New(TyMachDouble, this->m_func) : dst->AsRegOpnd());
  5369. // Load the float value in reg2
  5370. this->lowererMDArch.LoadCheckedFloat(src->AsRegOpnd(), reg2, labelStore, labelHelper, insertInstr, needHelperLabel);
  5371. // $Store
  5372. insertInstr->InsertBefore(labelStore);
  5373. if (isFloat32)
  5374. {
  5375. IR::RegOpnd *reg2_32 = reg2->UseWithNewType(TyFloat32, this->m_func)->AsRegOpnd();
  5376. // CVTSD2SS r2_32.f32, r2.f64 -- Convert regOpnd from f64 to f32
  5377. instr = IR::Instr::New(Js::OpCode::CVTSD2SS, reg2_32, reg2, this->m_func);
  5378. insertInstr->InsertBefore(instr);
  5379. // MOVSS dst, r2_32
  5380. instr = IR::Instr::New(Js::OpCode::MOVSS, dst, reg2_32, this->m_func);
  5381. insertInstr->InsertBefore(instr);
  5382. }
  5383. else if (reg2 != dst)
  5384. {
  5385. // MOVSD dst, r2
  5386. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, reg2, this->m_func);
  5387. insertInstr->InsertBefore(instr);
  5388. }
  5389. // JMP $Done
  5390. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelDone, this->m_func);
  5391. insertInstr->InsertBefore(instr);
  5392. if (needHelperLabel)
  5393. {
  5394. // $Helper
  5395. insertInstr->InsertBefore(labelHelper);
  5396. }
  5397. return labelDone;
  5398. }
  5399. void
  5400. LowererMD::EmitLoadFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr, IR::Instr * instrBailOut, IR::LabelInstr * labelBailOut)
  5401. {
  5402. IR::LabelInstr *labelDone;
  5403. IR::Instr *instr;
  5404. labelDone = EmitLoadFloatCommon(dst, src, insertInstr, true);
  5405. if (labelDone == nullptr)
  5406. {
  5407. // We're done
  5408. return;
  5409. }
  5410. IR::BailOutKind bailOutKind = instrBailOut && instrBailOut->HasBailOutInfo() ? instrBailOut->GetBailOutKind() : IR::BailOutInvalid;
  5411. if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  5412. {
  5413. // Bail out instead of making the helper call.
  5414. Assert(labelBailOut);
  5415. m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, insertInstr);
  5416. insertInstr->InsertBefore(labelDone);
  5417. return;
  5418. }
  5419. IR::Opnd *memAddress = dst;
  5420. if (dst->IsRegOpnd())
  5421. {
  5422. // Create an f64 stack location to store the result of the helper.
  5423. IR::SymOpnd *symOpnd = IR::SymOpnd::New(StackSym::New(dst->GetType(), this->m_func), dst->GetType(), this->m_func);
  5424. this->m_func->StackAllocate(symOpnd->m_sym->AsStackSym(), sizeof(double));
  5425. memAddress = symOpnd;
  5426. }
  5427. // LEA r3, dst
  5428. IR::RegOpnd *reg3Opnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  5429. instr = IR::Instr::New(Js::OpCode::LEA, reg3Opnd, memAddress, this->m_func);
  5430. insertInstr->InsertBefore(instr);
  5431. // to_number_full(value, &dst, scriptContext);
  5432. // Create dummy binary op to convert into helper
  5433. instr = IR::Instr::New(Js::OpCode::Add_A, this->m_func);
  5434. instr->SetSrc1(src);
  5435. instr->SetSrc2(reg3Opnd);
  5436. insertInstr->InsertBefore(instr);
  5437. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  5438. {
  5439. _Analysis_assume_(instrBailOut != nullptr);
  5440. instr = instr->ConvertToBailOutInstr(instrBailOut->GetBailOutInfo(), bailOutKind);
  5441. if (instrBailOut->GetBailOutInfo()->bailOutInstr == instrBailOut)
  5442. {
  5443. IR::Instr * instrShare = instrBailOut->ShareBailOut();
  5444. m_lowerer->LowerBailTarget(instrShare);
  5445. }
  5446. }
  5447. IR::JnHelperMethod helper;
  5448. if (dst->GetType() == TyFloat32)
  5449. {
  5450. helper = IR::HelperOp_ConvFloat_Helper;
  5451. }
  5452. else
  5453. {
  5454. helper = IR::HelperOp_ConvNumber_Helper;
  5455. }
  5456. this->m_lowerer->LowerBinaryHelperMem(instr, helper);
  5457. if (dst->IsRegOpnd())
  5458. {
  5459. if (dst->GetType() == TyFloat32)
  5460. {
  5461. // MOVSS dst, r32
  5462. instr = IR::Instr::New(Js::OpCode::MOVSS, dst, memAddress, this->m_func);
  5463. insertInstr->InsertBefore(instr);
  5464. }
  5465. else
  5466. {
  5467. // MOVSD dst, [pResult].f64
  5468. instr = IR::Instr::New(Js::OpCode::MOVSD, dst, memAddress, this->m_func);
  5469. insertInstr->InsertBefore(instr);
  5470. }
  5471. }
  5472. // $Done
  5473. insertInstr->InsertBefore(labelDone);
  5474. }
  5475. void
  5476. LowererMD::LowerInt4NegWithBailOut(
  5477. IR::Instr *const instr,
  5478. const IR::BailOutKind bailOutKind,
  5479. IR::LabelInstr *const bailOutLabel,
  5480. IR::LabelInstr *const skipBailOutLabel)
  5481. {
  5482. Assert(instr);
  5483. Assert(instr->m_opcode == Js::OpCode::Neg_I4);
  5484. Assert(!instr->HasBailOutInfo());
  5485. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  5486. Assert(bailOutLabel);
  5487. Assert(instr->m_next == bailOutLabel);
  5488. Assert(skipBailOutLabel);
  5489. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5490. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5491. // Lower the instruction
  5492. instr->m_opcode = Js::OpCode::NEG;
  5493. Legalize(instr);
  5494. if(bailOutKind & IR::BailOutOnOverflow || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck)
  5495. {
  5496. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func));
  5497. }
  5498. if(bailOutKind & IR::BailOutOnNegativeZero)
  5499. {
  5500. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JEQ, bailOutLabel, instr->m_func));
  5501. }
  5502. // Skip bailout
  5503. bailOutLabel->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, skipBailOutLabel, instr->m_func));
  5504. }
  5505. void
  5506. LowererMD::LowerInt4AddWithBailOut(
  5507. IR::Instr *const instr,
  5508. const IR::BailOutKind bailOutKind,
  5509. IR::LabelInstr *const bailOutLabel,
  5510. IR::LabelInstr *const skipBailOutLabel)
  5511. {
  5512. Assert(instr);
  5513. Assert(instr->m_opcode == Js::OpCode::Add_I4);
  5514. Assert(!instr->HasBailOutInfo());
  5515. Assert(
  5516. (bailOutKind & IR::BailOutOnResultConditions) == IR::BailOutOnOverflow ||
  5517. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  5518. Assert(bailOutLabel);
  5519. Assert(instr->m_next == bailOutLabel);
  5520. Assert(skipBailOutLabel);
  5521. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5522. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5523. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  5524. // Restore sources overwritten by the instruction in the bailout path
  5525. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5526. Assert(dst->IsRegOpnd());
  5527. const bool dstEquSrc1 = dst->IsEqual(src1), dstEquSrc2 = dst->IsEqual(src2);
  5528. if(dstEquSrc1 & dstEquSrc2)
  5529. {
  5530. // We have:
  5531. // s1 += s1
  5532. // Which is equivalent to:
  5533. // s1 <<= 1
  5534. //
  5535. // These overflow a signed 32-bit integer when for the initial s1:
  5536. // s1 > 0 && (s1 & 0x40000000) - result is negative after overflow
  5537. // s1 < 0 && !(s1 & 0x40000000) - result is nonnegative after overflow
  5538. //
  5539. // To restore s1 to its value before the operation, we first do an arithmetic right-shift by one bit to undo the
  5540. // left-shift and preserve the sign of the result after overflow. Since the result after overflow always has the
  5541. // opposite sign from the operands (hence the overflow), we just need to invert the sign of the result. The following
  5542. // restores s1 to its value before the instruction:
  5543. // s1 = (s1 >> 1) ^ 0x80000000
  5544. //
  5545. // Generate:
  5546. // sar s1, 1
  5547. // xor s1, 0x80000000
  5548. const auto startBailOutInstr = bailOutLabel->m_next;
  5549. Assert(startBailOutInstr);
  5550. startBailOutInstr->InsertBefore(
  5551. IR::Instr::New(
  5552. Js::OpCode::SAR,
  5553. dst,
  5554. dst,
  5555. IR::IntConstOpnd::New(1, TyInt8, instr->m_func),
  5556. instr->m_func)
  5557. );
  5558. startBailOutInstr->InsertBefore(
  5559. IR::Instr::New(
  5560. Js::OpCode::XOR,
  5561. dst,
  5562. dst,
  5563. IR::IntConstOpnd::New(INT32_MIN, TyInt32, instr->m_func, true /* dontEncode */),
  5564. instr->m_func)
  5565. );
  5566. }
  5567. else if(dstEquSrc1 | dstEquSrc2)
  5568. {
  5569. // We have:
  5570. // s1 += s2
  5571. // Or:
  5572. // s1 = s2 + s1
  5573. //
  5574. // The following restores s1 to its value before the instruction:
  5575. // s1 -= s2
  5576. //
  5577. // Generate:
  5578. // sub s1, s2
  5579. if(dstEquSrc1)
  5580. {
  5581. Assert(src2->IsRegOpnd() || src2->IsIntConstOpnd());
  5582. }
  5583. else
  5584. {
  5585. Assert(src1->IsRegOpnd() || src1->IsIntConstOpnd());
  5586. }
  5587. bailOutLabel->InsertAfter(IR::Instr::New(Js::OpCode::SUB, dst, dst, dstEquSrc1 ? src2 : src1, instr->m_func));
  5588. }
  5589. // Lower the instruction
  5590. ChangeToAdd(instr, true /* needFlags */);
  5591. Legalize(instr);
  5592. // Skip bailout on no overflow
  5593. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNO, skipBailOutLabel, instr->m_func));
  5594. // Fall through to bailOutLabel
  5595. }
  5596. void
  5597. LowererMD::LowerInt4SubWithBailOut(
  5598. IR::Instr *const instr,
  5599. const IR::BailOutKind bailOutKind,
  5600. IR::LabelInstr *const bailOutLabel,
  5601. IR::LabelInstr *const skipBailOutLabel)
  5602. {
  5603. Assert(instr);
  5604. Assert(instr->m_opcode == Js::OpCode::Sub_I4);
  5605. Assert(!instr->HasBailOutInfo());
  5606. Assert(
  5607. (bailOutKind & IR::BailOutOnResultConditions) == IR::BailOutOnOverflow ||
  5608. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  5609. Assert(bailOutLabel);
  5610. Assert(instr->m_next == bailOutLabel);
  5611. Assert(skipBailOutLabel);
  5612. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5613. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5614. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  5615. // Restore sources overwritten by the instruction in the bailout path
  5616. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5617. Assert(dst->IsRegOpnd());
  5618. const bool dstEquSrc1 = dst->IsEqual(src1), dstEquSrc2 = dst->IsEqual(src2);
  5619. if(dstEquSrc1 ^ dstEquSrc2)
  5620. {
  5621. // We have:
  5622. // s1 -= s2
  5623. // Or:
  5624. // s1 = s2 - s1
  5625. //
  5626. // The following restores s1 to its value before the instruction:
  5627. // s1 += s2
  5628. // Or:
  5629. // s1 = s2 - s1
  5630. //
  5631. // Generate:
  5632. // neg s1 - only for second case
  5633. // add s1, s2
  5634. if(dstEquSrc1)
  5635. {
  5636. Assert(src2->IsRegOpnd() || src2->IsIntConstOpnd());
  5637. }
  5638. else
  5639. {
  5640. Assert(src1->IsRegOpnd() || src1->IsIntConstOpnd());
  5641. }
  5642. const auto startBailOutInstr = bailOutLabel->m_next;
  5643. Assert(startBailOutInstr);
  5644. if(dstEquSrc2)
  5645. {
  5646. startBailOutInstr->InsertBefore(IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func));
  5647. }
  5648. startBailOutInstr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, dst, dst, dstEquSrc1 ? src2 : src1, instr->m_func));
  5649. }
  5650. // Lower the instruction
  5651. ChangeToSub(instr, true /* needFlags */);
  5652. Legalize(instr);
  5653. // Skip bailout on no overflow
  5654. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNO, skipBailOutLabel, instr->m_func));
  5655. // Fall through to bailOutLabel
  5656. }
  5657. bool
  5658. LowererMD::GenerateSimplifiedInt4Mul(
  5659. IR::Instr *const mulInstr,
  5660. const IR::BailOutKind bailOutKind,
  5661. IR::LabelInstr *const bailOutLabel)
  5662. {
  5663. if (AutoSystemInfo::Data.IsAtomPlatform())
  5664. {
  5665. // On Atom, always optimize unless phase is off
  5666. if (PHASE_OFF(Js::AtomPhase, mulInstr->m_func->GetTopFunc()) ||
  5667. PHASE_OFF(Js::MulStrengthReductionPhase, mulInstr->m_func->GetTopFunc()))
  5668. return false;
  5669. }
  5670. else
  5671. {
  5672. // On other platforms, don't optimize unless phase is forced
  5673. if (!PHASE_FORCE(Js::AtomPhase, mulInstr->m_func->GetTopFunc()) &&
  5674. !PHASE_FORCE(Js::MulStrengthReductionPhase, mulInstr->m_func->GetTopFunc()))
  5675. return false;
  5676. }
  5677. Assert(mulInstr);
  5678. Assert(mulInstr->m_opcode == Js::OpCode::Mul_I4);
  5679. IR::Instr *instr = mulInstr, *nextInstr;
  5680. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5681. if (!src1->IsIntConstOpnd() && !src2->IsIntConstOpnd())
  5682. return false;
  5683. // if two const operands, GlobOpt would have folded the computation
  5684. Assert(!(src1->IsIntConstOpnd() && src2->IsIntConstOpnd()));
  5685. Assert(dst->IsRegOpnd());
  5686. const auto constSrc = src1->IsIntConstOpnd() ? src1 : src2;
  5687. const auto nonConstSrc = src1->IsIntConstOpnd() ? src2 : src1;
  5688. const auto constSrcValue = constSrc->AsIntConstOpnd()->AsInt32();
  5689. auto nonConstSrcCopy = nonConstSrc;
  5690. Assert(nonConstSrc->IsRegOpnd());
  5691. bool doOVF = bailOutKind & IR::BailOutOnMulOverflow || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck;
  5692. // don't simplify mul by large numbers with OF check
  5693. if (doOVF && (constSrcValue > 3 || constSrcValue < -3))
  5694. return false;
  5695. switch(constSrcValue)
  5696. {
  5697. case -3:
  5698. case 3:
  5699. // if dst = src, we need to have a copy of the src for the ADD/SUB
  5700. if (dst->IsEqual(nonConstSrc))
  5701. {
  5702. nonConstSrcCopy = IR::RegOpnd::New(nonConstSrc->GetType(), instr->m_func);
  5703. // MOV
  5704. Lowerer::InsertMove(nonConstSrcCopy, nonConstSrc, instr);
  5705. }
  5706. instr->UnlinkSrc1();
  5707. instr->UnlinkSrc2();
  5708. // SHL
  5709. instr->m_opcode = Js::OpCode::SHL;
  5710. instr->SetSrc1(nonConstSrc);
  5711. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) 1, TyInt32, instr->m_func));
  5712. constSrc->Free(instr->m_func);
  5713. Legalize(instr);
  5714. // JO
  5715. if (doOVF)
  5716. {
  5717. nextInstr = IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func);
  5718. instr->InsertAfter(nextInstr);
  5719. instr = nextInstr;
  5720. }
  5721. // ADD
  5722. nextInstr = IR::Instr::New(Js::OpCode::ADD, dst, dst, nonConstSrcCopy, instr->m_func);
  5723. instr->InsertAfter(nextInstr);
  5724. instr = nextInstr;
  5725. Legalize(instr);
  5726. if (constSrcValue == -3)
  5727. {
  5728. // JO
  5729. if (doOVF)
  5730. {
  5731. nextInstr = IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func);
  5732. instr->InsertAfter(nextInstr);
  5733. instr = nextInstr;
  5734. }
  5735. // NEG
  5736. nextInstr = IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func);
  5737. instr->InsertAfter(nextInstr);
  5738. instr = nextInstr;
  5739. Legalize(instr);
  5740. }
  5741. // last JO inserted by caller
  5742. return true;
  5743. case -2:
  5744. case 2:
  5745. instr->UnlinkSrc1();
  5746. instr->UnlinkSrc2();
  5747. // SHL
  5748. instr->m_opcode = Js::OpCode::SHL;
  5749. instr->SetSrc1(nonConstSrc);
  5750. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) 1, TyInt32, instr->m_func));
  5751. constSrc->Free(instr->m_func);
  5752. Legalize(instr);
  5753. if (constSrcValue == -2)
  5754. {
  5755. // JO
  5756. if (doOVF)
  5757. {
  5758. nextInstr = IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func);
  5759. instr->InsertAfter(nextInstr);
  5760. instr = nextInstr;
  5761. }
  5762. // NEG
  5763. nextInstr = IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func);
  5764. instr->InsertAfter(nextInstr);
  5765. instr = nextInstr;
  5766. Legalize(instr);
  5767. }
  5768. // last JO inserted by caller
  5769. return true;
  5770. case -1:
  5771. instr->UnlinkSrc1();
  5772. instr->UnlinkSrc2();
  5773. // NEG
  5774. instr->m_opcode = Js::OpCode::NEG;
  5775. instr->SetSrc1(nonConstSrc);
  5776. constSrc->Free(instr->m_func);
  5777. Legalize(instr);
  5778. // JO inserted by caller
  5779. return true;
  5780. case 0:
  5781. instr->FreeSrc1();
  5782. instr->FreeSrc2();
  5783. // MOV
  5784. instr->m_opcode = Js::OpCode::MOV;
  5785. instr->SetSrc1(IR::IntConstOpnd::New((IntConstType) 0, TyInt32, instr->m_func));
  5786. Legalize(instr);
  5787. // JO inserted by caller are removed in later phases
  5788. return true;
  5789. case 1:
  5790. instr->UnlinkSrc1();
  5791. instr->UnlinkSrc2();
  5792. // MOV
  5793. instr->m_opcode = Js::OpCode::MOV;
  5794. instr->SetSrc1(nonConstSrc);
  5795. constSrc->Free(instr->m_func);
  5796. Legalize(instr);
  5797. // JO inserted by caller are removed in later phases
  5798. return true;
  5799. default:
  5800. // large numbers with no OF check
  5801. Assert(!doOVF);
  5802. // 2^i
  5803. // -2^i
  5804. if (Math::IsPow2(constSrcValue) || Math::IsPow2(-constSrcValue))
  5805. {
  5806. uint32 shamt = constSrcValue > 0 ? Math::Log2(constSrcValue) : Math::Log2(-constSrcValue);
  5807. instr->UnlinkSrc1();
  5808. instr->UnlinkSrc2();
  5809. // SHL
  5810. instr->m_opcode = Js::OpCode::SHL;
  5811. instr->SetSrc1(nonConstSrc);
  5812. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) shamt, TyInt32, instr->m_func));
  5813. constSrc->Free(instr->m_func);
  5814. Legalize(instr);
  5815. if (constSrcValue < 0)
  5816. {
  5817. // NEG
  5818. nextInstr = IR::Instr::New(Js::OpCode::NEG, dst, dst, instr->m_func);
  5819. instr->InsertAfter(nextInstr);
  5820. Legalize(instr);
  5821. }
  5822. return true;
  5823. }
  5824. // 2^i + 1
  5825. // 2^i - 1
  5826. if (Math::IsPow2(constSrcValue - 1) || Math::IsPow2(constSrcValue + 1))
  5827. {
  5828. bool plusOne = Math::IsPow2(constSrcValue - 1);
  5829. uint32 shamt = plusOne ? Math::Log2(constSrcValue - 1) : Math::Log2(constSrcValue + 1);
  5830. if (dst->IsEqual(nonConstSrc))
  5831. {
  5832. nonConstSrcCopy = IR::RegOpnd::New(nonConstSrc->GetType(), instr->m_func);
  5833. // MOV
  5834. Lowerer::InsertMove(nonConstSrcCopy, nonConstSrc, instr);
  5835. }
  5836. instr->UnlinkSrc1();
  5837. instr->UnlinkSrc2();
  5838. // SHL
  5839. instr->m_opcode = Js::OpCode::SHL;
  5840. instr->SetSrc1(nonConstSrc);
  5841. instr->SetSrc2(IR::IntConstOpnd::New((IntConstType) shamt, TyInt32, instr->m_func));
  5842. constSrc->Free(instr->m_func);
  5843. Legalize(instr);
  5844. // ADD/SUB
  5845. nextInstr = IR::Instr::New(plusOne ? Js::OpCode::ADD : Js::OpCode::SUB, dst, dst, nonConstSrcCopy, instr->m_func);
  5846. instr->InsertAfter(nextInstr);
  5847. instr = nextInstr;
  5848. Legalize(instr);
  5849. return true;
  5850. }
  5851. return false;
  5852. }
  5853. }
  5854. void
  5855. LowererMD::LowerInt4MulWithBailOut(
  5856. IR::Instr *const instr,
  5857. const IR::BailOutKind bailOutKind,
  5858. IR::LabelInstr *const bailOutLabel,
  5859. IR::LabelInstr *const skipBailOutLabel)
  5860. {
  5861. Assert(instr);
  5862. Assert(instr->m_opcode == Js::OpCode::Mul_I4);
  5863. Assert(!instr->HasBailOutInfo());
  5864. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  5865. Assert(bailOutLabel);
  5866. Assert(instr->m_next == bailOutLabel);
  5867. Assert(skipBailOutLabel);
  5868. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5869. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5870. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  5871. IR::LabelInstr *checkForNegativeZeroLabel = nullptr;
  5872. if(bailOutKind & IR::BailOutOnNegativeZero)
  5873. {
  5874. // We have:
  5875. // s3 = s1 * s2
  5876. //
  5877. // If the result is zero, we need to check and only bail out if it would be -0. The following determines this:
  5878. // bailOut = (s1 < 0 || s2 < 0) (either s1 or s2 has to be zero for the result to be zero, so we don't emit zero checks)
  5879. //
  5880. // Note, however, that if in future we decide to ignore mul overflow in some cases, and overflow occurs with one of the operands as negative,
  5881. // this can lead to bailout. Will handle that case if ever we decide to ignore mul overflow.
  5882. //
  5883. // Generate:
  5884. // $checkForNegativeZeroLabel:
  5885. // test s1, s1
  5886. // js $bailOutLabel
  5887. // test s2, s2
  5888. // jns $skipBailOutLabel
  5889. // (fall through to bail out)
  5890. const auto dst = instr->GetDst(), src1 = instr->GetSrc1(), src2 = instr->GetSrc2();
  5891. Assert(dst->IsRegOpnd());
  5892. Assert(!src1->IsEqual(src2)); // cannot result in -0 if both operands are the same; GlobOpt should have figured that out
  5893. checkForNegativeZeroLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true);
  5894. bailOutLabel->InsertBefore(checkForNegativeZeroLabel);
  5895. if(src1->IsIntConstOpnd() || src2->IsIntConstOpnd())
  5896. {
  5897. Assert(!(src1->IsIntConstOpnd() && src2->IsIntConstOpnd())); // if this results in -0, GlobOpt should have avoided type specialization
  5898. const auto constSrc = src1->IsIntConstOpnd() ? src1 : src2;
  5899. const auto nonConstSrc = src1->IsIntConstOpnd() ? src2 : src1;
  5900. Assert(nonConstSrc->IsRegOpnd());
  5901. const auto newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5902. newInstr->SetSrc1(nonConstSrc);
  5903. newInstr->SetSrc2(nonConstSrc);
  5904. bailOutLabel->InsertBefore(newInstr);
  5905. const auto constSrcValue = constSrc->AsIntConstOpnd()->GetValue();
  5906. if(constSrcValue == 0)
  5907. {
  5908. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNSB, skipBailOutLabel, instr->m_func));
  5909. }
  5910. else
  5911. {
  5912. Assert(constSrcValue < 0); // cannot result in -0 if one operand is positive; GlobOpt should have figured that out
  5913. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, skipBailOutLabel, instr->m_func));
  5914. }
  5915. }
  5916. else
  5917. {
  5918. auto newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5919. newInstr->SetSrc1(src1);
  5920. newInstr->SetSrc2(src1);
  5921. bailOutLabel->InsertBefore(newInstr);
  5922. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JSB, bailOutLabel, instr->m_func));
  5923. newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5924. newInstr->SetSrc1(src2);
  5925. newInstr->SetSrc2(src2);
  5926. bailOutLabel->InsertBefore(newInstr);
  5927. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNSB, skipBailOutLabel, instr->m_func));
  5928. }
  5929. // Fall through to bailOutLabel
  5930. }
  5931. const bool needsOverflowCheck =
  5932. bailOutKind & IR::BailOutOnMulOverflow || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck;
  5933. AssertMsg(!instr->ShouldCheckForNon32BitOverflow() || (needsOverflowCheck && instr->ShouldCheckForNon32BitOverflow()), "Non 32-bit overflow check required without bailout info");
  5934. bool simplifiedMul = LowererMD::GenerateSimplifiedInt4Mul(instr, bailOutKind, bailOutLabel);
  5935. // Lower the instruction
  5936. if (!simplifiedMul)
  5937. {
  5938. LowererMD::ChangeToIMul(instr, needsOverflowCheck);
  5939. }
  5940. const auto insertBeforeInstr = checkForNegativeZeroLabel ? checkForNegativeZeroLabel : bailOutLabel;
  5941. if(needsOverflowCheck)
  5942. {
  5943. // do we care about int32 or non-int32 overflow ?
  5944. if (!simplifiedMul && !instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow())
  5945. LowererMD::EmitNon32BitOvfCheck(instr, insertBeforeInstr, bailOutLabel);
  5946. else
  5947. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JO, bailOutLabel, instr->m_func));
  5948. }
  5949. if(bailOutKind & IR::BailOutOnNegativeZero)
  5950. {
  5951. // On zero, branch to determine whether the result would be -0
  5952. Assert(checkForNegativeZeroLabel);
  5953. const auto newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5954. const auto dst = instr->GetDst();
  5955. newInstr->SetSrc1(dst);
  5956. newInstr->SetSrc2(dst);
  5957. insertBeforeInstr->InsertBefore(newInstr);
  5958. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JEQ, checkForNegativeZeroLabel, instr->m_func));
  5959. }
  5960. // Skip bailout
  5961. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, skipBailOutLabel, instr->m_func));
  5962. }
  5963. void
  5964. LowererMD::LowerInt4RemWithBailOut(
  5965. IR::Instr *const instr,
  5966. const IR::BailOutKind bailOutKind,
  5967. IR::LabelInstr *const bailOutLabel,
  5968. IR::LabelInstr *const skipBailOutLabel) const
  5969. {
  5970. Assert(instr);
  5971. Assert(instr->m_opcode == Js::OpCode::Rem_I4);
  5972. Assert(!instr->HasBailOutInfo());
  5973. Assert(bailOutKind & IR::BailOutOnNegativeZero);
  5974. Assert(bailOutLabel);
  5975. Assert(instr->m_next == bailOutLabel);
  5976. Assert(skipBailOutLabel);
  5977. instr->ReplaceDst(instr->GetDst()->UseWithNewType(TyInt32, instr->m_func));
  5978. instr->ReplaceSrc1(instr->GetSrc1()->UseWithNewType(TyInt32, instr->m_func));
  5979. instr->ReplaceSrc2(instr->GetSrc2()->UseWithNewType(TyInt32, instr->m_func));
  5980. bool fastPath = m_lowerer->GenerateSimplifiedInt4Rem(instr, skipBailOutLabel);
  5981. // We have:
  5982. // s3 = s1 % s2
  5983. //
  5984. // If the result is zero, we need to check and only bail out if it would be -0. The following determines this:
  5985. // bailOut = (s3 == 0 && s1 < 0)
  5986. //
  5987. // Generate:
  5988. // $checkForNegativeZeroLabel:
  5989. // test s3, s3
  5990. // jne $skipBailOutLabel
  5991. // test s1, s1
  5992. // jns $skipBailOutLabel
  5993. // (fall through to bail out)
  5994. IR::Opnd *dst = instr->GetDst(), *src1 = instr->GetSrc1();
  5995. Assert(dst->IsRegOpnd());
  5996. IR::Instr * newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  5997. newInstr->SetSrc1(dst);
  5998. newInstr->SetSrc2(dst);
  5999. bailOutLabel->InsertBefore(newInstr);
  6000. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, skipBailOutLabel, instr->m_func));
  6001. // Fast path already checks if s1 >= 0
  6002. if (!fastPath)
  6003. {
  6004. newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  6005. newInstr->SetSrc1(src1);
  6006. newInstr->SetSrc2(src1);
  6007. bailOutLabel->InsertBefore(newInstr);
  6008. bailOutLabel->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNSB, skipBailOutLabel, instr->m_func));
  6009. }
  6010. // Fall through to bailOutLabel
  6011. // Lower the instruction
  6012. LowererMDArch::EmitInt4Instr(instr);
  6013. }
  6014. IR::Instr *
  6015. LowererMD::LoadFloatZero(IR::Opnd * opndDst, IR::Instr * instrInsert)
  6016. {
  6017. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOVSD_ZERO, opndDst, instrInsert->m_func);
  6018. instrInsert->InsertBefore(instr);
  6019. return instr;
  6020. }
  6021. IR::Instr *
  6022. LowererMD::LoadFloatValue(IR::Opnd * opndDst, double value, IR::Instr * instrInsert)
  6023. {
  6024. if (value == 0.0 && !Js::JavascriptNumber::IsNegZero(value))
  6025. {
  6026. // zero can be loaded with "XORPS xmm, xmm" rather than needing memory load
  6027. return LoadFloatZero(opndDst, instrInsert);
  6028. }
  6029. IR::Opnd * opnd;
  6030. void* pValue = nullptr;
  6031. bool isFloat64 = opndDst->IsFloat64();
  6032. if (isFloat64)
  6033. {
  6034. pValue = NativeCodeDataNewNoFixup(instrInsert->m_func->GetNativeCodeDataAllocator(), DoubleType<DataDesc_LowererMD_LoadFloatValue_Double>, value);
  6035. }
  6036. else
  6037. {
  6038. Assert(opndDst->IsFloat32());
  6039. pValue = (float*)NativeCodeDataNewNoFixup(instrInsert->m_func->GetNativeCodeDataAllocator(), FloatType<DataDesc_LowererMD_LoadFloatValue_Float>, (float)value);
  6040. }
  6041. if (!instrInsert->m_func->IsOOPJIT())
  6042. {
  6043. opnd = IR::MemRefOpnd::New((void*)pValue, isFloat64 ? TyMachDouble : TyFloat32,
  6044. instrInsert->m_func, isFloat64 ? IR::AddrOpndKindDynamicDoubleRef : IR::AddrOpndKindDynamicFloatRef);
  6045. }
  6046. else // OOP JIT
  6047. {
  6048. int offset = NativeCodeData::GetDataTotalOffset(pValue);
  6049. auto addressRegOpnd = IR::RegOpnd::New(TyMachPtr, instrInsert->m_func);
  6050. Lowerer::InsertMove(
  6051. addressRegOpnd,
  6052. IR::MemRefOpnd::New(instrInsert->m_func->GetWorkItem()->GetWorkItemData()->nativeDataAddr, TyMachPtr, instrInsert->m_func, IR::AddrOpndKindDynamicNativeCodeDataRef),
  6053. instrInsert);
  6054. opnd = IR::IndirOpnd::New(addressRegOpnd, offset, isFloat64 ? TyMachDouble : TyFloat32,
  6055. #if DBG
  6056. NativeCodeData::GetDataDescription(pValue, instrInsert->m_func->m_alloc),
  6057. #endif
  6058. instrInsert->m_func, true);
  6059. }
  6060. // movsd xmm, [reg+offset]
  6061. IR::Instr * instr = IR::Instr::New(LowererMDArch::GetAssignOp(opndDst->GetType()), opndDst, opnd, instrInsert->m_func);
  6062. instrInsert->InsertBefore(instr);
  6063. Legalize(instr);
  6064. return instr;
  6065. }
  6066. IR::Instr *
  6067. LowererMD::EnsureAdjacentArgs(IR::Instr * instrArg)
  6068. {
  6069. // Ensure that the arg instructions for a given call site are adjacent.
  6070. // This isn't normally desirable for CQ, but it's required by, for instance, the cloner,
  6071. // which must clone a complete call sequence.
  6072. IR::Opnd * opnd = instrArg->GetSrc2();
  6073. IR::Instr * instrNextArg;
  6074. StackSym * sym;
  6075. AssertMsg(opnd, "opnd");
  6076. while (opnd->IsSymOpnd())
  6077. {
  6078. sym = opnd->AsSymOpnd()->m_sym->AsStackSym();
  6079. instrNextArg = sym->m_instrDef;
  6080. Assert(instrNextArg);
  6081. instrNextArg->SinkInstrBefore(instrArg);
  6082. instrArg = instrNextArg;
  6083. opnd = instrArg->GetSrc2();
  6084. }
  6085. sym = opnd->AsRegOpnd()->m_sym;
  6086. instrNextArg = sym->m_instrDef;
  6087. Assert(instrNextArg && instrNextArg->m_opcode == Js::OpCode::StartCall);
  6088. // The StartCall can be trivially moved down.
  6089. if (instrNextArg->m_next != instrArg)
  6090. {
  6091. instrNextArg->UnlinkStartCallFromBailOutInfo(instrArg);
  6092. instrNextArg->Unlink();
  6093. instrArg->InsertBefore(instrNextArg);
  6094. }
  6095. return instrNextArg->m_prev;
  6096. }
  6097. #if INT32VAR
  6098. //
  6099. // Convert an int32 to Var representation.
  6100. //
  6101. void LowererMD::GenerateInt32ToVarConversion( IR::Opnd * opndSrc, IR::Instr * insertInstr )
  6102. {
  6103. AssertMsg(TySize[opndSrc->GetType()] == MachPtr, "For this to work it should be a 64-bit register");
  6104. IR::Instr* instr = IR::Instr::New(Js::OpCode::BTS, opndSrc, opndSrc, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  6105. insertInstr->InsertBefore(instr);
  6106. }
  6107. //
  6108. // jump to $labelHelper, based on the result of CMP
  6109. //
  6110. void LowererMD::GenerateSmIntTest(IR::Opnd *opndSrc, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::Instr **instrFirst /* = nullptr */, bool fContinueLabel /*= false*/)
  6111. {
  6112. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  6113. IR::Opnd * opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  6114. #ifdef SHIFTLOAD
  6115. // s1 = SHLD src1, 16 - Shift top 16-bits of src1 to s1
  6116. IR::Instr* instr = IR::Instr::New(Js::OpCode::SHLD, opndReg, opndSrc, IR::IntConstOpnd::New(16, TyInt8, this->m_func), this->m_func);
  6117. insertInstr->InsertBefore(instr);
  6118. if (instrFirst)
  6119. {
  6120. *instrFirst = instr;
  6121. }
  6122. // CMP s1.i16, AtomTag.i16
  6123. IR::Opnd *opndReg16 = opndReg->Copy(m_func);
  6124. opndReg16->SetType(TyInt16);
  6125. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6126. instr->SetSrc1(opndReg16);
  6127. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt16, this->m_func, /* dontEncode = */ true));
  6128. insertInstr->InsertBefore(instr);
  6129. #else
  6130. // s1 = MOV src1 - Move to a temporary
  6131. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc, this->m_func);
  6132. insertInstr->InsertBefore(instr);
  6133. if (instrFirst)
  6134. {
  6135. *instrFirst = instr;
  6136. }
  6137. // s1 = SHR s1, VarTag_Shift
  6138. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  6139. insertInstr->InsertBefore(instr);
  6140. // CMP s1, AtomTag
  6141. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6142. instr->SetSrc1(opndReg);
  6143. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt32, this->m_func, /* dontEncode = */ true));
  6144. insertInstr->InsertBefore(instr);
  6145. #endif
  6146. if(fContinueLabel)
  6147. {
  6148. // JEQ $labelHelper
  6149. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  6150. }
  6151. else
  6152. {
  6153. // JNE $labelHelper
  6154. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  6155. }
  6156. insertInstr->InsertBefore(instr);
  6157. }
  6158. //
  6159. // If lower 32-bits are zero (value is zero), jump to $helper.
  6160. //
  6161. void LowererMD::GenerateTaggedZeroTest( IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelHelper )
  6162. {
  6163. // Cast the var to 32 bit integer.
  6164. if(opndSrc->GetSize() != 4)
  6165. {
  6166. opndSrc = opndSrc->UseWithNewType(TyUint32, this->m_func);
  6167. }
  6168. AssertMsg(TySize[opndSrc->GetType()] == 4, "This technique works only on the 32-bit version");
  6169. // TEST src1, src1
  6170. IR::Instr* instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  6171. instr->SetSrc1(opndSrc);
  6172. instr->SetSrc2(opndSrc);
  6173. insertInstr->InsertBefore(instr);
  6174. if(labelHelper != nullptr)
  6175. {
  6176. // JZ $labelHelper
  6177. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  6178. insertInstr->InsertBefore(instr);
  6179. }
  6180. }
  6181. //
  6182. // If top 16 bits are not zero i.e. it is NOT object, jump to $helper.
  6183. //
  6184. bool LowererMD::GenerateObjectTest(IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
  6185. {
  6186. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  6187. if (opndSrc->IsTaggedValue() && fContinueLabel)
  6188. {
  6189. // Insert delete branch opcode to tell the dbChecks not to assert on the helper label we may fall through into
  6190. IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
  6191. insertInstr->InsertBefore(fakeBr);
  6192. return false;
  6193. }
  6194. else if (opndSrc->IsNotTaggedValue() && !fContinueLabel)
  6195. {
  6196. return false;
  6197. }
  6198. IR::Opnd * opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  6199. // s1 = MOV src1 - Move to a temporary
  6200. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc, this->m_func);
  6201. insertInstr->InsertBefore(instr);
  6202. // s1 = SHR s1, VarTag_Shift
  6203. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
  6204. insertInstr->InsertBefore(instr);
  6205. if (fContinueLabel)
  6206. {
  6207. // JEQ $labelHelper
  6208. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelTarget, this->m_func);
  6209. insertInstr->InsertBefore(instr);
  6210. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6211. insertInstr->InsertBefore(labelHelper);
  6212. }
  6213. else
  6214. {
  6215. // JNZ $labelHelper
  6216. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelTarget, this->m_func);
  6217. insertInstr->InsertBefore(instr);
  6218. }
  6219. return true;
  6220. }
  6221. #else
  6222. //
  6223. // Convert an int32 value to a Var.
  6224. //
  6225. void LowererMD::GenerateInt32ToVarConversion( IR::Opnd * opndSrc, IR::Instr * insertInstr )
  6226. {
  6227. // SHL r1, AtomTag
  6228. IR::Instr * instr = IR::Instr::New(Js::OpCode::SHL, opndSrc, opndSrc, IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func), this->m_func);
  6229. insertInstr->InsertBefore(instr);
  6230. // INC r1
  6231. instr = IR::Instr::New(Js::OpCode::INC, opndSrc, opndSrc, this->m_func);
  6232. insertInstr->InsertBefore(instr);
  6233. }
  6234. //
  6235. // jump to $labelHelper, based on the result of TEST
  6236. //
  6237. void LowererMD::GenerateSmIntTest(IR::Opnd *opndSrc, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::Instr **instrFirst /* = nullptr */, bool fContinueLabel /*= false*/)
  6238. {
  6239. if (opndSrc->IsTaggedInt() && !fContinueLabel)
  6240. {
  6241. return;
  6242. }
  6243. else if (opndSrc->IsNotTaggedValue() && fContinueLabel)
  6244. {
  6245. return;
  6246. }
  6247. // TEST src1, AtomTag
  6248. IR::Instr* instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  6249. instr->SetSrc1(opndSrc);
  6250. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func));
  6251. insertInstr->InsertBefore(instr);
  6252. if (instrFirst)
  6253. {
  6254. *instrFirst = instr;
  6255. }
  6256. if(fContinueLabel)
  6257. {
  6258. // JNE $labelHelper
  6259. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  6260. }
  6261. else
  6262. {
  6263. // JEQ $labelHelper
  6264. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  6265. }
  6266. insertInstr->InsertBefore(instr);
  6267. }
  6268. //
  6269. // If value is zero in tagged int representation, jump to $labelHelper.
  6270. //
  6271. void LowererMD::GenerateTaggedZeroTest( IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelHelper )
  6272. {
  6273. if (opndSrc->IsNotTaggedValue())
  6274. {
  6275. return;
  6276. }
  6277. // CMP src1, AtomTag
  6278. IR::Instr* instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6279. instr->SetSrc1(opndSrc);
  6280. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt32, this->m_func));
  6281. insertInstr->InsertBefore(instr);
  6282. // JEQ $helper
  6283. if(labelHelper != nullptr)
  6284. {
  6285. // JEQ $labelHelper
  6286. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  6287. insertInstr->InsertBefore(instr);
  6288. }
  6289. }
  6290. //
  6291. // If not object, jump to $labelHelper.
  6292. //
  6293. bool LowererMD::GenerateObjectTest(IR::Opnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
  6294. {
  6295. if (opndSrc->IsTaggedInt() && fContinueLabel)
  6296. {
  6297. // Insert delete branch opcode to tell the dbChecks not to assert on this helper label
  6298. IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
  6299. insertInstr->InsertBefore(fakeBr);
  6300. return false;
  6301. }
  6302. else if (opndSrc->IsNotTaggedValue() && !fContinueLabel)
  6303. {
  6304. return false;
  6305. }
  6306. // TEST src1, AtomTag
  6307. IR::Instr* instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  6308. instr->SetSrc1(opndSrc);
  6309. instr->SetSrc2(IR::IntConstOpnd::New(Js::AtomTag, TyInt8, this->m_func));
  6310. insertInstr->InsertBefore(instr);
  6311. if (fContinueLabel)
  6312. {
  6313. // JEQ $labelHelper
  6314. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelTarget, this->m_func);
  6315. insertInstr->InsertBefore(instr);
  6316. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6317. insertInstr->InsertBefore(labelHelper);
  6318. }
  6319. else
  6320. {
  6321. // JNE $labelHelper
  6322. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelTarget, this->m_func);
  6323. insertInstr->InsertBefore(instr);
  6324. }
  6325. return true;
  6326. }
  6327. #endif
  6328. bool LowererMD::GenerateJSBooleanTest(IR::RegOpnd * regSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
  6329. {
  6330. IR::Instr* instr;
  6331. if (regSrc->GetValueType().IsBoolean())
  6332. {
  6333. if (fContinueLabel)
  6334. {
  6335. // JMP $labelTarget
  6336. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelTarget, this->m_func);
  6337. insertInstr->InsertBefore(instr);
  6338. #if DBG
  6339. if (labelTarget->isOpHelper)
  6340. {
  6341. labelTarget->m_noHelperAssert = true;
  6342. }
  6343. #endif
  6344. }
  6345. return false;
  6346. }
  6347. // CMP src1, vtable<JavaScriptBoolean>
  6348. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6349. IR::IndirOpnd *vtablePtrOpnd = IR::IndirOpnd::New(regSrc, 0, TyMachPtr, this->m_func);
  6350. instr->SetSrc1(vtablePtrOpnd);
  6351. IR::Opnd *jsBooleanVTable = m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptBoolean);
  6352. instr->SetSrc2(jsBooleanVTable);
  6353. insertInstr->InsertBefore(instr);
  6354. Legalize(instr);
  6355. if (fContinueLabel)
  6356. {
  6357. // JEQ $labelTarget
  6358. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelTarget, this->m_func);
  6359. insertInstr->InsertBefore(instr);
  6360. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6361. insertInstr->InsertBefore(labelHelper);
  6362. }
  6363. else
  6364. {
  6365. // JNE $labelTarget
  6366. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelTarget, this->m_func);
  6367. insertInstr->InsertBefore(instr);
  6368. }
  6369. return true;
  6370. }
  6371. #if FLOATVAR
  6372. //
  6373. // If any of the top 14 bits are not set, then the var is not a float value and hence, jump to $labelHelper.
  6374. //
  6375. void LowererMD::GenerateFloatTest(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper, const bool checkForNullInLoopBody)
  6376. {
  6377. if (opndSrc->GetValueType().IsFloat())
  6378. {
  6379. return;
  6380. }
  6381. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  6382. // s1 = MOV src1 - Move to a temporary
  6383. IR::Opnd * opndReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  6384. IR::Instr * instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc, this->m_func);
  6385. insertInstr->InsertBefore(instr);
  6386. // s1 = SHR s1, 50
  6387. instr = IR::Instr::New(Js::OpCode::SHR, opndReg, opndReg, IR::IntConstOpnd::New(50, TyInt8, this->m_func), this->m_func);
  6388. insertInstr->InsertBefore(instr);
  6389. // JZ $helper
  6390. instr = IR::BranchInstr::New(Js::OpCode::JEQ /* JZ */, labelHelper, this->m_func);
  6391. insertInstr->InsertBefore(instr);
  6392. }
  6393. IR::RegOpnd* LowererMD::CheckFloatAndUntag(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper)
  6394. {
  6395. IR::Opnd* floatTag = IR::AddrOpnd::New((Js::Var)Js::FloatTag_Value, IR::AddrOpndKindConstantVar, this->m_func, /* dontEncode = */ true);
  6396. IR::RegOpnd* regOpndFloatTag = IR::RegOpnd::New(TyUint64, this->m_func);
  6397. // MOV floatTagReg, FloatTag_Value
  6398. IR::Instr* instr = IR::Instr::New(Js::OpCode::MOV, regOpndFloatTag, floatTag, this->m_func);
  6399. insertInstr->InsertBefore(instr);
  6400. if (!opndSrc->GetValueType().IsFloat())
  6401. {
  6402. // TEST s1, floatTagReg
  6403. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  6404. instr->SetSrc1(opndSrc);
  6405. instr->SetSrc2(regOpndFloatTag);
  6406. insertInstr->InsertBefore(instr);
  6407. // JZ $helper
  6408. instr = IR::BranchInstr::New(Js::OpCode::JEQ /* JZ */, labelHelper, this->m_func);
  6409. insertInstr->InsertBefore(instr);
  6410. }
  6411. // untaggedFloat = XOR floatTagReg, s1 // where untaggedFloat == floatTagReg; use floatTagReg temporarily for the untagged float
  6412. IR::RegOpnd* untaggedFloat = regOpndFloatTag;
  6413. instr = IR::Instr::New(Js::OpCode::XOR, untaggedFloat, regOpndFloatTag, opndSrc, this->m_func);
  6414. insertInstr->InsertBefore(instr);
  6415. IR::RegOpnd *floatReg = IR::RegOpnd::New(TyMachDouble, this->m_func);
  6416. instr = IR::Instr::New(Js::OpCode::MOVD, floatReg, untaggedFloat, this->m_func);
  6417. insertInstr->InsertBefore(instr);
  6418. return floatReg;
  6419. }
  6420. #else
  6421. void LowererMD::GenerateFloatTest(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper, const bool checkForNullInLoopBody)
  6422. {
  6423. if (opndSrc->GetValueType().IsFloat())
  6424. {
  6425. return;
  6426. }
  6427. AssertMsg(opndSrc->GetSize() == MachPtr, "64-bit register required");
  6428. if(checkForNullInLoopBody && m_func->IsLoopBody())
  6429. {
  6430. // It's possible that the value was determined dead by the jitted function and was not restored. The jitted loop
  6431. // body may not realize that it's dead and may try to use it. Check for null in loop bodies.
  6432. // test src1, src1
  6433. // jz $helper (bail out)
  6434. m_lowerer->InsertCompareBranch(
  6435. opndSrc,
  6436. IR::AddrOpnd::NewNull(m_func),
  6437. Js::OpCode::BrEq_A,
  6438. labelHelper,
  6439. insertInstr);
  6440. }
  6441. IR::Instr* instr = IR::Instr::New(Js::OpCode::CMP, insertInstr->m_func);
  6442. instr->SetSrc1(IR::IndirOpnd::New(opndSrc, 0, TyMachPtr, insertInstr->m_func));
  6443. instr->SetSrc2(m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptNumber));
  6444. insertInstr->InsertBefore(instr);
  6445. // JNZ $helper
  6446. instr = IR::BranchInstr::New(Js::OpCode::JNE /* JZ */, labelHelper, this->m_func);
  6447. insertInstr->InsertBefore(instr);
  6448. }
  6449. #endif
  6450. #if DBG
  6451. //
  6452. // Helps in debugging of fast paths.
  6453. //
  6454. void LowererMD::GenerateDebugBreak( IR::Instr * insertInstr )
  6455. {
  6456. // int 3
  6457. IR::Instr *int3 = IR::Instr::New(Js::OpCode::INT, insertInstr->m_func);
  6458. int3->SetSrc1(IR::IntConstOpnd::New(3, TyInt32, insertInstr->m_func));
  6459. insertInstr->InsertBefore(int3);
  6460. }
  6461. #endif
  6462. IR::Instr *
  6463. LowererMD::LoadStackAddress(StackSym *sym, IR::RegOpnd *optionalDstOpnd /* = nullptr */)
  6464. {
  6465. IR::RegOpnd * regDst = optionalDstOpnd != nullptr ? optionalDstOpnd : IR::RegOpnd::New(TyMachReg, this->m_func);
  6466. IR::SymOpnd * symSrc = IR::SymOpnd::New(sym, TyMachPtr, this->m_func);
  6467. IR::Instr * lea = IR::Instr::New(Js::OpCode::LEA, regDst, symSrc, this->m_func);
  6468. return lea;
  6469. }
  6470. template <bool verify>
  6471. void
  6472. LowererMD::MakeDstEquSrc1(IR::Instr *const instr)
  6473. {
  6474. Assert(instr);
  6475. Assert(instr->IsLowered());
  6476. Assert(instr->GetDst());
  6477. Assert(instr->GetSrc1());
  6478. if(instr->GetDst()->IsEqual(instr->GetSrc1()))
  6479. {
  6480. return;
  6481. }
  6482. if (verify)
  6483. {
  6484. AssertMsg(false, "Missing legalization");
  6485. return;
  6486. }
  6487. if(instr->GetSrc2() && instr->GetDst()->IsEqual(instr->GetSrc2()))
  6488. {
  6489. switch(instr->m_opcode)
  6490. {
  6491. #ifdef _M_IX86
  6492. case Js::OpCode::ADC:
  6493. #endif
  6494. case Js::OpCode::Add_I4:
  6495. case Js::OpCode::Mul_I4:
  6496. case Js::OpCode::Or_I4:
  6497. case Js::OpCode::Xor_I4:
  6498. case Js::OpCode::And_I4:
  6499. case Js::OpCode::ADD:
  6500. case Js::OpCode::IMUL2:
  6501. case Js::OpCode::OR:
  6502. case Js::OpCode::XOR:
  6503. case Js::OpCode::AND:
  6504. case Js::OpCode::ADDSD:
  6505. case Js::OpCode::MULSD:
  6506. case Js::OpCode::ADDSS:
  6507. case Js::OpCode::MULSS:
  6508. case Js::OpCode::ADDPS:
  6509. // For (a = b & a), generate (a = a & b)
  6510. instr->SwapOpnds();
  6511. return;
  6512. }
  6513. // For (a = b - a), generate (c = a; a = b - c) and fall through
  6514. ChangeToAssign(instr->HoistSrc2(Js::OpCode::Ld_A));
  6515. }
  6516. // For (a = b - c), generate (a = b; a = a - c)
  6517. IR::Instr *const mov = IR::Instr::New(Js::OpCode::Ld_A, instr->GetDst(), instr->UnlinkSrc1(), instr->m_func);
  6518. instr->InsertBefore(mov);
  6519. ChangeToAssign(mov);
  6520. instr->SetSrc1(instr->GetDst());
  6521. }
  6522. void
  6523. LowererMD::EmitInt64Instr(IR::Instr * instr)
  6524. {
  6525. #ifdef _M_IX86
  6526. lowererMDArch.EmitInt64Instr(instr);
  6527. #else
  6528. Assert(UNREACHED);
  6529. #endif
  6530. }
  6531. void
  6532. LowererMD::EmitInt4Instr(IR::Instr *instr)
  6533. {
  6534. LowererMDArch::EmitInt4Instr(instr);
  6535. }
  6536. void
  6537. LowererMD::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  6538. {
  6539. lowererMDArch.EmitLoadVar(instrLoad, isFromUint32, isHelper);
  6540. }
  6541. bool
  6542. LowererMD::EmitLoadInt32(IR::Instr *instrLoad, bool conversionFromObjectAllowed, bool bailOutOnHelper, IR::LabelInstr * labelBailOut)
  6543. {
  6544. return lowererMDArch.EmitLoadInt32(instrLoad, conversionFromObjectAllowed, bailOutOnHelper, labelBailOut);
  6545. }
  6546. void
  6547. LowererMD::EmitIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  6548. {
  6549. this->lowererMDArch.EmitIntToFloat(dst, src, instrInsert);
  6550. }
  6551. void
  6552. LowererMD::EmitUIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  6553. {
  6554. this->lowererMDArch.EmitUIntToFloat(dst, src, instrInsert);
  6555. }
  6556. void
  6557. LowererMD::EmitIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  6558. {
  6559. this->lowererMDArch.EmitIntToLong(dst, src, instrInsert);
  6560. }
  6561. void
  6562. LowererMD::EmitUIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  6563. {
  6564. this->lowererMDArch.EmitUIntToLong(dst, src, instrInsert);
  6565. }
  6566. void
  6567. LowererMD::EmitLongToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  6568. {
  6569. this->lowererMDArch.EmitLongToInt(dst, src, instrInsert);
  6570. }
  6571. void
  6572. LowererMD::EmitFloat32ToFloat64(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  6573. {
  6574. // We should only generate this if sse2 is available
  6575. Assert(AutoSystemInfo::Data.SSE2Available());
  6576. Assert(dst->IsRegOpnd() && dst->IsFloat64());
  6577. Assert(src->IsRegOpnd() && src->GetType() == TyFloat32);
  6578. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::CVTSS2SD, dst, src, this->m_func));
  6579. }
  6580. void
  6581. LowererMD::EmitInt64toFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instr)
  6582. {
  6583. #ifdef _M_IX86
  6584. IR::Opnd *srcOpnd = instr->UnlinkSrc1();
  6585. LoadInt64HelperArgument(instr, srcOpnd);
  6586. IR::Instr* callinstr = IR::Instr::New(Js::OpCode::CALL, dst, this->m_func);
  6587. instr->InsertBefore(callinstr);
  6588. CompileAssert(sizeof(IRType) == 1);
  6589. const uint16 fromToType = dst->GetType() | (srcOpnd->GetType() << 8);
  6590. IR::JnHelperMethod method = IR::HelperOp_Throw;
  6591. switch (fromToType)
  6592. {
  6593. case TyFloat32 | (TyInt64 << 8) : method = IR::HelperI64TOF32; break;
  6594. case TyFloat32 | (TyUint64 << 8) : method = IR::HelperUI64TOF32; break;
  6595. case TyFloat64 | (TyInt64 << 8) : method = IR::HelperI64TOF64; break;
  6596. case TyFloat64 | (TyUint64 << 8) : method = IR::HelperUI64TOF64; break;
  6597. default:
  6598. Assert(UNREACHED);
  6599. }
  6600. this->ChangeToHelperCall(callinstr, method);
  6601. #else
  6602. IR::Opnd* origDst = nullptr;
  6603. if (dst->IsFloat32())
  6604. {
  6605. origDst = dst;
  6606. dst = IR::RegOpnd::New(TyFloat64, this->m_func);
  6607. }
  6608. const auto insertLegalize = [instr](IR::Instr* newInstr)
  6609. {
  6610. instr->InsertBefore(newInstr);
  6611. Legalize(newInstr);
  6612. };
  6613. if (src->IsUnsigned())
  6614. {
  6615. insertLegalize(IR::Instr::New(Js::OpCode::TEST, nullptr, src, src, m_func));
  6616. IR::LabelInstr* msbSetLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  6617. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  6618. insertLegalize(IR::BranchInstr::New(Js::OpCode::JSB, msbSetLabel, m_func));
  6619. // MSB not set, simple case
  6620. insertLegalize(IR::Instr::New(Js::OpCode::CVTSI2SD, dst, src, m_func));
  6621. insertLegalize(IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, m_func));
  6622. insertLegalize(msbSetLabel);
  6623. IR::RegOpnd* halfOpnd = IR::RegOpnd::New(TyInt64, m_func);
  6624. IR::RegOpnd* lsbOpnd = IR::RegOpnd::New(TyInt64, m_func);
  6625. m_lowerer->InsertMove(halfOpnd, src, instr);
  6626. m_lowerer->InsertMove(lsbOpnd, src, instr);
  6627. insertLegalize(IR::Instr::New(Js::OpCode::SHR, halfOpnd, halfOpnd, IR::IntConstOpnd::New(1, TyInt8, m_func), m_func));
  6628. insertLegalize(IR::Instr::New(Js::OpCode::AND, lsbOpnd, lsbOpnd, IR::Int64ConstOpnd::New(1, TyInt64, m_func), m_func));
  6629. insertLegalize(IR::Instr::New(Js::OpCode::OR, halfOpnd, halfOpnd, lsbOpnd, m_func));
  6630. insertLegalize(IR::Instr::New(Js::OpCode::CVTSI2SD, dst, halfOpnd, m_func));
  6631. insertLegalize(IR::Instr::New(Js::OpCode::ADDSD, dst, dst, dst, m_func));
  6632. insertLegalize(doneLabel);
  6633. }
  6634. else
  6635. {
  6636. insertLegalize(IR::Instr::New(Js::OpCode::CVTSI2SD, dst, src, m_func));
  6637. }
  6638. if (origDst)
  6639. {
  6640. insertLegalize(IR::Instr::New(Js::OpCode::CVTSD2SS, origDst, dst, m_func));
  6641. }
  6642. #endif
  6643. }
  6644. void
  6645. LowererMD::EmitNon32BitOvfCheck(IR::Instr *instr, IR::Instr *insertInstr, IR::LabelInstr* bailOutLabel)
  6646. {
  6647. AssertMsg(instr->m_opcode == Js::OpCode::IMUL, "IMUL should be used to check for non-32 bit overflow check on x86.");
  6648. IR::RegOpnd *edxSym = IR::RegOpnd::New(TyInt32, instr->m_func);
  6649. #ifdef _M_IX86
  6650. edxSym->SetReg(RegEDX);
  6651. #else
  6652. edxSym->SetReg(RegRDX);
  6653. #endif
  6654. // dummy def for edx to force RegAlloc to generate a lifetime. This is removed later by the Peeps phase.
  6655. IR::Instr *newInstr = IR::Instr::New(Js::OpCode::NOP, edxSym, instr->m_func);
  6656. insertInstr->InsertBefore(newInstr);
  6657. IR::RegOpnd *temp = IR::RegOpnd::New(TyInt32, instr->m_func);
  6658. Assert(instr->ignoreOverflowBitCount > 32);
  6659. uint8 shamt = 64 - instr->ignoreOverflowBitCount;
  6660. // MOV temp, edx
  6661. newInstr = IR::Instr::New(Js::OpCode::MOV, temp, edxSym, instr->m_func);
  6662. insertInstr->InsertBefore(newInstr);
  6663. // SHL temp, shamt
  6664. newInstr = IR::Instr::New(Js::OpCode::SHL, temp, temp, IR::IntConstOpnd::New(shamt, TyInt8, instr->m_func, true), instr->m_func);
  6665. insertInstr->InsertBefore(newInstr);
  6666. // SAR temp, shamt
  6667. newInstr = IR::Instr::New(Js::OpCode::SAR, temp, temp, IR::IntConstOpnd::New(shamt, TyInt8, instr->m_func, true), instr->m_func);
  6668. insertInstr->InsertBefore(newInstr);
  6669. // CMP temp, edx
  6670. newInstr = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  6671. newInstr->SetSrc1(temp);
  6672. newInstr->SetSrc2(edxSym);
  6673. insertInstr->InsertBefore(newInstr);
  6674. // JNE
  6675. Lowerer::InsertBranch(Js::OpCode::JNE, false, bailOutLabel, insertInstr);
  6676. }
  6677. void LowererMD::ConvertFloatToInt32(IR::Opnd* intOpnd, IR::Opnd* floatOpnd, IR::LabelInstr * labelHelper, IR::LabelInstr * labelDone, IR::Instr * instInsert)
  6678. {
  6679. UNREFERENCED_PARAMETER(labelHelper); // used on ARM
  6680. #if defined(_M_IX86)
  6681. // We should only generate this if sse2 is available
  6682. Assert(AutoSystemInfo::Data.SSE2Available());
  6683. #endif
  6684. Assert((floatOpnd->IsRegOpnd() && floatOpnd->IsFloat()) || (floatOpnd->IsIndirOpnd() && floatOpnd->GetType() == TyMachDouble));
  6685. Assert(intOpnd->GetType() == TyInt32);
  6686. IR::Instr* instr;
  6687. {
  6688. #ifdef _M_X64
  6689. IR::Opnd* dstOpnd = IR::RegOpnd::New(TyInt64, m_func);
  6690. #else
  6691. IR::Opnd* dstOpnd = intOpnd;
  6692. #endif
  6693. // CVTTSD2SI dst, floatOpnd
  6694. instr = IR::Instr::New(floatOpnd->IsFloat64() ? Js::OpCode::CVTTSD2SI : Js::OpCode::CVTTSS2SI, dstOpnd, floatOpnd, this->m_func);
  6695. instInsert->InsertBefore(instr);
  6696. // CMP dst, 0x80000000 {0x8000000000000000 on x64} -- Check for overflow
  6697. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6698. instr->SetSrc1(dstOpnd);
  6699. instr->SetSrc2(IR::IntConstOpnd::New(MachSignBit, TyMachReg, this->m_func, true));
  6700. instInsert->InsertBefore(instr);
  6701. Legalize(instr);
  6702. #ifdef _M_X64
  6703. // Truncate to int32 for x64. We still need to go to helper though if we have int64 overflow.
  6704. // MOV_TRUNC intOpnd, tmpOpnd
  6705. instr = IR::Instr::New(Js::OpCode::MOV_TRUNC, intOpnd, dstOpnd, this->m_func);
  6706. instInsert->InsertBefore(instr);
  6707. #endif
  6708. }
  6709. // JNE $done
  6710. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelDone, this->m_func);
  6711. instInsert->InsertBefore(instr);
  6712. // It does overflow - Let's try using FISTTP which uses 64 bits and is relevant only for x86
  6713. // but requires going to memory and should only be used in overflow scenarios
  6714. #ifdef _M_IX86
  6715. if (AutoSystemInfo::Data.SSE3Available())
  6716. {
  6717. IR::Opnd* floatStackOpnd;
  6718. StackSym* tempSymDouble = this->m_func->tempSymDouble;
  6719. if (!tempSymDouble)
  6720. {
  6721. this->m_func->tempSymDouble = StackSym::New(TyFloat64, this->m_func);
  6722. this->m_func->StackAllocate(this->m_func->tempSymDouble, MachDouble);
  6723. tempSymDouble = this->m_func->tempSymDouble;
  6724. }
  6725. IR::Opnd * float64Opnd;
  6726. if (floatOpnd->IsFloat32())
  6727. {
  6728. float64Opnd = IR::RegOpnd::New(TyFloat64, m_func);
  6729. instr = IR::Instr::New(Js::OpCode::CVTSS2SD, float64Opnd, floatOpnd, m_func);
  6730. instInsert->InsertBefore(instr);
  6731. }
  6732. else
  6733. {
  6734. float64Opnd = floatOpnd;
  6735. }
  6736. if (float64Opnd->IsRegOpnd())
  6737. {
  6738. floatStackOpnd = IR::SymOpnd::New(tempSymDouble, TyMachDouble, m_func);
  6739. instr = IR::Instr::New(Js::OpCode::MOVSD, floatStackOpnd, float64Opnd, m_func);
  6740. instInsert->InsertBefore(instr);
  6741. }
  6742. else
  6743. {
  6744. floatStackOpnd = float64Opnd;
  6745. }
  6746. // FLD [tmpDouble]
  6747. instr = IR::Instr::New(Js::OpCode::FLD, floatStackOpnd, floatStackOpnd, m_func);
  6748. instInsert->InsertBefore(instr);
  6749. if (!float64Opnd->IsRegOpnd())
  6750. {
  6751. floatStackOpnd = IR::SymOpnd::New(tempSymDouble, TyMachDouble, m_func);
  6752. }
  6753. // FISTTP qword ptr [tmpDouble]
  6754. instr = IR::Instr::New(Js::OpCode::FISTTP, floatStackOpnd, m_func);
  6755. instInsert->InsertBefore(instr);
  6756. StackSym *intSym = StackSym::New(TyInt32, m_func);
  6757. intSym->m_offset = tempSymDouble->m_offset;
  6758. intSym->m_allocated = true;
  6759. IR::Opnd* lowerBitsOpnd = IR::SymOpnd::New(intSym, TyInt32, m_func);
  6760. // MOV dst, dword ptr [tmpDouble]
  6761. instr = IR::Instr::New(Js::OpCode::MOV, intOpnd, lowerBitsOpnd, m_func);
  6762. instInsert->InsertBefore(instr);
  6763. // TEST dst, dst -- Check for overflow
  6764. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  6765. instr->SetSrc1(intOpnd);
  6766. instr->SetSrc2(intOpnd);
  6767. instInsert->InsertBefore(instr);
  6768. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelDone, this->m_func);
  6769. instInsert->InsertBefore(instr);
  6770. // CMP [tmpDouble - 4], 0x80000000
  6771. StackSym* higherBitsSym = StackSym::New(TyInt32, m_func);
  6772. higherBitsSym->m_offset = tempSymDouble->m_offset + 4;
  6773. higherBitsSym->m_allocated = true;
  6774. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  6775. instr->SetSrc1(IR::SymOpnd::New(higherBitsSym, TyInt32, m_func));
  6776. instr->SetSrc2(IR::IntConstOpnd::New(0x80000000, TyInt32, this->m_func, true));
  6777. instInsert->InsertBefore(instr);
  6778. instr = IR::BranchInstr::New(Js::OpCode::JNE, labelDone, this->m_func);
  6779. instInsert->InsertBefore(instr);
  6780. }
  6781. #endif
  6782. }
  6783. IR::Instr *
  6784. LowererMD::InsertConvertFloat64ToInt32(const RoundMode roundMode, IR::Opnd *const dst, IR::Opnd *const src, IR::Instr *const insertBeforeInstr)
  6785. {
  6786. Assert(dst);
  6787. Assert(dst->IsInt32());
  6788. Assert(src);
  6789. Assert(src->IsFloat64());
  6790. Assert(insertBeforeInstr);
  6791. // The caller is expected to check for overflow. To have that work be done automatically, use LowererMD::EmitFloatToInt.
  6792. Func *const func = insertBeforeInstr->m_func;
  6793. IR::AutoReuseOpnd autoReuseSrcPlusHalf;
  6794. IR::Instr *instr = nullptr;
  6795. switch (roundMode)
  6796. {
  6797. case RoundModeTowardInteger:
  6798. {
  6799. // Conversion with rounding towards nearest integer is not supported by the architecture. Add 0.5 and do a
  6800. // round-toward-zero conversion instead.
  6801. IR::RegOpnd *const srcPlusHalf = IR::RegOpnd::New(TyFloat64, func);
  6802. autoReuseSrcPlusHalf.Initialize(srcPlusHalf, func);
  6803. Lowerer::InsertAdd(
  6804. false /* needFlags */,
  6805. srcPlusHalf,
  6806. src,
  6807. IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetDoublePointFiveAddr(), TyFloat64, func,
  6808. IR::AddrOpndKindDynamicDoubleRef),
  6809. insertBeforeInstr);
  6810. instr = IR::Instr::New(LowererMD::MDConvertFloat64ToInt32Opcode(RoundModeTowardZero), dst, srcPlusHalf, func);
  6811. insertBeforeInstr->InsertBefore(instr);
  6812. LowererMD::Legalize(instr);
  6813. return instr;
  6814. }
  6815. case RoundModeHalfToEven:
  6816. {
  6817. instr = IR::Instr::New(LowererMD::MDConvertFloat64ToInt32Opcode(RoundModeHalfToEven), dst, src, func);
  6818. insertBeforeInstr->InsertBefore(instr);
  6819. LowererMD::Legalize(instr);
  6820. return instr;
  6821. }
  6822. default:
  6823. AssertMsg(0, "RoundMode not supported.");
  6824. return nullptr;
  6825. }
  6826. }
  6827. void
  6828. LowererMD::EmitFloatToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert, IR::Instr *instrBailOut, IR::LabelInstr * labelBailOut)
  6829. {
  6830. #ifdef _M_IX86
  6831. // We should only generate this if sse2 is available
  6832. Assert(AutoSystemInfo::Data.SSE2Available());
  6833. #endif
  6834. IR::BailOutKind bailOutKind = IR::BailOutInvalid;
  6835. if (instrBailOut && instrBailOut->HasBailOutInfo())
  6836. {
  6837. bailOutKind = instrBailOut->GetBailOutKind();
  6838. if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  6839. {
  6840. // Bail out instead of calling helper. If this is happening unconditionally, the caller should instead throw a rejit exception.
  6841. Assert(labelBailOut);
  6842. m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, instrInsert);
  6843. return;
  6844. }
  6845. }
  6846. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6847. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6848. IR::Instr *instr;
  6849. ConvertFloatToInt32(dst, src, labelHelper, labelDone, instrInsert);
  6850. // $Helper
  6851. instrInsert->InsertBefore(labelHelper);
  6852. IR::Opnd * arg = src;
  6853. if (src->IsFloat32())
  6854. {
  6855. arg = IR::RegOpnd::New(TyFloat64, m_func);
  6856. EmitFloat32ToFloat64(arg, src, instrInsert);
  6857. }
  6858. instr = IR::Instr::New(Js::OpCode::CALL, dst, this->m_func);
  6859. instrInsert->InsertBefore(instr);
  6860. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  6861. {
  6862. _Analysis_assume_(instrBailOut != nullptr);
  6863. instr = instr->ConvertToBailOutInstr(instrBailOut->GetBailOutInfo(), bailOutKind);
  6864. if (instrBailOut->GetBailOutInfo()->bailOutInstr == instrBailOut)
  6865. {
  6866. IR::Instr * instrShare = instrBailOut->ShareBailOut();
  6867. m_lowerer->LowerBailTarget(instrShare);
  6868. }
  6869. }
  6870. // dst = ToInt32Core(src);
  6871. LoadDoubleHelperArgument(instr, arg);
  6872. this->ChangeToHelperCall(instr, IR::HelperConv_ToInt32Core);
  6873. // $Done
  6874. instrInsert->InsertBefore(labelDone);
  6875. }
  6876. void
  6877. LowererMD::EmitLoadVarNoCheck(IR::RegOpnd * dst, IR::RegOpnd * src, IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  6878. {
  6879. #ifdef _M_IX86
  6880. if (!AutoSystemInfo::Data.SSE2Available())
  6881. {
  6882. IR::JnHelperMethod helperMethod;
  6883. // PUSH &floatTemp
  6884. IR::Opnd *tempOpnd;
  6885. if (instrLoad->dstIsTempNumber)
  6886. {
  6887. helperMethod = isFromUint32 ? IR::HelperOp_UInt32ToAtomInPlace : IR::HelperOp_Int32ToAtomInPlace;
  6888. // Use the original dst to get the temp number sym
  6889. StackSym * tempNumberSym = this->m_lowerer->GetTempNumberSym(instrLoad->GetDst(), instrLoad->dstIsTempNumberTransferred);
  6890. IR::Instr *load = this->LoadStackAddress(tempNumberSym);
  6891. instrLoad->InsertBefore(load);
  6892. tempOpnd = load->GetDst();
  6893. this->LoadHelperArgument(instrLoad, tempOpnd);
  6894. }
  6895. else
  6896. {
  6897. helperMethod = isFromUint32 ? IR::HelperOp_UInt32ToAtom : IR::HelperOp_Int32ToAtom;
  6898. }
  6899. // PUSH memContext
  6900. this->m_lowerer->LoadScriptContext(instrLoad);
  6901. // PUSH s1
  6902. this->LoadHelperArgument(instrLoad, src);
  6903. // dst = ToVar()
  6904. IR::Instr * instr = IR::Instr::New(Js::OpCode::Call, dst,
  6905. IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
  6906. instrLoad->InsertBefore(instr);
  6907. this->LowerCall(instr, 0);
  6908. return;
  6909. }
  6910. #endif
  6911. IR::RegOpnd * floatReg = IR::RegOpnd::New(TyFloat64, this->m_func);
  6912. if (isFromUint32)
  6913. {
  6914. this->EmitUIntToFloat(floatReg, src, instrLoad);
  6915. }
  6916. else
  6917. {
  6918. this->EmitIntToFloat(floatReg, src, instrLoad);
  6919. }
  6920. this->SaveDoubleToVar(dst, floatReg, instrLoad, instrLoad, isHelper);
  6921. }
  6922. IR::Instr *
  6923. LowererMD::LowerGetCachedFunc(IR::Instr *instr)
  6924. {
  6925. // src1 is an ActivationObjectEx, and we want to get the function object identified by the index (src2)
  6926. // dst = MOV (src1)->GetFuncCacheEntry(src2)->func
  6927. //
  6928. // => [src1 + (offsetof(src1, cache) + (src2 * sizeof(FuncCacheEntry)) + offsetof(FuncCacheEntry, func))]
  6929. IR::IntConstOpnd *src2Opnd = instr->UnlinkSrc2()->AsIntConstOpnd();
  6930. IR::RegOpnd *src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
  6931. instr->m_opcode = Js::OpCode::MOV;
  6932. IntConstType offset = (src2Opnd->GetValue() * sizeof(Js::FuncCacheEntry)) + Js::ActivationObjectEx::GetOffsetOfCache() + offsetof(Js::FuncCacheEntry, func);
  6933. Assert(Math::FitsInDWord(offset));
  6934. instr->SetSrc1(IR::IndirOpnd::New(src1Opnd, (int32)offset, TyVar, this->m_func));
  6935. src2Opnd->Free(this->m_func);
  6936. return instr->m_prev;
  6937. }
  6938. IR::Instr *
  6939. LowererMD::LowerCommitScope(IR::Instr *instrCommit)
  6940. {
  6941. IR::Instr *instrPrev = instrCommit->m_prev;
  6942. IR::RegOpnd *baseOpnd = instrCommit->UnlinkSrc1()->AsRegOpnd();
  6943. IR::Opnd *opnd;
  6944. IR::Instr * insertInstr = instrCommit->m_next;
  6945. // Write undef to all the local var slots.
  6946. opnd = IR::IndirOpnd::New(baseOpnd, Js::ActivationObjectEx::GetOffsetOfCommitFlag(), TyInt8, this->m_func);
  6947. instrCommit->SetDst(opnd);
  6948. instrCommit->SetSrc1(IR::IntConstOpnd::New(1, TyInt8, this->m_func));
  6949. LowererMD::ChangeToAssign(instrCommit);
  6950. const Js::PropertyIdArray *propIds = instrCommit->m_func->GetJITFunctionBody()->GetFormalsPropIdArray();
  6951. uint firstVarSlot = (uint)Js::ActivationObjectEx::GetFirstVarSlot(propIds);
  6952. if (firstVarSlot < propIds->count)
  6953. {
  6954. IR::RegOpnd *undefOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  6955. LowererMD::CreateAssign(undefOpnd, m_lowerer->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueUndefined), insertInstr);
  6956. IR::RegOpnd *slotBaseOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  6957. // Load a pointer to the aux slots. We assume that all ActivationObject's have only aux slots.
  6958. opnd = IR::IndirOpnd::New(baseOpnd, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  6959. this->CreateAssign(slotBaseOpnd, opnd, insertInstr);
  6960. for (uint i = firstVarSlot; i < propIds->count; i++)
  6961. {
  6962. opnd = IR::IndirOpnd::New(slotBaseOpnd, i << this->GetDefaultIndirScale(), TyMachReg, this->m_func);
  6963. this->CreateAssign(opnd, undefOpnd, insertInstr);
  6964. }
  6965. }
  6966. return instrPrev;
  6967. }
  6968. void
  6969. LowererMD::ImmedSrcToReg(IR::Instr * instr, IR::Opnd * newOpnd, int srcNum)
  6970. {
  6971. if (srcNum == 2)
  6972. {
  6973. instr->SetSrc2(newOpnd);
  6974. }
  6975. else
  6976. {
  6977. Assert(srcNum == 1);
  6978. instr->SetSrc1(newOpnd);
  6979. }
  6980. }
  6981. IR::LabelInstr *
  6982. LowererMD::GetBailOutStackRestoreLabel(BailOutInfo * bailOutInfo, IR::LabelInstr * exitTargetInstr)
  6983. {
  6984. return lowererMDArch.GetBailOutStackRestoreLabel(bailOutInfo, exitTargetInstr);
  6985. }
  6986. StackSym *
  6987. LowererMD::GetImplicitParamSlotSym(Js::ArgSlot argSlot)
  6988. {
  6989. return GetImplicitParamSlotSym(argSlot, this->m_func);
  6990. }
  6991. StackSym *
  6992. LowererMD::GetImplicitParamSlotSym(Js::ArgSlot argSlot, Func * func)
  6993. {
  6994. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  6995. // Pass in the EBP+8 to start at the function object, the start of the implicit param slots
  6996. StackSym * stackSym = StackSym::NewImplicitParamSym(argSlot, func);
  6997. func->SetArgOffset(stackSym, (2 + argSlot) * MachPtr);
  6998. func->SetHasImplicitParamLoad();
  6999. return stackSym;
  7000. }
  7001. bool LowererMD::GenerateFastAnd(IR::Instr * instrAnd)
  7002. {
  7003. return this->lowererMDArch.GenerateFastAnd(instrAnd);
  7004. }
  7005. bool LowererMD::GenerateFastXor(IR::Instr * instrXor)
  7006. {
  7007. return this->lowererMDArch.GenerateFastXor(instrXor);
  7008. }
  7009. bool LowererMD::GenerateFastOr(IR::Instr * instrOr)
  7010. {
  7011. return this->lowererMDArch.GenerateFastOr(instrOr);
  7012. }
  7013. bool LowererMD::GenerateFastNot(IR::Instr * instrNot)
  7014. {
  7015. return this->lowererMDArch.GenerateFastNot(instrNot);
  7016. }
  7017. bool LowererMD::GenerateFastShiftLeft(IR::Instr * instrShift)
  7018. {
  7019. return this->lowererMDArch.GenerateFastShiftLeft(instrShift);
  7020. }
  7021. bool LowererMD::GenerateFastShiftRight(IR::Instr * instrShift)
  7022. {
  7023. return this->lowererMDArch.GenerateFastShiftRight(instrShift);
  7024. }
  7025. void LowererMD::GenerateIsDynamicObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool fContinueLabel)
  7026. {
  7027. // CMP [srcReg], Js::DynamicObject::`vtable'
  7028. {
  7029. IR::Instr *cmp = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  7030. cmp->SetSrc1(IR::IndirOpnd::New(regOpnd, 0, TyMachPtr, m_func));
  7031. cmp->SetSrc2(m_lowerer->LoadVTableValueOpnd(insertInstr, VTableValue::VtableDynamicObject));
  7032. insertInstr->InsertBefore(cmp);
  7033. Legalize(cmp);
  7034. }
  7035. if (fContinueLabel)
  7036. {
  7037. // JEQ $fallThough
  7038. IR::Instr * jne = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  7039. insertInstr->InsertBefore(jne);
  7040. }
  7041. else
  7042. {
  7043. // JNE $helper
  7044. IR::Instr * jne = IR::BranchInstr::New(Js::OpCode::JNE, labelHelper, this->m_func);
  7045. insertInstr->InsertBefore(jne);
  7046. }
  7047. }
  7048. void LowererMD::GenerateIsRecyclableObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool checkObjectAndDynamicObject)
  7049. {
  7050. // CMP [srcReg], Js::DynamicObject::`vtable'
  7051. // JEQ $fallThough
  7052. // MOV r1, [src1 + offset(type)] -- get the type id
  7053. // MOV r1, [r1 + offset(typeId)]
  7054. // ADD r1, ~TypeIds_LastJavascriptPrimitiveType -- if (typeId > TypeIds_LastJavascriptPrimitiveType && typeId <= TypeIds_LastTrueJavascriptObjectType)
  7055. // CMP r1, (TypeIds_LastTrueJavascriptObjectType - TypeIds_LastJavascriptPrimitiveType - 1)
  7056. // JA $helper
  7057. //fallThrough:
  7058. IR::LabelInstr *labelFallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7059. if (checkObjectAndDynamicObject)
  7060. {
  7061. if (!regOpnd->IsNotTaggedValue())
  7062. {
  7063. GenerateObjectTest(regOpnd, insertInstr, labelHelper);
  7064. }
  7065. this->GenerateIsDynamicObject(regOpnd, insertInstr, labelFallthrough, true);
  7066. }
  7067. IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  7068. IR::RegOpnd * typeIdRegOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  7069. // MOV r1, [src1 + offset(type)]
  7070. {
  7071. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  7072. IR::Instr * mov = IR::Instr::New(Js::OpCode::MOV, typeRegOpnd, indirOpnd, this->m_func);
  7073. insertInstr->InsertBefore(mov);
  7074. }
  7075. // MOV r1, [r1 + offset(typeId)]
  7076. {
  7077. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
  7078. IR::Instr * mov = IR::Instr::New(Js::OpCode::MOV, typeIdRegOpnd, indirOpnd, this->m_func);
  7079. insertInstr->InsertBefore(mov);
  7080. }
  7081. // ADD r1, ~TypeIds_LastJavascriptPrimitiveType
  7082. {
  7083. IR::Instr * add = IR::Instr::New(Js::OpCode::ADD, typeIdRegOpnd, typeIdRegOpnd, IR::IntConstOpnd::New(~Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, this->m_func, true), this->m_func);
  7084. insertInstr->InsertBefore(add);
  7085. }
  7086. // CMP r1, (TypeIds_LastTrueJavascriptObjectType - TypeIds_LastJavascriptPrimitiveType - 1)
  7087. {
  7088. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  7089. cmp->SetSrc1(typeIdRegOpnd);
  7090. cmp->SetSrc2(IR::IntConstOpnd::New(Js::TypeIds_LastTrueJavascriptObjectType - Js::TypeIds_LastJavascriptPrimitiveType - 1, TyInt32, this->m_func));
  7091. insertInstr->InsertBefore(cmp);
  7092. }
  7093. // JA $helper
  7094. {
  7095. IR::Instr * jbe = IR::BranchInstr::New(Js::OpCode::JA, labelHelper, this->m_func);
  7096. insertInstr->InsertBefore(jbe);
  7097. }
  7098. // $fallThrough
  7099. insertInstr->InsertBefore(labelFallthrough);
  7100. }
  7101. bool
  7102. LowererMD::GenerateLdThisCheck(IR::Instr * instr)
  7103. {
  7104. //
  7105. // If not a recyclable object, jump to $helper
  7106. // MOV dst, src1 -- return the object itself
  7107. // JMP $fallthrough
  7108. // $helper:
  7109. // (caller generates helper call)
  7110. // $fallthrough:
  7111. //
  7112. IR::RegOpnd * src1 = instr->GetSrc1()->AsRegOpnd();
  7113. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  7114. IR::LabelInstr * fallthrough = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  7115. this->GenerateIsRecyclableObject(src1, instr, helper);
  7116. // MOV dst, src1
  7117. if (instr->GetDst() && !instr->GetDst()->IsEqual(src1))
  7118. {
  7119. IR::Instr * mov = IR::Instr::New(Js::OpCode::MOV, instr->GetDst(), src1, this->m_func);
  7120. instr->InsertBefore(mov);
  7121. }
  7122. // JMP $fallthrough
  7123. {
  7124. IR::Instr * jmp = IR::BranchInstr::New(Js::OpCode::JMP, fallthrough, this->m_func);
  7125. instr->InsertBefore(jmp);
  7126. }
  7127. // $helper:
  7128. // (caller generates helper call)
  7129. // $fallthrough:
  7130. instr->InsertBefore(helper);
  7131. instr->InsertAfter(fallthrough);
  7132. return true;
  7133. }
  7134. //
  7135. // TEST src, Js::AtomTag
  7136. // JNE $done
  7137. // MOV typeReg, objectSrc + offsetof(RecyclableObject::type)
  7138. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_ActivationObject
  7139. // JEQ $helper
  7140. // $done:
  7141. // MOV dst, src
  7142. // JMP $fallthru
  7143. // helper:
  7144. // MOV dst, undefined
  7145. // $fallthru:
  7146. bool
  7147. LowererMD::GenerateLdThisStrict(IR::Instr* instr)
  7148. {
  7149. IR::RegOpnd * src1 = instr->GetSrc1()->AsRegOpnd();
  7150. IR::RegOpnd * typeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  7151. IR::LabelInstr * done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  7152. IR::LabelInstr * fallthru = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  7153. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*helper*/true);
  7154. bool assign = instr->GetDst() && !instr->GetDst()->IsEqual(src1);
  7155. // TEST src1, Js::AtomTag
  7156. // JNE $done
  7157. if(!src1->IsNotTaggedValue())
  7158. {
  7159. GenerateObjectTest(src1, instr, assign ? done : fallthru);
  7160. }
  7161. // MOV typeReg, objectSrc + offsetof(RecyclableObject::type)
  7162. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, typeReg,
  7163. IR::IndirOpnd::New(src1, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  7164. m_func));
  7165. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_ActivationObject
  7166. {
  7167. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  7168. cmp->SetSrc1(IR::IndirOpnd::New(typeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func));
  7169. cmp->SetSrc2(IR::IntConstOpnd::New(Js::TypeId::TypeIds_ActivationObject, TyInt32, m_func));
  7170. instr->InsertBefore(cmp);
  7171. }
  7172. // JEQ $helper
  7173. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JEQ, helper, m_func));
  7174. if (assign)
  7175. {
  7176. // $done:
  7177. // MOV dst, src
  7178. instr->InsertBefore(done);
  7179. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, instr->GetDst(), src1, m_func));
  7180. }
  7181. // JMP $fallthru
  7182. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, fallthru, m_func));
  7183. instr->InsertBefore(helper);
  7184. if (instr->GetDst())
  7185. {
  7186. // MOV dst, undefined
  7187. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, instr->GetDst(),
  7188. m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined), m_func));
  7189. }
  7190. // $fallthru:
  7191. instr->InsertAfter(fallthru);
  7192. return true;
  7193. }
  7194. // given object instanceof function, functionReg is a register with function,
  7195. // objectReg is a register with instance and inlineCache is an InstIsInlineCache.
  7196. // We want to generate:
  7197. //
  7198. // fallback on helper (will patch the inline cache) if function does not match the cache
  7199. // MOV dst, Js::false
  7200. // CMP functionReg, [&(inlineCache->function)]
  7201. // JNE helper
  7202. //
  7203. // fallback if object is a tagged int
  7204. // TEST objectReg, Js::AtomTag
  7205. // JNE done
  7206. //
  7207. // fallback if object's type is not the cached type
  7208. // MOV typeReg, objectSrc + offsetof(RecyclableObject::type)
  7209. // CMP typeReg, [&(inlineCache->type]
  7210. // JNE checkPrimType
  7211. // use the cached result and fallthrough
  7212. // MOV dst, [&(inlineCache->result)]
  7213. // JMP done
  7214. // return false if object is a primitive
  7215. // $checkPrimType
  7216. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_LastJavascriptPrimitiveType
  7217. // JLE done
  7218. //
  7219. //
  7220. // $helper
  7221. // $done
  7222. bool
  7223. LowererMD::GenerateFastIsInst(IR::Instr * instr)
  7224. {
  7225. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  7226. IR::LabelInstr * checkPrimType = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  7227. IR::LabelInstr * done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  7228. IR::RegOpnd * typeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  7229. IR::Opnd * objectSrc;
  7230. IR::RegOpnd * objectReg;
  7231. IR::Opnd * functionSrc;
  7232. IR::RegOpnd * functionReg;
  7233. intptr_t inlineCache;
  7234. IR::Instr * instrArg;
  7235. // We are going to use the extra ArgOut_A instructions to lower the helper call later,
  7236. // so we leave them alone here and clean them up then.
  7237. inlineCache = instr->m_func->GetJITFunctionBody()->GetIsInstInlineCache(instr->GetSrc1()->AsIntConstOpnd()->AsUint32());
  7238. Assert(instr->GetSrc2()->AsRegOpnd()->m_sym->m_isSingleDef);
  7239. instrArg = instr->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
  7240. objectSrc = instrArg->GetSrc1();
  7241. Assert(instrArg->GetSrc2()->AsRegOpnd()->m_sym->m_isSingleDef);
  7242. instrArg = instrArg->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
  7243. functionSrc = instrArg->GetSrc1();
  7244. Assert(instrArg->GetSrc2() == nullptr);
  7245. // MOV dst, Js::false
  7246. Lowerer::InsertMove(instr->GetDst(), m_lowerer->LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
  7247. if (functionSrc->IsRegOpnd())
  7248. {
  7249. functionReg = functionSrc->AsRegOpnd();
  7250. }
  7251. else
  7252. {
  7253. functionReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  7254. // MOV functionReg, functionSrc
  7255. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, functionReg, functionSrc, m_func));
  7256. }
  7257. // CMP functionReg, [&(inlineCache->function)]
  7258. {
  7259. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  7260. cmp->SetSrc1(functionReg);
  7261. cmp->SetSrc2(IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfFunction(), TyMachReg, m_func,
  7262. IR::AddrOpndKindDynamicIsInstInlineCacheFunctionRef));
  7263. instr->InsertBefore(cmp);
  7264. Legalize(cmp);
  7265. }
  7266. // JNE helper
  7267. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, helper, m_func));
  7268. if (objectSrc->IsRegOpnd())
  7269. {
  7270. objectReg = objectSrc->AsRegOpnd();
  7271. }
  7272. else
  7273. {
  7274. objectReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  7275. // MOV objectReg, objectSrc
  7276. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, objectReg, objectSrc, m_func));
  7277. }
  7278. // TEST objectReg, Js::AtomTag
  7279. // JNE done
  7280. GenerateObjectTest(objectReg, instr, done);
  7281. // MOV typeReg, objectSrc + offsetof(RecyclableObject::type)
  7282. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, typeReg,
  7283. IR::IndirOpnd::New(objectReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  7284. m_func));
  7285. // CMP typeReg, [&(inlineCache->type]
  7286. {
  7287. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  7288. cmp->SetSrc1(typeReg);
  7289. cmp->SetSrc2(IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfType(), TyMachReg, m_func,
  7290. IR::AddrOpndKindDynamicIsInstInlineCacheTypeRef));
  7291. instr->InsertBefore(cmp);
  7292. Legalize(cmp);
  7293. }
  7294. // JNE checkPrimType
  7295. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JNE, checkPrimType, m_func));
  7296. // MOV dst, [&(inlineCache->result)]
  7297. Lowerer::InsertMove(instr->GetDst(), IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfResult(), TyMachReg, m_func,
  7298. IR::AddrOpndKindDynamicIsInstInlineCacheResultRef), instr);
  7299. // JMP done
  7300. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  7301. // LABEL checkPrimType
  7302. instr->InsertBefore(checkPrimType);
  7303. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_LastJavascriptPrimitiveType
  7304. {
  7305. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, m_func);
  7306. cmp->SetSrc1(IR::IndirOpnd::New(typeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func));
  7307. cmp->SetSrc2(IR::IntConstOpnd::New(Js::TypeId::TypeIds_LastJavascriptPrimitiveType, TyInt32, m_func));
  7308. instr->InsertBefore(cmp);
  7309. }
  7310. // JLE done
  7311. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JLE, done, m_func));
  7312. // LABEL helper
  7313. instr->InsertBefore(helper);
  7314. instr->InsertAfter(done);
  7315. return true;
  7316. }
  7317. void LowererMD::GenerateIsJsObjectTest(IR::RegOpnd* instanceReg, IR::Instr* insertInstr, IR::LabelInstr* labelHelper)
  7318. {
  7319. // TEST instanceReg, (Js::AtomTag_IntPtr | Js::FloatTag_Value )
  7320. GenerateObjectTest(instanceReg, insertInstr, labelHelper);
  7321. IR::RegOpnd * typeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  7322. // MOV typeReg, instanceReg + offsetof(RecyclableObject::type)
  7323. insertInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, typeReg,
  7324. IR::IndirOpnd::New(instanceReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  7325. m_func));
  7326. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_LastJavascriptPrimitiveType
  7327. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  7328. cmp->SetSrc1(IR::IndirOpnd::New(typeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func));
  7329. cmp->SetSrc2(IR::IntConstOpnd::New(Js::TypeId::TypeIds_LastJavascriptPrimitiveType, TyInt32, this->m_func));
  7330. insertInstr->InsertBefore(cmp);
  7331. // JLE labelHelper
  7332. insertInstr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JLE, labelHelper, this->m_func));
  7333. }
  7334. void
  7335. LowererMD::EmitReinterpretPrimitive(IR::Opnd* dst, IR::Opnd* src, IR::Instr* insertBeforeInstr)
  7336. {
  7337. Assert(dst && src);
  7338. Assert(dst->GetSize() == src->GetSize());
  7339. Assert(dst->GetType() != src->GetType());
  7340. if (
  7341. // Additional runtime check to prevent unknown behavior
  7342. (dst->GetSize() != src->GetSize()) ||
  7343. // There is nothing to do in this case
  7344. (dst->GetType() == src->GetType())
  7345. )
  7346. {
  7347. Lowerer::InsertMove(dst, src, insertBeforeInstr);
  7348. return;
  7349. }
  7350. auto LegalizeInsert = [insertBeforeInstr](IR::Instr* instr)
  7351. {
  7352. Legalize(instr);
  7353. insertBeforeInstr->InsertBefore(instr);
  7354. };
  7355. if (dst->GetSize() == 8)
  7356. {
  7357. #if _M_AMD64
  7358. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVQ, dst, src, m_func));
  7359. #elif LOWER_SPLIT_INT64
  7360. if (dst->IsInt64())
  7361. {
  7362. // movd xmm2, xmm1
  7363. // movd low_bits, xmm2
  7364. // shufps xmm2, xmm2, 1
  7365. // movd high_bits, xmm2
  7366. Assert(src->IsFloat64());
  7367. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(dst);
  7368. // shufps modifies the register, we shouldn't change the source here
  7369. IR::RegOpnd* tmpDouble = IR::RegOpnd::New(TyFloat64, m_func);
  7370. this->CreateAssign(tmpDouble, src, insertBeforeInstr);
  7371. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dstPair.low, tmpDouble, m_func));
  7372. LegalizeInsert(IR::Instr::New(Js::OpCode::SHUFPS, tmpDouble, tmpDouble, IR::IntConstOpnd::New(1, TyInt8, m_func, true), m_func));
  7373. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dstPair.high, tmpDouble, m_func));
  7374. }
  7375. else
  7376. {
  7377. // movd xmm0, lowBits;
  7378. // movd xmm1, highBits;
  7379. // shufps xmm0, xmm1, (0 | 2 << 2 | 0 << 4 | 1 << 6);
  7380. // shufps xmm0, xmm0, (0 | 2 << 2 | 3 << 4 | 3 << 6);
  7381. Assert(src->IsInt64());
  7382. Assert(dst->IsFloat64());
  7383. Int64RegPair srcPair = m_func->FindOrCreateInt64Pair(src);
  7384. IR::RegOpnd* tmpDouble = IR::RegOpnd::New(TyFloat64, m_func);
  7385. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dst, srcPair.low, m_func));
  7386. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, tmpDouble, srcPair.high, m_func));
  7387. LegalizeInsert(IR::Instr::New(Js::OpCode::SHUFPS, dst, tmpDouble, IR::IntConstOpnd::New((0 | 2 << 2 | 0 << 4 | 1 << 6), TyInt8, m_func, true), m_func));
  7388. LegalizeInsert(IR::Instr::New(Js::OpCode::SHUFPS, dst, dst, IR::IntConstOpnd::New((0 | 2 << 2 | 3 << 4 | 3 << 6), TyInt8, m_func, true), m_func));
  7389. }
  7390. #endif
  7391. }
  7392. else if (dst->GetSize() == 4)
  7393. {
  7394. // 32bit reinterprets
  7395. LegalizeInsert(IR::Instr::New(Js::OpCode::MOVD, dst, src, m_func));
  7396. }
  7397. else
  7398. {
  7399. Assert(UNREACHED);
  7400. }
  7401. }
  7402. void LowererMD::EmitReinterpretFloatToInt(IR::Opnd* dst, IR::Opnd* src, IR::Instr* insertBeforeInstr)
  7403. {
  7404. Assert(dst->IsInt32() || dst->IsUInt32() || dst->IsInt64());
  7405. Assert(src->IsFloat());
  7406. EmitReinterpretPrimitive(dst, src, insertBeforeInstr);
  7407. }
  7408. void LowererMD::EmitReinterpretIntToFloat(IR::Opnd* dst, IR::Opnd* src, IR::Instr* insertBeforeInstr)
  7409. {
  7410. Assert(dst->IsFloat());
  7411. Assert(src->IsInt32() || src->IsUInt32() || src->IsInt64());
  7412. EmitReinterpretPrimitive(dst, src, insertBeforeInstr);
  7413. }
  7414. IR::Instr *
  7415. LowererMD::LowerInt64Assign(IR::Instr * instr)
  7416. {
  7417. return this->lowererMDArch.LowerInt64Assign(instr);
  7418. }
  7419. IR::Instr *
  7420. LowererMD::LowerToFloat(IR::Instr *instr)
  7421. {
  7422. switch (instr->m_opcode)
  7423. {
  7424. case Js::OpCode::Add_A:
  7425. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  7426. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  7427. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::ADDSD : Js::OpCode::ADDSS;
  7428. break;
  7429. case Js::OpCode::Sub_A:
  7430. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  7431. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  7432. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::SUBSD : Js::OpCode::SUBSS;
  7433. break;
  7434. case Js::OpCode::Mul_A:
  7435. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  7436. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  7437. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::MULSD : Js::OpCode::MULSS;
  7438. break;
  7439. case Js::OpCode::Div_A:
  7440. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  7441. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  7442. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::DIVSD : Js::OpCode::DIVSS;
  7443. break;
  7444. case Js::OpCode::Neg_A:
  7445. {
  7446. IR::Opnd *opnd;
  7447. instr->m_opcode = Js::OpCode::XORPS;
  7448. if (instr->GetDst()->IsFloat32())
  7449. {
  7450. opnd = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetMaskNegFloatAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  7451. }
  7452. else
  7453. {
  7454. Assert(instr->GetDst()->IsFloat64());
  7455. opnd = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetMaskNegDoubleAddr(), TyMachDouble, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  7456. }
  7457. instr->SetSrc2(opnd);
  7458. Legalize(instr);
  7459. break;
  7460. }
  7461. case Js::OpCode::BrEq_A:
  7462. case Js::OpCode::BrNeq_A:
  7463. case Js::OpCode::BrSrEq_A:
  7464. case Js::OpCode::BrSrNeq_A:
  7465. case Js::OpCode::BrGt_A:
  7466. case Js::OpCode::BrGe_A:
  7467. case Js::OpCode::BrLt_A:
  7468. case Js::OpCode::BrLe_A:
  7469. case Js::OpCode::BrNotEq_A:
  7470. case Js::OpCode::BrNotNeq_A:
  7471. case Js::OpCode::BrSrNotEq_A:
  7472. case Js::OpCode::BrSrNotNeq_A:
  7473. case Js::OpCode::BrNotGt_A:
  7474. case Js::OpCode::BrNotGe_A:
  7475. case Js::OpCode::BrNotLt_A:
  7476. case Js::OpCode::BrNotLe_A:
  7477. return this->LowerFloatCondBranch(instr->AsBranchInstr());
  7478. default:
  7479. Assume(UNREACHED);
  7480. }
  7481. this->MakeDstEquSrc1(instr);
  7482. return instr;
  7483. }
  7484. IR::BranchInstr *
  7485. LowererMD::LowerFloatCondBranch(IR::BranchInstr *instrBranch, bool ignoreNan)
  7486. {
  7487. Js::OpCode brOpcode = Js::OpCode::InvalidOpCode;
  7488. Js::OpCode cmpOpcode = Js::OpCode::InvalidOpCode;
  7489. IR::Instr *instr;
  7490. bool swapCmpOpnds = false;
  7491. bool addJP = false;
  7492. IR::LabelInstr *labelNaN = nullptr;
  7493. // Generate float compare that behave correctly for NaN's.
  7494. // These branch on unordered:
  7495. // JB
  7496. // JBE
  7497. // JE
  7498. // These don't branch on unordered:
  7499. // JA
  7500. // JAE
  7501. // JNE
  7502. // Unfortunately, only JA and JAE do what we'd like....
  7503. Func * func = instrBranch->m_func;
  7504. IR::Opnd *src1 = instrBranch->UnlinkSrc1();
  7505. IR::Opnd *src2 = instrBranch->UnlinkSrc2();
  7506. Assert(src1->GetType() == src2->GetType());
  7507. switch (instrBranch->m_opcode)
  7508. {
  7509. case Js::OpCode::BrSrEq_A:
  7510. case Js::OpCode::BrEq_A:
  7511. case Js::OpCode::BrSrNotNeq_A:
  7512. case Js::OpCode::BrNotNeq_A:
  7513. cmpOpcode = src1->IsFloat64() ? Js::OpCode::UCOMISD : Js::OpCode::UCOMISS;
  7514. brOpcode = Js::OpCode::JEQ;
  7515. if (!ignoreNan)
  7516. {
  7517. // Don't jump on NaN's
  7518. labelNaN = instrBranch->GetOrCreateContinueLabel();
  7519. addJP = true;
  7520. }
  7521. break;
  7522. case Js::OpCode::BrNeq_A:
  7523. case Js::OpCode::BrSrNeq_A:
  7524. case Js::OpCode::BrSrNotEq_A:
  7525. case Js::OpCode::BrNotEq_A:
  7526. cmpOpcode = src1->IsFloat64() ? Js::OpCode::UCOMISD : Js::OpCode::UCOMISS;
  7527. brOpcode = Js::OpCode::JNE;
  7528. if (!ignoreNan)
  7529. {
  7530. // Jump on NaN's
  7531. labelNaN = instrBranch->GetTarget();
  7532. addJP = true;
  7533. }
  7534. break;
  7535. case Js::OpCode::BrLe_A:
  7536. swapCmpOpnds = true;
  7537. brOpcode = Js::OpCode::JAE;
  7538. break;
  7539. case Js::OpCode::BrLt_A:
  7540. swapCmpOpnds = true;
  7541. brOpcode = Js::OpCode::JA;
  7542. break;
  7543. case Js::OpCode::BrGe_A:
  7544. brOpcode = Js::OpCode::JAE;
  7545. break;
  7546. case Js::OpCode::BrGt_A:
  7547. brOpcode = Js::OpCode::JA;
  7548. break;
  7549. case Js::OpCode::BrNotLe_A:
  7550. swapCmpOpnds = true;
  7551. brOpcode = Js::OpCode::JB;
  7552. break;
  7553. case Js::OpCode::BrNotLt_A:
  7554. swapCmpOpnds = true;
  7555. brOpcode = Js::OpCode::JBE;
  7556. break;
  7557. case Js::OpCode::BrNotGe_A:
  7558. brOpcode = Js::OpCode::JB;
  7559. break;
  7560. case Js::OpCode::BrNotGt_A:
  7561. brOpcode = Js::OpCode::JBE;
  7562. break;
  7563. default:
  7564. Assume(UNREACHED);
  7565. }
  7566. // if we haven't set cmpOpcode, then we are using COMISD/COMISS
  7567. if (cmpOpcode == Js::OpCode::InvalidOpCode)
  7568. {
  7569. cmpOpcode = src1->IsFloat64() ? Js::OpCode::COMISD : Js::OpCode::COMISS;
  7570. }
  7571. if (swapCmpOpnds)
  7572. {
  7573. IR::Opnd *tmp = src1;
  7574. src1 = src2;
  7575. src2 = tmp;
  7576. }
  7577. // VC generates UCOMISD for BrEq/BrNeq, and COMISD for all others, accordingly to IEEE 754.
  7578. // We'll do the same.
  7579. // COMISD / UCOMISD src1, src2
  7580. IR::Instr *instrCmp = IR::Instr::New(cmpOpcode, func);
  7581. instrCmp->SetSrc1(src1);
  7582. instrCmp->SetSrc2(src2);
  7583. instrBranch->InsertBefore(instrCmp);
  7584. Legalize(instrCmp);
  7585. if (addJP)
  7586. {
  7587. // JP $LabelNaN
  7588. instr = IR::BranchInstr::New(Js::OpCode::JP, labelNaN, func);
  7589. instrBranch->InsertBefore(instr);
  7590. }
  7591. // Jcc $L
  7592. instr = IR::BranchInstr::New(brOpcode, instrBranch->GetTarget(), func);
  7593. instrBranch->InsertBefore(instr);
  7594. instrBranch->Remove();
  7595. return instr->AsBranchInstr();
  7596. }
  7597. void LowererMD::HelperCallForAsmMathBuiltin(IR::Instr* instr, IR::JnHelperMethod helperMethodFloat, IR::JnHelperMethod helperMethodDouble)
  7598. {
  7599. Assert(instr->m_opcode == Js::OpCode::InlineMathFloor || instr->m_opcode == Js::OpCode::InlineMathCeil || instr->m_opcode == Js::OpCode::Trunc_A || instr->m_opcode == Js::OpCode::Nearest_A);
  7600. AssertMsg(instr->GetDst()->IsFloat(), "dst must be float.");
  7601. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  7602. Assert(!instr->GetSrc2());
  7603. IR::Opnd * argOpnd = instr->UnlinkSrc1();
  7604. IR::JnHelperMethod helperMethod;
  7605. uint dwordCount;
  7606. if (argOpnd->IsFloat32())
  7607. {
  7608. helperMethod = helperMethodFloat;
  7609. LoadFloatHelperArgument(instr, argOpnd);
  7610. dwordCount = 1;
  7611. }
  7612. else
  7613. {
  7614. helperMethod = helperMethodDouble;
  7615. LoadDoubleHelperArgument(instr, argOpnd);
  7616. dwordCount = 2;
  7617. }
  7618. instr->m_opcode = Js::OpCode::CALL;
  7619. IR::HelperCallOpnd *helperCallOpnd = Lowerer::CreateHelperCallOpnd(helperMethod, this->lowererMDArch.GetHelperArgsCount(), m_func);
  7620. instr->SetSrc1(helperCallOpnd);
  7621. this->lowererMDArch.LowerCall(instr, dwordCount);
  7622. }
  7623. void LowererMD::GenerateFastInlineBuiltInCall(IR::Instr* instr, IR::JnHelperMethod helperMethod)
  7624. {
  7625. switch (instr->m_opcode)
  7626. {
  7627. case Js::OpCode::InlineMathSqrt:
  7628. // Sqrt maps directly to the SSE2 instruction.
  7629. // src and dst should already be XMM registers, all we need is just change the opcode.
  7630. Assert(helperMethod == (IR::JnHelperMethod)0);
  7631. Assert(instr->GetSrc2() == nullptr);
  7632. instr->m_opcode = instr->GetSrc1()->IsFloat64() ? Js::OpCode::SQRTSD : Js::OpCode::SQRTSS;
  7633. break;
  7634. case Js::OpCode::InlineMathAbs:
  7635. Assert(helperMethod == (IR::JnHelperMethod)0);
  7636. return GenerateFastInlineBuiltInMathAbs(instr);
  7637. case Js::OpCode::InlineMathPow:
  7638. #ifdef _M_IX86
  7639. if (!instr->GetSrc2()->IsFloat())
  7640. {
  7641. #endif
  7642. this->GenerateFastInlineBuiltInMathPow(instr);
  7643. break;
  7644. #ifdef _M_IX86
  7645. }
  7646. // fallthrough
  7647. #endif
  7648. case Js::OpCode::InlineMathAcos:
  7649. case Js::OpCode::InlineMathAsin:
  7650. case Js::OpCode::InlineMathAtan:
  7651. case Js::OpCode::InlineMathAtan2:
  7652. case Js::OpCode::InlineMathCos:
  7653. case Js::OpCode::InlineMathExp:
  7654. case Js::OpCode::InlineMathLog:
  7655. case Js::OpCode::Expo_A: //** operator reuses InlineMathPow fastpath
  7656. case Js::OpCode::InlineMathSin:
  7657. case Js::OpCode::InlineMathTan:
  7658. {
  7659. AssertMsg(instr->GetDst()->IsFloat(), "dst must be float.");
  7660. AssertMsg(instr->GetSrc1()->IsFloat(), "src1 must be float.");
  7661. AssertMsg(!instr->GetSrc2() || instr->GetSrc2()->IsFloat(), "src2 must be float.");
  7662. // Before:
  7663. // dst = <Built-in call> src1, src2
  7664. // After:
  7665. // I386:
  7666. // XMM0 = MOVSD src1
  7667. // CALL helperMethod
  7668. // dst = MOVSD call->dst
  7669. // AMD64:
  7670. // XMM0 = MOVSD src1
  7671. // RAX = MOV helperMethod
  7672. // CALL RAX
  7673. // dst = MOVSD call->dst
  7674. // Src1
  7675. IR::Instr* argOut = IR::Instr::New(Js::OpCode::MOVSD, this->m_func);
  7676. IR::RegOpnd* dst1 = IR::RegOpnd::New(nullptr, (RegNum)FIRST_FLOAT_ARG_REG, TyMachDouble, this->m_func);
  7677. dst1->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  7678. argOut->SetDst(dst1);
  7679. argOut->SetSrc1(instr->UnlinkSrc1());
  7680. instr->InsertBefore(argOut);
  7681. // Src2
  7682. if (instr->GetSrc2() != nullptr)
  7683. {
  7684. IR::Instr* argOut2 = IR::Instr::New(Js::OpCode::MOVSD, this->m_func);
  7685. IR::RegOpnd* dst2 = IR::RegOpnd::New(nullptr, (RegNum)(FIRST_FLOAT_ARG_REG + 1), TyMachDouble, this->m_func);
  7686. dst2->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  7687. argOut2->SetDst(dst2);
  7688. argOut2->SetSrc1(instr->UnlinkSrc2());
  7689. instr->InsertBefore(argOut2);
  7690. }
  7691. // Call CRT.
  7692. IR::RegOpnd* floatCallDst = IR::RegOpnd::New(nullptr, (RegNum)(FIRST_FLOAT_REG), TyMachDouble, this->m_func); // Dst in XMM0.
  7693. #ifdef _M_IX86
  7694. IR::Instr* floatCall = IR::Instr::New(Js::OpCode::CALL, floatCallDst, this->m_func);
  7695. floatCall->SetSrc1(IR::HelperCallOpnd::New(helperMethod, this->m_func));
  7696. instr->InsertBefore(floatCall);
  7697. #else
  7698. // s1 = MOV helperAddr
  7699. IR::RegOpnd* s1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  7700. IR::AddrOpnd* helperAddr = IR::AddrOpnd::New((Js::Var)IR::GetMethodOriginalAddress(m_func->GetThreadContextInfo(), helperMethod), IR::AddrOpndKind::AddrOpndKindDynamicMisc, this->m_func);
  7701. IR::Instr* mov = IR::Instr::New(Js::OpCode::MOV, s1, helperAddr, this->m_func);
  7702. instr->InsertBefore(mov);
  7703. // dst(XMM0) = CALL s1
  7704. IR::Instr *floatCall = IR::Instr::New(Js::OpCode::CALL, floatCallDst, s1, this->m_func);
  7705. instr->InsertBefore(floatCall);
  7706. #endif
  7707. instr->m_func->SetHasCalls();
  7708. // Save the result.
  7709. instr->m_opcode = Js::OpCode::MOVSD;
  7710. instr->SetSrc1(floatCall->GetDst());
  7711. break;
  7712. }
  7713. case Js::OpCode::InlineMathFloor:
  7714. case Js::OpCode::InlineMathCeil:
  7715. case Js::OpCode::InlineMathRound:
  7716. #ifdef ENABLE_WASM
  7717. case Js::OpCode::Trunc_A:
  7718. case Js::OpCode::Nearest_A:
  7719. #endif //ENABLE_WASM
  7720. {
  7721. Assert(AutoSystemInfo::Data.SSE4_1Available());
  7722. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsFloat());
  7723. // MOVSD roundedFloat, src
  7724. //
  7725. // if(round)
  7726. // {
  7727. // /* N.B.: the following CMPs are lowered to COMISDs, whose results can only be >, <, or =.
  7728. // In fact, only ">" can be used if NaN has not been handled.
  7729. // */
  7730. // CMP 0.5, roundedFloat
  7731. // JA $ltHalf
  7732. // CMP TwoToFraction, roundedFloat
  7733. // JA $addHalfToRoundSrcLabel
  7734. // J $skipRoundSd (NaN is also handled here)
  7735. // $ltHalf:
  7736. // CMP roundedFloat, -0.5
  7737. // JL $ltNegHalf
  7738. // if (shouldCheckNegZero) {
  7739. // CMP roundedFloat, 0
  7740. // JA $setZero
  7741. // $negZeroTest [Helper]:
  7742. // JB $bailoutLabel
  7743. // isNegZero(src)
  7744. // JE $bailoutLabel
  7745. // J $skipRoundSd
  7746. // } // else: setZero
  7747. // $setZero:
  7748. // MOV roundedFloat, 0
  7749. // J $skipRoundSd
  7750. // $ltNegHalf:
  7751. // CMP roundedFloat, NegTwoToFraction
  7752. // JA $addHalfToRoundSrc
  7753. // J $skipRoundSd
  7754. // $addHalfToRoundSrc:
  7755. // ADDSD roundedFloat, 0.5
  7756. // $skipAddHalf:
  7757. // }
  7758. //
  7759. // if(isNotCeil)
  7760. // {
  7761. // CMP roundedFloat, 0
  7762. // JGE $skipRoundSd
  7763. // }
  7764. // ROUNDSD roundedFloat, roundedFloat, round_mode
  7765. //
  7766. // $skipRoundSd:
  7767. // if(isNotCeil)
  7768. // MOVSD checkNegZeroOpnd, roundedFloat
  7769. // else if (ceil)
  7770. // MOVSD checkNegZeroOpnd, src
  7771. //
  7772. // CMP checkNegZeroOpnd, 0
  7773. // JNE $convertToInt
  7774. //
  7775. // if(instr->ShouldCheckForNegativeZero())
  7776. // {
  7777. // isNegZero CALL IsNegZero(checkNegZeroOpnd)
  7778. // CMP isNegZero, 0
  7779. // JNE $bailoutLabel
  7780. // }
  7781. //
  7782. // $convertToInt:
  7783. // CVT(T)SD2SI dst, roundedFloat //CVTTSD2SI for floor/round and CVTSD2SI for ceil
  7784. // CMP dst 0x80000000
  7785. // JNE $fallthrough
  7786. //
  7787. // if(!sharedBailout)
  7788. // {
  7789. // $bailoutLabel:
  7790. // }
  7791. // GenerateBailout(instr)
  7792. //
  7793. // $fallthrough:
  7794. bool isNotCeil = instr->m_opcode != Js::OpCode::InlineMathCeil;
  7795. // MOVSD roundedFloat, src
  7796. IR::Opnd * src = instr->UnlinkSrc1();
  7797. IR::RegOpnd* roundedFloat = IR::RegOpnd::New(src->GetType(), this->m_func);
  7798. IR::Instr* argOut = IR::Instr::New(LowererMDArch::GetAssignOp(src->GetType()), roundedFloat, src, this->m_func);
  7799. instr->InsertBefore(argOut);
  7800. bool negZeroCheckDone = false;
  7801. IR::LabelInstr * bailoutLabel = nullptr;
  7802. bool sharedBailout = false;
  7803. if (instr->GetDst()->IsInt32())
  7804. {
  7805. sharedBailout = (instr->GetBailOutInfo()->bailOutInstr != instr) ? true : false;
  7806. bailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /*helperLabel*/true);
  7807. }
  7808. IR::Opnd * zero;
  7809. if (src->IsFloat64())
  7810. {
  7811. zero = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleZeroAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  7812. }
  7813. else
  7814. {
  7815. Assert(src->IsFloat32());
  7816. zero = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatZeroAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  7817. }
  7818. IR::AutoReuseOpnd autoReuseZero(zero, this->m_func);
  7819. IR::LabelInstr * skipRoundSd = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7820. if(instr->m_opcode == Js::OpCode::InlineMathRound)
  7821. {
  7822. IR::LabelInstr * addHalfToRoundSrcLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7823. IR::LabelInstr * ltHalf = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7824. IR::LabelInstr * setZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7825. IR::LabelInstr * ltNegHalf = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7826. IR::Opnd * pointFive;
  7827. IR::Opnd * negPointFive;
  7828. if (src->IsFloat64())
  7829. {
  7830. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoublePointFiveAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  7831. negPointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNegPointFiveAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  7832. }
  7833. else
  7834. {
  7835. Assert(src->IsFloat32());
  7836. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatPointFiveAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  7837. negPointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNegPointFiveAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  7838. }
  7839. // CMP 0.5, roundedFloat
  7840. // JA $ltHalf
  7841. this->m_lowerer->InsertCompareBranch(pointFive, roundedFloat, Js::OpCode::BrGt_A, ltHalf, instr);
  7842. if (instr->GetDst()->IsInt32())
  7843. {
  7844. // if we are specializing dst to int, we will bailout on overflow so don't need upperbound check
  7845. // Also, we will bailout on NaN, so it doesn't need special handling either
  7846. // J $addHalfToRoundSrcLabel
  7847. this->m_lowerer->InsertBranch(Js::OpCode::Br, addHalfToRoundSrcLabel, instr);
  7848. }
  7849. else
  7850. {
  7851. IR::Opnd * twoToFraction;
  7852. if (src->IsFloat64())
  7853. {
  7854. twoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleTwoToFractionAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  7855. }
  7856. else
  7857. {
  7858. Assert(src->IsFloat32());
  7859. twoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatTwoToFractionAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  7860. }
  7861. // CMP 2^fraction, roundedFloat
  7862. // JA $addHalfToRoundSrcLabel
  7863. this->m_lowerer->InsertCompareBranch(twoToFraction, roundedFloat, Js::OpCode::BrGt_A, addHalfToRoundSrcLabel, instr);
  7864. // J $skipRoundSd (NaN also handled here)
  7865. this->m_lowerer->InsertBranch(Js::OpCode::Br, skipRoundSd, instr);
  7866. }
  7867. // $ltHalf:
  7868. instr->InsertBefore(ltHalf);
  7869. // CMP roundedFloat, -0.5
  7870. // JL $ltNegHalf
  7871. this->m_lowerer->InsertCompareBranch(roundedFloat, negPointFive, Js::OpCode::BrLt_A, ltNegHalf, instr);
  7872. if (instr->ShouldCheckForNegativeZero())
  7873. {
  7874. // CMP roundedFloat, 0
  7875. // JA $setZero
  7876. this->m_lowerer->InsertCompareBranch(roundedFloat, zero, Js::OpCode::BrGt_A, setZero, instr);
  7877. // $negZeroTest [helper]
  7878. m_lowerer->InsertLabel(true, instr);
  7879. // JB $bailoutLabel
  7880. this->m_lowerer->InsertBranch(Js::OpCode::JB, bailoutLabel, instr);
  7881. // if isNegZero(src) J $bailoutLabel else J $skipRoundSd
  7882. NegZeroBranching(src, instr, bailoutLabel, skipRoundSd);
  7883. negZeroCheckDone = true;
  7884. }
  7885. // $setZero:
  7886. instr->InsertBefore(setZero);
  7887. // MOVSD_ZERO roundedFloat
  7888. LoadFloatZero(roundedFloat, instr);
  7889. // J $skipRoundSd
  7890. this->m_lowerer->InsertBranch(Js::OpCode::Br, skipRoundSd, instr);
  7891. // $ltNegHalf:
  7892. instr->InsertBefore(ltNegHalf);
  7893. if (!instr->GetDst()->IsInt32())
  7894. {
  7895. // if we are specializing dst to int, we will bailout on overflow so don't need lowerbound check
  7896. IR::Opnd * negTwoToFraction;
  7897. if (src->IsFloat64())
  7898. {
  7899. negTwoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNegTwoToFractionAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  7900. }
  7901. else
  7902. {
  7903. Assert(src->IsFloat32());
  7904. negTwoToFraction = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNegTwoToFractionAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  7905. }
  7906. // CMP roundedFloat, negTwoToFraction
  7907. // JA $addHalfToRoundSrcLabel
  7908. this->m_lowerer->InsertCompareBranch(roundedFloat, negTwoToFraction, Js::OpCode::BrGt_A, addHalfToRoundSrcLabel, instr);
  7909. // J $skipRoundSd
  7910. this->m_lowerer->InsertBranch(Js::OpCode::Br, skipRoundSd, instr);
  7911. }
  7912. if (src->IsFloat64())
  7913. {
  7914. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoublePointFiveAddr(), TyFloat64, this->m_func, IR::AddrOpndKindDynamicDoubleRef);
  7915. }
  7916. else
  7917. {
  7918. Assert(src->IsFloat32());
  7919. pointFive = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatPointFiveAddr(), TyFloat32, this->m_func, IR::AddrOpndKindDynamicFloatRef);
  7920. }
  7921. // $addHalfToRoundSrcLabel
  7922. instr->InsertBefore(addHalfToRoundSrcLabel);
  7923. // ADDSD roundedFloat, 0.5
  7924. IR::Instr * addInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::ADDSD : Js::OpCode::ADDSS, roundedFloat, roundedFloat, pointFive, this->m_func);
  7925. instr->InsertBefore(addInstr);
  7926. Legalize(addInstr);
  7927. }
  7928. if (instr->m_opcode == Js::OpCode::InlineMathFloor && instr->GetDst()->IsInt32())
  7929. {
  7930. this->m_lowerer->InsertCompareBranch(roundedFloat, zero, Js::OpCode::BrGe_A, skipRoundSd, instr);
  7931. }
  7932. // ROUNDSD srcCopy, srcCopy, round_mode
  7933. IR::Opnd * roundMode = nullptr;
  7934. switch (instr->m_opcode)
  7935. {
  7936. #ifdef ENABLE_WASM
  7937. case Js::OpCode::Trunc_A:
  7938. roundMode = IR::IntConstOpnd::New(0x03, TyInt32, this->m_func);
  7939. break;
  7940. case Js::OpCode::Nearest_A:
  7941. roundMode = IR::IntConstOpnd::New(0x00, TyInt32, this->m_func);
  7942. break;
  7943. #endif //ENABLE_WASM
  7944. case Js::OpCode::InlineMathRound:
  7945. case Js::OpCode::InlineMathFloor:
  7946. roundMode = IR::IntConstOpnd::New(0x01, TyInt32, this->m_func);
  7947. break;
  7948. case Js::OpCode::InlineMathCeil:
  7949. roundMode = IR::IntConstOpnd::New(0x02, TyInt32, this->m_func);
  7950. break;
  7951. }
  7952. IR::Instr* roundInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::ROUNDSD : Js::OpCode::ROUNDSS, roundedFloat, roundedFloat, roundMode, this->m_func);
  7953. instr->InsertBefore(roundInstr);
  7954. if (instr->m_opcode == Js::OpCode::InlineMathRound)
  7955. {
  7956. instr->InsertBefore(skipRoundSd);
  7957. }
  7958. if (instr->GetDst()->IsInt32())
  7959. {
  7960. if (instr->m_opcode == Js::OpCode::InlineMathFloor)
  7961. {
  7962. instr->InsertBefore(skipRoundSd);
  7963. }
  7964. //negZero bailout
  7965. if(instr->ShouldCheckForNegativeZero() && !negZeroCheckDone)
  7966. {
  7967. IR::LabelInstr * convertToInt = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7968. IR::Opnd * checkNegZeroOpnd = isNotCeil ? src : roundedFloat;
  7969. this->m_lowerer->InsertCompareBranch(checkNegZeroOpnd, zero, Js::OpCode::BrNeq_A, convertToInt, instr);
  7970. m_lowerer->InsertLabel(true, instr);
  7971. NegZeroBranching(checkNegZeroOpnd, instr, bailoutLabel, convertToInt);
  7972. instr->InsertBefore(convertToInt);
  7973. }
  7974. IR::Opnd * originalDst = instr->UnlinkDst();
  7975. // CVT(T)SD2SI dst, srcCopy
  7976. IR::Instr* convertToIntInstr;
  7977. if (isNotCeil)
  7978. {
  7979. convertToIntInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::CVTTSD2SI : Js::OpCode::CVTTSS2SI, originalDst, roundedFloat, this->m_func);
  7980. }
  7981. else
  7982. {
  7983. convertToIntInstr = IR::Instr::New(src->IsFloat64() ? Js::OpCode::CVTSD2SI : Js::OpCode::CVTSS2SI, originalDst, roundedFloat, this->m_func);
  7984. }
  7985. instr->InsertBefore(convertToIntInstr);
  7986. IR::LabelInstr * fallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7987. IR::Opnd * intOverflowValue = IR::IntConstOpnd::New(INT32_MIN, IRType::TyInt32, this->m_func, true);
  7988. this->m_lowerer->InsertCompareBranch(originalDst, intOverflowValue, Js::OpCode::BrNeq_A, fallthrough, instr);
  7989. instr->InsertAfter(fallthrough);
  7990. if (!sharedBailout)
  7991. {
  7992. instr->InsertBefore(bailoutLabel);
  7993. }
  7994. // In case of a shared bailout, we should jump to the code that sets some data on the bailout record which is specific
  7995. // to this bailout. Pass the bailoutLabel to GenerateFunction so that it may use the label as the collectRuntimeStatsLabel.
  7996. this->m_lowerer->GenerateBailOut(instr, nullptr, nullptr, sharedBailout ? bailoutLabel : nullptr);
  7997. }
  7998. else
  7999. {
  8000. IR::Opnd * originalDst = instr->UnlinkDst();
  8001. Assert(originalDst->IsFloat());
  8002. Assert(originalDst->GetType() == roundedFloat->GetType());
  8003. IR::Instr * movInstr = IR::Instr::New(originalDst->IsFloat64() ? Js::OpCode::MOVSD : Js::OpCode::MOVSS, originalDst, roundedFloat, this->m_func);
  8004. instr->InsertBefore(movInstr);
  8005. instr->Remove();
  8006. }
  8007. break;
  8008. }
  8009. case Js::OpCode::InlineMathMin:
  8010. case Js::OpCode::InlineMathMax:
  8011. {
  8012. IR::Opnd* src1 = instr->GetSrc1();
  8013. IR::Opnd* src2 = instr->GetSrc2();
  8014. IR::Opnd* dst = instr->GetDst();
  8015. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  8016. IR::LabelInstr* labelNaNHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  8017. IR::LabelInstr* labelNegZeroAndNaNCheckHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  8018. IR::Instr* branchInstr;
  8019. bool min = instr->m_opcode == Js::OpCode::InlineMathMin ? true : false;
  8020. // CMP src1, src2
  8021. if(dst->IsInt32())
  8022. {
  8023. //MOV dst, src2;
  8024. Assert(!dst->IsEqual(src2));
  8025. this->m_lowerer->InsertMove(dst, src2, instr);
  8026. if(min)
  8027. {
  8028. // JLT $continueLabel
  8029. branchInstr = IR::BranchInstr::New(Js::OpCode::BrGt_I4, doneLabel, src1, src2, instr->m_func);
  8030. instr->InsertBefore(branchInstr);
  8031. LowererMDArch::EmitInt4Instr(branchInstr);
  8032. }
  8033. else
  8034. {
  8035. // JGT $continueLabel
  8036. branchInstr = IR::BranchInstr::New(Js::OpCode::BrLt_I4, doneLabel, src1, src2, instr->m_func);
  8037. instr->InsertBefore(branchInstr);
  8038. LowererMDArch::EmitInt4Instr(branchInstr);
  8039. }
  8040. // MOV dst, src1
  8041. this->m_lowerer->InsertMove(dst, src1, instr);
  8042. }
  8043. else if(dst->IsFloat())
  8044. {
  8045. // COMISD/COMISS src1 (src2), src2 (src1)
  8046. // JA $doneLabel
  8047. // JEQ $labelNegZeroAndNaNCheckHelper
  8048. // MOVSD/MOVSS dst, src2
  8049. // JMP $doneLabel
  8050. //
  8051. // $labelNegZeroAndNaNCheckHelper
  8052. // JP $labelNaNHelper
  8053. // if(min)
  8054. // {
  8055. // if(src2 == -0.0)
  8056. // MOVSD/MOVSS dst, src2
  8057. // }
  8058. // else
  8059. // {
  8060. // if(src1 == -0.0)
  8061. // MOVSD/MOVSS dst, src2
  8062. // }
  8063. // JMP $doneLabel
  8064. //
  8065. // $labelNaNHelper
  8066. // MOVSD/MOVSS dst, NaN
  8067. //
  8068. // $doneLabel
  8069. //MOVSD/MOVSS dst, src1;
  8070. Assert(!dst->IsEqual(src1));
  8071. this->m_lowerer->InsertMove(dst, src1, instr);
  8072. if(min)
  8073. {
  8074. this->m_lowerer->InsertCompareBranch(src1, src2, Js::OpCode::BrLt_A, doneLabel, instr); // Lowering of BrLt_A for floats is done to JA with operands swapped
  8075. }
  8076. else
  8077. {
  8078. this->m_lowerer->InsertCompareBranch(src1, src2, Js::OpCode::BrGt_A, doneLabel, instr);
  8079. }
  8080. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JEQ, labelNegZeroAndNaNCheckHelper, instr->m_func));
  8081. this->m_lowerer->InsertMove(dst, src2, instr);
  8082. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, instr->m_func));
  8083. instr->InsertBefore(labelNegZeroAndNaNCheckHelper);
  8084. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JP, labelNaNHelper, instr->m_func));
  8085. IR::LabelInstr *isNeg0Label = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  8086. NegZeroBranching(min ? src2 : src1, instr, isNeg0Label, doneLabel);
  8087. instr->InsertBefore(isNeg0Label);
  8088. this->m_lowerer->InsertMove(dst, src2, instr);
  8089. instr->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, doneLabel, instr->m_func));
  8090. instr->InsertBefore(labelNaNHelper);
  8091. IR::Opnd * opndNaN = nullptr;
  8092. if (dst->IsFloat32())
  8093. {
  8094. opndNaN = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNaNAddr(), IRType::TyFloat32, this->m_func);
  8095. }
  8096. else
  8097. {
  8098. opndNaN = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNaNAddr(), IRType::TyFloat64, this->m_func);
  8099. }
  8100. this->m_lowerer->InsertMove(dst, opndNaN, instr);
  8101. }
  8102. instr->InsertBefore(doneLabel);
  8103. instr->Remove();
  8104. break;
  8105. }
  8106. default:
  8107. AssertMsg(FALSE, "Unknown inline built-in opcode");
  8108. break;
  8109. }
  8110. }
  8111. void LowererMD::GenerateFastInlineBuiltInMathAbs(IR::Instr* inlineInstr)
  8112. {
  8113. IR::Opnd* src = inlineInstr->GetSrc1();
  8114. IR::Opnd* dst = inlineInstr->UnlinkDst();
  8115. Assert(src);
  8116. IR::Instr* tmpInstr;
  8117. IR::Instr* nextInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  8118. IR::Instr* continueInstr = m_lowerer->LowerBailOnIntMin(inlineInstr);
  8119. continueInstr->InsertAfter(nextInstr);
  8120. IRType srcType = src->GetType();
  8121. if (srcType == IRType::TyInt32)
  8122. {
  8123. // Note: if execution gets so far, we always get (untagged) int32 here.
  8124. // Since -x = ~x + 1, abs(x) = x, abs(-x) = -x, sign-extend(x) = 0, sign_extend(-x) = -1, where 0 <= x.
  8125. // Then: abs(x) = sign-extend(x) XOR x - sign-extend(x)
  8126. // Expected input (otherwise bailout):
  8127. // - src1 is (untagged) int, not equal to int_min (abs(int_min) would produce overflow, as there's no corresponding positive int).
  8128. // MOV EAX, src
  8129. IR::RegOpnd *regEAX = IR::RegOpnd::New(TyInt32, this->m_func);
  8130. regEAX->SetReg(LowererMDArch::GetRegIMulDestLower());
  8131. tmpInstr = IR::Instr::New(Js::OpCode::MOV, regEAX, src, this->m_func);
  8132. nextInstr->InsertBefore(tmpInstr);
  8133. IR::RegOpnd *regEDX = IR::RegOpnd::New(TyInt32, this->m_func);
  8134. regEDX->SetReg(LowererMDArch::GetRegIMulHighDestLower());
  8135. // CDQ (sign-extend EAX into EDX, producing 64bit EDX:EAX value)
  8136. // Note: put EDX on dst to give of def to the EDX lifetime
  8137. tmpInstr = IR::Instr::New(Js::OpCode::CDQ, regEDX, this->m_func);
  8138. nextInstr->InsertBefore(tmpInstr);
  8139. // XOR EAX, EDX
  8140. tmpInstr = IR::Instr::New(Js::OpCode::XOR, regEAX, regEAX, regEDX, this->m_func);
  8141. nextInstr->InsertBefore(tmpInstr);
  8142. // SUB EAX, EDX
  8143. tmpInstr = IR::Instr::New(Js::OpCode::SUB, regEAX, regEAX, regEDX, this->m_func);
  8144. nextInstr->InsertBefore(tmpInstr);
  8145. // MOV dst, EAX
  8146. tmpInstr = IR::Instr::New(Js::OpCode::MOV, dst, regEAX, this->m_func);
  8147. nextInstr->InsertBefore(tmpInstr);
  8148. }
  8149. else if (srcType == IRType::TyFloat64)
  8150. {
  8151. if (!dst->IsRegOpnd())
  8152. {
  8153. // MOVSD tempRegOpnd, src
  8154. IR::RegOpnd* tempRegOpnd = IR::RegOpnd::New(nullptr, TyMachDouble, this->m_func);
  8155. tempRegOpnd->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  8156. tmpInstr = IR::Instr::New(Js::OpCode::MOVSD, tempRegOpnd, src, this->m_func);
  8157. nextInstr->InsertBefore(tmpInstr);
  8158. // This saves the result in the same register.
  8159. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(tempRegOpnd), nextInstr);
  8160. // MOVSD dst, tempRegOpnd
  8161. tmpInstr = IR::Instr::New(Js::OpCode::MOVSD, dst, tempRegOpnd, this->m_func);
  8162. nextInstr->InsertBefore(tmpInstr);
  8163. }
  8164. else
  8165. {
  8166. // MOVSD dst, src
  8167. tmpInstr = IR::Instr::New(Js::OpCode::MOVSD, dst, src, this->m_func);
  8168. nextInstr->InsertBefore(tmpInstr);
  8169. // This saves the result in the same register.
  8170. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(dst), nextInstr);
  8171. }
  8172. }
  8173. else if (srcType == IRType::TyFloat32)
  8174. {
  8175. if (!dst->IsRegOpnd())
  8176. {
  8177. // MOVSS tempRegOpnd, src
  8178. IR::RegOpnd* tempRegOpnd = IR::RegOpnd::New(nullptr, TyFloat32, this->m_func);
  8179. tempRegOpnd->m_isCallArg = true; // This is to make sure that lifetime of opnd is virtually extended until next CALL instr.
  8180. tmpInstr = IR::Instr::New(Js::OpCode::MOVSS, tempRegOpnd, src, this->m_func);
  8181. nextInstr->InsertBefore(tmpInstr);
  8182. // This saves the result in the same register.
  8183. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(tempRegOpnd), nextInstr);
  8184. // MOVSS dst, tempRegOpnd
  8185. tmpInstr = IR::Instr::New(Js::OpCode::MOVSS, dst, tempRegOpnd, this->m_func);
  8186. nextInstr->InsertBefore(tmpInstr);
  8187. }
  8188. else
  8189. {
  8190. // MOVSS dst, src
  8191. tmpInstr = IR::Instr::New(Js::OpCode::MOVSS, dst, src, this->m_func);
  8192. nextInstr->InsertBefore(tmpInstr);
  8193. // This saves the result in the same register.
  8194. this->GenerateFloatAbs(static_cast<IR::RegOpnd*>(dst), nextInstr);
  8195. }
  8196. }
  8197. else
  8198. {
  8199. AssertMsg(FALSE, "GenerateFastInlineBuiltInMathAbs: unexpected type of the src!");
  8200. }
  8201. }
  8202. void LowererMD::GenerateFastInlineBuiltInMathPow(IR::Instr* instr)
  8203. {
  8204. #ifdef _M_IX86
  8205. AssertMsg(!instr->GetSrc2()->IsFloat(), "Math.pow(*, double) needs customized lowering!");
  8206. #endif
  8207. IR::JnHelperMethod directPowHelper = (IR::JnHelperMethod)0;
  8208. IR::Opnd* bailoutOpnd = nullptr;
  8209. if (!instr->GetSrc2()->IsFloat())
  8210. {
  8211. LoadHelperArgument(instr, instr->UnlinkSrc2());
  8212. if (instr->GetSrc1()->IsFloat())
  8213. {
  8214. directPowHelper = IR::HelperDirectMath_PowDoubleInt;
  8215. LoadDoubleHelperArgument(instr, instr->UnlinkSrc1());
  8216. }
  8217. else
  8218. {
  8219. directPowHelper = IR::HelperDirectMath_PowIntInt;
  8220. LoadHelperArgument(instr, instr->UnlinkSrc1());
  8221. if (!this->m_func->tempSymBool)
  8222. {
  8223. this->m_func->tempSymBool = StackSym::New(TyUint8, this->m_func);
  8224. this->m_func->StackAllocate(this->m_func->tempSymBool, TySize[TyUint8]);
  8225. }
  8226. IR::SymOpnd* boolOpnd = IR::SymOpnd::New(this->m_func->tempSymBool, TyUint8, this->m_func);
  8227. IR::RegOpnd* boolRefOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  8228. this->m_lowerer->InsertLea(boolRefOpnd, boolOpnd, instr);
  8229. LoadHelperArgument(instr, boolRefOpnd);
  8230. bailoutOpnd = boolOpnd;
  8231. }
  8232. }
  8233. #ifndef _M_IX86
  8234. else
  8235. {
  8236. AssertMsg(instr->GetSrc1()->IsFloat(), "Math.Pow(int, double) should not generated by GlobOpt!");
  8237. directPowHelper = IR::HelperDirectMath_Pow;
  8238. LoadDoubleHelperArgument(instr, instr->UnlinkSrc2());
  8239. LoadDoubleHelperArgument(instr, instr->UnlinkSrc1());
  8240. }
  8241. #endif
  8242. ChangeToHelperCall(instr, directPowHelper, nullptr, bailoutOpnd);
  8243. }
  8244. IR::Instr *
  8245. LowererMD::NegZeroBranching(IR::Opnd* opnd, IR::Instr* instr, IR::LabelInstr* isNeg0Label, IR::LabelInstr* isNotNeg0Label)
  8246. {
  8247. Assert(opnd->IsFloat());
  8248. bool is32Bits = opnd->IsFloat32();
  8249. IRType regType = is32Bits ? TyUint32 : TyUint64;
  8250. // Use UInt64 comparison between the opnd to check and negative zero constant.
  8251. // For this we have to convert opnd which is a double to uint64.
  8252. // MOV intOpnd, src
  8253. IR::RegOpnd *intOpnd = IR::RegOpnd::New(regType, this->m_func);
  8254. EmitReinterpretFloatToInt(intOpnd, opnd, instr);
  8255. #if LOWER_SPLIT_INT64
  8256. if (!is32Bits)
  8257. {
  8258. // For 64bits comparisons on x86 we need to check 2 registers
  8259. // CMP intOpnd.high, (k_NegZero >> 32).i32
  8260. // BRNEQ isNotNeg0Label
  8261. // CMP intOpnd.low, k_NegZero.i32
  8262. // BREQ isNeg0Label
  8263. // JMP isNotNeg0Label
  8264. Int64RegPair dstPair = m_func->FindOrCreateInt64Pair(intOpnd);
  8265. const uint32 high64NegZero = Js::NumberConstants::k_NegZero >> 32;
  8266. const uint32 low64NegZero = Js::NumberConstants::k_NegZero & UINT32_MAX;
  8267. IR::IntConstOpnd *negZeroHighOpnd = IR::IntConstOpnd::New(high64NegZero, TyUint32, m_func);
  8268. IR::IntConstOpnd *negZeroLowOpnd = IR::IntConstOpnd::New(low64NegZero, TyUint32, m_func);
  8269. m_lowerer->InsertCompareBranch(dstPair.high, negZeroHighOpnd, Js::OpCode::BrNeq_A, isNotNeg0Label, instr);
  8270. m_lowerer->InsertCompareBranch(dstPair.low, negZeroLowOpnd, Js::OpCode::BrEq_A, isNeg0Label, instr);
  8271. }
  8272. else
  8273. #endif
  8274. {
  8275. #if _M_IX86
  8276. IR::IntConstOpnd *negZeroOpnd = IR::IntConstOpnd::New(Js::NumberConstants::k_Float32NegZero, regType, m_func);
  8277. #else
  8278. IR::IntConstOpnd *negZeroOpnd = IR::IntConstOpnd::New(is32Bits ? Js::NumberConstants::k_Float32NegZero : Js::NumberConstants::k_NegZero, regType, m_func);
  8279. #endif
  8280. // CMP intOpnd, k_NegZero
  8281. // BREQ isNeg0Label
  8282. // JMP isNotNeg0Label
  8283. m_lowerer->InsertCompareBranch(intOpnd, negZeroOpnd, Js::OpCode::BrEq_A, isNeg0Label, instr);
  8284. }
  8285. IR::Instr* jmpNotNegZero = IR::BranchInstr::New(Js::OpCode::JMP, isNotNeg0Label, m_func);
  8286. instr->InsertBefore(jmpNotNegZero);
  8287. return jmpNotNegZero;
  8288. }
  8289. void
  8290. LowererMD::FinalLower()
  8291. {
  8292. this->lowererMDArch.FinalLower();
  8293. }
  8294. IR::Instr *
  8295. LowererMD::LowerDivI4AndBailOnReminder(IR::Instr * instr, IR::LabelInstr * bailOutLabel)
  8296. {
  8297. // Don't have save the operand for bailout because the lowering of IDIV don't overwrite their values
  8298. // (EDX) = CDQ
  8299. // EAX = numerator
  8300. // (EDX:EAX)= IDIV (EAX), denominator
  8301. // TEST EDX, EDX
  8302. // JNE bailout
  8303. // <Caller insert more checks here>
  8304. // dst = MOV EAX <-- assignInstr
  8305. Assert(instr);
  8306. Assert(instr->m_opcode == Js::OpCode::Div_I4);
  8307. Assert(!instr->HasBailOutInfo());
  8308. EmitInt4Instr(instr);
  8309. Assert(instr->m_opcode == Js::OpCode::IDIV);
  8310. IR::Instr * prev = instr->m_prev;
  8311. Assert(prev->m_opcode == Js::OpCode::CDQ);
  8312. #ifdef _M_IX86
  8313. Assert(prev->GetDst()->AsRegOpnd()->GetReg() == RegEDX);
  8314. #else
  8315. Assert(prev->GetDst()->AsRegOpnd()->GetReg() == RegRDX);
  8316. #endif
  8317. IR::Opnd * reminderOpnd = prev->GetDst();
  8318. // Insert all check before the assignment to the actual dst.
  8319. IR::Instr * insertBeforeInstr = instr->m_next;
  8320. Assert(insertBeforeInstr->m_opcode == Js::OpCode::MOV);
  8321. #ifdef _M_IX86
  8322. Assert(insertBeforeInstr->GetSrc1()->AsRegOpnd()->GetReg() == RegEAX);
  8323. #else
  8324. Assert(insertBeforeInstr->GetSrc1()->AsRegOpnd()->GetReg() == RegRAX);
  8325. #endif
  8326. // Jump to bailout if the reminder is not 0 (not int result)
  8327. this->m_lowerer->InsertTestBranch(reminderOpnd, reminderOpnd, Js::OpCode::BrNeq_A, bailOutLabel, insertBeforeInstr);
  8328. return insertBeforeInstr;
  8329. }
  8330. void
  8331. LowererMD::LowerTypeof(IR::Instr * typeOfInstr)
  8332. {
  8333. Func * func = typeOfInstr->m_func;
  8334. IR::Opnd * src1 = typeOfInstr->GetSrc1();
  8335. IR::Opnd * dst = typeOfInstr->GetDst();
  8336. Assert(src1->IsRegOpnd() && dst->IsRegOpnd());
  8337. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  8338. IR::LabelInstr * taggedIntLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  8339. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  8340. // MOV typeDisplayStringsArray, &javascriptLibrary->typeDisplayStrings
  8341. IR::RegOpnd * typeDisplayStringsArrayOpnd = IR::RegOpnd::New(TyMachPtr, func);
  8342. m_lowerer->InsertMove(typeDisplayStringsArrayOpnd, IR::AddrOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetTypeDisplayStringsOffset(), IR::AddrOpndKindConstantAddress, this->m_func), typeOfInstr);
  8343. GenerateObjectTest(src1, typeOfInstr, taggedIntLabel);
  8344. // MOV typeId, TypeIds_Object
  8345. // MOV typeRegOpnd, [src1 + offset(Type)]
  8346. // MOV objTypeId, [typeRegOpnd + offsetof(typeId)]
  8347. // CMP objTypeId, TypeIds_Limit /*external object test*/
  8348. // CMOVB typeId, objTypeId
  8349. // TEST [typeRegOpnd + offsetof(flags)], TypeFlagMask_IsFalsy /*test for falsy*/
  8350. // CMOVNE typeId, TypeIds_Undefined
  8351. // MOV dst, typeDisplayStrings[typeId]
  8352. // TEST dst, dst
  8353. // JE $helper
  8354. // JMP $done
  8355. IR::RegOpnd * typeIdOpnd = IR::RegOpnd::New(TyUint32, func);
  8356. m_lowerer->InsertMove(typeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_Object, TyUint32, func), typeOfInstr);
  8357. IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, func);
  8358. m_lowerer->InsertMove(typeRegOpnd,
  8359. IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func),
  8360. typeOfInstr);
  8361. IR::RegOpnd * objTypeIdOpnd = IR::RegOpnd::New(TyUint32, func);
  8362. m_lowerer->InsertMove(objTypeIdOpnd, IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, func), typeOfInstr);
  8363. m_lowerer->InsertCompare(objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_Limit, TyUint32, func), typeOfInstr);
  8364. InsertCmovCC(Js::OpCode::CMOVB, typeIdOpnd, objTypeIdOpnd, typeOfInstr);
  8365. // Insert MOV reg, 0 before the TEST because MOV reg, 0 will be peeped to XOR reg, reg and that may affect the zero flags that CMOVE depends on
  8366. IR::RegOpnd* typeIdUndefinedOpnd = IR::RegOpnd::New(TyUint32, func);
  8367. m_lowerer->InsertMove(typeIdUndefinedOpnd, IR::IntConstOpnd::New(Js::TypeIds_Undefined, TyUint32, func), typeOfInstr);
  8368. IR::Opnd *flagsOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfFlags(), TyInt32, this->m_func);
  8369. m_lowerer->InsertTest(flagsOpnd, IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyInt32, this->m_func), typeOfInstr);
  8370. InsertCmovCC(Js::OpCode::CMOVNE, typeIdOpnd, typeIdUndefinedOpnd, typeOfInstr);
  8371. if (dst->IsEqual(src1))
  8372. {
  8373. ChangeToAssign(typeOfInstr->HoistSrc1(Js::OpCode::Ld_A));
  8374. }
  8375. m_lowerer->InsertMove(dst, IR::IndirOpnd::New(typeDisplayStringsArrayOpnd, typeIdOpnd, this->GetDefaultIndirScale(), TyMachPtr, func), typeOfInstr);
  8376. m_lowerer->InsertTestBranch(dst, dst, Js::OpCode::BrEq_A, helperLabel, typeOfInstr);
  8377. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, typeOfInstr);
  8378. // $taggedInt:
  8379. // MOV dst, typeDisplayStrings[TypeIds_Number]
  8380. // JMP $done
  8381. typeOfInstr->InsertBefore(taggedIntLabel);
  8382. m_lowerer->InsertMove(dst, IR::IndirOpnd::New(typeDisplayStringsArrayOpnd, Js::TypeIds_Number * sizeof(Js::Var), TyMachPtr, func), typeOfInstr);
  8383. m_lowerer->InsertBranch(Js::OpCode::Br, doneLabel, typeOfInstr);
  8384. // $helper
  8385. // CALL OP_TypeOf
  8386. // $done
  8387. typeOfInstr->InsertBefore(helperLabel);
  8388. typeOfInstr->InsertAfter(doneLabel);
  8389. m_lowerer->LowerUnaryHelperMem(typeOfInstr, IR::HelperOp_Typeof);
  8390. }
  8391. IR::Instr*
  8392. LowererMD::InsertCmovCC(const Js::OpCode opCode, IR::Opnd * dst, IR::Opnd* src1, IR::Instr* insertBeforeInstr, bool postRegAlloc)
  8393. {
  8394. Assert(opCode > Js::OpCode::MDStart);
  8395. Func* func = insertBeforeInstr->m_func;
  8396. IR::Opnd* src2 = nullptr;
  8397. if (!postRegAlloc)
  8398. {
  8399. src2 = src1;
  8400. src1 = dst;
  8401. }
  8402. IR::Instr * instr = IR::Instr::New(opCode, dst, src1, src2, func);
  8403. insertBeforeInstr->InsertBefore(instr);
  8404. LowererMD::Legalize(instr);
  8405. return instr;
  8406. }