Lower.cpp 1009 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589115901159111592115931159411595115961159711598115991160011601116021160311604116051160611607116081160911610116111161211613116141161511616116171161811619116201162111622116231162411625116261162711628116291163011631116321163311634116351163611637116381163911640116411164211643116441164511646116471164811649116501165111652116531165411655116561165711658116591166011661116621166311664116651166611667116681166911670116711167211673116741167511676116771167811679116801168111682116831168411685116861168711688116891169011691116921169311694116951169611697116981169911700117011170211703117041170511706117071170811709117101171111712117131171411715117161171711718117191172011721117221172311724117251172611727117281172911730117311173211733117341173511736117371173811739117401174111742117431174411745117461174711748117491175011751117521175311754117551175611757117581175911760117611176211763117641176511766117671176811769117701177111772117731177411775117761177711778117791178011781117821178311784117851178611787117881178911790117911179211793117941179511796117971179811799118001180111802118031180411805118061180711808118091181011811118121181311814118151181611817118181181911820118211182211823118241182511826118271182811829118301183111832118331183411835118361183711838118391184011841118421184311844118451184611847118481184911850118511185211853118541185511856118571185811859118601186111862118631186411865118661186711868118691187011871118721187311874118751187611877118781187911880118811188211883118841188511886118871188811889118901189111892118931189411895118961189711898118991190011901119021190311904119051190611907119081190911910119111191211913119141191511916119171191811919119201192111922119231192411925119261192711928119291193011931119321193311934119351193611937119381193911940119411194211943119441194511946119471194811949119501195111952119531195411955119561195711958119591196011961119621196311964119651196611967119681196911970119711197211973119741197511976119771197811979119801198111982119831198411985119861198711988119891199011991119921199311994119951199611997119981199912000120011200212003120041200512006120071200812009120101201112012120131201412015120161201712018120191202012021120221202312024120251202612027120281202912030120311203212033120341203512036120371203812039120401204112042120431204412045120461204712048120491205012051120521205312054120551205612057120581205912060120611206212063120641206512066120671206812069120701207112072120731207412075120761207712078120791208012081120821208312084120851208612087120881208912090120911209212093120941209512096120971209812099121001210112102121031210412105121061210712108121091211012111121121211312114121151211612117121181211912120121211212212123121241212512126121271212812129121301213112132121331213412135121361213712138121391214012141121421214312144121451214612147121481214912150121511215212153121541215512156121571215812159121601216112162121631216412165121661216712168121691217012171121721217312174121751217612177121781217912180121811218212183121841218512186121871218812189121901219112192121931219412195121961219712198121991220012201122021220312204122051220612207122081220912210122111221212213122141221512216122171221812219122201222112222122231222412225122261222712228122291223012231122321223312234122351223612237122381223912240122411224212243122441224512246122471224812249122501225112252122531225412255122561225712258122591226012261122621226312264122651226612267122681226912270122711227212273122741227512276122771227812279122801228112282122831228412285122861228712288122891229012291122921229312294122951229612297122981229912300123011230212303123041230512306123071230812309123101231112312123131231412315123161231712318123191232012321123221232312324123251232612327123281232912330123311233212333123341233512336123371233812339123401234112342123431234412345123461234712348123491235012351123521235312354123551235612357123581235912360123611236212363123641236512366123671236812369123701237112372123731237412375123761237712378123791238012381123821238312384123851238612387123881238912390123911239212393123941239512396123971239812399124001240112402124031240412405124061240712408124091241012411124121241312414124151241612417124181241912420124211242212423124241242512426124271242812429124301243112432124331243412435124361243712438124391244012441124421244312444124451244612447124481244912450124511245212453124541245512456124571245812459124601246112462124631246412465124661246712468124691247012471124721247312474124751247612477124781247912480124811248212483124841248512486124871248812489124901249112492124931249412495124961249712498124991250012501125021250312504125051250612507125081250912510125111251212513125141251512516125171251812519125201252112522125231252412525125261252712528125291253012531125321253312534125351253612537125381253912540125411254212543125441254512546125471254812549125501255112552125531255412555125561255712558125591256012561125621256312564125651256612567125681256912570125711257212573125741257512576125771257812579125801258112582125831258412585125861258712588125891259012591125921259312594125951259612597125981259912600126011260212603126041260512606126071260812609126101261112612126131261412615126161261712618126191262012621126221262312624126251262612627126281262912630126311263212633126341263512636126371263812639126401264112642126431264412645126461264712648126491265012651126521265312654126551265612657126581265912660126611266212663126641266512666126671266812669126701267112672126731267412675126761267712678126791268012681126821268312684126851268612687126881268912690126911269212693126941269512696126971269812699127001270112702127031270412705127061270712708127091271012711127121271312714127151271612717127181271912720127211272212723127241272512726127271272812729127301273112732127331273412735127361273712738127391274012741127421274312744127451274612747127481274912750127511275212753127541275512756127571275812759127601276112762127631276412765127661276712768127691277012771127721277312774127751277612777127781277912780127811278212783127841278512786127871278812789127901279112792127931279412795127961279712798127991280012801128021280312804128051280612807128081280912810128111281212813128141281512816128171281812819128201282112822128231282412825128261282712828128291283012831128321283312834128351283612837128381283912840128411284212843128441284512846128471284812849128501285112852128531285412855128561285712858128591286012861128621286312864128651286612867128681286912870128711287212873128741287512876128771287812879128801288112882128831288412885128861288712888128891289012891128921289312894128951289612897128981289912900129011290212903129041290512906129071290812909129101291112912129131291412915129161291712918129191292012921129221292312924129251292612927129281292912930129311293212933129341293512936129371293812939129401294112942129431294412945129461294712948129491295012951129521295312954129551295612957129581295912960129611296212963129641296512966129671296812969129701297112972129731297412975129761297712978129791298012981129821298312984129851298612987129881298912990129911299212993129941299512996129971299812999130001300113002130031300413005130061300713008130091301013011130121301313014130151301613017130181301913020130211302213023130241302513026130271302813029130301303113032130331303413035130361303713038130391304013041130421304313044130451304613047130481304913050130511305213053130541305513056130571305813059130601306113062130631306413065130661306713068130691307013071130721307313074130751307613077130781307913080130811308213083130841308513086130871308813089130901309113092130931309413095130961309713098130991310013101131021310313104131051310613107131081310913110131111311213113131141311513116131171311813119131201312113122131231312413125131261312713128131291313013131131321313313134131351313613137131381313913140131411314213143131441314513146131471314813149131501315113152131531315413155131561315713158131591316013161131621316313164131651316613167131681316913170131711317213173131741317513176131771317813179131801318113182131831318413185131861318713188131891319013191131921319313194131951319613197131981319913200132011320213203132041320513206132071320813209132101321113212132131321413215132161321713218132191322013221132221322313224132251322613227132281322913230132311323213233132341323513236132371323813239132401324113242132431324413245132461324713248132491325013251132521325313254132551325613257132581325913260132611326213263132641326513266132671326813269132701327113272132731327413275132761327713278132791328013281132821328313284132851328613287132881328913290132911329213293132941329513296132971329813299133001330113302133031330413305133061330713308133091331013311133121331313314133151331613317133181331913320133211332213323133241332513326133271332813329133301333113332133331333413335133361333713338133391334013341133421334313344133451334613347133481334913350133511335213353133541335513356133571335813359133601336113362133631336413365133661336713368133691337013371133721337313374133751337613377133781337913380133811338213383133841338513386133871338813389133901339113392133931339413395133961339713398133991340013401134021340313404134051340613407134081340913410134111341213413134141341513416134171341813419134201342113422134231342413425134261342713428134291343013431134321343313434134351343613437134381343913440134411344213443134441344513446134471344813449134501345113452134531345413455134561345713458134591346013461134621346313464134651346613467134681346913470134711347213473134741347513476134771347813479134801348113482134831348413485134861348713488134891349013491134921349313494134951349613497134981349913500135011350213503135041350513506135071350813509135101351113512135131351413515135161351713518135191352013521135221352313524135251352613527135281352913530135311353213533135341353513536135371353813539135401354113542135431354413545135461354713548135491355013551135521355313554135551355613557135581355913560135611356213563135641356513566135671356813569135701357113572135731357413575135761357713578135791358013581135821358313584135851358613587135881358913590135911359213593135941359513596135971359813599136001360113602136031360413605136061360713608136091361013611136121361313614136151361613617136181361913620136211362213623136241362513626136271362813629136301363113632136331363413635136361363713638136391364013641136421364313644136451364613647136481364913650136511365213653136541365513656136571365813659136601366113662136631366413665136661366713668136691367013671136721367313674136751367613677136781367913680136811368213683136841368513686136871368813689136901369113692136931369413695136961369713698136991370013701137021370313704137051370613707137081370913710137111371213713137141371513716137171371813719137201372113722137231372413725137261372713728137291373013731137321373313734137351373613737137381373913740137411374213743137441374513746137471374813749137501375113752137531375413755137561375713758137591376013761137621376313764137651376613767137681376913770137711377213773137741377513776137771377813779137801378113782137831378413785137861378713788137891379013791137921379313794137951379613797137981379913800138011380213803138041380513806138071380813809138101381113812138131381413815138161381713818138191382013821138221382313824138251382613827138281382913830138311383213833138341383513836138371383813839138401384113842138431384413845138461384713848138491385013851138521385313854138551385613857138581385913860138611386213863138641386513866138671386813869138701387113872138731387413875138761387713878138791388013881138821388313884138851388613887138881388913890138911389213893138941389513896138971389813899139001390113902139031390413905139061390713908139091391013911139121391313914139151391613917139181391913920139211392213923139241392513926139271392813929139301393113932139331393413935139361393713938139391394013941139421394313944139451394613947139481394913950139511395213953139541395513956139571395813959139601396113962139631396413965139661396713968139691397013971139721397313974139751397613977139781397913980139811398213983139841398513986139871398813989139901399113992139931399413995139961399713998139991400014001140021400314004140051400614007140081400914010140111401214013140141401514016140171401814019140201402114022140231402414025140261402714028140291403014031140321403314034140351403614037140381403914040140411404214043140441404514046140471404814049140501405114052140531405414055140561405714058140591406014061140621406314064140651406614067140681406914070140711407214073140741407514076140771407814079140801408114082140831408414085140861408714088140891409014091140921409314094140951409614097140981409914100141011410214103141041410514106141071410814109141101411114112141131411414115141161411714118141191412014121141221412314124141251412614127141281412914130141311413214133141341413514136141371413814139141401414114142141431414414145141461414714148141491415014151141521415314154141551415614157141581415914160141611416214163141641416514166141671416814169141701417114172141731417414175141761417714178141791418014181141821418314184141851418614187141881418914190141911419214193141941419514196141971419814199142001420114202142031420414205142061420714208142091421014211142121421314214142151421614217142181421914220142211422214223142241422514226142271422814229142301423114232142331423414235142361423714238142391424014241142421424314244142451424614247142481424914250142511425214253142541425514256142571425814259142601426114262142631426414265142661426714268142691427014271142721427314274142751427614277142781427914280142811428214283142841428514286142871428814289142901429114292142931429414295142961429714298142991430014301143021430314304143051430614307143081430914310143111431214313143141431514316143171431814319143201432114322143231432414325143261432714328143291433014331143321433314334143351433614337143381433914340143411434214343143441434514346143471434814349143501435114352143531435414355143561435714358143591436014361143621436314364143651436614367143681436914370143711437214373143741437514376143771437814379143801438114382143831438414385143861438714388143891439014391143921439314394143951439614397143981439914400144011440214403144041440514406144071440814409144101441114412144131441414415144161441714418144191442014421144221442314424144251442614427144281442914430144311443214433144341443514436144371443814439144401444114442144431444414445144461444714448144491445014451144521445314454144551445614457144581445914460144611446214463144641446514466144671446814469144701447114472144731447414475144761447714478144791448014481144821448314484144851448614487144881448914490144911449214493144941449514496144971449814499145001450114502145031450414505145061450714508145091451014511145121451314514145151451614517145181451914520145211452214523145241452514526145271452814529145301453114532145331453414535145361453714538145391454014541145421454314544145451454614547145481454914550145511455214553145541455514556145571455814559145601456114562145631456414565145661456714568145691457014571145721457314574145751457614577145781457914580145811458214583145841458514586145871458814589145901459114592145931459414595145961459714598145991460014601146021460314604146051460614607146081460914610146111461214613146141461514616146171461814619146201462114622146231462414625146261462714628146291463014631146321463314634146351463614637146381463914640146411464214643146441464514646146471464814649146501465114652146531465414655146561465714658146591466014661146621466314664146651466614667146681466914670146711467214673146741467514676146771467814679146801468114682146831468414685146861468714688146891469014691146921469314694146951469614697146981469914700147011470214703147041470514706147071470814709147101471114712147131471414715147161471714718147191472014721147221472314724147251472614727147281472914730147311473214733147341473514736147371473814739147401474114742147431474414745147461474714748147491475014751147521475314754147551475614757147581475914760147611476214763147641476514766147671476814769147701477114772147731477414775147761477714778147791478014781147821478314784147851478614787147881478914790147911479214793147941479514796147971479814799148001480114802148031480414805148061480714808148091481014811148121481314814148151481614817148181481914820148211482214823148241482514826148271482814829148301483114832148331483414835148361483714838148391484014841148421484314844148451484614847148481484914850148511485214853148541485514856148571485814859148601486114862148631486414865148661486714868148691487014871148721487314874148751487614877148781487914880148811488214883148841488514886148871488814889148901489114892148931489414895148961489714898148991490014901149021490314904149051490614907149081490914910149111491214913149141491514916149171491814919149201492114922149231492414925149261492714928149291493014931149321493314934149351493614937149381493914940149411494214943149441494514946149471494814949149501495114952149531495414955149561495714958149591496014961149621496314964149651496614967149681496914970149711497214973149741497514976149771497814979149801498114982149831498414985149861498714988149891499014991149921499314994149951499614997149981499915000150011500215003150041500515006150071500815009150101501115012150131501415015150161501715018150191502015021150221502315024150251502615027150281502915030150311503215033150341503515036150371503815039150401504115042150431504415045150461504715048150491505015051150521505315054150551505615057150581505915060150611506215063150641506515066150671506815069150701507115072150731507415075150761507715078150791508015081150821508315084150851508615087150881508915090150911509215093150941509515096150971509815099151001510115102151031510415105151061510715108151091511015111151121511315114151151511615117151181511915120151211512215123151241512515126151271512815129151301513115132151331513415135151361513715138151391514015141151421514315144151451514615147151481514915150151511515215153151541515515156151571515815159151601516115162151631516415165151661516715168151691517015171151721517315174151751517615177151781517915180151811518215183151841518515186151871518815189151901519115192151931519415195151961519715198151991520015201152021520315204152051520615207152081520915210152111521215213152141521515216152171521815219152201522115222152231522415225152261522715228152291523015231152321523315234152351523615237152381523915240152411524215243152441524515246152471524815249152501525115252152531525415255152561525715258152591526015261152621526315264152651526615267152681526915270152711527215273152741527515276152771527815279152801528115282152831528415285152861528715288152891529015291152921529315294152951529615297152981529915300153011530215303153041530515306153071530815309153101531115312153131531415315153161531715318153191532015321153221532315324153251532615327153281532915330153311533215333153341533515336153371533815339153401534115342153431534415345153461534715348153491535015351153521535315354153551535615357153581535915360153611536215363153641536515366153671536815369153701537115372153731537415375153761537715378153791538015381153821538315384153851538615387153881538915390153911539215393153941539515396153971539815399154001540115402154031540415405154061540715408154091541015411154121541315414154151541615417154181541915420154211542215423154241542515426154271542815429154301543115432154331543415435154361543715438154391544015441154421544315444154451544615447154481544915450154511545215453154541545515456154571545815459154601546115462154631546415465154661546715468154691547015471154721547315474154751547615477154781547915480154811548215483154841548515486154871548815489154901549115492154931549415495154961549715498154991550015501155021550315504155051550615507155081550915510155111551215513155141551515516155171551815519155201552115522155231552415525155261552715528155291553015531155321553315534155351553615537155381553915540155411554215543155441554515546155471554815549155501555115552155531555415555155561555715558155591556015561155621556315564155651556615567155681556915570155711557215573155741557515576155771557815579155801558115582155831558415585155861558715588155891559015591155921559315594155951559615597155981559915600156011560215603156041560515606156071560815609156101561115612156131561415615156161561715618156191562015621156221562315624156251562615627156281562915630156311563215633156341563515636156371563815639156401564115642156431564415645156461564715648156491565015651156521565315654156551565615657156581565915660156611566215663156641566515666156671566815669156701567115672156731567415675156761567715678156791568015681156821568315684156851568615687156881568915690156911569215693156941569515696156971569815699157001570115702157031570415705157061570715708157091571015711157121571315714157151571615717157181571915720157211572215723157241572515726157271572815729157301573115732157331573415735157361573715738157391574015741157421574315744157451574615747157481574915750157511575215753157541575515756157571575815759157601576115762157631576415765157661576715768157691577015771157721577315774157751577615777157781577915780157811578215783157841578515786157871578815789157901579115792157931579415795157961579715798157991580015801158021580315804158051580615807158081580915810158111581215813158141581515816158171581815819158201582115822158231582415825158261582715828158291583015831158321583315834158351583615837158381583915840158411584215843158441584515846158471584815849158501585115852158531585415855158561585715858158591586015861158621586315864158651586615867158681586915870158711587215873158741587515876158771587815879158801588115882158831588415885158861588715888158891589015891158921589315894158951589615897158981589915900159011590215903159041590515906159071590815909159101591115912159131591415915159161591715918159191592015921159221592315924159251592615927159281592915930159311593215933159341593515936159371593815939159401594115942159431594415945159461594715948159491595015951159521595315954159551595615957159581595915960159611596215963159641596515966159671596815969159701597115972159731597415975159761597715978159791598015981159821598315984159851598615987159881598915990159911599215993159941599515996159971599815999160001600116002160031600416005160061600716008160091601016011160121601316014160151601616017160181601916020160211602216023160241602516026160271602816029160301603116032160331603416035160361603716038160391604016041160421604316044160451604616047160481604916050160511605216053160541605516056160571605816059160601606116062160631606416065160661606716068160691607016071160721607316074160751607616077160781607916080160811608216083160841608516086160871608816089160901609116092160931609416095160961609716098160991610016101161021610316104161051610616107161081610916110161111611216113161141611516116161171611816119161201612116122161231612416125161261612716128161291613016131161321613316134161351613616137161381613916140161411614216143161441614516146161471614816149161501615116152161531615416155161561615716158161591616016161161621616316164161651616616167161681616916170161711617216173161741617516176161771617816179161801618116182161831618416185161861618716188161891619016191161921619316194161951619616197161981619916200162011620216203162041620516206162071620816209162101621116212162131621416215162161621716218162191622016221162221622316224162251622616227162281622916230162311623216233162341623516236162371623816239162401624116242162431624416245162461624716248162491625016251162521625316254162551625616257162581625916260162611626216263162641626516266162671626816269162701627116272162731627416275162761627716278162791628016281162821628316284162851628616287162881628916290162911629216293162941629516296162971629816299163001630116302163031630416305163061630716308163091631016311163121631316314163151631616317163181631916320163211632216323163241632516326163271632816329163301633116332163331633416335163361633716338163391634016341163421634316344163451634616347163481634916350163511635216353163541635516356163571635816359163601636116362163631636416365163661636716368163691637016371163721637316374163751637616377163781637916380163811638216383163841638516386163871638816389163901639116392163931639416395163961639716398163991640016401164021640316404164051640616407164081640916410164111641216413164141641516416164171641816419164201642116422164231642416425164261642716428164291643016431164321643316434164351643616437164381643916440164411644216443164441644516446164471644816449164501645116452164531645416455164561645716458164591646016461164621646316464164651646616467164681646916470164711647216473164741647516476164771647816479164801648116482164831648416485164861648716488164891649016491164921649316494164951649616497164981649916500165011650216503165041650516506165071650816509165101651116512165131651416515165161651716518165191652016521165221652316524165251652616527165281652916530165311653216533165341653516536165371653816539165401654116542165431654416545165461654716548165491655016551165521655316554165551655616557165581655916560165611656216563165641656516566165671656816569165701657116572165731657416575165761657716578165791658016581165821658316584165851658616587165881658916590165911659216593165941659516596165971659816599166001660116602166031660416605166061660716608166091661016611166121661316614166151661616617166181661916620166211662216623166241662516626166271662816629166301663116632166331663416635166361663716638166391664016641166421664316644166451664616647166481664916650166511665216653166541665516656166571665816659166601666116662166631666416665166661666716668166691667016671166721667316674166751667616677166781667916680166811668216683166841668516686166871668816689166901669116692166931669416695166961669716698166991670016701167021670316704167051670616707167081670916710167111671216713167141671516716167171671816719167201672116722167231672416725167261672716728167291673016731167321673316734167351673616737167381673916740167411674216743167441674516746167471674816749167501675116752167531675416755167561675716758167591676016761167621676316764167651676616767167681676916770167711677216773167741677516776167771677816779167801678116782167831678416785167861678716788167891679016791167921679316794167951679616797167981679916800168011680216803168041680516806168071680816809168101681116812168131681416815168161681716818168191682016821168221682316824168251682616827168281682916830168311683216833168341683516836168371683816839168401684116842168431684416845168461684716848168491685016851168521685316854168551685616857168581685916860168611686216863168641686516866168671686816869168701687116872168731687416875168761687716878168791688016881168821688316884168851688616887168881688916890168911689216893168941689516896168971689816899169001690116902169031690416905169061690716908169091691016911169121691316914169151691616917169181691916920169211692216923169241692516926169271692816929169301693116932169331693416935169361693716938169391694016941169421694316944169451694616947169481694916950169511695216953169541695516956169571695816959169601696116962169631696416965169661696716968169691697016971169721697316974169751697616977169781697916980169811698216983169841698516986169871698816989169901699116992169931699416995169961699716998169991700017001170021700317004170051700617007170081700917010170111701217013170141701517016170171701817019170201702117022170231702417025170261702717028170291703017031170321703317034170351703617037170381703917040170411704217043170441704517046170471704817049170501705117052170531705417055170561705717058170591706017061170621706317064170651706617067170681706917070170711707217073170741707517076170771707817079170801708117082170831708417085170861708717088170891709017091170921709317094170951709617097170981709917100171011710217103171041710517106171071710817109171101711117112171131711417115171161711717118171191712017121171221712317124171251712617127171281712917130171311713217133171341713517136171371713817139171401714117142171431714417145171461714717148171491715017151171521715317154171551715617157171581715917160171611716217163171641716517166171671716817169171701717117172171731717417175171761717717178171791718017181171821718317184171851718617187171881718917190171911719217193171941719517196171971719817199172001720117202172031720417205172061720717208172091721017211172121721317214172151721617217172181721917220172211722217223172241722517226172271722817229172301723117232172331723417235172361723717238172391724017241172421724317244172451724617247172481724917250172511725217253172541725517256172571725817259172601726117262172631726417265172661726717268172691727017271172721727317274172751727617277172781727917280172811728217283172841728517286172871728817289172901729117292172931729417295172961729717298172991730017301173021730317304173051730617307173081730917310173111731217313173141731517316173171731817319173201732117322173231732417325173261732717328173291733017331173321733317334173351733617337173381733917340173411734217343173441734517346173471734817349173501735117352173531735417355173561735717358173591736017361173621736317364173651736617367173681736917370173711737217373173741737517376173771737817379173801738117382173831738417385173861738717388173891739017391173921739317394173951739617397173981739917400174011740217403174041740517406174071740817409174101741117412174131741417415174161741717418174191742017421174221742317424174251742617427174281742917430174311743217433174341743517436174371743817439174401744117442174431744417445174461744717448174491745017451174521745317454174551745617457174581745917460174611746217463174641746517466174671746817469174701747117472174731747417475174761747717478174791748017481174821748317484174851748617487174881748917490174911749217493174941749517496174971749817499175001750117502175031750417505175061750717508175091751017511175121751317514175151751617517175181751917520175211752217523175241752517526175271752817529175301753117532175331753417535175361753717538175391754017541175421754317544175451754617547175481754917550175511755217553175541755517556175571755817559175601756117562175631756417565175661756717568175691757017571175721757317574175751757617577175781757917580175811758217583175841758517586175871758817589175901759117592175931759417595175961759717598175991760017601176021760317604176051760617607176081760917610176111761217613176141761517616176171761817619176201762117622176231762417625176261762717628176291763017631176321763317634176351763617637176381763917640176411764217643176441764517646176471764817649176501765117652176531765417655176561765717658176591766017661176621766317664176651766617667176681766917670176711767217673176741767517676176771767817679176801768117682176831768417685176861768717688176891769017691176921769317694176951769617697176981769917700177011770217703177041770517706177071770817709177101771117712177131771417715177161771717718177191772017721177221772317724177251772617727177281772917730177311773217733177341773517736177371773817739177401774117742177431774417745177461774717748177491775017751177521775317754177551775617757177581775917760177611776217763177641776517766177671776817769177701777117772177731777417775177761777717778177791778017781177821778317784177851778617787177881778917790177911779217793177941779517796177971779817799178001780117802178031780417805178061780717808178091781017811178121781317814178151781617817178181781917820178211782217823178241782517826178271782817829178301783117832178331783417835178361783717838178391784017841178421784317844178451784617847178481784917850178511785217853178541785517856178571785817859178601786117862178631786417865178661786717868178691787017871178721787317874178751787617877178781787917880178811788217883178841788517886178871788817889178901789117892178931789417895178961789717898178991790017901179021790317904179051790617907179081790917910179111791217913179141791517916179171791817919179201792117922179231792417925179261792717928179291793017931179321793317934179351793617937179381793917940179411794217943179441794517946179471794817949179501795117952179531795417955179561795717958179591796017961179621796317964179651796617967179681796917970179711797217973179741797517976179771797817979179801798117982179831798417985179861798717988179891799017991179921799317994179951799617997179981799918000180011800218003180041800518006180071800818009180101801118012180131801418015180161801718018180191802018021180221802318024180251802618027180281802918030180311803218033180341803518036180371803818039180401804118042180431804418045180461804718048180491805018051180521805318054180551805618057180581805918060180611806218063180641806518066180671806818069180701807118072180731807418075180761807718078180791808018081180821808318084180851808618087180881808918090180911809218093180941809518096180971809818099181001810118102181031810418105181061810718108181091811018111181121811318114181151811618117181181811918120181211812218123181241812518126181271812818129181301813118132181331813418135181361813718138181391814018141181421814318144181451814618147181481814918150181511815218153181541815518156181571815818159181601816118162181631816418165181661816718168181691817018171181721817318174181751817618177181781817918180181811818218183181841818518186181871818818189181901819118192181931819418195181961819718198181991820018201182021820318204182051820618207182081820918210182111821218213182141821518216182171821818219182201822118222182231822418225182261822718228182291823018231182321823318234182351823618237182381823918240182411824218243182441824518246182471824818249182501825118252182531825418255182561825718258182591826018261182621826318264182651826618267182681826918270182711827218273182741827518276182771827818279182801828118282182831828418285182861828718288182891829018291182921829318294182951829618297182981829918300183011830218303183041830518306183071830818309183101831118312183131831418315183161831718318183191832018321183221832318324183251832618327183281832918330183311833218333183341833518336183371833818339183401834118342183431834418345183461834718348183491835018351183521835318354183551835618357183581835918360183611836218363183641836518366183671836818369183701837118372183731837418375183761837718378183791838018381183821838318384183851838618387183881838918390183911839218393183941839518396183971839818399184001840118402184031840418405184061840718408184091841018411184121841318414184151841618417184181841918420184211842218423184241842518426184271842818429184301843118432184331843418435184361843718438184391844018441184421844318444184451844618447184481844918450184511845218453184541845518456184571845818459184601846118462184631846418465184661846718468184691847018471184721847318474184751847618477184781847918480184811848218483184841848518486184871848818489184901849118492184931849418495184961849718498184991850018501185021850318504185051850618507185081850918510185111851218513185141851518516185171851818519185201852118522185231852418525185261852718528185291853018531185321853318534185351853618537185381853918540185411854218543185441854518546185471854818549185501855118552185531855418555185561855718558185591856018561185621856318564185651856618567185681856918570185711857218573185741857518576185771857818579185801858118582185831858418585185861858718588185891859018591185921859318594185951859618597185981859918600186011860218603186041860518606186071860818609186101861118612186131861418615186161861718618186191862018621186221862318624186251862618627186281862918630186311863218633186341863518636186371863818639186401864118642186431864418645186461864718648186491865018651186521865318654186551865618657186581865918660186611866218663186641866518666186671866818669186701867118672186731867418675186761867718678186791868018681186821868318684186851868618687186881868918690186911869218693186941869518696186971869818699187001870118702187031870418705187061870718708187091871018711187121871318714187151871618717187181871918720187211872218723187241872518726187271872818729187301873118732187331873418735187361873718738187391874018741187421874318744187451874618747187481874918750187511875218753187541875518756187571875818759187601876118762187631876418765187661876718768187691877018771187721877318774187751877618777187781877918780187811878218783187841878518786187871878818789187901879118792187931879418795187961879718798187991880018801188021880318804188051880618807188081880918810188111881218813188141881518816188171881818819188201882118822188231882418825188261882718828188291883018831188321883318834188351883618837188381883918840188411884218843188441884518846188471884818849188501885118852188531885418855188561885718858188591886018861188621886318864188651886618867188681886918870188711887218873188741887518876188771887818879188801888118882188831888418885188861888718888188891889018891188921889318894188951889618897188981889918900189011890218903189041890518906189071890818909189101891118912189131891418915189161891718918189191892018921189221892318924189251892618927189281892918930189311893218933189341893518936189371893818939189401894118942189431894418945189461894718948189491895018951189521895318954189551895618957189581895918960189611896218963189641896518966189671896818969189701897118972189731897418975189761897718978189791898018981189821898318984189851898618987189881898918990189911899218993189941899518996189971899818999190001900119002190031900419005190061900719008190091901019011190121901319014190151901619017190181901919020190211902219023190241902519026190271902819029190301903119032190331903419035190361903719038190391904019041190421904319044190451904619047190481904919050190511905219053190541905519056190571905819059190601906119062190631906419065190661906719068190691907019071190721907319074190751907619077190781907919080190811908219083190841908519086190871908819089190901909119092190931909419095190961909719098190991910019101191021910319104191051910619107191081910919110191111911219113191141911519116191171911819119191201912119122191231912419125191261912719128191291913019131191321913319134191351913619137191381913919140191411914219143191441914519146191471914819149191501915119152191531915419155191561915719158191591916019161191621916319164191651916619167191681916919170191711917219173191741917519176191771917819179191801918119182191831918419185191861918719188191891919019191191921919319194191951919619197191981919919200192011920219203192041920519206192071920819209192101921119212192131921419215192161921719218192191922019221192221922319224192251922619227192281922919230192311923219233192341923519236192371923819239192401924119242192431924419245192461924719248192491925019251192521925319254192551925619257192581925919260192611926219263192641926519266192671926819269192701927119272192731927419275192761927719278192791928019281192821928319284192851928619287192881928919290192911929219293192941929519296192971929819299193001930119302193031930419305193061930719308193091931019311193121931319314193151931619317193181931919320193211932219323193241932519326193271932819329193301933119332193331933419335193361933719338193391934019341193421934319344193451934619347193481934919350193511935219353193541935519356193571935819359193601936119362193631936419365193661936719368193691937019371193721937319374193751937619377193781937919380193811938219383193841938519386193871938819389193901939119392193931939419395193961939719398193991940019401194021940319404194051940619407194081940919410194111941219413194141941519416194171941819419194201942119422194231942419425194261942719428194291943019431194321943319434194351943619437194381943919440194411944219443194441944519446194471944819449194501945119452194531945419455194561945719458194591946019461194621946319464194651946619467194681946919470194711947219473194741947519476194771947819479194801948119482194831948419485194861948719488194891949019491194921949319494194951949619497194981949919500195011950219503195041950519506195071950819509195101951119512195131951419515195161951719518195191952019521195221952319524195251952619527195281952919530195311953219533195341953519536195371953819539195401954119542195431954419545195461954719548195491955019551195521955319554195551955619557195581955919560195611956219563195641956519566195671956819569195701957119572195731957419575195761957719578195791958019581195821958319584195851958619587195881958919590195911959219593195941959519596195971959819599196001960119602196031960419605196061960719608196091961019611196121961319614196151961619617196181961919620196211962219623196241962519626196271962819629196301963119632196331963419635196361963719638196391964019641196421964319644196451964619647196481964919650196511965219653196541965519656196571965819659196601966119662196631966419665196661966719668196691967019671196721967319674196751967619677196781967919680196811968219683196841968519686196871968819689196901969119692196931969419695196961969719698196991970019701197021970319704197051970619707197081970919710197111971219713197141971519716197171971819719197201972119722197231972419725197261972719728197291973019731197321973319734197351973619737197381973919740197411974219743197441974519746197471974819749197501975119752197531975419755197561975719758197591976019761197621976319764197651976619767197681976919770197711977219773197741977519776197771977819779197801978119782197831978419785197861978719788197891979019791197921979319794197951979619797197981979919800198011980219803198041980519806198071980819809198101981119812198131981419815198161981719818198191982019821198221982319824198251982619827198281982919830198311983219833198341983519836198371983819839198401984119842198431984419845198461984719848198491985019851198521985319854198551985619857198581985919860198611986219863198641986519866198671986819869198701987119872198731987419875198761987719878198791988019881198821988319884198851988619887198881988919890198911989219893198941989519896198971989819899199001990119902199031990419905199061990719908199091991019911199121991319914199151991619917199181991919920199211992219923199241992519926199271992819929199301993119932199331993419935199361993719938199391994019941199421994319944199451994619947199481994919950199511995219953199541995519956199571995819959199601996119962199631996419965199661996719968199691997019971199721997319974199751997619977199781997919980199811998219983199841998519986199871998819989199901999119992199931999419995199961999719998199992000020001200022000320004200052000620007200082000920010200112001220013200142001520016200172001820019200202002120022200232002420025200262002720028200292003020031200322003320034200352003620037200382003920040200412004220043200442004520046200472004820049200502005120052200532005420055200562005720058200592006020061200622006320064200652006620067200682006920070200712007220073200742007520076200772007820079200802008120082200832008420085200862008720088200892009020091200922009320094200952009620097200982009920100201012010220103201042010520106201072010820109201102011120112201132011420115201162011720118201192012020121201222012320124201252012620127201282012920130201312013220133201342013520136201372013820139201402014120142201432014420145201462014720148201492015020151201522015320154201552015620157201582015920160201612016220163201642016520166201672016820169201702017120172201732017420175201762017720178201792018020181201822018320184201852018620187201882018920190201912019220193201942019520196201972019820199202002020120202202032020420205202062020720208202092021020211202122021320214202152021620217202182021920220202212022220223202242022520226202272022820229202302023120232202332023420235202362023720238202392024020241202422024320244202452024620247202482024920250202512025220253202542025520256202572025820259202602026120262202632026420265202662026720268202692027020271202722027320274202752027620277202782027920280202812028220283202842028520286202872028820289202902029120292202932029420295202962029720298202992030020301203022030320304203052030620307203082030920310203112031220313203142031520316203172031820319203202032120322203232032420325203262032720328203292033020331203322033320334203352033620337203382033920340203412034220343203442034520346203472034820349203502035120352203532035420355203562035720358203592036020361203622036320364203652036620367203682036920370203712037220373203742037520376203772037820379203802038120382203832038420385203862038720388203892039020391203922039320394203952039620397203982039920400204012040220403204042040520406204072040820409204102041120412204132041420415204162041720418204192042020421204222042320424204252042620427204282042920430204312043220433204342043520436204372043820439204402044120442204432044420445204462044720448204492045020451204522045320454204552045620457204582045920460204612046220463204642046520466204672046820469204702047120472204732047420475204762047720478204792048020481204822048320484204852048620487204882048920490204912049220493204942049520496204972049820499205002050120502205032050420505205062050720508205092051020511205122051320514205152051620517205182051920520205212052220523205242052520526205272052820529205302053120532205332053420535205362053720538205392054020541205422054320544205452054620547205482054920550205512055220553205542055520556205572055820559205602056120562205632056420565205662056720568205692057020571205722057320574205752057620577205782057920580205812058220583205842058520586205872058820589205902059120592205932059420595205962059720598205992060020601206022060320604206052060620607206082060920610206112061220613206142061520616206172061820619206202062120622206232062420625206262062720628206292063020631206322063320634206352063620637206382063920640206412064220643206442064520646206472064820649206502065120652206532065420655206562065720658206592066020661206622066320664206652066620667206682066920670206712067220673206742067520676206772067820679206802068120682206832068420685206862068720688206892069020691206922069320694206952069620697206982069920700207012070220703207042070520706207072070820709207102071120712207132071420715207162071720718207192072020721207222072320724207252072620727207282072920730207312073220733207342073520736207372073820739207402074120742207432074420745207462074720748207492075020751207522075320754207552075620757207582075920760207612076220763207642076520766207672076820769207702077120772207732077420775207762077720778207792078020781207822078320784207852078620787207882078920790207912079220793207942079520796207972079820799208002080120802208032080420805208062080720808208092081020811208122081320814208152081620817208182081920820208212082220823208242082520826208272082820829208302083120832208332083420835208362083720838208392084020841208422084320844208452084620847208482084920850208512085220853208542085520856208572085820859208602086120862208632086420865208662086720868208692087020871208722087320874208752087620877208782087920880208812088220883208842088520886208872088820889208902089120892208932089420895208962089720898208992090020901209022090320904209052090620907209082090920910209112091220913209142091520916209172091820919209202092120922209232092420925209262092720928209292093020931209322093320934209352093620937209382093920940209412094220943209442094520946209472094820949209502095120952209532095420955209562095720958209592096020961209622096320964209652096620967209682096920970209712097220973209742097520976209772097820979209802098120982209832098420985209862098720988209892099020991209922099320994209952099620997209982099921000210012100221003210042100521006210072100821009210102101121012210132101421015210162101721018210192102021021210222102321024210252102621027210282102921030210312103221033210342103521036210372103821039210402104121042210432104421045210462104721048210492105021051210522105321054210552105621057210582105921060210612106221063210642106521066210672106821069210702107121072210732107421075210762107721078210792108021081210822108321084210852108621087210882108921090210912109221093210942109521096210972109821099211002110121102211032110421105211062110721108211092111021111211122111321114211152111621117211182111921120211212112221123211242112521126211272112821129211302113121132211332113421135211362113721138211392114021141211422114321144211452114621147211482114921150211512115221153211542115521156211572115821159211602116121162211632116421165211662116721168211692117021171211722117321174211752117621177211782117921180211812118221183211842118521186211872118821189211902119121192211932119421195211962119721198211992120021201212022120321204212052120621207212082120921210212112121221213212142121521216212172121821219212202122121222212232122421225212262122721228212292123021231212322123321234212352123621237212382123921240212412124221243212442124521246212472124821249212502125121252212532125421255212562125721258212592126021261212622126321264212652126621267212682126921270212712127221273212742127521276212772127821279212802128121282212832128421285212862128721288212892129021291212922129321294212952129621297212982129921300213012130221303213042130521306213072130821309213102131121312213132131421315213162131721318213192132021321213222132321324213252132621327213282132921330213312133221333213342133521336213372133821339213402134121342213432134421345213462134721348213492135021351213522135321354213552135621357213582135921360213612136221363213642136521366213672136821369213702137121372213732137421375213762137721378213792138021381213822138321384213852138621387213882138921390213912139221393213942139521396213972139821399214002140121402214032140421405214062140721408214092141021411214122141321414214152141621417214182141921420214212142221423214242142521426214272142821429214302143121432214332143421435214362143721438214392144021441214422144321444214452144621447214482144921450214512145221453214542145521456214572145821459214602146121462214632146421465214662146721468214692147021471214722147321474214752147621477214782147921480214812148221483214842148521486214872148821489214902149121492214932149421495214962149721498214992150021501215022150321504215052150621507215082150921510215112151221513215142151521516215172151821519215202152121522215232152421525215262152721528215292153021531215322153321534215352153621537215382153921540215412154221543215442154521546215472154821549215502155121552215532155421555215562155721558215592156021561215622156321564215652156621567215682156921570215712157221573215742157521576215772157821579215802158121582215832158421585215862158721588215892159021591215922159321594215952159621597215982159921600216012160221603216042160521606216072160821609216102161121612216132161421615216162161721618216192162021621216222162321624216252162621627216282162921630216312163221633216342163521636216372163821639216402164121642216432164421645216462164721648216492165021651216522165321654216552165621657216582165921660216612166221663216642166521666216672166821669216702167121672216732167421675216762167721678216792168021681216822168321684216852168621687216882168921690216912169221693216942169521696216972169821699217002170121702217032170421705217062170721708217092171021711217122171321714217152171621717217182171921720217212172221723217242172521726217272172821729217302173121732217332173421735217362173721738217392174021741217422174321744217452174621747217482174921750217512175221753217542175521756217572175821759217602176121762217632176421765217662176721768217692177021771217722177321774217752177621777217782177921780217812178221783217842178521786217872178821789217902179121792217932179421795217962179721798217992180021801218022180321804218052180621807218082180921810218112181221813218142181521816218172181821819218202182121822218232182421825218262182721828218292183021831218322183321834218352183621837218382183921840218412184221843218442184521846218472184821849218502185121852218532185421855218562185721858218592186021861218622186321864218652186621867218682186921870218712187221873218742187521876218772187821879218802188121882218832188421885218862188721888218892189021891218922189321894218952189621897218982189921900219012190221903219042190521906219072190821909219102191121912219132191421915219162191721918219192192021921219222192321924219252192621927219282192921930219312193221933219342193521936219372193821939219402194121942219432194421945219462194721948219492195021951219522195321954219552195621957219582195921960219612196221963219642196521966219672196821969219702197121972219732197421975219762197721978219792198021981219822198321984219852198621987219882198921990219912199221993219942199521996219972199821999220002200122002220032200422005220062200722008220092201022011220122201322014220152201622017220182201922020220212202222023220242202522026220272202822029220302203122032220332203422035220362203722038220392204022041220422204322044220452204622047220482204922050220512205222053220542205522056220572205822059220602206122062220632206422065220662206722068220692207022071220722207322074220752207622077220782207922080220812208222083220842208522086220872208822089220902209122092220932209422095220962209722098220992210022101221022210322104221052210622107221082210922110221112211222113221142211522116221172211822119221202212122122221232212422125221262212722128221292213022131221322213322134221352213622137221382213922140221412214222143221442214522146221472214822149221502215122152221532215422155221562215722158221592216022161221622216322164221652216622167221682216922170221712217222173221742217522176221772217822179221802218122182221832218422185221862218722188221892219022191221922219322194221952219622197221982219922200222012220222203222042220522206222072220822209222102221122212222132221422215222162221722218222192222022221222222222322224222252222622227222282222922230222312223222233222342223522236222372223822239222402224122242222432224422245222462224722248222492225022251222522225322254222552225622257222582225922260222612226222263222642226522266222672226822269222702227122272222732227422275222762227722278222792228022281222822228322284222852228622287222882228922290222912229222293222942229522296222972229822299223002230122302223032230422305223062230722308223092231022311223122231322314223152231622317223182231922320223212232222323223242232522326223272232822329223302233122332223332233422335223362233722338223392234022341223422234322344223452234622347223482234922350223512235222353223542235522356223572235822359223602236122362223632236422365223662236722368223692237022371223722237322374223752237622377223782237922380223812238222383223842238522386223872238822389223902239122392223932239422395223962239722398223992240022401224022240322404224052240622407224082240922410224112241222413224142241522416224172241822419224202242122422224232242422425224262242722428224292243022431224322243322434224352243622437224382243922440224412244222443224442244522446224472244822449224502245122452224532245422455224562245722458224592246022461224622246322464224652246622467224682246922470224712247222473224742247522476224772247822479224802248122482224832248422485224862248722488224892249022491224922249322494224952249622497224982249922500225012250222503225042250522506225072250822509225102251122512225132251422515225162251722518225192252022521225222252322524225252252622527225282252922530225312253222533225342253522536225372253822539225402254122542225432254422545225462254722548225492255022551225522255322554225552255622557225582255922560225612256222563225642256522566225672256822569225702257122572225732257422575225762257722578225792258022581225822258322584225852258622587225882258922590225912259222593225942259522596225972259822599226002260122602226032260422605226062260722608226092261022611226122261322614226152261622617226182261922620226212262222623226242262522626226272262822629226302263122632226332263422635226362263722638226392264022641226422264322644226452264622647226482264922650226512265222653226542265522656226572265822659226602266122662226632266422665226662266722668226692267022671226722267322674226752267622677226782267922680226812268222683226842268522686226872268822689226902269122692226932269422695226962269722698226992270022701227022270322704227052270622707227082270922710227112271222713227142271522716227172271822719227202272122722227232272422725227262272722728227292273022731227322273322734227352273622737227382273922740227412274222743227442274522746227472274822749227502275122752227532275422755227562275722758227592276022761227622276322764227652276622767227682276922770227712277222773227742277522776227772277822779227802278122782227832278422785227862278722788227892279022791227922279322794227952279622797227982279922800228012280222803228042280522806228072280822809228102281122812228132281422815228162281722818228192282022821228222282322824228252282622827228282282922830228312283222833228342283522836228372283822839228402284122842228432284422845228462284722848228492285022851228522285322854228552285622857228582285922860228612286222863228642286522866228672286822869228702287122872228732287422875228762287722878228792288022881228822288322884228852288622887228882288922890228912289222893228942289522896228972289822899229002290122902229032290422905229062290722908229092291022911229122291322914229152291622917229182291922920229212292222923229242292522926229272292822929229302293122932229332293422935229362293722938229392294022941229422294322944229452294622947229482294922950229512295222953229542295522956229572295822959229602296122962229632296422965229662296722968229692297022971229722297322974229752297622977229782297922980229812298222983229842298522986229872298822989229902299122992229932299422995229962299722998229992300023001230022300323004230052300623007230082300923010230112301223013230142301523016230172301823019230202302123022230232302423025230262302723028230292303023031230322303323034230352303623037230382303923040230412304223043230442304523046230472304823049230502305123052230532305423055230562305723058230592306023061230622306323064230652306623067230682306923070230712307223073230742307523076230772307823079230802308123082230832308423085230862308723088230892309023091230922309323094230952309623097230982309923100231012310223103231042310523106231072310823109231102311123112231132311423115231162311723118231192312023121231222312323124231252312623127231282312923130231312313223133231342313523136231372313823139231402314123142231432314423145231462314723148231492315023151231522315323154231552315623157231582315923160231612316223163231642316523166231672316823169231702317123172231732317423175231762317723178231792318023181231822318323184231852318623187231882318923190231912319223193231942319523196231972319823199232002320123202232032320423205232062320723208232092321023211232122321323214232152321623217232182321923220232212322223223232242322523226232272322823229232302323123232232332323423235232362323723238232392324023241232422324323244232452324623247232482324923250232512325223253232542325523256232572325823259232602326123262232632326423265232662326723268232692327023271232722327323274232752327623277232782327923280232812328223283232842328523286232872328823289232902329123292232932329423295232962329723298232992330023301233022330323304233052330623307233082330923310233112331223313233142331523316233172331823319233202332123322233232332423325233262332723328233292333023331233322333323334233352333623337233382333923340233412334223343233442334523346233472334823349233502335123352233532335423355233562335723358233592336023361233622336323364233652336623367233682336923370233712337223373233742337523376233772337823379233802338123382233832338423385233862338723388233892339023391233922339323394233952339623397233982339923400234012340223403234042340523406234072340823409234102341123412234132341423415234162341723418234192342023421234222342323424234252342623427234282342923430234312343223433234342343523436234372343823439234402344123442234432344423445234462344723448234492345023451234522345323454234552345623457234582345923460234612346223463234642346523466234672346823469234702347123472234732347423475234762347723478234792348023481234822348323484234852348623487234882348923490234912349223493234942349523496234972349823499235002350123502235032350423505235062350723508235092351023511235122351323514235152351623517235182351923520235212352223523235242352523526235272352823529235302353123532235332353423535235362353723538235392354023541235422354323544235452354623547235482354923550235512355223553235542355523556235572355823559235602356123562235632356423565235662356723568235692357023571235722357323574235752357623577235782357923580235812358223583235842358523586235872358823589235902359123592235932359423595235962359723598235992360023601236022360323604236052360623607236082360923610236112361223613236142361523616236172361823619236202362123622236232362423625236262362723628236292363023631236322363323634236352363623637236382363923640236412364223643236442364523646236472364823649236502365123652236532365423655236562365723658236592366023661236622366323664236652366623667236682366923670236712367223673236742367523676236772367823679236802368123682236832368423685236862368723688236892369023691236922369323694236952369623697236982369923700237012370223703237042370523706237072370823709237102371123712237132371423715237162371723718237192372023721237222372323724237252372623727237282372923730237312373223733237342373523736237372373823739237402374123742237432374423745237462374723748237492375023751237522375323754237552375623757237582375923760237612376223763237642376523766237672376823769237702377123772237732377423775237762377723778237792378023781237822378323784237852378623787237882378923790237912379223793237942379523796237972379823799238002380123802238032380423805238062380723808238092381023811238122381323814238152381623817238182381923820238212382223823238242382523826238272382823829238302383123832238332383423835238362383723838238392384023841238422384323844238452384623847238482384923850238512385223853238542385523856238572385823859238602386123862238632386423865238662386723868238692387023871238722387323874238752387623877238782387923880238812388223883238842388523886238872388823889238902389123892238932389423895238962389723898238992390023901239022390323904239052390623907239082390923910239112391223913239142391523916239172391823919239202392123922239232392423925239262392723928239292393023931239322393323934239352393623937239382393923940239412394223943239442394523946239472394823949239502395123952239532395423955239562395723958239592396023961239622396323964239652396623967239682396923970239712397223973239742397523976239772397823979239802398123982239832398423985239862398723988239892399023991239922399323994239952399623997239982399924000240012400224003240042400524006240072400824009240102401124012240132401424015240162401724018240192402024021240222402324024240252402624027240282402924030240312403224033240342403524036240372403824039240402404124042240432404424045240462404724048240492405024051240522405324054240552405624057240582405924060240612406224063240642406524066240672406824069240702407124072240732407424075240762407724078240792408024081240822408324084240852408624087240882408924090240912409224093240942409524096240972409824099241002410124102241032410424105241062410724108241092411024111241122411324114241152411624117241182411924120241212412224123241242412524126241272412824129241302413124132241332413424135241362413724138241392414024141241422414324144241452414624147241482414924150241512415224153241542415524156241572415824159241602416124162241632416424165241662416724168241692417024171241722417324174241752417624177241782417924180241812418224183241842418524186241872418824189241902419124192241932419424195241962419724198241992420024201242022420324204242052420624207242082420924210242112421224213242142421524216242172421824219242202422124222242232422424225242262422724228242292423024231242322423324234242352423624237242382423924240242412424224243242442424524246242472424824249242502425124252242532425424255242562425724258242592426024261242622426324264242652426624267242682426924270242712427224273242742427524276242772427824279242802428124282242832428424285242862428724288242892429024291242922429324294242952429624297242982429924300243012430224303243042430524306243072430824309243102431124312243132431424315243162431724318243192432024321243222432324324243252432624327243282432924330243312433224333243342433524336243372433824339243402434124342243432434424345243462434724348243492435024351243522435324354243552435624357243582435924360243612436224363243642436524366243672436824369243702437124372243732437424375243762437724378243792438024381243822438324384243852438624387243882438924390243912439224393243942439524396243972439824399244002440124402244032440424405244062440724408244092441024411244122441324414244152441624417244182441924420244212442224423244242442524426244272442824429244302443124432244332443424435244362443724438244392444024441244422444324444244452444624447244482444924450244512445224453244542445524456244572445824459244602446124462244632446424465244662446724468244692447024471244722447324474244752447624477244782447924480244812448224483244842448524486244872448824489244902449124492244932449424495244962449724498244992450024501245022450324504245052450624507245082450924510245112451224513245142451524516245172451824519245202452124522245232452424525245262452724528245292453024531245322453324534245352453624537245382453924540245412454224543245442454524546245472454824549245502455124552245532455424555245562455724558245592456024561245622456324564245652456624567245682456924570245712457224573245742457524576245772457824579245802458124582245832458424585245862458724588245892459024591245922459324594245952459624597245982459924600246012460224603246042460524606246072460824609246102461124612246132461424615246162461724618246192462024621246222462324624246252462624627246282462924630246312463224633246342463524636246372463824639246402464124642246432464424645246462464724648246492465024651246522465324654246552465624657246582465924660246612466224663246642466524666246672466824669246702467124672246732467424675246762467724678246792468024681246822468324684246852468624687246882468924690246912469224693246942469524696246972469824699247002470124702247032470424705247062470724708247092471024711247122471324714247152471624717247182471924720247212472224723247242472524726247272472824729247302473124732247332473424735247362473724738247392474024741247422474324744247452474624747247482474924750247512475224753247542475524756247572475824759247602476124762247632476424765247662476724768247692477024771247722477324774247752477624777247782477924780247812478224783247842478524786247872478824789247902479124792247932479424795247962479724798247992480024801248022480324804248052480624807248082480924810248112481224813248142481524816248172481824819248202482124822248232482424825248262482724828248292483024831248322483324834248352483624837248382483924840248412484224843248442484524846248472484824849248502485124852248532485424855248562485724858248592486024861248622486324864248652486624867248682486924870248712487224873248742487524876248772487824879248802488124882248832488424885248862488724888248892489024891248922489324894248952489624897248982489924900249012490224903249042490524906249072490824909249102491124912249132491424915249162491724918249192492024921249222492324924249252492624927249282492924930249312493224933249342493524936249372493824939249402494124942249432494424945249462494724948249492495024951249522495324954249552495624957249582495924960249612496224963249642496524966249672496824969249702497124972249732497424975249762497724978249792498024981249822498324984249852498624987249882498924990249912499224993249942499524996249972499824999250002500125002250032500425005250062500725008250092501025011250122501325014250152501625017250182501925020250212502225023250242502525026250272502825029250302503125032250332503425035250362503725038250392504025041250422504325044250452504625047250482504925050250512505225053250542505525056250572505825059250602506125062250632506425065250662506725068250692507025071250722507325074250752507625077250782507925080250812508225083250842508525086250872508825089250902509125092250932509425095250962509725098250992510025101251022510325104251052510625107251082510925110251112511225113251142511525116251172511825119251202512125122251232512425125251262512725128251292513025131251322513325134251352513625137251382513925140251412514225143251442514525146251472514825149251502515125152251532515425155251562515725158251592516025161251622516325164251652516625167251682516925170251712517225173251742517525176251772517825179251802518125182251832518425185251862518725188251892519025191251922519325194251952519625197251982519925200252012520225203252042520525206252072520825209252102521125212252132521425215252162521725218252192522025221252222522325224252252522625227252282522925230252312523225233252342523525236252372523825239252402524125242252432524425245252462524725248252492525025251252522525325254252552525625257252582525925260252612526225263252642526525266252672526825269252702527125272252732527425275252762527725278252792528025281252822528325284252852528625287252882528925290252912529225293252942529525296252972529825299253002530125302253032530425305253062530725308253092531025311253122531325314253152531625317253182531925320253212532225323253242532525326253272532825329253302533125332253332533425335253362533725338253392534025341253422534325344253452534625347253482534925350253512535225353253542535525356253572535825359253602536125362253632536425365253662536725368253692537025371253722537325374253752537625377253782537925380253812538225383253842538525386253872538825389253902539125392253932539425395253962539725398253992540025401254022540325404254052540625407254082540925410254112541225413254142541525416254172541825419254202542125422254232542425425254262542725428254292543025431254322543325434254352543625437254382543925440254412544225443254442544525446254472544825449254502545125452254532545425455254562545725458254592546025461254622546325464254652546625467254682546925470254712547225473254742547525476254772547825479254802548125482254832548425485254862548725488254892549025491254922549325494254952549625497254982549925500255012550225503255042550525506255072550825509255102551125512255132551425515255162551725518255192552025521255222552325524255252552625527255282552925530255312553225533255342553525536255372553825539255402554125542255432554425545
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Debug/DebuggingFlags.h"
  7. #include "Debug/DiagProbe.h"
  8. #include "Debug/DebugManager.h"
  9. // Parser includes
  10. #include "RegexCommon.h"
  11. #include "RegexPattern.h"
  12. #include "ExternalLowerer.h"
  13. #include "Types/DynamicObjectPropertyEnumerator.h"
  14. #include "Types/JavascriptStaticEnumerator.h"
  15. #include "Library/ForInObjectEnumerator.h"
  16. ///----------------------------------------------------------------------------
  17. ///
  18. /// Lowerer::Lower
  19. ///
  20. /// Lowerer's main entrypoint. Lowers this function..
  21. ///
  22. ///----------------------------------------------------------------------------
  23. void
  24. Lowerer::Lower()
  25. {
  26. this->m_func->StopMaintainByteCodeOffset();
  27. NoRecoverMemoryJitArenaAllocator localAlloc(_u("BE-Lower"), this->m_func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  28. this->m_alloc = &localAlloc;
  29. BVSparse<JitArenaAllocator> localInitializedTempSym(&localAlloc);
  30. this->initializedTempSym = &localInitializedTempSym;
  31. BVSparse<JitArenaAllocator> localAddToLiveOnBackEdgeSyms(&localAlloc);
  32. this->addToLiveOnBackEdgeSyms = &localAddToLiveOnBackEdgeSyms;
  33. Assert(this->m_func->GetCloneMap() == nullptr);
  34. m_lowererMD.Init(this);
  35. bool defaultDoFastPath = this->m_func->DoFastPaths();
  36. bool loopFastPath = this->m_func->DoLoopFastPaths();
  37. if (m_func->HasAnyStackNestedFunc())
  38. {
  39. EnsureStackFunctionListStackSym();
  40. }
  41. if (m_func->DoStackFrameDisplay() && !m_func->IsLoopBody())
  42. {
  43. AllocStackClosure();
  44. }
  45. AllocStackForInObjectEnumeratorArray();
  46. if (m_func->IsJitInDebugMode())
  47. {
  48. // Initialize metadata of local var slots.
  49. // Too late to wait until Register Allocator, as we need the offset when lowerering bailout for debugger.
  50. int32 hasLocalVarChangedOffset = m_func->GetHasLocalVarChangedOffset();
  51. if (hasLocalVarChangedOffset != Js::Constants::InvalidOffset)
  52. {
  53. // MOV [EBP + m_func->GetHasLocalVarChangedOffset()], 0
  54. StackSym* sym = StackSym::New(TyInt8, m_func);
  55. sym->m_offset = hasLocalVarChangedOffset;
  56. sym->m_allocated = true;
  57. IR::Opnd* opnd1 = IR::SymOpnd::New(sym, TyInt8, m_func);
  58. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func);
  59. LowererMD::CreateAssign(opnd1, opnd2, m_func->GetFunctionEntryInsertionPoint());
  60. #ifdef DBG
  61. // Pre-fill all local slots with a pattern. This will help identify non-initialized/garbage var values.
  62. // Note that in the beginning of the function in bytecode we should initialize all locals to undefined.
  63. uint32 localSlotCount = m_func->GetJITFunctionBody()->GetEndNonTempLocalIndex() - m_func->GetJITFunctionBody()->GetFirstNonTempLocalIndex();
  64. for (uint i = 0; i < localSlotCount; ++i)
  65. {
  66. int offset = m_func->GetLocalVarSlotOffset(i);
  67. IRType opnd1Type;
  68. #if defined(_M_IX86) || defined (_M_ARM)
  69. opnd1Type = TyInt32;
  70. opnd2 = IR::IntConstOpnd::New(Func::c_debugFillPattern4, opnd1Type, m_func);
  71. #else
  72. opnd1Type = TyInt64;
  73. opnd2 = IR::IntConstOpnd::New(Func::c_debugFillPattern8, opnd1Type, m_func);
  74. #endif
  75. sym = StackSym::New(opnd1Type, m_func);
  76. sym->m_offset = offset;
  77. sym->m_allocated = true;
  78. opnd1 = IR::SymOpnd::New(sym, opnd1Type, m_func);
  79. LowererMD::CreateAssign(opnd1, opnd2, m_func->GetFunctionEntryInsertionPoint());
  80. }
  81. #endif
  82. }
  83. Assert(!m_func->HasAnyStackNestedFunc());
  84. }
  85. this->LowerRange(m_func->m_headInstr, m_func->m_tailInstr, defaultDoFastPath, loopFastPath);
  86. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  87. // TODO: (leish)(swb) implement for arm
  88. #if defined(_M_IX86) || defined(_M_AMD64)
  89. if (CONFIG_FLAG(ForceSoftwareWriteBarrier) && CONFIG_FLAG(VerifyBarrierBit))
  90. {
  91. // find out all write barrier setting instr, call Recycler::WBSetBit for verification purpose
  92. // should do this in LowererMD::GenerateWriteBarrier, however, can't insert call instruction there
  93. FOREACH_INSTR_EDITING(instr, instrNext, m_func->m_headInstr)
  94. if (instr->m_src1 && instr->m_src1->IsAddrOpnd())
  95. {
  96. IR::AddrOpnd* addrOpnd = instr->m_src1->AsAddrOpnd();
  97. if (addrOpnd->GetAddrOpndKind() == IR::AddrOpndKindWriteBarrierCardTable)
  98. {
  99. auto& leaInstr = instr->m_prev->m_prev->m_prev;
  100. auto& movInstr = instr->m_prev->m_prev;
  101. auto& shrInstr = instr->m_prev;
  102. Assert(leaInstr->m_opcode == Js::OpCode::LEA);
  103. Assert(movInstr->m_opcode == Js::OpCode::MOV);
  104. Assert(shrInstr->m_opcode == Js::OpCode::SHR);
  105. m_lowererMD.LoadHelperArgument(movInstr, leaInstr->m_dst);
  106. IR::Instr* instrCall = IR::Instr::New(Js::OpCode::Call, m_func);
  107. movInstr->InsertBefore(instrCall);
  108. m_lowererMD.ChangeToHelperCall(instrCall, IR::HelperWriteBarrierSetVerifyBit);
  109. }
  110. }
  111. NEXT_INSTR_EDITING
  112. }
  113. #endif
  114. #endif
  115. this->m_func->ClearCloneMap();
  116. if (m_func->HasAnyStackNestedFunc())
  117. {
  118. EnsureZeroLastStackFunctionNext();
  119. }
  120. if (!m_func->IsSimpleJit())
  121. {
  122. #if 0 // TODO michhol oop jit, reenable assert
  123. Js::EntryPointInfo* entryPointInfo = this->m_func->m_workItem->GetEntryPoint();
  124. Assert(entryPointInfo->GetJitTransferData() != nullptr && !entryPointInfo->GetJitTransferData()->GetIsReady());
  125. #endif
  126. }
  127. this->initializedTempSym = nullptr;
  128. this->m_alloc = nullptr;
  129. this->m_func->DisableConstandAddressLoadHoist();
  130. }
  131. void
  132. Lowerer::LowerRange(IR::Instr *instrStart, IR::Instr *instrEnd, bool defaultDoFastPath, bool defaultDoLoopFastPath)
  133. {
  134. bool noMathFastPath;
  135. bool noFieldFastPath;
  136. bool isStrictMode = this->m_func->GetJITFunctionBody()->IsStrictMode();
  137. noFieldFastPath = !defaultDoFastPath;
  138. noMathFastPath = !defaultDoFastPath;
  139. #if DBG_DUMP
  140. char16 * globOptInstrString = nullptr;
  141. #endif
  142. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, instrEnd, instrStart)
  143. {
  144. // Try to peep this`
  145. instr = this->PreLowerPeepInstr(instr, &instrPrev);
  146. #if DBG
  147. IR::Instr * verifyLegalizeInstrNext = instr->m_next;
  148. #endif
  149. // If we have debugger bailout as part of real instr (not separate BailForDebugger instr),
  150. // extract/split out BailOutForDebugger into separate instr, if needed.
  151. // The instr can have just debugger bailout, or debugger bailout + other shared bailout.
  152. // Note that by the time we get here, we should not have aux-only bailout (in globopt we promote it to normal bailout).
  153. if (m_func->IsJitInDebugMode() && instr->HasBailOutInfo() &&
  154. (((instr->GetBailOutKind() & IR::BailOutForDebuggerBits) && instr->m_opcode != Js::OpCode::BailForDebugger) ||
  155. instr->HasAuxBailOut()))
  156. {
  157. instr = this->SplitBailForDebugger(instr); // Change instr, as returned is the one we need to lower next.
  158. instrPrev = instr->m_prev; // Change just in case if instr got changed.
  159. }
  160. #if DBG_DUMP
  161. if (!instr->IsLowered() && !instr->IsLabelInstr()
  162. && (CONFIG_FLAG(ForcePostLowerGlobOptInstrString) ||
  163. PHASE_DUMP(Js::LowererPhase, m_func) ||
  164. PHASE_DUMP(Js::LinearScanPhase, m_func) ||
  165. PHASE_DUMP(Js::RegAllocPhase, m_func) ||
  166. PHASE_DUMP(Js::PeepsPhase, m_func) ||
  167. PHASE_DUMP(Js::LayoutPhase, m_func) ||
  168. PHASE_DUMP(Js::EmitterPhase, m_func) ||
  169. PHASE_DUMP(Js::EncoderPhase, m_func) ||
  170. PHASE_DUMP(Js::BackEndPhase, m_func)))
  171. {
  172. if(instr->m_next && instr->m_next->m_opcode != Js::OpCode::StatementBoundary && !instr->m_next->IsLabelInstr())
  173. {
  174. instr->m_next->globOptInstrString = globOptInstrString;
  175. }
  176. globOptInstrString = instr->DumpString();
  177. }
  178. #endif
  179. if (instr->IsBranchInstr() && !instr->AsBranchInstr()->IsMultiBranch() && instr->AsBranchInstr()->GetTarget()->m_isLoopTop)
  180. {
  181. Loop * loop = instr->AsBranchInstr()->GetTarget()->GetLoop();
  182. if (this->outerMostLoopLabel == nullptr && !loop->isProcessed)
  183. {
  184. while (loop && loop->GetLoopTopInstr()) // some loops are optimized away so that they are not loops anymore.
  185. // They do, however, stay in the loop graph but don't have loop top labels assigned to them
  186. {
  187. this->outerMostLoopLabel = loop->GetLoopTopInstr();
  188. Assert(this->outerMostLoopLabel->m_isLoopTop);
  189. // landing pad must fall through to the loop
  190. Assert(this->outerMostLoopLabel->m_prev->HasFallThrough());
  191. loop = loop->parent;
  192. }
  193. this->initializedTempSym->ClearAll();
  194. }
  195. noFieldFastPath = !defaultDoLoopFastPath;
  196. noMathFastPath = !defaultDoLoopFastPath;
  197. }
  198. #ifdef INLINE_CACHE_STATS
  199. if(PHASE_STATS1(Js::PolymorphicInlineCachePhase))
  200. {
  201. // Always use the slow path, so we can track property accesses
  202. noFieldFastPath = true;
  203. }
  204. #endif
  205. switch (instr->m_opcode)
  206. {
  207. case Js::OpCode::LdHandlerScope:
  208. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdHandlerScope);
  209. break;
  210. case Js::OpCode::InitSetFld:
  211. instrPrev = this->LowerStFld(instr, IR::HelperOP_InitSetter, IR::HelperOP_InitSetter, false);
  212. break;
  213. case Js::OpCode::InitGetFld:
  214. instrPrev = this->LowerStFld(instr, IR::HelperOP_InitGetter, IR::HelperOP_InitGetter, false);
  215. break;
  216. case Js::OpCode::InitProto:
  217. instrPrev = this->LowerStFld(instr, IR::HelperOP_InitProto, IR::HelperOP_InitProto, false);
  218. break;
  219. case Js::OpCode::LdArgCnt:
  220. this->LoadArgumentCount(instr);
  221. break;
  222. case Js::OpCode::LdStackArgPtr:
  223. this->LoadStackArgPtr(instr);
  224. break;
  225. case Js::OpCode::LdHeapArguments:
  226. case Js::OpCode::LdLetHeapArguments:
  227. instrPrev = m_lowererMD.LoadHeapArguments(instr);
  228. break;
  229. case Js::OpCode::LdHeapArgsCached:
  230. case Js::OpCode::LdLetHeapArgsCached:
  231. m_lowererMD.LoadHeapArgsCached(instr);
  232. break;
  233. case Js::OpCode::InvalCachedScope:
  234. this->LowerBinaryHelper(instr, IR::HelperOP_InvalidateCachedScope);
  235. break;
  236. case Js::OpCode::InitCachedScope:
  237. if (instr->m_func->GetJITFunctionBody()->GetDoScopeObjectCreation() || !instr->m_func->IsStackArgsEnabled())
  238. {
  239. instrPrev = this->LowerInitCachedScope(instr);
  240. }
  241. else
  242. {
  243. instr->ReplaceSrc1(this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull));
  244. instr->m_opcode = Js::OpCode::Ld_A;
  245. instrPrev = instr;
  246. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase))
  247. {
  248. Output::Print(_u("StackArgFormals : %s (%d) :Removing Scope object creation in Lowerer and replacing it with MOV NULL. \n"), instr->m_func->GetJITFunctionBody()->GetDisplayName(), instr->m_func->GetFunctionNumber());
  249. Output::Flush();
  250. }
  251. }
  252. break;
  253. case Js::OpCode::NewScopeObject:
  254. {
  255. Func * currFunc = instr->m_func;
  256. if (currFunc->GetJITFunctionBody()->GetDoScopeObjectCreation() || !currFunc->IsStackArgsEnabled())
  257. {
  258. //Call Helper that creates scope object and does type transition for the formals
  259. if (currFunc->IsStackArgsEnabled() && currFunc->GetJITFunctionBody()->GetInParamsCount() != 1)
  260. {
  261. // s3 = formals are let decls
  262. this->m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(currFunc->GetHasNonSimpleParams() ? TRUE : FALSE, TyUint8, currFunc));
  263. // s2 = current function.
  264. IR::Opnd * paramOpnd = LoadFunctionBodyOpnd(instr);
  265. this->m_lowererMD.LoadHelperArgument(instr, paramOpnd);
  266. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewScopeObjectWithFormals);
  267. }
  268. else
  269. {
  270. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewScopeObject);
  271. }
  272. }
  273. else
  274. {
  275. instr->SetSrc1(this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull));
  276. instr->m_opcode = Js::OpCode::Ld_A;
  277. instrPrev = instr;
  278. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase))
  279. {
  280. Output::Print(_u("StackArgFormals : %s (%d) :Removing Scope object creation in Lowerer and replacing it with MOV NULL. \n"), currFunc->GetJITFunctionBody()->GetDisplayName(), currFunc->GetFunctionNumber());
  281. Output::Flush();
  282. }
  283. }
  284. break;
  285. }
  286. case Js::OpCode::NewStackScopeSlots:
  287. this->LowerNewScopeSlots(instr, m_func->DoStackScopeSlots());
  288. break;
  289. case Js::OpCode::NewScopeSlots:
  290. this->LowerNewScopeSlots(instr, false);
  291. break;
  292. case Js::OpCode::InitLocalClosure:
  293. // Real initialization of the stack pointers happens on entry to the function, so this instruction
  294. // (which exists to provide a def in the IR) can go away.
  295. instr->Remove();
  296. break;
  297. case Js::OpCode::NewScopeSlotsWithoutPropIds:
  298. this->LowerBinaryHelperMemWithFuncBody(instr, IR::HelperOP_NewScopeSlotsWithoutPropIds);
  299. break;
  300. case Js::OpCode::NewBlockScope:
  301. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewBlockScope);
  302. break;
  303. case Js::OpCode::NewPseudoScope:
  304. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewPseudoScope);
  305. break;
  306. case Js::OpCode::CloneInnerScopeSlots:
  307. this->LowerUnaryHelperMem(instr, IR::HelperOP_CloneInnerScopeSlots);
  308. break;
  309. case Js::OpCode::CloneBlockScope:
  310. this->LowerUnaryHelperMem(instr, IR::HelperOP_CloneBlockScope);
  311. break;
  312. case Js::OpCode::GetCachedFunc:
  313. m_lowererMD.LowerGetCachedFunc(instr);
  314. break;
  315. case Js::OpCode::BrFncCachedScopeEq:
  316. case Js::OpCode::BrFncCachedScopeNeq:
  317. this->LowerBrFncCachedScopeEq(instr);
  318. break;
  319. case Js::OpCode::CommitScope:
  320. m_lowererMD.LowerCommitScope(instr);
  321. break;
  322. case Js::OpCode::LdFldForTypeOf:
  323. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValueForTypeOf, IR::HelperOp_PatchGetValuePolymorphicForTypeOf,
  324. IR::HelperOp_PatchGetValueForTypeOf, IR::HelperOp_PatchGetValuePolymorphicForTypeOf);
  325. break;
  326. case Js::OpCode::LdFld:
  327. case Js::OpCode::LdFldForCallApplyTarget:
  328. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValue, IR::HelperOp_PatchGetValuePolymorphic,
  329. IR::HelperOp_PatchGetValue, IR::HelperOp_PatchGetValuePolymorphic);
  330. break;
  331. case Js::OpCode::LdSuperFld:
  332. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValueWithThisPtr, IR::HelperOp_PatchGetValuePolymorphicWithThisPtr,
  333. IR::HelperOp_PatchGetValueWithThisPtr, IR::HelperOp_PatchGetValuePolymorphicWithThisPtr);
  334. break;
  335. case Js::OpCode::LdRootFld:
  336. instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootValue, IR::HelperOp_PatchGetRootValuePolymorphic,
  337. IR::HelperOp_PatchGetRootValue, IR::HelperOp_PatchGetRootValuePolymorphic);
  338. break;
  339. case Js::OpCode::LdRootFldForTypeOf:
  340. instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootValueForTypeOf, IR::HelperOp_PatchGetRootValuePolymorphicForTypeOf,
  341. IR::HelperOp_PatchGetRootValueForTypeOf, IR::HelperOp_PatchGetRootValuePolymorphicForTypeOf);
  342. break;
  343. case Js::OpCode::LdMethodFldPolyInlineMiss:
  344. instrPrev = LowerLdFld(instr, IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic, true, nullptr, true);
  345. break;
  346. case Js::OpCode::LdMethodFld:
  347. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic,
  348. IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic);
  349. break;
  350. case Js::OpCode::LdRootMethodFld:
  351. instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootMethod, IR::HelperOp_PatchGetRootMethodPolymorphic,
  352. IR::HelperOp_PatchGetRootMethod, IR::HelperOp_PatchGetRootMethodPolymorphic);
  353. break;
  354. case Js::OpCode::ScopedLdMethodFld:
  355. // "Scoped" in ScopedLdMethodFld is a bit of a misnomer because it doesn't look through a scope chain.
  356. // Instead the op is to allow for either a LdRootMethodFld or LdMethodFld depending on whether the
  357. // object is the root object or not.
  358. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_ScopedGetMethod, IR::HelperOp_ScopedGetMethodPolymorphic,
  359. IR::HelperOp_ScopedGetMethod, IR::HelperOp_ScopedGetMethodPolymorphic);
  360. break;
  361. case Js::OpCode::LdMethodFromFlags:
  362. {
  363. Assert(instr->HasBailOutInfo());
  364. bool success = m_lowererMD.GenerateFastLdMethodFromFlags(instr);
  365. AssertMsg(success, "Not expected to generate helper block here");
  366. break;
  367. }
  368. case Js::OpCode::CheckFixedFld:
  369. AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instr->m_func) || !PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func), "CheckFixedFld with fixed prop(Data|Method) phase disabled?");
  370. this->GenerateCheckFixedFld(instr);
  371. break;
  372. case Js::OpCode::CheckPropertyGuardAndLoadType:
  373. instrPrev = this->GeneratePropertyGuardCheckBailoutAndLoadType(instr);
  374. break;
  375. case Js::OpCode::CheckObjType:
  376. this->GenerateCheckObjType(instr);
  377. break;
  378. case Js::OpCode::AdjustObjType:
  379. this->LowerAdjustObjType(instr);
  380. break;
  381. case Js::OpCode::DeleteFld:
  382. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteProperty, false, false);
  383. break;
  384. case Js::OpCode::DeleteRootFld:
  385. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteRootProperty, false, false);
  386. break;
  387. case Js::OpCode::DeleteFldStrict:
  388. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteProperty, false, true);
  389. break;
  390. case Js::OpCode::DeleteRootFldStrict:
  391. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteRootProperty, false, true);
  392. break;
  393. case Js::OpCode::ScopedLdFldForTypeOf:
  394. if (!noFieldFastPath)
  395. {
  396. m_lowererMD.GenerateFastScopedLdFld(instr);
  397. }
  398. instrPrev = this->LowerScopedLdFld(instr, IR::HelperOp_PatchGetPropertyForTypeOfScoped, true);
  399. break;
  400. case Js::OpCode::ScopedLdFld:
  401. if (!noFieldFastPath)
  402. {
  403. m_lowererMD.GenerateFastScopedLdFld(instr);
  404. }
  405. instrPrev = this->LowerScopedLdFld(instr, IR::HelperOp_PatchGetPropertyScoped, true);
  406. break;
  407. case Js::OpCode::ScopedLdInst:
  408. instrPrev = this->LowerScopedLdInst(instr, IR::HelperOp_GetInstanceScoped);
  409. break;
  410. case Js::OpCode::ScopedDeleteFld:
  411. instrPrev = this->LowerScopedDelFld(instr, IR::HelperOp_DeletePropertyScoped, false, false);
  412. break;
  413. case Js::OpCode::ScopedDeleteFldStrict:
  414. instrPrev = this->LowerScopedDelFld(instr, IR::HelperOp_DeletePropertyScoped, false, true);
  415. break;
  416. case Js::OpCode::NewScFunc:
  417. instrPrev = this->LowerNewScFunc(instr);
  418. break;
  419. case Js::OpCode::NewScGenFunc:
  420. instrPrev = this->LowerNewScGenFunc(instr);
  421. break;
  422. case Js::OpCode::StFld:
  423. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueNoLocalFastPath, IR::HelperOp_PatchPutValueNoLocalFastPathPolymorphic,
  424. IR::HelperOp_PatchPutValue, IR::HelperOp_PatchPutValuePolymorphic, true, Js::PropertyOperation_None);
  425. break;
  426. case Js::OpCode::StSuperFld:
  427. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueWithThisPtrNoLocalFastPath, IR::HelperOp_PatchPutValueWithThisPtrNoLocalFastPathPolymorphic,
  428. IR::HelperOp_PatchPutValueWithThisPtr, IR::HelperOp_PatchPutValueWithThisPtrPolymorphic, true, isStrictMode ? Js::PropertyOperation_StrictMode : Js::PropertyOperation_None);
  429. break;
  430. case Js::OpCode::StRootFld:
  431. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPathPolymorphic,
  432. IR::HelperOp_PatchPutRootValue, IR::HelperOp_PatchPutRootValuePolymorphic, true, Js::PropertyOperation_Root);
  433. break;
  434. case Js::OpCode::StFldStrict:
  435. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueNoLocalFastPath, IR::HelperOp_PatchPutValueNoLocalFastPathPolymorphic,
  436. IR::HelperOp_PatchPutValue, IR::HelperOp_PatchPutValuePolymorphic, true, Js::PropertyOperation_StrictMode);
  437. break;
  438. case Js::OpCode::StRootFldStrict:
  439. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPathPolymorphic,
  440. IR::HelperOp_PatchPutRootValue, IR::HelperOp_PatchPutRootValuePolymorphic, true, Js::PropertyOperation_StrictModeRoot);
  441. break;
  442. case Js::OpCode::InitFld:
  443. case Js::OpCode::InitRootFld:
  444. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchInitValue, IR::HelperOp_PatchInitValuePolymorphic,
  445. IR::HelperOp_PatchInitValue, IR::HelperOp_PatchInitValuePolymorphic, false, Js::PropertyOperation_None);
  446. break;
  447. case Js::OpCode::ScopedInitFunc:
  448. instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_InitFuncScoped, false);
  449. break;
  450. case Js::OpCode::ScopedStFld:
  451. case Js::OpCode::ScopedStFldStrict:
  452. if (!noFieldFastPath)
  453. {
  454. m_lowererMD.GenerateFastScopedStFld(instr);
  455. }
  456. instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_PatchSetPropertyScoped, true, true,
  457. instr->m_opcode == Js::OpCode::ScopedStFld ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode);
  458. break;
  459. case Js::OpCode::ConsoleScopedStFld:
  460. case Js::OpCode::ConsoleScopedStFldStrict:
  461. {
  462. if (!noFieldFastPath)
  463. {
  464. m_lowererMD.GenerateFastScopedStFld(instr);
  465. }
  466. Js::PropertyOperationFlags flags = static_cast<Js::PropertyOperationFlags>((instr->m_opcode == Js::OpCode::ConsoleScopedStFld ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode) | Js::PropertyOperation_AllowUndeclInConsoleScope);
  467. instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_ConsolePatchSetPropertyScoped, true, true, flags);
  468. break;
  469. }
  470. case Js::OpCode::LdStr:
  471. m_lowererMD.ChangeToAssign(instr);
  472. break;
  473. case Js::OpCode::CloneStr:
  474. {
  475. GenerateGetImmutableOrScriptUnreferencedString(instr->GetSrc1()->AsRegOpnd(), instr, IR::HelperOp_CompoundStringCloneForAppending, false);
  476. instr->Remove();
  477. break;
  478. }
  479. case Js::OpCode::NewScObjArray:
  480. instrPrev = this->LowerNewScObjArray(instr);
  481. break;
  482. case Js::OpCode::NewScObject:
  483. case Js::OpCode::NewScObjectSpread:
  484. case Js::OpCode::NewScObjArraySpread:
  485. instrPrev = this->LowerNewScObject(instr, true, true);
  486. break;
  487. case Js::OpCode::NewScObjectNoCtor:
  488. instrPrev = this->LowerNewScObject(instr, false, true);
  489. break;
  490. case Js::OpCode::NewScObjectNoCtorFull:
  491. instrPrev = this->LowerNewScObject(instr, false, true, true);
  492. break;
  493. case Js::OpCode::GetNewScObject:
  494. instrPrev = this->LowerGetNewScObject(instr);
  495. break;
  496. case Js::OpCode::UpdateNewScObjectCache:
  497. instrPrev = instr->m_prev;
  498. this->LowerUpdateNewScObjectCache(instr, instr->GetSrc2(), instr->GetSrc1(), true /* isCtorFunction */);
  499. instr->Remove();
  500. break;
  501. case Js::OpCode::NewScObjectSimple:
  502. this->LowerNewScObjectSimple(instr);
  503. break;
  504. case Js::OpCode::NewScObjectLiteral:
  505. this->LowerNewScObjectLiteral(instr);
  506. break;
  507. case Js::OpCode::LdPropIds:
  508. m_lowererMD.ChangeToAssign(instr);
  509. break;
  510. case Js::OpCode::StArrSegItem_A:
  511. instrPrev = this->LowerArraySegmentVars(instr);
  512. break;
  513. case Js::OpCode::InlineMathAcos:
  514. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Acos);
  515. break;
  516. case Js::OpCode::InlineMathAsin:
  517. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Asin);
  518. break;
  519. case Js::OpCode::InlineMathAtan:
  520. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Atan);
  521. break;
  522. case Js::OpCode::InlineMathAtan2:
  523. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Atan2);
  524. break;
  525. case Js::OpCode::InlineMathCos:
  526. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Cos);
  527. break;
  528. case Js::OpCode::InlineMathExp:
  529. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Exp);
  530. break;
  531. case Js::OpCode::InlineMathLog:
  532. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Log);
  533. break;
  534. case Js::OpCode::InlineMathPow:
  535. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Pow);
  536. break;
  537. case Js::OpCode::InlineMathSin:
  538. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Sin);
  539. break;
  540. case Js::OpCode::InlineMathSqrt:
  541. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  542. break;
  543. case Js::OpCode::InlineMathTan:
  544. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Tan);
  545. break;
  546. case Js::OpCode::InlineMathFloor:
  547. #if defined(ASMJS_PLAT) && (defined(_M_X64) || defined(_M_IX86))
  548. if (!AutoSystemInfo::Data.SSE4_1Available() && instr->m_func->GetJITFunctionBody()->IsAsmJsMode())
  549. {
  550. m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_FloorFlt, IR::HelperDirectMath_FloorDb);
  551. break;
  552. }
  553. #endif
  554. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  555. break;
  556. case Js::OpCode::InlineMathCeil:
  557. #if defined(ASMJS_PLAT) && (defined(_M_X64) || defined(_M_IX86))
  558. if (!AutoSystemInfo::Data.SSE4_1Available() && instr->m_func->GetJITFunctionBody()->IsAsmJsMode())
  559. {
  560. m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_CeilFlt, IR::HelperDirectMath_CeilDb);
  561. break;
  562. }
  563. #endif
  564. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  565. break;
  566. case Js::OpCode::InlineMathRound:
  567. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  568. break;
  569. case Js::OpCode::InlineMathAbs:
  570. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  571. break;
  572. case Js::OpCode::InlineMathImul:
  573. GenerateFastInlineMathImul(instr);
  574. break;
  575. case Js::OpCode::Ctz:
  576. GenerateCtz(instr);
  577. break;
  578. case Js::OpCode::PopCnt:
  579. GeneratePopCnt(instr);
  580. break;
  581. case Js::OpCode::InlineMathClz:
  582. GenerateFastInlineMathClz(instr);
  583. break;
  584. case Js::OpCode::InlineMathFround:
  585. GenerateFastInlineMathFround(instr);
  586. break;
  587. case Js::OpCode::Reinterpret_Prim:
  588. LowerReinterpretPrimitive(instr);
  589. break;
  590. case Js::OpCode::InlineMathMin:
  591. case Js::OpCode::InlineMathMax:
  592. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  593. break;
  594. case Js::OpCode::InlineMathRandom:
  595. this->GenerateFastInlineBuiltInMathRandom(instr);
  596. break;
  597. #ifdef ENABLE_DOM_FAST_PATH
  598. case Js::OpCode::DOMFastPathGetter:
  599. this->LowerFastInlineDOMFastPathGetter(instr);
  600. break;
  601. #endif
  602. case Js::OpCode::InlineArrayPush:
  603. this->GenerateFastInlineArrayPush(instr);
  604. break;
  605. case Js::OpCode::InlineArrayPop:
  606. this->GenerateFastInlineArrayPop(instr);
  607. break;
  608. //Now retrieve the function object from the ArgOut_A_InlineSpecialized instruction opcode to push it on the stack after all the other arguments have been pushed.
  609. //The lowering of the direct call to helper is handled by GenerateDirectCall (architecture specific).
  610. case Js::OpCode::CallDirect:
  611. {
  612. IR::Opnd * src1 = instr->GetSrc1();
  613. Assert(src1->IsHelperCallOpnd());
  614. switch (src1->AsHelperCallOpnd()->m_fnHelper)
  615. {
  616. case IR::JnHelperMethod::HelperString_Split:
  617. case IR::JnHelperMethod::HelperString_Match:
  618. GenerateFastInlineStringSplitMatch(instr);
  619. break;
  620. case IR::JnHelperMethod::HelperRegExp_Exec:
  621. GenerateFastInlineRegExpExec(instr);
  622. break;
  623. case IR::JnHelperMethod::HelperGlobalObject_ParseInt:
  624. GenerateFastInlineGlobalObjectParseInt(instr);
  625. break;
  626. case IR::JnHelperMethod::HelperString_FromCharCode:
  627. GenerateFastInlineStringFromCharCode(instr);
  628. break;
  629. case IR::JnHelperMethod::HelperString_FromCodePoint:
  630. GenerateFastInlineStringFromCodePoint(instr);
  631. break;
  632. case IR::JnHelperMethod::HelperString_CharAt:
  633. GenerateFastInlineStringCharCodeAt(instr, Js::BuiltinFunction::JavascriptString_CharAt);
  634. break;
  635. case IR::JnHelperMethod::HelperString_CharCodeAt:
  636. GenerateFastInlineStringCharCodeAt(instr, Js::BuiltinFunction::JavascriptString_CharCodeAt);
  637. break;
  638. case IR::JnHelperMethod::HelperString_Replace:
  639. GenerateFastInlineStringReplace(instr);
  640. break;
  641. case IR::JnHelperMethod::HelperObject_HasOwnProperty:
  642. this->GenerateFastInlineHasOwnProperty(instr);
  643. break;
  644. case IR::JnHelperMethod::HelperArray_IsArray:
  645. this->GenerateFastInlineIsArray(instr);
  646. break;
  647. }
  648. instrPrev = LowerCallDirect(instr);
  649. break;
  650. }
  651. case Js::OpCode::CallIDynamic:
  652. {
  653. Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
  654. instrPrev = this->LowerCallIDynamic(instr, (ushort)flags);
  655. break;
  656. }
  657. case Js::OpCode::CallIDynamicSpread:
  658. {
  659. Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
  660. instrPrev = this->LowerCallIDynamicSpread(instr, (ushort)flags);
  661. break;
  662. }
  663. case Js::OpCode::CallI:
  664. case Js::OpCode::CallINew:
  665. case Js::OpCode::CallIFixed:
  666. case Js::OpCode::CallINewTargetNew:
  667. {
  668. Js::CallFlags flags = Js::CallFlags_None;
  669. if (instr->isCtorCall)
  670. {
  671. flags = Js::CallFlags_New;
  672. }
  673. else
  674. {
  675. if (instr->m_opcode == Js::OpCode::CallINew)
  676. {
  677. flags = Js::CallFlags_New;
  678. }
  679. else if (instr->m_opcode == Js::OpCode::CallINewTargetNew)
  680. {
  681. flags = (Js::CallFlags) (Js::CallFlags_New | Js::CallFlags_ExtraArg | Js::CallFlags_NewTarget);
  682. }
  683. if (instr->GetDst())
  684. {
  685. flags = (Js::CallFlags) (flags | Js::CallFlags_Value);
  686. }
  687. else
  688. {
  689. flags = (Js::CallFlags) (flags | Js::CallFlags_NotUsed);
  690. }
  691. }
  692. if (!PHASE_OFF(Js::CallFastPathPhase, this->m_func) && !noMathFastPath)
  693. {
  694. // We shouldn't have turned this instruction into a fixed method call if we're calling one of the
  695. // built-ins we still inline in the lowerer.
  696. Assert(instr->m_opcode != Js::OpCode::CallIFixed || !Func::IsBuiltInInlinedInLowerer(instr->GetSrc1()));
  697. // Disable InlineBuiltInLibraryCall as it does not work well with 2nd chance reg alloc
  698. // and may invalidate live on back edge data by introducing refs across loops. See Winblue Bug: 577641
  699. //// Callee may still be a library built-in; if so, generate it inline.
  700. //if (this->InlineBuiltInLibraryCall(instr))
  701. //{
  702. // m_lowererMD.LowerCallI(instr, (ushort)flags, true /*isHelper*/);
  703. //}
  704. //else
  705. //{
  706. m_lowererMD.LowerCallI(instr, (ushort)flags);
  707. //}
  708. }
  709. else
  710. {
  711. m_lowererMD.LowerCallI(instr, (ushort)flags);
  712. }
  713. break;
  714. }
  715. case Js::OpCode::AsmJsCallI:
  716. m_lowererMD.LowerAsmJsCallI(instr);
  717. break;
  718. case Js::OpCode::AsmJsCallE:
  719. m_lowererMD.LowerAsmJsCallE(instr);
  720. break;
  721. case Js::OpCode::CallIEval:
  722. {
  723. Js::CallFlags flags = (Js::CallFlags)(Js::CallFlags_ExtraArg | (instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed));
  724. if (IsSpreadCall(instr))
  725. {
  726. instrPrev = LowerSpreadCall(instr, flags);
  727. }
  728. else
  729. {
  730. m_lowererMD.LowerCallI(instr, (ushort)flags);
  731. }
  732. #ifdef PERF_HINT
  733. if (PHASE_TRACE1(Js::PerfHintPhase))
  734. {
  735. WritePerfHint(PerfHints::CallsEval, this->m_func, instr->GetByteCodeOffset());
  736. }
  737. #endif
  738. break;
  739. }
  740. case Js::OpCode::CallIPut:
  741. m_lowererMD.LowerCallPut(instr);
  742. break;
  743. case Js::OpCode::CallHelper:
  744. instrPrev = m_lowererMD.LowerCallHelper(instr);
  745. break;
  746. case Js::OpCode::Ret:
  747. if (instr->m_next->m_opcode != Js::OpCode::FunctionExit)
  748. {
  749. // If this RET isn't at the end of the function, insert a branch to
  750. // the epilog.
  751. IR::Instr *exitPrev = m_func->m_exitInstr->m_prev;
  752. if (!exitPrev->IsLabelInstr())
  753. {
  754. exitPrev = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  755. m_func->m_exitInstr->InsertBefore(exitPrev);
  756. }
  757. IR::BranchInstr *exitBr = IR::BranchInstr::New(Js::OpCode::Br,
  758. exitPrev->AsLabelInstr(), m_func);
  759. instr->InsertAfter(exitBr);
  760. m_lowererMD.LowerUncondBranch(exitBr);
  761. }
  762. m_lowererMD.LowerRet(instr);
  763. break;
  764. case Js::OpCode::LdArgumentsFromFrame:
  765. this->LoadArgumentsFromFrame(instr);
  766. break;
  767. case Js::OpCode::LdC_A_I4:
  768. {
  769. IR::Opnd *src1 = instr->UnlinkSrc1();
  770. AssertMsg(src1->IsIntConstOpnd(), "Source of LdC_A_I4 should be an IntConst...");
  771. instrPrev = this->LowerLoadVar(instr,
  772. IR::AddrOpnd::NewFromNumber(static_cast<int32>(src1->AsIntConstOpnd()->GetValue()), this->m_func));
  773. src1->Free(this->m_func);
  774. break;
  775. }
  776. case Js::OpCode::LdC_A_R8:
  777. {
  778. IR::Opnd *src1 = instr->UnlinkSrc1();
  779. AssertMsg(src1->IsFloatConstOpnd(), "Source of LdC_A_R8 should be a FloatConst...");
  780. instrPrev = this->LowerLoadVar(instr, src1->AsFloatConstOpnd()->GetAddrOpnd(this->m_func));
  781. src1->Free(this->m_func);
  782. break;
  783. }
  784. case Js::OpCode::LdC_F8_R8:
  785. {
  786. IR::Opnd *src1 = instr->UnlinkSrc1();
  787. AssertMsg(src1->IsFloatConstOpnd(), "Source of LdC_F8_R8 should be a FloatConst...");
  788. instrPrev = m_lowererMD.LoadFloatValue(instr->UnlinkDst()->AsRegOpnd(), src1->AsFloatConstOpnd()->m_value, instr);
  789. src1->Free(this->m_func);
  790. instr->Remove();
  791. break;
  792. }
  793. case Js::OpCode::NewRegEx:
  794. instrPrev = this->LowerNewRegEx(instr);
  795. break;
  796. case Js::OpCode::Conv_Obj:
  797. this->LowerUnaryHelperMem(instr, IR::HelperOp_ConvObject);
  798. break;
  799. case Js::OpCode::NewWithObject:
  800. this->LowerUnaryHelperMem(instr, IR::HelperOp_NewWithObject);
  801. break;
  802. case Js::OpCode::LdCustomSpreadIteratorList:
  803. this->LowerUnaryHelperMem(instr, IR::HelperOp_ToSpreadedFunctionArgument);
  804. break;
  805. case Js::OpCode::Conv_Num:
  806. this->LowerConvNum(instr, noMathFastPath);
  807. break;
  808. case Js::OpCode::Incr_A:
  809. if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  810. {
  811. this->LowerUnaryHelperMem(instr, IR::HelperOp_Increment);
  812. }
  813. else
  814. {
  815. instr->SetSrc2(IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(1), IR::AddrOpndKindConstantVar, this->m_func));
  816. m_lowererMD.GenerateFastAdd(instr);
  817. instr->FreeSrc2();
  818. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Increment));
  819. }
  820. break;
  821. case Js::OpCode::Decr_A:
  822. if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  823. {
  824. this->LowerUnaryHelperMem(instr, IR::HelperOp_Decrement);
  825. }
  826. else
  827. {
  828. instr->SetSrc2(IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(1), IR::AddrOpndKindConstantVar, this->m_func));
  829. m_lowererMD.GenerateFastSub(instr);
  830. instr->FreeSrc2();
  831. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Decrement));
  832. }
  833. break;
  834. case Js::OpCode::Neg_A:
  835. if (instr->GetDst()->IsFloat())
  836. {
  837. Assert(instr->GetSrc1()->IsFloat());
  838. m_lowererMD.LowerToFloat(instr);
  839. }
  840. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  841. {
  842. this->LowerUnaryHelperMem(instr, IR::HelperOp_Negate);
  843. }
  844. else if (m_lowererMD.GenerateFastNeg(instr))
  845. {
  846. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Negate));
  847. }
  848. break;
  849. case Js::OpCode::Not_A:
  850. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
  851. {
  852. this->LowerUnaryHelperMem(instr, IR::HelperOp_Not);
  853. }
  854. else if (m_lowererMD.GenerateFastNot(instr))
  855. {
  856. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Not));
  857. }
  858. break;
  859. case Js::OpCode::BrEq_I4:
  860. case Js::OpCode::BrNeq_I4:
  861. case Js::OpCode::BrGt_I4:
  862. case Js::OpCode::BrGe_I4:
  863. case Js::OpCode::BrLt_I4:
  864. case Js::OpCode::BrLe_I4:
  865. case Js::OpCode::BrUnGt_I4:
  866. case Js::OpCode::BrUnGe_I4:
  867. case Js::OpCode::BrUnLt_I4:
  868. case Js::OpCode::BrUnLe_I4:
  869. {
  870. // See calls to MarkOneFltTmpSym under BrSrEq. This is to handle the case
  871. // where a branch is type-specialized and uses the result of a float pref op,
  872. // which must then be saved to var at the def.
  873. StackSym *sym = instr->GetSrc1()->GetStackSym();
  874. if (sym)
  875. {
  876. sym = sym->GetVarEquivSym(nullptr);
  877. }
  878. sym = instr->GetSrc2()->GetStackSym();
  879. if (sym)
  880. {
  881. sym = sym->GetVarEquivSym(nullptr);
  882. }
  883. }
  884. // FALLTHROUGH
  885. case Js::OpCode::Neg_I4:
  886. case Js::OpCode::Not_I4:
  887. case Js::OpCode::Add_I4:
  888. case Js::OpCode::Sub_I4:
  889. case Js::OpCode::Mul_I4:
  890. case Js::OpCode::RemU_I4:
  891. case Js::OpCode::Rem_I4:
  892. case Js::OpCode::Or_I4:
  893. case Js::OpCode::Xor_I4:
  894. case Js::OpCode::And_I4:
  895. case Js::OpCode::Shl_I4:
  896. case Js::OpCode::Shr_I4:
  897. case Js::OpCode::ShrU_I4:
  898. case Js::OpCode::Rol_I4:
  899. case Js::OpCode::Ror_I4:
  900. case Js::OpCode::BrTrue_I4:
  901. case Js::OpCode::BrFalse_I4:
  902. #ifdef _M_IX86
  903. if (
  904. instr->GetDst() && instr->GetDst()->IsInt64() ||
  905. instr->GetSrc1() && instr->GetSrc1()->IsInt64() ||
  906. instr->GetSrc2() && instr->GetSrc2()->IsInt64()
  907. )
  908. {
  909. m_lowererMD.EmitInt64Instr(instr);
  910. break;
  911. }
  912. #endif
  913. if(instr->HasBailOutInfo())
  914. {
  915. const auto bailOutKind = instr->GetBailOutKind();
  916. if(bailOutKind & IR::BailOutOnResultConditions ||
  917. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck)
  918. {
  919. const auto nonBailOutInstr = SplitBailOnResultCondition(instr);
  920. IR::LabelInstr *bailOutLabel, *skipBailOutLabel;
  921. LowerBailOnResultCondition(instr, &bailOutLabel, &skipBailOutLabel);
  922. LowerInstrWithBailOnResultCondition(nonBailOutInstr, bailOutKind, bailOutLabel, skipBailOutLabel);
  923. }
  924. else if(bailOutKind == IR::BailOnModByPowerOf2)
  925. {
  926. Assert(instr->m_opcode == Js::OpCode::Rem_I4);
  927. bool fastPath = GenerateSimplifiedInt4Rem(instr);
  928. Assert(fastPath);
  929. instr->FreeSrc1();
  930. instr->FreeSrc2();
  931. this->GenerateBailOut(instr);
  932. }
  933. }
  934. else
  935. {
  936. if (instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::RemU_I4)
  937. {
  938. // fast path
  939. this->GenerateSimplifiedInt4Rem(instr);
  940. // slow path
  941. this->LowerRemI4(instr);
  942. }
  943. #if defined(_M_IX86) || defined(_M_X64)
  944. else if (instr->m_opcode == Js::OpCode::Mul_I4)
  945. {
  946. if (!LowererMD::GenerateSimplifiedInt4Mul(instr))
  947. {
  948. m_lowererMD.EmitInt4Instr(instr);
  949. }
  950. }
  951. #endif
  952. else
  953. {
  954. m_lowererMD.EmitInt4Instr(instr);
  955. }
  956. }
  957. break;
  958. case Js::OpCode::TrapIfMinIntOverNegOne:
  959. LowerTrapIfMinIntOverNegOne(instr);
  960. break;
  961. case Js::OpCode::TrapIfTruncOverflow:
  962. instr->m_opcode = Js::OpCode::Ld_I4;
  963. LowerLdI4(instr);
  964. break;
  965. case Js::OpCode::TrapIfZero:
  966. LowerTrapIfZero(instr);
  967. break;
  968. case Js::OpCode::DivU_I4:
  969. case Js::OpCode::Div_I4:
  970. this->LowerDivI4(instr);
  971. break;
  972. case Js::OpCode::Typeof:
  973. m_lowererMD.LowerTypeof(instr);
  974. break;
  975. case Js::OpCode::TypeofElem:
  976. this->LowerLdElemI(instr, IR::HelperOp_TypeofElem, false);
  977. break;
  978. case Js::OpCode::LdLen_A:
  979. {
  980. bool fastPath = !noMathFastPath;
  981. if(!fastPath && instr->HasBailOutInfo())
  982. {
  983. // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
  984. // bailouts require the fast path.
  985. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  986. if(bailOutKind & IR::BailOutKindBits)
  987. {
  988. fastPath = true;
  989. }
  990. else
  991. {
  992. const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
  993. fastPath =
  994. bailOutKindMinusBits &&
  995. bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
  996. bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
  997. }
  998. }
  999. bool instrIsInHelperBlock = false;
  1000. if(!fastPath)
  1001. {
  1002. LowerLdLen(instr, false);
  1003. }
  1004. else if(GenerateFastLdLen(instr, &instrIsInHelperBlock))
  1005. {
  1006. Assert(
  1007. !instr->HasBailOutInfo() ||
  1008. (instr->GetBailOutKind() & ~IR::BailOutKindBits) != IR::BailOutOnIrregularLength);
  1009. LowerLdLen(instr, instrIsInHelperBlock);
  1010. }
  1011. break;
  1012. }
  1013. case Js::OpCode::LdThis:
  1014. {
  1015. if (noFieldFastPath || !m_lowererMD.GenerateLdThisCheck(instr))
  1016. {
  1017. IR::JnHelperMethod meth;
  1018. if (instr->IsJitProfilingInstr())
  1019. {
  1020. Assert(instr->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
  1021. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  1022. meth = IR::HelperSimpleProfiledLdThis;
  1023. this->LowerBinaryHelper(instr, meth);
  1024. }
  1025. else
  1026. {
  1027. meth = IR::HelperLdThisNoFastPath;
  1028. this->LowerBinaryHelperMem(instr, meth);
  1029. }
  1030. }
  1031. else
  1032. {
  1033. this->LowerBinaryHelperMem(instr, IR::HelperLdThis);
  1034. }
  1035. break;
  1036. }
  1037. case Js::OpCode::LdNativeCodeData:
  1038. Assert(m_func->IsOOPJIT());
  1039. instrPrev = LowerLdNativeCodeData(instr);
  1040. break;
  1041. case Js::OpCode::StrictLdThis:
  1042. if (noFieldFastPath)
  1043. {
  1044. IR::JnHelperMethod meth;
  1045. if (instr->IsJitProfilingInstr())
  1046. {
  1047. Assert(instr->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
  1048. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  1049. meth = IR::HelperSimpleProfiledStrictLdThis;
  1050. this->LowerUnaryHelper(instr, meth);
  1051. }
  1052. else
  1053. {
  1054. meth = IR::HelperStrictLdThis;
  1055. this->LowerUnaryHelperMem(instr, meth);
  1056. }
  1057. }
  1058. else
  1059. {
  1060. m_lowererMD.GenerateLdThisStrict(instr);
  1061. instr->Remove();
  1062. }
  1063. break;
  1064. case Js::OpCode::CheckThis:
  1065. m_lowererMD.GenerateLdThisCheck(instr);
  1066. instr->FreeSrc1();
  1067. this->GenerateBailOut(instr);
  1068. break;
  1069. case Js::OpCode::StrictCheckThis:
  1070. m_lowererMD.GenerateLdThisStrict(instr);
  1071. instr->FreeSrc1();
  1072. this->GenerateBailOut(instr);
  1073. break;
  1074. case Js::OpCode::NewScArray:
  1075. instrPrev = this->LowerNewScArray(instr);
  1076. break;
  1077. case Js::OpCode::NewScArrayWithMissingValues:
  1078. this->LowerUnaryHelperMem(instr, IR::HelperScrArr_OP_NewScArrayWithMissingValues);
  1079. break;
  1080. case Js::OpCode::NewScIntArray:
  1081. instrPrev = this->LowerNewScIntArray(instr);
  1082. break;
  1083. case Js::OpCode::NewScFltArray:
  1084. instrPrev = this->LowerNewScFltArray(instr);
  1085. break;
  1086. case Js::OpCode::InitForInEnumerator:
  1087. this->LowerInitForInEnumerator(instr);
  1088. break;
  1089. case Js::OpCode::Add_A:
  1090. if (instr->GetDst()->IsFloat())
  1091. {
  1092. Assert(instr->GetSrc1()->IsFloat());
  1093. Assert(instr->GetSrc2()->IsFloat());
  1094. // we don't want to mix float32 and float64
  1095. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1096. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1097. m_lowererMD.LowerToFloat(instr);
  1098. }
  1099. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  1100. {
  1101. this->LowerBinaryHelperMem(instr, IR::HelperOp_Add);
  1102. }
  1103. else if (m_lowererMD.TryGenerateFastMulAdd(instr, &instrPrev))
  1104. {
  1105. }
  1106. else
  1107. {
  1108. m_lowererMD.GenerateFastAdd(instr);
  1109. this->LowerBinaryHelperMemWithTemp3(instr, IR_HELPER_OP_FULL_OR_INPLACE(Add), IR::HelperOp_AddLeftDead);
  1110. }
  1111. break;
  1112. case Js::OpCode::Div_A:
  1113. {
  1114. if (instr->IsJitProfilingInstr()) {
  1115. LowerProfiledBinaryOp(instr->AsJitProfilingInstr(), IR::HelperSimpleProfiledDivide);
  1116. }
  1117. else if (instr->GetDst()->IsFloat())
  1118. {
  1119. Assert(instr->GetSrc1()->IsFloat());
  1120. Assert(instr->GetSrc2()->IsFloat());
  1121. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1122. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1123. m_lowererMD.LowerToFloat(instr);
  1124. }
  1125. else
  1126. {
  1127. if (!PHASE_OFF(Js::MathFastPathPhase, this->m_func) && !noMathFastPath)
  1128. {
  1129. IR::AddrOpnd *src2 = instr->GetSrc2()->IsAddrOpnd() ? instr->GetSrc2()->AsAddrOpnd() : nullptr;
  1130. if (src2 && src2->IsVar() && Js::TaggedInt::Is(src2->m_address))
  1131. {
  1132. int32 value = Js::TaggedInt::ToInt32(src2->m_address);
  1133. if (Math::IsPow2(value))
  1134. {
  1135. m_lowererMD.GenerateFastDivByPow2(instr);
  1136. }
  1137. }
  1138. }
  1139. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Divide));
  1140. }
  1141. break;
  1142. }
  1143. case Js::OpCode::Expo_A:
  1144. {
  1145. if (instr->GetDst()->IsFloat())
  1146. {
  1147. Assert(instr->GetSrc1()->IsFloat());
  1148. Assert(instr->GetSrc2()->IsFloat());
  1149. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1150. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1151. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Pow);
  1152. }
  1153. else
  1154. {
  1155. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Exponentiation));
  1156. }
  1157. break;
  1158. }
  1159. case Js::OpCode::Mul_A:
  1160. if (instr->GetDst()->IsFloat())
  1161. {
  1162. Assert(instr->GetSrc1()->IsFloat());
  1163. Assert(instr->GetSrc2()->IsFloat());
  1164. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1165. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1166. m_lowererMD.LowerToFloat(instr);
  1167. }
  1168. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  1169. {
  1170. this->LowerBinaryHelperMem(instr, IR::HelperOp_Multiply);
  1171. }
  1172. else if (m_lowererMD.GenerateFastMul(instr))
  1173. {
  1174. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Multiply));
  1175. }
  1176. break;
  1177. case Js::OpCode::Rem_A:
  1178. if (instr->GetDst()->IsFloat64())
  1179. {
  1180. this->LowerRemR8(instr);
  1181. }
  1182. else if (instr->IsJitProfilingInstr())
  1183. {
  1184. this->LowerProfiledBinaryOp(instr->AsJitProfilingInstr(), IR::HelperSimpleProfiledRemainder);
  1185. }
  1186. else
  1187. {
  1188. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Modulus));
  1189. }
  1190. break;
  1191. case Js::OpCode::Sub_A:
  1192. if (instr->GetDst()->IsFloat())
  1193. {
  1194. Assert(instr->GetSrc1()->IsFloat());
  1195. Assert(instr->GetSrc2()->IsFloat());
  1196. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1197. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1198. m_lowererMD.LowerToFloat(instr);
  1199. }
  1200. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  1201. {
  1202. this->LowerBinaryHelperMem(instr, IR::HelperOp_Subtract);
  1203. }
  1204. else if (m_lowererMD.TryGenerateFastMulAdd(instr, &instrPrev))
  1205. {
  1206. }
  1207. else
  1208. {
  1209. m_lowererMD.GenerateFastSub(instr);
  1210. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Subtract));
  1211. }
  1212. break;
  1213. case Js::OpCode::And_A:
  1214. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
  1215. {
  1216. this->LowerBinaryHelperMem(instr, IR::HelperOp_And);
  1217. }
  1218. else if (m_lowererMD.GenerateFastAnd(instr))
  1219. {
  1220. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(And));
  1221. }
  1222. break;
  1223. case Js::OpCode::Or_A:
  1224. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
  1225. {
  1226. this->LowerBinaryHelperMem(instr, IR::HelperOp_Or);
  1227. }
  1228. else if (m_lowererMD.GenerateFastOr(instr))
  1229. {
  1230. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Or));
  1231. }
  1232. break;
  1233. case Js::OpCode::Xor_A:
  1234. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastXor(instr))
  1235. {
  1236. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Xor));
  1237. }
  1238. break;
  1239. case Js::OpCode::Shl_A:
  1240. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftLeft(instr))
  1241. {
  1242. this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftLeft);
  1243. }
  1244. break;
  1245. case Js::OpCode::Shr_A:
  1246. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftRight(instr))
  1247. {
  1248. this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftRight);
  1249. }
  1250. break;
  1251. case Js::OpCode::ShrU_A:
  1252. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftRight(instr))
  1253. {
  1254. this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftRightU);
  1255. }
  1256. break;
  1257. case Js::OpCode::CmEq_A:
  1258. {
  1259. instrPrev = LowerEqualityCompare(instr, IR::HelperOP_CmEq_A);
  1260. break;
  1261. }
  1262. case Js::OpCode::CmNeq_A:
  1263. {
  1264. instrPrev = LowerEqualityCompare(instr, IR::HelperOP_CmNeq_A);
  1265. break;
  1266. }
  1267. case Js::OpCode::CmSrEq_A:
  1268. instrPrev = LowerEqualityCompare(instr, IR::HelperOP_CmSrEq_A);
  1269. break;
  1270. case Js::OpCode::CmSrNeq_A:
  1271. instrPrev = LowerEqualityCompare(instr, IR::HelperOP_CmSrNeq_A);
  1272. break;
  1273. case Js::OpCode::CmGt_A:
  1274. if (instr->GetSrc1()->IsFloat())
  1275. {
  1276. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1277. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1278. }
  1279. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1280. {
  1281. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmGt_A);
  1282. }
  1283. break;
  1284. case Js::OpCode::CmGe_A:
  1285. if (instr->GetSrc1()->IsFloat())
  1286. {
  1287. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1288. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1289. }
  1290. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1291. {
  1292. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmGe_A);
  1293. }
  1294. break;
  1295. case Js::OpCode::CmLt_A:
  1296. if (instr->GetSrc1()->IsFloat())
  1297. {
  1298. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1299. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1300. }
  1301. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1302. {
  1303. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmLt_A);
  1304. }
  1305. break;
  1306. case Js::OpCode::CmLe_A:
  1307. if (instr->GetSrc1()->IsFloat())
  1308. {
  1309. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1310. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1311. }
  1312. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1313. {
  1314. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmLe_A);
  1315. }
  1316. break;
  1317. case Js::OpCode::CmEq_I4:
  1318. case Js::OpCode::CmNeq_I4:
  1319. case Js::OpCode::CmGe_I4:
  1320. case Js::OpCode::CmGt_I4:
  1321. case Js::OpCode::CmLe_I4:
  1322. case Js::OpCode::CmLt_I4:
  1323. case Js::OpCode::CmUnGe_I4:
  1324. case Js::OpCode::CmUnGt_I4:
  1325. case Js::OpCode::CmUnLe_I4:
  1326. case Js::OpCode::CmUnLt_I4:
  1327. this->m_lowererMD.GenerateFastCmXxI4(instr);
  1328. break;
  1329. case Js::OpCode::Conv_Bool:
  1330. instrPrev = this->m_lowererMD.GenerateConvBool(instr);
  1331. break;
  1332. case Js::OpCode::IsInst:
  1333. m_lowererMD.GenerateFastIsInst(instr);
  1334. instrPrev = this->LowerIsInst(instr, IR::HelperScrObj_OP_IsInst);
  1335. break;
  1336. case Js::OpCode::IsIn:
  1337. this->LowerBinaryHelperMem(instr, IR::HelperOp_IsIn);
  1338. break;
  1339. case Js::OpCode::LdArrViewElem:
  1340. instrPrev = LowerLdArrViewElem(instr);
  1341. break;
  1342. case Js::OpCode::StArrViewElem:
  1343. instrPrev = LowerStArrViewElem(instr);
  1344. break;
  1345. case Js::OpCode::LdArrViewElemWasm:
  1346. instrPrev = LowerLdArrViewElemWasm(instr);
  1347. break;
  1348. case Js::OpCode::Memset:
  1349. case Js::OpCode::Memcopy:
  1350. {
  1351. instrPrev = LowerMemOp(instr);
  1352. break;
  1353. }
  1354. case Js::OpCode::ArrayDetachedCheck:
  1355. instrPrev = LowerArrayDetachedCheck(instr);
  1356. break;
  1357. case Js::OpCode::StElemI_A:
  1358. case Js::OpCode::StElemI_A_Strict:
  1359. {
  1360. // Note: under debugger (Fast F12) don't let GenerateFastStElemI which calls into ToNumber_Helper
  1361. // which takes double, and currently our helper wrapper doesn't support double.
  1362. bool fastPath = !noMathFastPath && !m_func->IsJitInDebugMode();
  1363. if(!fastPath && instr->HasBailOutInfo())
  1364. {
  1365. // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
  1366. // bailouts require the fast path.
  1367. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1368. const IR::BailOutKind bailOutKindBits = bailOutKind & IR::BailOutKindBits;
  1369. if(bailOutKindBits & ~(IR::BailOutOnMissingValue | IR::BailOutConvertedNativeArray))
  1370. {
  1371. fastPath = true;
  1372. }
  1373. else
  1374. {
  1375. const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
  1376. fastPath =
  1377. bailOutKindMinusBits &&
  1378. bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
  1379. bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
  1380. }
  1381. }
  1382. IR::Opnd * opnd = instr->GetDst();
  1383. IR::Opnd * baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  1384. ValueType profiledBaseValueType = baseOpnd->AsRegOpnd()->GetValueType();
  1385. if (profiledBaseValueType.IsUninitialized() && baseOpnd->AsRegOpnd()->m_sym->IsSingleDef())
  1386. {
  1387. baseOpnd->SetValueType(baseOpnd->FindProfiledValueType());
  1388. }
  1389. bool instrIsInHelperBlock = false;
  1390. if (!fastPath)
  1391. {
  1392. this->LowerStElemI(
  1393. instr,
  1394. instr->m_opcode == Js::OpCode::StElemI_A ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode,
  1395. false);
  1396. }
  1397. else if (GenerateFastStElemI(instr, &instrIsInHelperBlock))
  1398. {
  1399. #if DBG
  1400. if(instr->HasBailOutInfo())
  1401. {
  1402. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1403. Assert(
  1404. (bailOutKind & ~IR::BailOutKindBits) != IR::BailOutConventionalTypedArrayAccessOnly &&
  1405. !(
  1406. bailOutKind &
  1407. (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)
  1408. ));
  1409. }
  1410. #endif
  1411. this->LowerStElemI(
  1412. instr,
  1413. instr->m_opcode == Js::OpCode::StElemI_A ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode,
  1414. instrIsInHelperBlock);
  1415. }
  1416. break;
  1417. }
  1418. case Js::OpCode::LdElemI_A:
  1419. case Js::OpCode::LdMethodElem:
  1420. {
  1421. bool fastPath =
  1422. !noMathFastPath &&
  1423. (
  1424. instr->m_opcode != Js::OpCode::LdMethodElem ||
  1425. instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyObject()
  1426. );
  1427. if(!fastPath && instr->HasBailOutInfo())
  1428. {
  1429. // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
  1430. // bailouts require the fast path.
  1431. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1432. if(bailOutKind & IR::BailOutKindBits)
  1433. {
  1434. fastPath = true;
  1435. }
  1436. else
  1437. {
  1438. const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
  1439. fastPath =
  1440. bailOutKindMinusBits &&
  1441. bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
  1442. bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
  1443. }
  1444. }
  1445. IR::Opnd * opnd = instr->GetSrc1();
  1446. IR::Opnd * baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  1447. ValueType profiledBaseValueType = baseOpnd->AsRegOpnd()->GetValueType();
  1448. if (profiledBaseValueType.IsUninitialized() && baseOpnd->AsRegOpnd()->m_sym->IsSingleDef())
  1449. {
  1450. baseOpnd->SetValueType(baseOpnd->FindProfiledValueType());
  1451. }
  1452. bool instrIsInHelperBlock = false;
  1453. if (!fastPath)
  1454. {
  1455. this->LowerLdElemI(
  1456. instr,
  1457. instr->m_opcode == Js::OpCode::LdElemI_A ? IR::HelperOp_GetElementI : IR::HelperOp_GetMethodElement,
  1458. false);
  1459. }
  1460. else if (GenerateFastLdElemI(instr, &instrIsInHelperBlock))
  1461. {
  1462. #if DBG
  1463. if(instr->HasBailOutInfo())
  1464. {
  1465. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1466. Assert(
  1467. (bailOutKind & ~IR::BailOutKindBits) != IR::BailOutConventionalTypedArrayAccessOnly &&
  1468. !(
  1469. bailOutKind &
  1470. (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)
  1471. ));
  1472. }
  1473. #endif
  1474. this->LowerLdElemI(
  1475. instr,
  1476. instr->m_opcode == Js::OpCode::LdElemI_A ? IR::HelperOp_GetElementI : IR::HelperOp_GetMethodElement,
  1477. instrIsInHelperBlock);
  1478. }
  1479. break;
  1480. }
  1481. case Js::OpCode::InitSetElemI:
  1482. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitElemSetter);
  1483. break;
  1484. case Js::OpCode::InitGetElemI:
  1485. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitElemGetter);
  1486. break;
  1487. case Js::OpCode::InitComputedProperty:
  1488. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitComputedProperty);
  1489. break;
  1490. case Js::OpCode::Delete_A:
  1491. this->LowerUnaryHelperMem(instr, IR::HelperOp_Delete);
  1492. break;
  1493. case Js::OpCode::DeleteElemI_A:
  1494. this->LowerDeleteElemI(instr, false);
  1495. break;
  1496. case Js::OpCode::DeleteElemIStrict_A:
  1497. this->LowerDeleteElemI(instr, true);
  1498. break;
  1499. case Js::OpCode::BytecodeArgOutCapture:
  1500. m_lowererMD.ChangeToAssign(instr);
  1501. break;
  1502. case Js::OpCode::UnwrapWithObj:
  1503. this->LowerUnaryHelper(instr, IR::HelperOp_UnwrapWithObj);
  1504. break;
  1505. #ifdef ENABLE_WASM
  1506. case Js::OpCode::CheckWasmSignature:
  1507. this->LowerCheckWasmSignature(instr);
  1508. break;
  1509. case Js::OpCode::LdWasmFunc:
  1510. instrPrev = this->LowerLdWasmFunc(instr);
  1511. break;
  1512. case Js::OpCode::GrowWasmMemory:
  1513. instrPrev = this->LowerGrowWasmMemory(instr);
  1514. break;
  1515. #endif
  1516. case Js::OpCode::Ld_I4:
  1517. LowerLdI4(instr);
  1518. break;
  1519. case Js::OpCode::LdAsmJsFunc:
  1520. if (instr->GetSrc1()->IsIndirOpnd())
  1521. {
  1522. IR::IndirOpnd* indir = instr->GetSrc1()->AsIndirOpnd();
  1523. byte scale = m_lowererMD.GetDefaultIndirScale();
  1524. if (!indir->GetIndexOpnd())
  1525. {
  1526. // If we have a constant offset, we need to apply the scale now
  1527. int32 offset;
  1528. if (Int32Math::Shl(1, scale, &offset) || Int32Math::Mul(offset, indir->GetOffset(), &offset))
  1529. {
  1530. // The constant is too big to offset this array. Throw out of range.
  1531. // Todo:: throw a better error message for this scenario
  1532. GenerateRuntimeError(instr, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
  1533. }
  1534. indir->SetOffset(offset);
  1535. }
  1536. else
  1537. {
  1538. indir->SetScale(scale);
  1539. }
  1540. }
  1541. //fallthrough
  1542. case Js::OpCode::Ld_A:
  1543. case Js::OpCode::InitConst:
  1544. if (instr->IsJitProfilingInstr() && instr->AsJitProfilingInstr()->isBeginSwitch) {
  1545. LowerProfiledBeginSwitch(instr->AsJitProfilingInstr());
  1546. break;
  1547. }
  1548. m_lowererMD.ChangeToAssign(instr);
  1549. if(instr->HasBailOutInfo())
  1550. {
  1551. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1552. if(bailOutKind == IR::BailOutExpectingString)
  1553. {
  1554. this->LowerBailOnNotString(instr);
  1555. }
  1556. else
  1557. {
  1558. // Should not reach here as there are only 1 BailOutKind (BailOutExpectingString) currently associated with the Load Instr
  1559. Assert(false);
  1560. }
  1561. }
  1562. break;
  1563. case Js::OpCode::LdIndir:
  1564. Assert(instr->GetDst());
  1565. Assert(instr->GetDst()->IsRegOpnd());
  1566. Assert(instr->GetSrc1());
  1567. Assert(instr->GetSrc1()->IsIndirOpnd());
  1568. Assert(!instr->GetSrc2());
  1569. m_lowererMD.ChangeToAssign(instr);
  1570. break;
  1571. case Js::OpCode::FromVar:
  1572. Assert(instr->GetSrc1()->GetType() == TyVar);
  1573. if (instr->GetDst()->GetType() == TyInt32)
  1574. {
  1575. if(m_lowererMD.EmitLoadInt32(instr, !(instr->HasBailOutInfo() && (instr->GetBailOutKind() == IR::BailOutOnNotPrimitive))))
  1576. {
  1577. // Bail out instead of calling a helper
  1578. Assert(instr->GetBailOutKind() == IR::BailOutIntOnly || instr->GetBailOutKind() == IR::BailOutExpectingInteger);
  1579. Assert(!instr->GetSrc1()->GetValueType().IsInt()); // when we know it's an int, it should not have bailout info, to avoid generating a bailout path that will never be taken
  1580. instr->UnlinkSrc1();
  1581. instr->UnlinkDst();
  1582. GenerateBailOut(instr);
  1583. }
  1584. }
  1585. else if (instr->GetDst()->IsFloat())
  1586. {
  1587. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  1588. {
  1589. m_lowererMD.EmitLoadFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1590. instr->Remove();
  1591. }
  1592. else
  1593. {
  1594. m_lowererMD.EmitLoadFloatFromNumber(instr->GetDst(), instr->GetSrc1(), instr);
  1595. }
  1596. }
  1597. else if (instr->GetDst()->IsInt64())
  1598. {
  1599. GenerateRuntimeError(instr, WASMERR_InvalidTypeConversion);
  1600. instr->ReplaceSrc1(IR::Int64ConstOpnd::New(0, TyInt64, m_func));
  1601. m_lowererMD.LowerInt64Assign(instr);
  1602. }
  1603. #ifdef ENABLE_SIMDJS
  1604. // Support on IA only
  1605. #if defined(_M_IX86) || defined(_M_X64)
  1606. else if (instr->GetDst()->IsSimd128())
  1607. {
  1608. // SIMD_JS
  1609. m_lowererMD.GenerateCheckedSimdLoad(instr);
  1610. }
  1611. #endif
  1612. #endif // ENABLE_SIMDJS
  1613. else
  1614. {
  1615. Assert(UNREACHED);
  1616. }
  1617. break;
  1618. case Js::OpCode::ArgOut_A:
  1619. // I don't know if this can happen in asm.js mode, but if it can, we might want to handle differently
  1620. Assert(!m_func->GetJITFunctionBody()->IsAsmJsMode());
  1621. // fall-through
  1622. case Js::OpCode::ArgOut_A_Inline:
  1623. case Js::OpCode::ArgOut_A_Dynamic:
  1624. {
  1625. // ArgOut/StartCall are normally lowered by the lowering of the associated call instr.
  1626. // If the call becomes unreachable, we could end up with an orphan ArgOut or StartCall.
  1627. // Change the ArgOut into a store to the stack for bailouts
  1628. instr->FreeSrc2();
  1629. StackSym *argSym = instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  1630. argSym->m_offset = this->m_func->StackAllocate(sizeof(Js::Var));
  1631. argSym->m_allocated = true;
  1632. argSym->m_isOrphanedArg = true;
  1633. this->m_lowererMD.ChangeToAssign(instr);
  1634. }
  1635. break;
  1636. case Js::OpCode::LoweredStartCall:
  1637. case Js::OpCode::StartCall:
  1638. // ArgOut/StartCall are normally lowered by the lowering of the associated call instr.
  1639. // If the call becomes unreachable, we could end up with an orphan ArgOut or StartCall.
  1640. // We'll just delete these StartCalls during peeps.
  1641. break;
  1642. case Js::OpCode::ToVar:
  1643. Assert(instr->GetDst()->GetType() == TyVar);
  1644. if (instr->GetSrc1()->GetType() == TyInt32)
  1645. {
  1646. m_lowererMD.EmitLoadVar(instr);
  1647. }
  1648. else if (instr->GetSrc1()->IsFloat())
  1649. {
  1650. Assert(instr->GetSrc1()->IsRegOpnd());
  1651. IR::RegOpnd* float64Opnd = instr->GetSrc1()->AsRegOpnd();
  1652. if (float64Opnd->IsFloat32())
  1653. {
  1654. IR::RegOpnd* float64ConvOpnd = IR::RegOpnd::New(TyFloat64, m_func);
  1655. m_lowererMD.EmitFloat32ToFloat64(float64ConvOpnd, float64Opnd, instr);
  1656. float64Opnd = float64ConvOpnd;
  1657. }
  1658. m_lowererMD.SaveDoubleToVar(
  1659. instr->GetDst()->AsRegOpnd(),
  1660. float64Opnd, instr, instr);
  1661. instr->Remove();
  1662. }
  1663. else if (instr->GetSrc1()->IsInt64())
  1664. {
  1665. GenerateRuntimeError(instr, WASMERR_InvalidTypeConversion);
  1666. instr->ReplaceSrc1(IR::IntConstOpnd::New(0, TyMachReg, m_func));
  1667. m_lowererMD.ChangeToAssign(instr);
  1668. }
  1669. #ifdef ENABLE_SIMDJS
  1670. #if defined(_M_IX86) || defined(_M_X64)
  1671. else if (IRType_IsSimd128(instr->GetSrc1()->GetType()))
  1672. {
  1673. m_lowererMD.GenerateSimdStore(instr);
  1674. }
  1675. #endif
  1676. #endif
  1677. else
  1678. {
  1679. Assert(UNREACHED);
  1680. }
  1681. break;
  1682. case Js::OpCode::Conv_Prim:
  1683. {
  1684. if (IR::Instr::FindSingleDefInstr(Js::OpCode::TrapIfTruncOverflow, instr->GetSrc1()))
  1685. {
  1686. GenerateTruncWithCheck(instr);
  1687. break;
  1688. }
  1689. if (instr->GetDst()->IsFloat())
  1690. {
  1691. if (instr->GetSrc1()->IsIntConstOpnd())
  1692. {
  1693. LoadFloatFromNonReg(instr->UnlinkSrc1(), instr->UnlinkDst(), instr);
  1694. }
  1695. else if (instr->GetSrc1()->IsInt32())
  1696. {
  1697. m_lowererMD.EmitIntToFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1698. }
  1699. else if (instr->GetSrc1()->IsUInt32())
  1700. {
  1701. m_lowererMD.EmitUIntToFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1702. }
  1703. else if (instr->GetSrc1()->IsInt64())
  1704. {
  1705. m_lowererMD.EmitInt64toFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1706. }
  1707. else
  1708. {
  1709. Assert(instr->GetDst()->IsFloat64());
  1710. Assert(instr->GetSrc1()->IsFloat32());
  1711. m_lowererMD.EmitFloat32ToFloat64(instr->GetDst(), instr->GetSrc1(), instr);
  1712. }
  1713. }
  1714. else if (instr->GetDst()->IsInt64())
  1715. {
  1716. if (instr->GetSrc1()->IsInt32())
  1717. {
  1718. m_lowererMD.EmitIntToLong(instr->GetDst(), instr->GetSrc1(), instr);
  1719. }
  1720. else if (instr->GetSrc1()->IsUInt32())
  1721. {
  1722. m_lowererMD.EmitUIntToLong(instr->GetDst(), instr->GetSrc1(), instr);
  1723. }
  1724. else
  1725. {
  1726. Assert(0);
  1727. }
  1728. }
  1729. else
  1730. {
  1731. Assert(instr->GetDst()->IsInt32());
  1732. if (instr->GetSrc1()->IsInt64())
  1733. {
  1734. m_lowererMD.EmitLongToInt(instr->GetDst(), instr->GetSrc1(), instr);
  1735. }
  1736. else
  1737. {
  1738. Assert(instr->GetSrc1()->IsFloat());
  1739. m_lowererMD.EmitFloatToInt(instr->GetDst(), instr->GetSrc1(), instr);
  1740. }
  1741. }
  1742. instr->Remove();
  1743. break;
  1744. }
  1745. case Js::OpCode::FunctionExit:
  1746. LowerFunctionExit(instr);
  1747. // The rest of Epilog generation happens after reg allocation
  1748. break;
  1749. case Js::OpCode::FunctionEntry:
  1750. LowerFunctionEntry(instr);
  1751. // The rest of Prolog generation happens after reg allocation
  1752. break;
  1753. case Js::OpCode::ArgIn_Rest:
  1754. case Js::OpCode::ArgIn_A:
  1755. if (m_func->GetJITFunctionBody()->IsAsmJsMode() && !m_func->IsLoopBody())
  1756. {
  1757. instrPrev = LowerArgInAsmJs(instr);
  1758. }
  1759. else
  1760. {
  1761. instrPrev = LowerArgIn(instr);
  1762. }
  1763. break;
  1764. case Js::OpCode::Label:
  1765. if (instr->AsLabelInstr()->m_isLoopTop)
  1766. {
  1767. if (this->outerMostLoopLabel == instr)
  1768. {
  1769. noFieldFastPath = !defaultDoFastPath;
  1770. noMathFastPath = !defaultDoFastPath;
  1771. this->outerMostLoopLabel = nullptr;
  1772. instr->AsLabelInstr()->GetLoop()->isProcessed = true;
  1773. }
  1774. this->m_func->MarkConstantAddressSyms(instr->AsLabelInstr()->GetLoop()->regAlloc.liveOnBackEdgeSyms);
  1775. instr->AsLabelInstr()->GetLoop()->regAlloc.liveOnBackEdgeSyms->Or(this->addToLiveOnBackEdgeSyms);
  1776. }
  1777. break;
  1778. case Js::OpCode::Br:
  1779. m_lowererMD.LowerUncondBranch(instr);
  1780. break;
  1781. case Js::OpCode::BrFncEqApply:
  1782. LowerBrFncApply(instr,IR::HelperOp_OP_BrFncEqApply);
  1783. break;
  1784. case Js::OpCode::BrFncNeqApply:
  1785. LowerBrFncApply(instr,IR::HelperOp_OP_BrFncNeqApply);
  1786. break;
  1787. case Js::OpCode::BrHasSideEffects:
  1788. case Js::OpCode::BrNotHasSideEffects:
  1789. m_lowererMD.GenerateFastBrS(instr->AsBranchInstr());
  1790. break;
  1791. case Js::OpCode::BrFalse_A:
  1792. case Js::OpCode::BrTrue_A:
  1793. if (instr->GetSrc1()->IsFloat())
  1794. {
  1795. GenerateFastBrBool(instr->AsBranchInstr());
  1796. }
  1797. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) ||
  1798. noMathFastPath ||
  1799. GenerateFastBrBool(instr->AsBranchInstr()))
  1800. {
  1801. this->LowerBrBMem(instr, IR::HelperConv_ToBoolean);
  1802. }
  1803. break;
  1804. case Js::OpCode::BrOnObject_A:
  1805. if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath)
  1806. {
  1807. this->LowerBrOnObject(instr, IR::HelperOp_IsObject);
  1808. }
  1809. else
  1810. {
  1811. GenerateFastBrOnObject(instr);
  1812. }
  1813. break;
  1814. case Js::OpCode::BrOnBaseConstructorKind:
  1815. this->LowerBrOnClassConstructor(instr, IR::HelperOp_IsBaseConstructorKind);
  1816. break;
  1817. case Js::OpCode::BrOnClassConstructor:
  1818. this->LowerBrOnClassConstructor(instr, IR::HelperOp_IsClassConstructor);
  1819. break;
  1820. case Js::OpCode::BrAddr_A:
  1821. case Js::OpCode::BrNotAddr_A:
  1822. case Js::OpCode::BrNotNull_A:
  1823. m_lowererMD.LowerCondBranch(instr);
  1824. break;
  1825. case Js::OpCode::BrEq_A:
  1826. case Js::OpCode::BrNotNeq_A:
  1827. instrPrev = LowerEqualityBranch(instr, IR::HelperOp_Equal);
  1828. break;
  1829. case Js::OpCode::BrGe_A:
  1830. case Js::OpCode::BrNotGe_A:
  1831. if (instr->GetSrc1()->IsFloat())
  1832. {
  1833. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1834. m_lowererMD.LowerToFloat(instr);
  1835. }
  1836. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1837. {
  1838. this->LowerBrCMem(instr, IR::HelperOp_GreaterEqual, false, false /*isHelper*/);
  1839. }
  1840. else
  1841. {
  1842. this->LowerBrCMem(instr, IR::HelperOp_GreaterEqual, true, false /*isHelper*/);
  1843. }
  1844. break;
  1845. case Js::OpCode::BrGt_A:
  1846. case Js::OpCode::BrNotGt_A:
  1847. if (instr->GetSrc1()->IsFloat())
  1848. {
  1849. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1850. m_lowererMD.LowerToFloat(instr);
  1851. }
  1852. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1853. {
  1854. this->LowerBrCMem(instr, IR::HelperOp_Greater, false, false /*isHelper*/);
  1855. }
  1856. else
  1857. {
  1858. this->LowerBrCMem(instr, IR::HelperOp_Greater, true, false /*isHelper*/);
  1859. }
  1860. break;
  1861. case Js::OpCode::BrLt_A:
  1862. case Js::OpCode::BrNotLt_A:
  1863. if (instr->GetSrc1()->IsFloat())
  1864. {
  1865. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1866. m_lowererMD.LowerToFloat(instr);
  1867. }
  1868. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1869. {
  1870. this->LowerBrCMem(instr, IR::HelperOp_Less, false, false /*isHelper*/);
  1871. }
  1872. else
  1873. {
  1874. this->LowerBrCMem(instr, IR::HelperOp_Less, true, false /*isHelper*/);
  1875. }
  1876. break;
  1877. case Js::OpCode::BrLe_A:
  1878. case Js::OpCode::BrNotLe_A:
  1879. if (instr->GetSrc1()->IsFloat())
  1880. {
  1881. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1882. m_lowererMD.LowerToFloat(instr);
  1883. }
  1884. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1885. {
  1886. this->LowerBrCMem(instr, IR::HelperOp_LessEqual, false, false /*isHelper*/);
  1887. }
  1888. else
  1889. {
  1890. this->LowerBrCMem(instr, IR::HelperOp_LessEqual, true, false /*isHelper*/);
  1891. }
  1892. break;
  1893. case Js::OpCode::BrNeq_A:
  1894. case Js::OpCode::BrNotEq_A:
  1895. instrPrev = LowerEqualityBranch(instr, IR::HelperOp_NotEqual);
  1896. break;
  1897. case Js::OpCode::MultiBr:
  1898. {
  1899. IR::MultiBranchInstr * multiBranchInstr = instr->AsBranchInstr()->AsMultiBrInstr();
  1900. switch (multiBranchInstr->m_kind)
  1901. {
  1902. case IR::MultiBranchInstr::StrDictionary:
  1903. this->GenerateSwitchStringLookup(instr);
  1904. break;
  1905. case IR::MultiBranchInstr::SingleCharStrJumpTable:
  1906. this->GenerateSingleCharStrJumpTableLookup(instr);
  1907. m_func->m_totalJumpTableSizeInBytesForSwitchStatements += (multiBranchInstr->GetBranchJumpTable()->tableSize * sizeof(void*));
  1908. break;
  1909. case IR::MultiBranchInstr::IntJumpTable:
  1910. this->LowerMultiBr(instr);
  1911. m_func->m_totalJumpTableSizeInBytesForSwitchStatements += (multiBranchInstr->GetBranchJumpTable()->tableSize * sizeof(void*));
  1912. break;
  1913. default:
  1914. Assert(false);
  1915. }
  1916. break;
  1917. }
  1918. case Js::OpCode::BrSrEq_A:
  1919. case Js::OpCode::BrSrNotNeq_A:
  1920. instrPrev = LowerEqualityBranch(instr, IR::HelperOp_StrictEqual);
  1921. break;
  1922. case Js::OpCode::BrSrNeq_A:
  1923. case Js::OpCode::BrSrNotEq_A:
  1924. instrPrev = LowerEqualityBranch(instr, IR::HelperOp_NotStrictEqual);
  1925. break;
  1926. case Js::OpCode::BrOnEmpty:
  1927. case Js::OpCode::BrOnNotEmpty:
  1928. if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func))
  1929. {
  1930. this->GenerateFastBrBReturn(instr);
  1931. this->LowerBrBReturn(instr, IR::HelperOp_OP_BrOnEmpty, true);
  1932. }
  1933. else
  1934. {
  1935. this->LowerBrBReturn(instr, IR::HelperOp_OP_BrOnEmpty, false);
  1936. }
  1937. break;
  1938. case Js::OpCode::BrOnHasProperty:
  1939. case Js::OpCode::BrOnNoProperty:
  1940. this->LowerBrProperty(instr, IR::HelperOp_HasProperty);
  1941. break;
  1942. case Js::OpCode::BrOnException:
  1943. Assert(!this->m_func->DoGlobOpt());
  1944. instr->Remove();
  1945. break;
  1946. case Js::OpCode::BrOnNoException:
  1947. instr->m_opcode = LowererMD::MDUncondBranchOpcode;
  1948. break;
  1949. case Js::OpCode::StSlot:
  1950. this->LowerStSlot(instr);
  1951. break;
  1952. case Js::OpCode::StSlotChkUndecl:
  1953. this->LowerStSlotChkUndecl(instr);
  1954. break;
  1955. case Js::OpCode::ProfiledLoopStart:
  1956. {
  1957. Assert(m_func->DoSimpleJitDynamicProfile());
  1958. Assert(instr->IsJitProfilingInstr());
  1959. // Check for the helper instr from IRBuilding (it won't be there if there are no LoopEnds due to an infinite loop)
  1960. auto prev = instr->m_prev;
  1961. if (prev->IsJitProfilingInstr() && prev->AsJitProfilingInstr()->isLoopHelper)
  1962. {
  1963. auto saveOpnd = prev->UnlinkDst();
  1964. instrPrev = prev->m_prev;
  1965. prev->Remove();
  1966. const auto starFlag = GetImplicitCallFlagsOpnd();
  1967. IR::AutoReuseOpnd a(starFlag, m_func);
  1968. this->InsertMove(saveOpnd, starFlag, instr);
  1969. this->InsertMove(starFlag, CreateClearImplicitCallFlagsOpnd(), instr);
  1970. }
  1971. else
  1972. {
  1973. #if DBG
  1974. // Double check that we indeed do not have a LoopEnd that is part of the same loop for the rest of the function
  1975. auto cur = instr;
  1976. auto loopNumber = instr->AsJitProfilingInstr()->loopNumber;
  1977. while (cur)
  1978. {
  1979. Assert(cur->m_opcode != Js::OpCode::ProfiledLoopEnd || cur->IsJitProfilingInstr() && cur->AsJitProfilingInstr()->loopNumber != loopNumber);
  1980. cur = cur->m_next;
  1981. }
  1982. #endif
  1983. }
  1984. // If we turned off fulljit, there's no reason to do this.
  1985. if (PHASE_OFF(Js::FullJitPhase, m_func))
  1986. {
  1987. instr->Remove();
  1988. }
  1989. else
  1990. {
  1991. Assert(instr->GetDst());
  1992. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleGetScheduledEntryPoint, m_func));
  1993. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateUint32Opnd(instr->AsJitProfilingInstr()->loopNumber, m_func));
  1994. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  1995. this->m_lowererMD.LowerCall(instr, 0);
  1996. }
  1997. break;
  1998. }
  1999. case Js::OpCode::ProfiledLoopBodyStart:
  2000. {
  2001. Assert(m_func->DoSimpleJitDynamicProfile());
  2002. const auto loopNum = instr->AsJitProfilingInstr()->loopNumber;
  2003. Assert(loopNum < m_func->GetJITFunctionBody()->GetLoopCount());
  2004. auto entryPointOpnd = instr->UnlinkSrc1();
  2005. auto dobailout = instr->UnlinkDst();
  2006. const auto dobailoutType = TyUint8;
  2007. Assert(dobailout->GetType() == TyUint8 && sizeof(decltype(Js::SimpleJitHelpers::IsLoopCodeGenDone(nullptr))) == 1);
  2008. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(0, TyUint32, m_func)); // zero indicates that we do not want to add flags back in
  2009. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(loopNum, TyUint32, m_func));
  2010. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  2011. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleRecordLoopImplicitCallFlags, m_func));
  2012. m_lowererMD.LowerCall(instr, 0);
  2013. // Outline of JITed code:
  2014. //
  2015. // LoopStart:
  2016. // entryPoint = GetScheduledEntryPoint(framePtr, loopNum)
  2017. // LoopBodyStart:
  2018. // uint8 dobailout;
  2019. // if (entryPoint) {
  2020. // dobailout = IsLoopCodeGenDone(entryPoint)
  2021. // } else {
  2022. // dobailout = ++interpretCount >= threshold
  2023. // }
  2024. // // already exists from IRBuilding:
  2025. // if (dobailout) {
  2026. // Bailout
  2027. // }
  2028. if (PHASE_OFF(Js::FullJitPhase, m_func) || !m_func->GetJITFunctionBody()->DoJITLoopBody())
  2029. {
  2030. // If we're not doing fulljit, we've turned off JitLoopBodies, or if we don't have loop headers allocated (the function has a Try, etc)
  2031. // just move false to dobailout
  2032. this->InsertMove(dobailout, IR::IntConstOpnd::New(0, dobailoutType, m_func, true), instr->m_next);
  2033. }
  2034. else if (m_func->GetWorkItem()->GetJITTimeInfo()->ForceJITLoopBody())
  2035. {
  2036. // If we're forcing jit loop bodies, move true to dobailout
  2037. this->InsertMove(dobailout, IR::IntConstOpnd::New(1, dobailoutType, m_func, true), instr->m_next);
  2038. }
  2039. else
  2040. {
  2041. // Put in the labels
  2042. auto entryPointIsNull = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2043. auto checkDoBailout = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2044. instr->InsertAfter(checkDoBailout);
  2045. instr->InsertAfter(entryPointIsNull);
  2046. this->InsertCompareBranch(entryPointOpnd, IR::AddrOpnd::New(nullptr, IR::AddrOpndKindDynamicMisc, m_func), Js::OpCode::BrEq_A, false, entryPointIsNull, instr->m_next);
  2047. // If the entry point is not null
  2048. auto isCodeGenDone = IR::Instr::New(Js::OpCode::Call, dobailout, IR::HelperCallOpnd::New(IR::HelperSimpleIsLoopCodeGenDone, m_func), m_func);
  2049. entryPointIsNull->InsertBefore(isCodeGenDone);
  2050. m_lowererMD.LoadHelperArgument(isCodeGenDone, entryPointOpnd);
  2051. m_lowererMD.LowerCall(isCodeGenDone, 0);
  2052. this->InsertBranch(LowererMD::MDUncondBranchOpcode, true, checkDoBailout, entryPointIsNull);
  2053. const auto type = TyUint32;
  2054. auto countReg = IR::RegOpnd::New(type, m_func);
  2055. auto countAddr = IR::MemRefOpnd::New(m_func->GetJITFunctionBody()->GetLoopHeaderAddr(loopNum) + Js::LoopHeader::GetOffsetOfInterpretCount(), type, m_func);
  2056. IR::AutoReuseOpnd a(countReg, m_func), b(countAddr, m_func);
  2057. this->InsertAdd(false, countReg, countAddr, IR::IntConstOpnd::New(1, type, m_func, true), checkDoBailout);
  2058. this->InsertMove(countAddr, countReg, checkDoBailout);
  2059. this->InsertMove(dobailout, IR::IntConstOpnd::New(0, dobailoutType, m_func, true), checkDoBailout);
  2060. this->InsertCompareBranch(countReg, IR::IntConstOpnd::New(m_func->GetJITFunctionBody()->GetLoopHeaderData(loopNum)->interpretCount, type, m_func), Js::OpCode::BrLt_A, checkDoBailout, checkDoBailout);
  2061. this->InsertMove(dobailout, IR::IntConstOpnd::New(1, dobailoutType, m_func, true), checkDoBailout);
  2062. // fallthrough
  2063. // Label checkDoBailout (inserted above)
  2064. }
  2065. }
  2066. break;
  2067. case Js::OpCode::ProfiledLoopEnd:
  2068. {
  2069. Assert(m_func->DoSimpleJitDynamicProfile());
  2070. // This is set up in IRBuilding
  2071. Assert(instr->GetSrc1());
  2072. IR::Opnd* savedFlags = instr->UnlinkSrc1();
  2073. m_lowererMD.LoadHelperArgument(instr, savedFlags);
  2074. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateUint32Opnd(instr->AsJitProfilingInstr()->loopNumber, m_func));
  2075. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  2076. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleRecordLoopImplicitCallFlags, m_func));
  2077. m_lowererMD.LowerCall(instr, 0);
  2078. }
  2079. break;
  2080. case Js::OpCode::InitLoopBodyCount:
  2081. Assert(this->m_func->IsLoopBody());
  2082. instr->SetSrc1(IR::IntConstOpnd::New(0, TyUint32, this->m_func));
  2083. this->m_lowererMD.ChangeToAssign(instr);
  2084. break;
  2085. case Js::OpCode::StLoopBodyCount:
  2086. Assert(this->m_func->IsLoopBody());
  2087. this->LowerStLoopBodyCount(instr);
  2088. break;
  2089. case Js::OpCode::IncrLoopBodyCount:
  2090. {
  2091. Assert(this->m_func->IsLoopBody());
  2092. instr->m_opcode = Js::OpCode::Add_I4;
  2093. instr->SetSrc2(IR::IntConstOpnd::New(1, TyUint32, this->m_func));
  2094. this->m_lowererMD.EmitInt4Instr(instr);
  2095. // Update the jittedLoopIterations field on the entryPointInfo
  2096. IR::MemRefOpnd *iterationsAddressOpnd = IR::MemRefOpnd::New(this->m_func->GetJittedLoopIterationsSinceLastBailoutAddress(), TyUint32, this->m_func);
  2097. m_lowererMD.CreateAssign(iterationsAddressOpnd, instr->GetDst(), instr);
  2098. break;
  2099. }
  2100. #if !FLOATVAR
  2101. case Js::OpCode::StSlotBoxTemp:
  2102. this->LowerStSlotBoxTemp(instr);
  2103. break;
  2104. #endif
  2105. case Js::OpCode::LdSlot:
  2106. case Js::OpCode::LdSlotArr:
  2107. {
  2108. Js::ProfileId profileId;
  2109. IR::Instr *profileBeforeInstr;
  2110. if(instr->IsJitProfilingInstr())
  2111. {
  2112. profileId = instr->AsJitProfilingInstr()->profileId;
  2113. Assert(profileId != Js::Constants::NoProfileId);
  2114. profileBeforeInstr = instr->m_next;
  2115. }
  2116. else
  2117. {
  2118. profileId = Js::Constants::NoProfileId;
  2119. profileBeforeInstr = nullptr;
  2120. }
  2121. this->LowerLdSlot(instr);
  2122. if(profileId != Js::Constants::NoProfileId)
  2123. {
  2124. LowerProfileLdSlot(instr->GetDst(), instr->m_func, profileId, profileBeforeInstr);
  2125. }
  2126. break;
  2127. }
  2128. case Js::OpCode::ChkUndecl:
  2129. instrPrev = this->LowerChkUndecl(instr);
  2130. break;
  2131. case Js::OpCode::LdArrHead:
  2132. this->LowerLdArrHead(instr);
  2133. break;
  2134. case Js::OpCode::StElemC:
  2135. case Js::OpCode::StArrSegElemC:
  2136. this->LowerStElemC(instr);
  2137. break;
  2138. case Js::OpCode::LdEnv:
  2139. instrPrev = this->LowerLdEnv(instr);
  2140. break;
  2141. case Js::OpCode::LdAsmJsEnv:
  2142. instrPrev = this->LowerLdAsmJsEnv(instr);
  2143. break;
  2144. case Js::OpCode::LdElemUndef:
  2145. this->LowerLdElemUndef(instr);
  2146. break;
  2147. case Js::OpCode::LdElemUndefScoped:
  2148. this->LowerElementUndefinedScopedMem(instr, IR::HelperOp_LdElemUndefScoped);
  2149. break;
  2150. case Js::OpCode::EnsureNoRootFld:
  2151. this->LowerElementUndefined(instr, IR::HelperOp_EnsureNoRootProperty);
  2152. break;
  2153. case Js::OpCode::EnsureNoRootRedeclFld:
  2154. this->LowerElementUndefined(instr, IR::HelperOp_EnsureNoRootRedeclProperty);
  2155. break;
  2156. case Js::OpCode::ScopedEnsureNoRedeclFld:
  2157. this->LowerElementUndefinedScoped(instr, IR::HelperOp_EnsureNoRedeclPropertyScoped);
  2158. break;
  2159. case Js::OpCode::LdFuncExpr:
  2160. // src = function Expression
  2161. LoadFuncExpression(instr);
  2162. this->GenerateGetCurrentFunctionObject(instr);
  2163. break;
  2164. case Js::OpCode::LdNewTarget:
  2165. this->GenerateLoadNewTarget(instr);
  2166. break;
  2167. case Js::OpCode::ChkNewCallFlag:
  2168. this->GenerateCheckForCallFlagNew(instr);
  2169. break;
  2170. case Js::OpCode::StFuncExpr:
  2171. // object.propid = src
  2172. LowerStFld(instr, IR::HelperOp_StFunctionExpression, IR::HelperOp_StFunctionExpression, false);
  2173. break;
  2174. case Js::OpCode::InitLetFld:
  2175. case Js::OpCode::InitRootLetFld:
  2176. LowerStFld(instr, IR::HelperOp_InitLetFld, IR::HelperOp_InitLetFld, false);
  2177. break;
  2178. case Js::OpCode::InitConstFld:
  2179. case Js::OpCode::InitRootConstFld:
  2180. LowerStFld(instr, IR::HelperOp_InitConstFld, IR::HelperOp_InitConstFld, false);
  2181. break;
  2182. case Js::OpCode::InitUndeclRootLetFld:
  2183. LowerElementUndefined(instr, IR::HelperOp_InitUndeclRootLetFld);
  2184. break;
  2185. case Js::OpCode::InitUndeclRootConstFld:
  2186. LowerElementUndefined(instr, IR::HelperOp_InitUndeclRootConstFld);
  2187. break;
  2188. case Js::OpCode::InitUndeclConsoleLetFld:
  2189. LowerElementUndefined(instr, IR::HelperOp_InitUndeclConsoleLetFld);
  2190. break;
  2191. case Js::OpCode::InitUndeclConsoleConstFld:
  2192. LowerElementUndefined(instr, IR::HelperOp_InitUndeclConsoleConstFld);
  2193. break;
  2194. case Js::OpCode::InitClassMember:
  2195. LowerStFld(instr, IR::HelperOp_InitClassMember, IR::HelperOp_InitClassMember, false);
  2196. break;
  2197. case Js::OpCode::InitClassMemberComputedName:
  2198. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberComputedName);
  2199. break;
  2200. case Js::OpCode::InitClassMemberGetComputedName:
  2201. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberGetComputedName);
  2202. break;
  2203. case Js::OpCode::InitClassMemberSetComputedName:
  2204. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberSetComputedName);
  2205. break;
  2206. case Js::OpCode::InitClassMemberGet:
  2207. instrPrev = this->LowerStFld(instr, IR::HelperOp_InitClassMemberGet, IR::HelperOp_InitClassMemberGet, false);
  2208. break;
  2209. case Js::OpCode::InitClassMemberSet:
  2210. instrPrev = this->LowerStFld(instr, IR::HelperOp_InitClassMemberSet, IR::HelperOp_InitClassMemberSet, false);
  2211. break;
  2212. case Js::OpCode::NewStackFrameDisplay:
  2213. this->LowerLdFrameDisplay(instr, m_func->DoStackFrameDisplay());
  2214. break;
  2215. case Js::OpCode::LdFrameDisplay:
  2216. this->LowerLdFrameDisplay(instr, false);
  2217. break;
  2218. case Js::OpCode::LdInnerFrameDisplay:
  2219. this->LowerLdInnerFrameDisplay(instr);
  2220. break;
  2221. case Js::OpCode::Throw:
  2222. case Js::OpCode::InlineThrow:
  2223. case Js::OpCode::EHThrow:
  2224. this->LowerUnaryHelperMem(instr, IR::HelperOp_Throw);
  2225. break;
  2226. case Js::OpCode::TryCatch:
  2227. instrPrev = this->LowerTry(instr, true /*try-catch*/);
  2228. break;
  2229. case Js::OpCode::TryFinally:
  2230. instrPrev = this->LowerTry(instr, false /*try-finally*/);
  2231. break;
  2232. case Js::OpCode::Catch:
  2233. instrPrev = m_lowererMD.LowerCatch(instr);
  2234. break;
  2235. case Js::OpCode::Finally:
  2236. instr->Remove();
  2237. break;
  2238. case Js::OpCode::LeaveNull:
  2239. if (this->m_func->IsSimpleJit() || !this->m_func->DoOptimizeTry())
  2240. {
  2241. instrPrev = m_lowererMD.LowerLeaveNull(instr);
  2242. }
  2243. else
  2244. {
  2245. instr->Remove();
  2246. }
  2247. break;
  2248. case Js::OpCode::Leave:
  2249. if (this->m_func->HasTry() && this->m_func->DoOptimizeTry())
  2250. {
  2251. // Required in Register Allocator to mark region boundaries
  2252. break;
  2253. }
  2254. instrPrev = m_lowererMD.LowerLeave(instr, instr->AsBranchInstr()->GetTarget(), false /*fromFinalLower*/, instr->AsBranchInstr()->m_isOrphanedLeave);
  2255. break;
  2256. case Js::OpCode::BailOnException:
  2257. instrPrev = this->LowerBailOnException(instr);
  2258. break;
  2259. case Js::OpCode::BailOnEarlyExit:
  2260. instrPrev = this->LowerBailOnEarlyExit(instr);
  2261. break;
  2262. case Js::OpCode::RuntimeTypeError:
  2263. case Js::OpCode::InlineRuntimeTypeError:
  2264. this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeTypeError);
  2265. break;
  2266. case Js::OpCode::RuntimeReferenceError:
  2267. case Js::OpCode::InlineRuntimeReferenceError:
  2268. this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeReferenceError);
  2269. break;
  2270. case Js::OpCode::Break:
  2271. // Inline breakpoint: for now do nothing.
  2272. break;
  2273. case Js::OpCode::Nop:
  2274. // This may need support for debugging the JIT, but for now just remove the instruction.
  2275. instr->Remove();
  2276. break;
  2277. case Js::OpCode::Unused:
  2278. // Currently Unused is used with ScopedLdInst to keep the second dst alive, but we don't need to lower it.
  2279. instr->Remove();
  2280. break;
  2281. case Js::OpCode::StatementBoundary:
  2282. // This instruction is merely to help convey source info through the IR
  2283. // and eventually generate the nativeOffset maps.
  2284. #if DBG_DUMP && DBG
  2285. // If we have a JITStatementBreakpoint, then we should break on this statement
  2286. {
  2287. uint32 statementIndex = instr->AsPragmaInstr()->m_statementIndex;
  2288. if (Js::Configuration::Global.flags.StatementDebugBreak.Contains(instr->m_func->GetSourceContextId(), instr->m_func->GetLocalFunctionId(), statementIndex))
  2289. {
  2290. IR::Instr* tempinstr = instr;
  2291. Assert(tempinstr != nullptr);
  2292. // go past any labels, and then add a debug breakpoint
  2293. while (tempinstr->m_next != nullptr && tempinstr->m_next->m_opcode == Js::OpCode::Label)
  2294. {
  2295. tempinstr = tempinstr->m_next;
  2296. }
  2297. this->m_lowererMD.GenerateDebugBreak(tempinstr);
  2298. }
  2299. }
  2300. #endif
  2301. break;
  2302. case Js::OpCode::BailOnNotPolymorphicInlinee:
  2303. instrPrev = LowerBailOnNotPolymorphicInlinee(instr);
  2304. break;
  2305. case Js::OpCode::BailOnNoSimdTypeSpec:
  2306. case Js::OpCode::BailOnNoProfile:
  2307. this->GenerateBailOut(instr, nullptr, nullptr);
  2308. break;
  2309. case Js::OpCode::BailOnNotSpreadable:
  2310. instrPrev = this->LowerBailOnNotSpreadable(instr);
  2311. break;
  2312. case Js::OpCode::BailOnNotStackArgs:
  2313. instrPrev = this->LowerBailOnNotStackArgs(instr);
  2314. break;
  2315. case Js::OpCode::BailOnEqual:
  2316. case Js::OpCode::BailOnNotEqual:
  2317. instrPrev = this->LowerBailOnEqualOrNotEqual(instr);
  2318. break;
  2319. case Js::OpCode::BailOnNegative:
  2320. LowerBailOnNegative(instr);
  2321. break;
  2322. case Js::OpCode::BailForDebugger:
  2323. instrPrev = this->LowerBailForDebugger(instr);
  2324. break;
  2325. case Js::OpCode::BailOnNotObject:
  2326. instrPrev = this->LowerBailOnNotObject(instr);
  2327. break;
  2328. case Js::OpCode::BailOnNotBuiltIn:
  2329. instrPrev = this->LowerBailOnNotBuiltIn(instr);
  2330. break;
  2331. case Js::OpCode::BailOnNotArray:
  2332. {
  2333. IR::Instr *bailOnNotArray = nullptr, *bailOnMissingValue = nullptr;
  2334. SplitBailOnNotArray(instr, &bailOnNotArray, &bailOnMissingValue);
  2335. IR::RegOpnd *const arrayOpnd = LowerBailOnNotArray(bailOnNotArray);
  2336. if(bailOnMissingValue)
  2337. {
  2338. LowerBailOnMissingValue(bailOnMissingValue, arrayOpnd);
  2339. }
  2340. break;
  2341. }
  2342. case Js::OpCode::BoundCheck:
  2343. case Js::OpCode::UnsignedBoundCheck:
  2344. LowerBoundCheck(instr);
  2345. break;
  2346. case Js::OpCode::BailTarget:
  2347. instrPrev = this->LowerBailTarget(instr);
  2348. break;
  2349. case Js::OpCode::InlineeStart:
  2350. this->LowerInlineeStart(instr);
  2351. break;
  2352. case Js::OpCode::EndCallForPolymorphicInlinee:
  2353. instr->Remove();
  2354. break;
  2355. case Js::OpCode::InlineeEnd:
  2356. this->LowerInlineeEnd(instr);
  2357. break;
  2358. case Js::OpCode::InlineBuiltInEnd:
  2359. case Js::OpCode::InlineNonTrackingBuiltInEnd:
  2360. this->LowerInlineBuiltIn(instr);
  2361. break;
  2362. case Js::OpCode::ExtendArg_A:
  2363. if (instr->GetSrc1()->IsRegOpnd())
  2364. {
  2365. IR::RegOpnd *src1 = instr->GetSrc1()->AsRegOpnd();
  2366. this->addToLiveOnBackEdgeSyms->Clear(src1->m_sym->m_id);
  2367. }
  2368. instr->Remove();
  2369. break;
  2370. case Js::OpCode::InlineBuiltInStart:
  2371. case Js::OpCode::BytecodeArgOutUse:
  2372. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  2373. instr->Remove();
  2374. break;
  2375. case Js::OpCode::DeadBrEqual:
  2376. this->LowerBinaryHelperMem(instr, IR::HelperOp_Equal);
  2377. break;
  2378. case Js::OpCode::DeadBrSrEqual:
  2379. this->LowerBinaryHelperMem(instr, IR::HelperOp_StrictEqual);
  2380. break;
  2381. case Js::OpCode::DeadBrRelational:
  2382. this->LowerBinaryHelperMem(instr, IR::HelperOp_Greater);
  2383. break;
  2384. case Js::OpCode::DeadBrOnHasProperty:
  2385. this->LowerUnaryHelperMem(instr, IR::HelperOp_HasProperty);
  2386. break;
  2387. case Js::OpCode::DeletedNonHelperBranch:
  2388. break;
  2389. case Js::OpCode::InitClass:
  2390. instrPrev = this->LowerInitClass(instr);
  2391. break;
  2392. case Js::OpCode::NewConcatStrMulti:
  2393. this->LowerNewConcatStrMulti(instr);
  2394. break;
  2395. case Js::OpCode::NewConcatStrMultiBE:
  2396. this->LowerNewConcatStrMultiBE(instr);
  2397. break;
  2398. case Js::OpCode::SetConcatStrMultiItem:
  2399. this->LowerSetConcatStrMultiItem(instr);
  2400. break;
  2401. case Js::OpCode::SetConcatStrMultiItemBE:
  2402. Assert(instr->GetSrc1()->IsRegOpnd());
  2403. this->addToLiveOnBackEdgeSyms->Clear(instr->GetSrc1()->GetStackSym()->m_id);
  2404. // code corresponding to it should already have been generated while lowering NewConcatStrMultiBE
  2405. instr->Remove();
  2406. break;
  2407. case Js::OpCode::Conv_Str:
  2408. this->LowerConvStr(instr);
  2409. break;
  2410. case Js::OpCode::Coerce_Str:
  2411. this->LowerCoerseStr(instr);
  2412. break;
  2413. case Js::OpCode::Coerce_StrOrRegex:
  2414. this->LowerCoerseStrOrRegex(instr);
  2415. break;
  2416. case Js::OpCode::Coerce_Regex:
  2417. this->LowerCoerseRegex(instr);
  2418. break;
  2419. case Js::OpCode::Conv_PrimStr:
  2420. this->LowerConvPrimStr(instr);
  2421. break;
  2422. case Js::OpCode::ObjectFreeze:
  2423. this->LowerUnaryHelper(instr, IR::HelperOP_Freeze);
  2424. break;
  2425. case Js::OpCode::ClearAttributes:
  2426. this->LowerBinaryHelper(instr, IR::HelperOP_ClearAttributes);
  2427. break;
  2428. case Js::OpCode::SpreadArrayLiteral:
  2429. this->LowerSpreadArrayLiteral(instr);
  2430. break;
  2431. case Js::OpCode::CallIExtended:
  2432. {
  2433. // Currently, the only use for CallIExtended is a call that uses spread.
  2434. Assert(IsSpreadCall(instr));
  2435. instrPrev = this->LowerSpreadCall(instr, Js::CallFlags_None);
  2436. break;
  2437. }
  2438. case Js::OpCode::CallIExtendedNew:
  2439. {
  2440. // Currently, the only use for CallIExtended is a call that uses spread.
  2441. Assert(IsSpreadCall(instr));
  2442. instrPrev = this->LowerSpreadCall(instr, Js::CallFlags_New);
  2443. break;
  2444. }
  2445. case Js::OpCode::CallIExtendedNewTargetNew:
  2446. {
  2447. // Currently, the only use for CallIExtended is a call that uses spread.
  2448. Assert(IsSpreadCall(instr));
  2449. instrPrev = this->LowerSpreadCall(instr, (Js::CallFlags)(Js::CallFlags_New | Js::CallFlags_ExtraArg | Js::CallFlags_NewTarget));
  2450. break;
  2451. }
  2452. case Js::OpCode::LdSpreadIndices:
  2453. instr->Remove();
  2454. break;
  2455. case Js::OpCode::LdHomeObj:
  2456. this->GenerateLdHomeObj(instr);
  2457. break;
  2458. case Js::OpCode::LdHomeObjProto:
  2459. this->GenerateLdHomeObjProto(instr);
  2460. break;
  2461. case Js::OpCode::LdFuncObj:
  2462. this->GenerateLdFuncObj(instr);
  2463. break;
  2464. case Js::OpCode::LdFuncObjProto:
  2465. this->GenerateLdFuncObjProto(instr);
  2466. break;
  2467. case Js::OpCode::ScopedLdHomeObj:
  2468. instrPrev = m_lowererMD.LowerLdSuper(instr, IR::HelperScopedLdHomeObj);
  2469. break;
  2470. case Js::OpCode::ScopedLdFuncObj:
  2471. instrPrev = m_lowererMD.LowerLdSuper(instr, IR::HelperScopedLdFuncObj);
  2472. break;
  2473. case Js::OpCode::SetHomeObj:
  2474. {
  2475. this->GenerateSetHomeObj(instr);
  2476. break;
  2477. }
  2478. case Js::OpCode::ImportCall:
  2479. {
  2480. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  2481. IR::Opnd *functionObjOpnd = nullptr;
  2482. m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  2483. LoadScriptContext(instr);
  2484. m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  2485. m_lowererMD.LoadHelperArgument(instr, functionObjOpnd);
  2486. m_lowererMD.ChangeToHelperCall(instr, IR::HelperImportCall);
  2487. break;
  2488. }
  2489. case Js::OpCode::SetComputedNameVar:
  2490. {
  2491. IR::Opnd *src2Opnd = instr->UnlinkSrc2();
  2492. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  2493. m_lowererMD.LoadHelperArgument(instr, src2Opnd);
  2494. m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  2495. m_lowererMD.ChangeToHelperCall(instr, IR::HelperSetComputedNameVar);
  2496. break;
  2497. }
  2498. case Js::OpCode::InlineeMetaArg:
  2499. {
  2500. m_lowererMD.ChangeToAssign(instr);
  2501. break;
  2502. }
  2503. case Js::OpCode::Yield:
  2504. {
  2505. instr->FreeSrc1(); // Source is not actually used by the backend other than to calculate lifetime
  2506. IR::Opnd* dstOpnd = instr->UnlinkDst();
  2507. // prm2 is the ResumeYieldData pointer per calling convention established in JavascriptGenerator::CallGenerator
  2508. // This is the value the bytecode expects to be in the dst register of the Yield opcode after resumption.
  2509. // Load it here after the bail-in.
  2510. StackSym *resumeYieldDataSym = StackSym::NewImplicitParamSym(4, m_func);
  2511. m_func->SetArgOffset(resumeYieldDataSym, (LowererMD::GetFormalParamOffset() + 1) * MachPtr);
  2512. IR::SymOpnd * resumeYieldDataOpnd = IR::SymOpnd::New(resumeYieldDataSym, TyMachPtr, m_func);
  2513. AssertMsg(instr->m_next->IsLabelInstr(), "Expect the resume label to immediately follow Yield instruction");
  2514. m_lowererMD.CreateAssign(dstOpnd, resumeYieldDataOpnd, instr->m_next->m_next);
  2515. GenerateBailOut(instr);
  2516. break;
  2517. }
  2518. case Js::OpCode::ResumeYield:
  2519. case Js::OpCode::ResumeYieldStar:
  2520. {
  2521. IR::Opnd *srcOpnd1 = instr->UnlinkSrc1();
  2522. IR::Opnd *srcOpnd2 = instr->m_opcode == Js::OpCode::ResumeYieldStar ? instr->UnlinkSrc2() : IR::AddrOpnd::NewNull(m_func);
  2523. m_lowererMD.LoadHelperArgument(instr, srcOpnd2);
  2524. m_lowererMD.LoadHelperArgument(instr, srcOpnd1);
  2525. m_lowererMD.ChangeToHelperCall(instr, IR::HelperResumeYield);
  2526. break;
  2527. }
  2528. case Js::OpCode::GeneratorResumeJumpTable:
  2529. {
  2530. // Lowered in LowerPrologEpilog so that the jumps introduced are not considered to be part of the flow for the RegAlloc phase.
  2531. // Introduce a BailOutNoSave label if there were yield points that were elided due to optimizations. They could still be hit
  2532. // if an active generator object had been paused at such a yield point when the function body was JITed. So safe guard such a
  2533. // case by having the native code simply jump back to the interpreter for such yield points.
  2534. IR::LabelInstr *bailOutNoSaveLabel = nullptr;
  2535. m_func->MapUntilYieldOffsetResumeLabels([this, &bailOutNoSaveLabel](int, const YieldOffsetResumeLabel& yorl)
  2536. {
  2537. if (yorl.Second() == nullptr)
  2538. {
  2539. if (bailOutNoSaveLabel == nullptr)
  2540. {
  2541. bailOutNoSaveLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2542. }
  2543. return true;
  2544. }
  2545. return false;
  2546. });
  2547. // Insert the bailoutnosave label somewhere along with a call to BailOutNoSave helper
  2548. if (bailOutNoSaveLabel != nullptr)
  2549. {
  2550. IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->m_prev;
  2551. IR::LabelInstr * exitTargetInstr;
  2552. if (exitPrevInstr->IsLabelInstr())
  2553. {
  2554. exitTargetInstr = exitPrevInstr->AsLabelInstr();
  2555. exitPrevInstr = exitPrevInstr->m_prev;
  2556. }
  2557. else
  2558. {
  2559. exitTargetInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  2560. exitPrevInstr->InsertAfter(exitTargetInstr);
  2561. }
  2562. bailOutNoSaveLabel->m_hasNonBranchRef = true;
  2563. bailOutNoSaveLabel->isOpHelper = true;
  2564. IR::Instr* bailOutCall = IR::Instr::New(Js::OpCode::Call, m_func);
  2565. exitPrevInstr->InsertAfter(bailOutCall);
  2566. exitPrevInstr->InsertAfter(bailOutNoSaveLabel);
  2567. exitPrevInstr->InsertAfter(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, exitTargetInstr, m_func));
  2568. IR::RegOpnd * frameRegOpnd = IR::RegOpnd::New(nullptr, LowererMD::GetRegFramePointer(), TyMachPtr, m_func);
  2569. m_lowererMD.LoadHelperArgument(bailOutCall, frameRegOpnd);
  2570. m_lowererMD.ChangeToHelperCall(bailOutCall, IR::HelperNoSaveRegistersBailOutForElidedYield);
  2571. m_func->m_bailOutNoSaveLabel = bailOutNoSaveLabel;
  2572. }
  2573. break;
  2574. }
  2575. case Js::OpCode::FrameDisplayCheck:
  2576. instrPrev = this->LowerFrameDisplayCheck(instr);
  2577. break;
  2578. case Js::OpCode::SlotArrayCheck:
  2579. instrPrev = this->LowerSlotArrayCheck(instr);
  2580. break;
  2581. #ifdef ENABLE_WASM
  2582. case Js::OpCode::Copysign_A:
  2583. m_lowererMD.GenerateCopysign(instr);
  2584. break;
  2585. case Js::OpCode::Trunc_A:
  2586. if (!AutoSystemInfo::Data.SSE4_1Available())
  2587. {
  2588. m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_TruncFlt, IR::HelperDirectMath_TruncDb);
  2589. break;
  2590. }
  2591. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  2592. break;
  2593. case Js::OpCode::Nearest_A:
  2594. if (!AutoSystemInfo::Data.SSE4_1Available())
  2595. {
  2596. m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_NearestFlt, IR::HelperDirectMath_NearestDb);
  2597. break;
  2598. }
  2599. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  2600. break;
  2601. case Js::OpCode::ThrowRuntimeError:
  2602. GenerateThrow(instr->UnlinkSrc1(), instr);
  2603. instr->Remove();
  2604. break;
  2605. #endif //ENABLE_WASM
  2606. default:
  2607. #ifdef ENABLE_SIMDJS
  2608. #if defined(_M_IX86) || defined(_M_X64)
  2609. if (IsSimd128Opcode(instr->m_opcode))
  2610. {
  2611. instrPrev = m_lowererMD.Simd128Instruction(instr);
  2612. break;
  2613. }
  2614. #endif
  2615. #endif
  2616. AssertMsg(instr->IsLowered(), "Unknown opcode");
  2617. if(!instr->IsLowered())
  2618. {
  2619. Fatal();
  2620. }
  2621. break;
  2622. }
  2623. #if DBG
  2624. LegalizeVerifyRange(instrPrev ? instrPrev->m_next : instrStart,
  2625. verifyLegalizeInstrNext ? verifyLegalizeInstrNext->m_prev : nullptr);
  2626. #endif
  2627. } NEXT_INSTR_BACKWARD_EDITING_IN_RANGE;
  2628. Assert(this->outerMostLoopLabel == nullptr);
  2629. }
  2630. IR::Opnd *
  2631. Lowerer::LoadFunctionInfoOpnd(IR::Instr * instr)
  2632. {
  2633. return IR::AddrOpnd::New(instr->m_func->GetWorkItem()->GetJITTimeInfo()->GetFunctionInfoAddr(), IR::AddrOpndKindDynamicFunctionInfo, instr->m_func);
  2634. }
  2635. IR::Instr *
  2636. Lowerer::LoadFunctionBody(IR::Instr * instr)
  2637. {
  2638. return m_lowererMD.LoadHelperArgument(instr, LoadFunctionBodyOpnd(instr));
  2639. }
  2640. IR::Instr *
  2641. Lowerer::LoadScriptContext(IR::Instr * instr)
  2642. {
  2643. return m_lowererMD.LoadHelperArgument(instr, LoadScriptContextOpnd(instr));
  2644. }
  2645. IR::Opnd *
  2646. Lowerer::LoadFunctionBodyOpnd(IR::Instr * instr)
  2647. {
  2648. return IR::AddrOpnd::New(instr->m_func->GetJITFunctionBody()->GetAddr(), IR::AddrOpndKindDynamicFunctionBody, instr->m_func);
  2649. }
  2650. IR::Opnd *
  2651. Lowerer::LoadScriptContextOpnd(IR::Instr * instr)
  2652. {
  2653. return IR::AddrOpnd::New(m_func->GetScriptContextInfo()->GetAddr(), IR::AddrOpndKindDynamicScriptContext, this->m_func);
  2654. }
  2655. IR::Opnd *
  2656. Lowerer::LoadScriptContextValueOpnd(IR::Instr * instr, ScriptContextValue valueType)
  2657. {
  2658. ScriptContextInfo *scriptContextInfo = instr->m_func->GetScriptContextInfo();
  2659. switch (valueType)
  2660. {
  2661. case ScriptContextValue::ScriptContextNumberAllocator:
  2662. return IR::AddrOpnd::New(scriptContextInfo->GetNumberAllocatorAddr(), IR::AddrOpndKindDynamicMisc, instr->m_func);
  2663. case ScriptContextValue::ScriptContextRecycler:
  2664. return IR::AddrOpnd::New(scriptContextInfo->GetRecyclerAddr(), IR::AddrOpndKindDynamicMisc, instr->m_func);
  2665. default:
  2666. Assert(false);
  2667. return nullptr;
  2668. }
  2669. }
  2670. IR::Opnd *
  2671. Lowerer::LoadLibraryValueOpnd(IR::Instr * instr, LibraryValue valueType)
  2672. {
  2673. ScriptContextInfo *scriptContextInfo = instr->m_func->GetScriptContextInfo();
  2674. switch (valueType)
  2675. {
  2676. case LibraryValue::ValueEmptyString:
  2677. return IR::AddrOpnd::New(scriptContextInfo->GetEmptyStringAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2678. case LibraryValue::ValueUndeclBlockVar:
  2679. return IR::AddrOpnd::New(scriptContextInfo->GetUndeclBlockVarAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2680. case LibraryValue::ValueUndefined:
  2681. return IR::AddrOpnd::New(scriptContextInfo->GetUndefinedAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2682. case LibraryValue::ValueNull:
  2683. return IR::AddrOpnd::New(scriptContextInfo->GetNullAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2684. case LibraryValue::ValueTrue:
  2685. return IR::AddrOpnd::New(scriptContextInfo->GetTrueAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2686. case LibraryValue::ValueFalse:
  2687. return IR::AddrOpnd::New(scriptContextInfo->GetFalseAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2688. case LibraryValue::ValueNegativeZero:
  2689. return IR::AddrOpnd::New(scriptContextInfo->GetNegativeZeroAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2690. case LibraryValue::ValueNumberTypeStatic:
  2691. return IR::AddrOpnd::New(scriptContextInfo->GetNumberTypeStaticAddr(), IR::AddrOpndKindDynamicType, instr->m_func, true);
  2692. case LibraryValue::ValueStringTypeStatic:
  2693. return IR::AddrOpnd::New(scriptContextInfo->GetStringTypeStaticAddr(), IR::AddrOpndKindDynamicType, instr->m_func, true);
  2694. case LibraryValue::ValueObjectType:
  2695. return IR::AddrOpnd::New(scriptContextInfo->GetObjectTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2696. case LibraryValue::ValueObjectHeaderInlinedType:
  2697. return IR::AddrOpnd::New(scriptContextInfo->GetObjectHeaderInlinedTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2698. case LibraryValue::ValueRegexType:
  2699. return IR::AddrOpnd::New(scriptContextInfo->GetRegexTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2700. case LibraryValue::ValueArrayConstructor:
  2701. return IR::AddrOpnd::New(scriptContextInfo->GetArrayConstructorAddr(), IR::AddrOpndKindDynamicVar, instr->m_func);
  2702. case LibraryValue::ValueJavascriptArrayType:
  2703. return IR::AddrOpnd::New(scriptContextInfo->GetArrayTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2704. case LibraryValue::ValueNativeIntArrayType:
  2705. return IR::AddrOpnd::New(scriptContextInfo->GetNativeIntArrayTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2706. case LibraryValue::ValueNativeFloatArrayType:
  2707. return IR::AddrOpnd::New(scriptContextInfo->GetNativeFloatArrayTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2708. case LibraryValue::ValueConstructorCacheDefaultInstance:
  2709. return IR::AddrOpnd::New(m_func->GetThreadContextInfo()->GetConstructorCacheDefaultInstanceAddr(), IR::AddrOpndKindDynamicMisc, instr->m_func);
  2710. case LibraryValue::ValueAbsDoubleCst:
  2711. return IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetAbsDoubleCstAddr(), TyMachDouble, instr->m_func, IR::AddrOpndKindDynamicDoubleRef);
  2712. case LibraryValue::ValueCharStringCache:
  2713. return IR::AddrOpnd::New(scriptContextInfo->GetCharStringCacheAddr(), IR::AddrOpndKindDynamicCharStringCache, instr->m_func);
  2714. default:
  2715. Assert(UNREACHED);
  2716. return nullptr;
  2717. }
  2718. }
  2719. IR::Opnd *
  2720. Lowerer::LoadVTableValueOpnd(IR::Instr * instr, VTableValue vtableType)
  2721. {
  2722. return IR::AddrOpnd::New((Js::Var)instr->m_func->GetScriptContextInfo()->GetVTableAddress(vtableType), IR::AddrOpndKindDynamicVtable, this->m_func);
  2723. }
  2724. IR::Opnd *
  2725. Lowerer::LoadOptimizationOverridesValueOpnd(IR::Instr *instr, OptimizationOverridesValue valueType)
  2726. {
  2727. switch (valueType)
  2728. {
  2729. case OptimizationOverridesValue::OptimizationOverridesSideEffects:
  2730. return IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetSideEffectsAddr(), TyInt32, instr->m_func);
  2731. case OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable:
  2732. return IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetArraySetElementFastPathVtableAddr(), TyMachPtr, instr->m_func);
  2733. case OptimizationOverridesValue::OptimizationOverridesIntArraySetElementFastPathVtable:
  2734. return IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetIntArraySetElementFastPathVtableAddr(), TyMachPtr, instr->m_func);
  2735. case OptimizationOverridesValue::OptimizationOverridesFloatArraySetElementFastPathVtable:
  2736. return IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetFloatArraySetElementFastPathVtableAddr(), TyMachPtr, instr->m_func);
  2737. default:
  2738. Assert(UNREACHED);
  2739. return nullptr;
  2740. }
  2741. }
  2742. IR::Opnd *
  2743. Lowerer::LoadNumberAllocatorValueOpnd(IR::Instr *instr, NumberAllocatorValue valueType)
  2744. {
  2745. ScriptContextInfo *scriptContext = instr->m_func->GetScriptContextInfo();
  2746. bool allowNativeCodeBumpAllocation = scriptContext->GetRecyclerAllowNativeCodeBumpAllocation();
  2747. switch (valueType)
  2748. {
  2749. case NumberAllocatorValue::NumberAllocatorEndAddress:
  2750. return IR::MemRefOpnd::New(((char *)scriptContext->GetNumberAllocatorAddr()) + Js::RecyclerJavascriptNumberAllocator::GetEndAddressOffset(), TyMachPtr, instr->m_func);
  2751. case NumberAllocatorValue::NumberAllocatorFreeObjectList:
  2752. return IR::MemRefOpnd::New(
  2753. ((char *)scriptContext->GetNumberAllocatorAddr()) +
  2754. (allowNativeCodeBumpAllocation ? Js::RecyclerJavascriptNumberAllocator::GetFreeObjectListOffset() : Js::RecyclerJavascriptNumberAllocator::GetEndAddressOffset()),
  2755. TyMachPtr, instr->m_func);
  2756. default:
  2757. Assert(false);
  2758. return nullptr;
  2759. }
  2760. }
  2761. IR::Opnd *
  2762. Lowerer::LoadIsInstInlineCacheOpnd(IR::Instr * instr, uint inlineCacheIndex)
  2763. {
  2764. intptr_t inlineCache = instr->m_func->GetJITFunctionBody()->GetIsInstInlineCache(inlineCacheIndex);
  2765. return IR::AddrOpnd::New(inlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func);
  2766. }
  2767. IR::Opnd *
  2768. Lowerer::LoadRuntimeInlineCacheOpnd(IR::Instr * instr, IR::PropertySymOpnd * propertySymOpnd, bool isHelper)
  2769. {
  2770. Assert(propertySymOpnd->m_runtimeInlineCache != 0);
  2771. IR::Opnd * inlineCacheOpnd = nullptr;
  2772. if (instr->m_func->GetJITFunctionBody()->HasInlineCachesOnFunctionObject() && !instr->m_func->IsInlinee())
  2773. {
  2774. inlineCacheOpnd = this->GetInlineCacheFromFuncObjectForRuntimeUse(instr, propertySymOpnd, isHelper);
  2775. }
  2776. else
  2777. {
  2778. intptr_t inlineCache = propertySymOpnd->m_runtimeInlineCache;
  2779. inlineCacheOpnd = IR::AddrOpnd::New(inlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func, /* dontEncode */ true);
  2780. }
  2781. return inlineCacheOpnd;
  2782. }
  2783. bool
  2784. Lowerer::TryGenerateFastCmSrEq(IR::Instr * instr)
  2785. {
  2786. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  2787. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  2788. if (srcReg2 && IsConstRegOpnd(srcReg2))
  2789. {
  2790. return m_lowererMD.GenerateFastCmSrEqConst(instr);
  2791. }
  2792. else if (srcReg1 && IsConstRegOpnd(srcReg1))
  2793. {
  2794. instr->SwapOpnds();
  2795. return m_lowererMD.GenerateFastCmSrEqConst(instr);
  2796. }
  2797. else if (srcReg2 && (srcReg2->m_sym->m_isStrConst))
  2798. {
  2799. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_String);
  2800. return true;
  2801. }
  2802. else if (srcReg1 && (srcReg1->m_sym->m_isStrConst))
  2803. {
  2804. instr->SwapOpnds();
  2805. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_String);
  2806. return true;
  2807. }
  2808. else if (srcReg2 && (srcReg2->m_sym->m_isStrEmpty))
  2809. {
  2810. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_EmptyString);
  2811. return true;
  2812. }
  2813. else if (srcReg1 && (srcReg1->m_sym->m_isStrEmpty))
  2814. {
  2815. instr->SwapOpnds();
  2816. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_EmptyString);
  2817. return true;
  2818. }
  2819. return false;
  2820. }
  2821. bool
  2822. Lowerer::GenerateFastBrSrEq(IR::Instr * instr, IR::RegOpnd * srcReg1, IR::RegOpnd * srcReg2, IR::Instr ** pInstrPrev, bool noMathFastPath)
  2823. {
  2824. if (srcReg2 && IsConstRegOpnd(srcReg2))
  2825. {
  2826. this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg2->m_sym->GetConstOpnd(), true);
  2827. instr->Remove();
  2828. return true;
  2829. }
  2830. else if (srcReg1 && IsConstRegOpnd(srcReg1))
  2831. {
  2832. instr->SwapOpnds();
  2833. this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg1->m_sym->GetConstOpnd(), true);
  2834. instr->Remove();
  2835. return true;
  2836. }
  2837. else if (srcReg2 && (srcReg2->m_sym->m_isStrConst))
  2838. {
  2839. this->LowerBrCMem(instr, IR::HelperOp_StrictEqualString, noMathFastPath, false);
  2840. return true;
  2841. }
  2842. else if (srcReg1 && (srcReg1->m_sym->m_isStrConst))
  2843. {
  2844. instr->SwapOpnds();
  2845. this->LowerBrCMem(instr, IR::HelperOp_StrictEqualString, noMathFastPath, false);
  2846. return true;
  2847. }
  2848. else if (srcReg2 && (srcReg2->m_sym->m_isStrEmpty))
  2849. {
  2850. this->LowerBrCMem(instr, IR::HelperOp_StrictEqualEmptyString, noMathFastPath, false);
  2851. return true;
  2852. }
  2853. else if (srcReg1 && (srcReg1->m_sym->m_isStrEmpty))
  2854. {
  2855. instr->SwapOpnds();
  2856. this->LowerBrCMem(instr, IR::HelperOp_StrictEqualEmptyString, noMathFastPath, false);
  2857. return true;
  2858. }
  2859. return false;
  2860. }
  2861. ///----------------------------------------------------------------------------
  2862. ///
  2863. /// Lowerer::GenerateFastBrConst
  2864. ///
  2865. ///----------------------------------------------------------------------------
  2866. IR::BranchInstr *
  2867. Lowerer::GenerateFastBrConst(IR::BranchInstr *branchInstr, IR::Opnd * constOpnd, bool isEqual)
  2868. {
  2869. Assert(constOpnd->IsAddrOpnd() || constOpnd->IsIntConstOpnd());
  2870. //
  2871. // Given:
  2872. // BrSrEq_A $L1, s1, s2
  2873. // where s2 is either 'null', 'undefined', 'true' or 'false'
  2874. //
  2875. // Generate:
  2876. //
  2877. // CMP s1, s2
  2878. // JEQ/JNE $L1
  2879. //
  2880. // TODO: OOP JIT, enable this assert
  2881. //Assert(this->IsConstRegOpnd(branchInstr->GetSrc2()->AsRegOpnd()));
  2882. IR::Opnd *opnd = branchInstr->GetSrc1();
  2883. if (!opnd->IsRegOpnd())
  2884. {
  2885. IR::RegOpnd *lhsReg = IR::RegOpnd::New(TyVar, m_func);
  2886. LowererMD::CreateAssign(lhsReg, opnd, branchInstr);
  2887. opnd = lhsReg;
  2888. }
  2889. Assert(opnd->IsRegOpnd());
  2890. IR::BranchInstr *newBranch;
  2891. newBranch = InsertCompareBranch(opnd, constOpnd, isEqual ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A, branchInstr->GetTarget(), branchInstr);
  2892. return newBranch;
  2893. }
  2894. bool
  2895. Lowerer::TryGenerateFastBrEq(IR::Instr * instr)
  2896. {
  2897. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  2898. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  2899. bool isConst = false;
  2900. if (srcReg1 && this->IsNullOrUndefRegOpnd(srcReg1))
  2901. {
  2902. instr->SwapOpnds();
  2903. isConst = true;
  2904. }
  2905. // Fast path for == null or == undefined
  2906. // if (src == null || src == undefined)
  2907. if (isConst || (srcReg2 && this->IsNullOrUndefRegOpnd(srcReg2)))
  2908. {
  2909. IR::BranchInstr *newBranch;
  2910. newBranch = this->GenerateFastBrConst(instr->AsBranchInstr(),
  2911. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  2912. true);
  2913. this->GenerateFastBrConst(instr->AsBranchInstr(),
  2914. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
  2915. true);
  2916. instr->Remove();
  2917. return true;
  2918. }
  2919. return false;
  2920. }
  2921. bool
  2922. Lowerer::TryGenerateFastBrNeq(IR::Instr * instr)
  2923. {
  2924. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  2925. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  2926. bool isConst = false;
  2927. if (srcReg1 && this->IsNullOrUndefRegOpnd(srcReg1))
  2928. {
  2929. instr->SwapOpnds();
  2930. isConst = true;
  2931. }
  2932. // Fast path for != null or != undefined
  2933. // if (src != null && src != undefined)
  2934. //
  2935. // That is:
  2936. // if (src == NULL) goto labelEq
  2937. // if (src != undef) goto target
  2938. // labelEq:
  2939. if (isConst || (srcReg2 && this->IsNullOrUndefRegOpnd(srcReg2)))
  2940. {
  2941. IR::LabelInstr *labelEq = instr->GetOrCreateContinueLabel();
  2942. IR::BranchInstr *newBranch;
  2943. newBranch = this->GenerateFastBrConst(instr->AsBranchInstr(),
  2944. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  2945. true);
  2946. newBranch->AsBranchInstr()->SetTarget(labelEq);
  2947. this->GenerateFastBrConst(instr->AsBranchInstr(),
  2948. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
  2949. false);
  2950. instr->Remove();
  2951. return true;
  2952. }
  2953. return false;
  2954. }
  2955. bool
  2956. Lowerer::GenerateFastBrSrNeq(IR::Instr * instr, IR::Instr ** pInstrPrev)
  2957. {
  2958. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  2959. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  2960. if (srcReg2 && IsConstRegOpnd(srcReg2))
  2961. {
  2962. this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg2->m_sym->GetConstOpnd(), false);
  2963. instr->Remove();
  2964. return true;
  2965. }
  2966. else if (srcReg1 && IsConstRegOpnd(srcReg1))
  2967. {
  2968. instr->SwapOpnds();
  2969. this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg1->m_sym->GetConstOpnd(), false);
  2970. instr->Remove();
  2971. return true;
  2972. }
  2973. return false;
  2974. }
  2975. void
  2976. Lowerer::GenerateDynamicObjectAlloc(IR::Instr * newObjInstr, uint inlineSlotCount, uint slotCount, IR::RegOpnd * newObjDst, IR::Opnd * typeSrc)
  2977. {
  2978. size_t headerAllocSize = sizeof(Js::DynamicObject) + inlineSlotCount * sizeof(Js::Var);
  2979. IR::SymOpnd * tempObjectSymOpnd;
  2980. bool isZeroed = GenerateRecyclerOrMarkTempAlloc(newObjInstr, newObjDst, IR::HelperAllocMemForScObject, headerAllocSize, &tempObjectSymOpnd);
  2981. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  2982. {
  2983. // Hoist the vtable init to the outer most loop top as it never changes
  2984. InsertMove(tempObjectSymOpnd,
  2985. LoadVTableValueOpnd(this->outerMostLoopLabel, VTableValue::VtableDynamicObject), this->outerMostLoopLabel, false);
  2986. }
  2987. else
  2988. {
  2989. // MOV [newObjDst + offset(vtable)], DynamicObject::vtable
  2990. GenerateMemInit(newObjDst, 0, LoadVTableValueOpnd(newObjInstr, VTableValue::VtableDynamicObject), newObjInstr, isZeroed);
  2991. }
  2992. // MOV [newObjDst + offset(type)], newObjectType
  2993. GenerateMemInit(newObjDst, Js::DynamicObject::GetOffsetOfType(), typeSrc, newObjInstr, isZeroed);
  2994. // CALL JavascriptOperators::AllocMemForVarArray((slotCount - inlineSlotCount) * sizeof(Js::Var))
  2995. if (slotCount > inlineSlotCount)
  2996. {
  2997. size_t auxSlotsAllocSize = (slotCount - inlineSlotCount) * sizeof(Js::Var);
  2998. IR::RegOpnd* auxSlots = IR::RegOpnd::New(TyMachPtr, m_func);
  2999. GenerateRecyclerAllocAligned(IR::HelperAllocMemForVarArray, auxSlotsAllocSize, auxSlots, newObjInstr);
  3000. GenerateMemInit(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), auxSlots, newObjInstr, isZeroed);
  3001. IR::IndirOpnd* newObjAuxSlots = IR::IndirOpnd::New(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachPtr, m_func);
  3002. this->m_lowererMD.CreateAssign(newObjAuxSlots, auxSlots, newObjInstr);
  3003. }
  3004. else
  3005. {
  3006. GenerateMemInitNull(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), newObjInstr, isZeroed);
  3007. }
  3008. GenerateMemInitNull(newObjDst, Js::DynamicObject::GetOffsetOfObjectArray(), newObjInstr, isZeroed);
  3009. }
  3010. void
  3011. Lowerer::LowerNewScObjectSimple(IR::Instr * instr)
  3012. {
  3013. GenerateDynamicObjectAlloc(
  3014. instr,
  3015. 0,
  3016. 0,
  3017. instr->UnlinkDst()->AsRegOpnd(),
  3018. LoadLibraryValueOpnd(
  3019. instr,
  3020. Js::FunctionBody::DoObjectHeaderInliningForEmptyObjects()
  3021. ? LibraryValue::ValueObjectHeaderInlinedType
  3022. : LibraryValue::ValueObjectType));
  3023. instr->Remove();
  3024. }
  3025. void
  3026. Lowerer::LowerNewScObjectLiteral(IR::Instr *newObjInstr)
  3027. {
  3028. Func * func = m_func;
  3029. IR::IntConstOpnd * literalObjectIdOpnd = newObjInstr->UnlinkSrc2()->AsIntConstOpnd();
  3030. intptr_t literalTypeRef = newObjInstr->m_func->GetJITFunctionBody()->GetObjectLiteralTypeRef(literalObjectIdOpnd->AsUint32());
  3031. IR::LabelInstr * helperLabel = nullptr;
  3032. IR::LabelInstr * allocLabel = nullptr;
  3033. IR::Opnd * literalTypeRefOpnd;
  3034. IR::Opnd * literalTypeOpnd;
  3035. IR::Opnd * propertyArrayOpnd;
  3036. IR::IntConstOpnd * propertyArrayIdOpnd = newObjInstr->UnlinkSrc1()->AsIntConstOpnd();
  3037. const Js::PropertyIdArray * propIds = newObjInstr->m_func->GetJITFunctionBody()->ReadPropertyIdArrayFromAuxData(propertyArrayIdOpnd->AsUint32());
  3038. intptr_t propArrayAddr = newObjInstr->m_func->GetJITFunctionBody()->GetAuxDataAddr(propertyArrayIdOpnd->AsUint32());
  3039. uint inlineSlotCapacity = Js::JavascriptOperators::GetLiteralInlineSlotCapacity(propIds);
  3040. uint slotCapacity = Js::JavascriptOperators::GetLiteralSlotCapacity(propIds);
  3041. IR::RegOpnd * dstOpnd;
  3042. literalTypeRefOpnd = IR::AddrOpnd::New(literalTypeRef, IR::AddrOpndKindDynamicMisc, this->m_func);
  3043. propertyArrayOpnd = IR::AddrOpnd::New(propArrayAddr, IR::AddrOpndKindDynamicMisc, this->m_func);
  3044. //#if 0 TODO: OOP JIT, obj literal types
  3045. // should pass in isShared bit through RPC, enable for in-proc jit to see perf impact
  3046. Js::DynamicType * literalType = func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts) ? nullptr : *(Js::DynamicType **)literalTypeRef;
  3047. if (literalType == nullptr || !literalType->GetIsShared())
  3048. {
  3049. helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3050. allocLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3051. literalTypeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3052. InsertMove(literalTypeOpnd, IR::MemRefOpnd::New(literalTypeRef, TyMachPtr, func), newObjInstr);
  3053. InsertTestBranch(literalTypeOpnd, literalTypeOpnd,
  3054. Js::OpCode::BrEq_A, helperLabel, newObjInstr);
  3055. InsertTestBranch(IR::IndirOpnd::New(literalTypeOpnd->AsRegOpnd(), Js::DynamicType::GetOffsetOfIsShared(), TyInt8, func),
  3056. IR::IntConstOpnd::New(1, TyInt8, func, true), Js::OpCode::BrEq_A, helperLabel, newObjInstr);
  3057. dstOpnd = newObjInstr->GetDst()->AsRegOpnd();
  3058. }
  3059. else
  3060. {
  3061. literalTypeOpnd = IR::AddrOpnd::New(literalType, IR::AddrOpndKindDynamicType, func);
  3062. dstOpnd = newObjInstr->UnlinkDst()->AsRegOpnd();
  3063. Assert(inlineSlotCapacity == literalType->GetTypeHandler()->GetInlineSlotCapacity());
  3064. Assert(slotCapacity == (uint)literalType->GetTypeHandler()->GetSlotCapacity());
  3065. }
  3066. if (helperLabel)
  3067. {
  3068. InsertBranch(Js::OpCode::Br, allocLabel, newObjInstr);
  3069. // Slow path to ensure the type is there
  3070. newObjInstr->InsertBefore(helperLabel);
  3071. IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperEnsureObjectLiteralType, func);
  3072. m_lowererMD.LoadHelperArgument(newObjInstr, literalTypeRefOpnd);
  3073. m_lowererMD.LoadHelperArgument(newObjInstr, propertyArrayOpnd);
  3074. LoadScriptContext(newObjInstr);
  3075. IR::Instr * ensureTypeInstr = IR::Instr::New(Js::OpCode::Call, literalTypeOpnd, opndHelper, func);
  3076. newObjInstr->InsertBefore(ensureTypeInstr);
  3077. m_lowererMD.LowerCall(ensureTypeInstr, 0);
  3078. newObjInstr->InsertBefore(allocLabel);
  3079. }
  3080. else
  3081. {
  3082. Assert(allocLabel == nullptr);
  3083. }
  3084. // For the next call:
  3085. // inlineSlotCapacity == Number of slots to allocate beyond the DynamicObject header
  3086. // slotCapacity - inlineSlotCapacity == Number of aux slots to allocate
  3087. if(Js::FunctionBody::DoObjectHeaderInliningForObjectLiteral(propIds))
  3088. {
  3089. Assert(inlineSlotCapacity >= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity());
  3090. Assert(inlineSlotCapacity == slotCapacity);
  3091. slotCapacity = inlineSlotCapacity -= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity();
  3092. }
  3093. GenerateDynamicObjectAlloc(
  3094. newObjInstr,
  3095. inlineSlotCapacity,
  3096. slotCapacity,
  3097. dstOpnd,
  3098. literalTypeOpnd);
  3099. newObjInstr->Remove();
  3100. }
  3101. IR::Instr*
  3102. Lowerer::LowerProfiledNewScArray(IR::JitProfilingInstr* arrInstr)
  3103. {
  3104. IR::Instr *instrPrev = arrInstr->m_prev;
  3105. /*
  3106. JavascriptArray *ProfilingHelpers::ProfiledNewScArray(
  3107. const uint length,
  3108. FunctionBody *const functionBody,
  3109. const ProfileId profileId)
  3110. */
  3111. m_lowererMD.LoadHelperArgument(arrInstr, IR::Opnd::CreateProfileIdOpnd(arrInstr->profileId, m_func));
  3112. m_lowererMD.LoadHelperArgument(arrInstr, CreateFunctionBodyOpnd(arrInstr->m_func));
  3113. m_lowererMD.LoadHelperArgument(arrInstr, arrInstr->UnlinkSrc1());
  3114. arrInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledNewScArray, m_func));
  3115. m_lowererMD.LowerCall(arrInstr, 0);
  3116. return instrPrev;
  3117. }
  3118. IR::Instr *
  3119. Lowerer::LowerNewScArray(IR::Instr *arrInstr)
  3120. {
  3121. if (arrInstr->IsJitProfilingInstr())
  3122. {
  3123. return LowerProfiledNewScArray(arrInstr->AsJitProfilingInstr());
  3124. }
  3125. IR::Instr *instrPrev = arrInstr->m_prev;
  3126. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScArray;
  3127. if (arrInstr->IsProfiledInstr() && arrInstr->m_func->HasProfileInfo())
  3128. {
  3129. intptr_t weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
  3130. Assert(weakFuncRef);
  3131. Js::ProfileId profileId = static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
  3132. Js::ArrayCallSiteInfo *arrayInfo = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
  3133. intptr_t arrayInfoAddr = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
  3134. Assert(arrInstr->GetSrc1()->IsConstOpnd());
  3135. GenerateProfiledNewScArrayFastPath(arrInstr, arrayInfo, arrayInfoAddr, weakFuncRef, arrInstr->GetSrc1()->AsIntConstOpnd()->AsUint32());
  3136. if (arrInstr->GetDst() && arrInstr->GetDst()->GetValueType().IsLikelyNativeArray())
  3137. {
  3138. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
  3139. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
  3140. helperMethod = IR::HelperScrArr_ProfiledNewScArray;
  3141. }
  3142. }
  3143. LoadScriptContext(arrInstr);
  3144. IR::Opnd *src1Opnd = arrInstr->UnlinkSrc1();
  3145. m_lowererMD.LoadHelperArgument(arrInstr, src1Opnd);
  3146. m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
  3147. return instrPrev;
  3148. }
  3149. template <typename ArrayType>
  3150. BOOL Lowerer::IsSmallObject(uint32 length)
  3151. {
  3152. if (ArrayType::HasInlineHeadSegment(length))
  3153. return true;
  3154. uint32 alignedHeadSegmentSize = Js::SparseArraySegment<typename ArrayType::TElement>::GetAlignedSize(length);
  3155. size_t allocSize = sizeof(Js::SparseArraySegment<typename ArrayType::TElement>) + alignedHeadSegmentSize * sizeof(typename ArrayType::TElement);
  3156. return HeapInfo::IsSmallObject(HeapInfo::GetAlignedSizeNoCheck(allocSize));
  3157. }
  3158. void
  3159. Lowerer::GenerateProfiledNewScArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef, uint32 length)
  3160. {
  3161. if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
  3162. {
  3163. return;
  3164. }
  3165. Func * func = this->m_func;
  3166. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3167. uint32 size = length;
  3168. bool isZeroed;
  3169. IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
  3170. IR::RegOpnd *headOpnd;
  3171. uint32 i = length;
  3172. if (instr->GetDst() && instr->GetDst()->GetValueType().IsLikelyNativeIntArray())
  3173. {
  3174. if (!IsSmallObject<Js::JavascriptNativeIntArray>(length))
  3175. {
  3176. return;
  3177. }
  3178. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3179. Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
  3180. headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isZeroed);
  3181. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3182. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3183. for (; i < size; i++)
  3184. {
  3185. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3186. Js::JavascriptNativeIntArray::MissingItem, instr, isZeroed);
  3187. }
  3188. }
  3189. else if (instr->GetDst() && instr->GetDst()->GetValueType().IsLikelyNativeFloatArray())
  3190. {
  3191. if (!IsSmallObject<Js::JavascriptNativeFloatArray>(length))
  3192. {
  3193. return;
  3194. }
  3195. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3196. Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
  3197. headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isZeroed);
  3198. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3199. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3200. // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
  3201. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3202. uint const missingItemCount = size * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3203. i = i * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3204. for (; i < missingItemCount; i++)
  3205. {
  3206. GenerateMemInit(
  3207. headOpnd, offsetStart + i * sizeof(Js::JavascriptArray::MissingItem),
  3208. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstantAddress, m_func, true),
  3209. instr, isZeroed);
  3210. }
  3211. }
  3212. else
  3213. {
  3214. if (!IsSmallObject<Js::JavascriptArray>(length))
  3215. {
  3216. return;
  3217. }
  3218. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3219. headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptArray>(instr, &size, arrayInfo, &isZeroed);
  3220. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3221. for (; i < size; i++)
  3222. {
  3223. GenerateMemInit(
  3224. headOpnd, offsetStart + i * sizeof(Js::Var),
  3225. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstantAddress, m_func, true),
  3226. instr, isZeroed);
  3227. }
  3228. }
  3229. // Skip pass the helper call
  3230. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3231. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  3232. instr->InsertBefore(helperLabel);
  3233. instr->InsertAfter(doneLabel);
  3234. }
  3235. void
  3236. Lowerer::GenerateArrayInfoIsNativeIntArrayTest(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, IR::LabelInstr * helperLabel)
  3237. {
  3238. Func * func = this->m_func;
  3239. InsertTestBranch(IR::MemRefOpnd::New(((char *)arrayInfoAddr) + Js::ArrayCallSiteInfo::GetOffsetOfBits(), TyUint8, func),
  3240. IR::IntConstOpnd::New(Js::ArrayCallSiteInfo::NotNativeIntBit, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
  3241. }
  3242. void
  3243. Lowerer::GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, IR::LabelInstr * helperLabel)
  3244. {
  3245. Func * func = this->m_func;
  3246. InsertCompareBranch(IR::MemRefOpnd::New(((char *)arrayInfoAddr) + Js::ArrayCallSiteInfo::GetOffsetOfBits(), TyUint8, func),
  3247. IR::IntConstOpnd::New(Js::ArrayCallSiteInfo::NotNativeIntBit, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
  3248. }
  3249. template <typename ArrayType>
  3250. static IR::JnHelperMethod GetArrayAllocMemHelper();
  3251. template <>
  3252. IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptArray>()
  3253. {
  3254. return IR::HelperAllocMemForJavascriptArray;
  3255. }
  3256. template <>
  3257. IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptNativeIntArray>()
  3258. {
  3259. return IR::HelperAllocMemForJavascriptNativeIntArray;
  3260. }
  3261. template <>
  3262. IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptNativeFloatArray>()
  3263. {
  3264. return IR::HelperAllocMemForJavascriptNativeFloatArray;
  3265. }
  3266. template <typename ArrayType>
  3267. IR::RegOpnd *
  3268. Lowerer::GenerateArrayLiteralsAlloc(IR::Instr *instr, uint32 * psize, Js::ArrayCallSiteInfo * arrayInfo, bool * pIsHeadSegmentZeroed)
  3269. {
  3270. return GenerateArrayAllocHelper<ArrayType>(instr, psize, arrayInfo, pIsHeadSegmentZeroed, false /* isArrayObjCtor */, false /* isNoArgs */);
  3271. }
  3272. template <typename ArrayType>
  3273. IR::RegOpnd *
  3274. Lowerer::GenerateArrayObjectsAlloc(IR::Instr *instr, uint32 * psize, Js::ArrayCallSiteInfo * arrayInfo, bool * pIsHeadSegmentZeroed, bool isNoArgs)
  3275. {
  3276. return GenerateArrayAllocHelper<ArrayType>(instr, psize, arrayInfo, pIsHeadSegmentZeroed, true /* isArrayObjCtor */, isNoArgs);
  3277. }
  3278. template <typename ArrayType>
  3279. IR::RegOpnd *
  3280. Lowerer::GenerateArrayAllocHelper(IR::Instr *instr, uint32 * psize, Js::ArrayCallSiteInfo * arrayInfo, bool * pIsHeadSegmentZeroed, bool isArrayObjCtor, bool isNoArgs)
  3281. {
  3282. Func * func = this->m_func;
  3283. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3284. // Generate code as in JavascriptArray::NewLiteral
  3285. uint32 count = *psize;
  3286. uint alignedHeadSegmentSize;
  3287. size_t arrayAllocSize;
  3288. IR::RegOpnd * headOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3289. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func, false);
  3290. IR::Instr * leaHeadInstr = nullptr;
  3291. bool isHeadSegmentZeroed = false;
  3292. if (ArrayType::HasInlineHeadSegment(count))
  3293. {
  3294. if (isArrayObjCtor)
  3295. {
  3296. uint32 allocCount = isNoArgs ? Js::SparseArraySegmentBase::SMALL_CHUNK_SIZE : count;
  3297. arrayAllocSize = Js::JavascriptArray::DetermineAllocationSizeForArrayObjects<ArrayType, 0>(allocCount, nullptr, &alignedHeadSegmentSize);
  3298. }
  3299. else
  3300. {
  3301. uint32 allocCount = count == 0 ? Js::SparseArraySegmentBase::SMALL_CHUNK_SIZE : count;
  3302. arrayAllocSize = Js::JavascriptArray::DetermineAllocationSize<ArrayType, 0>(allocCount, nullptr, &alignedHeadSegmentSize);
  3303. }
  3304. leaHeadInstr = IR::Instr::New(Js::OpCode::LEA, headOpnd,
  3305. IR::IndirOpnd::New(dstOpnd, sizeof(ArrayType), TyMachPtr, func), func);
  3306. isHeadSegmentZeroed = true;
  3307. }
  3308. else
  3309. {
  3310. // Need to allocate the head segment first so that if it throws,
  3311. // we doesn't have the memory assigned to dstOpnd yet
  3312. // Even if the instruction is marked as dstIsTempObject, we still should not allocate
  3313. // that big of a chunk on the stack.
  3314. alignedHeadSegmentSize = Js::SparseArraySegment<typename ArrayType::TElement>::GetAlignedSize(count);
  3315. GenerateRecyclerAlloc(
  3316. IR::HelperAllocMemForSparseArraySegmentBase,
  3317. sizeof(Js::SparseArraySegment<typename ArrayType::TElement>) +
  3318. alignedHeadSegmentSize * sizeof(typename ArrayType::TElement),
  3319. headOpnd,
  3320. instr);
  3321. arrayAllocSize = sizeof(ArrayType);
  3322. }
  3323. *psize = alignedHeadSegmentSize;
  3324. IR::SymOpnd * tempObjectSymOpnd;
  3325. bool isZeroed = GenerateRecyclerOrMarkTempAlloc(instr, dstOpnd,
  3326. GetArrayAllocMemHelper<ArrayType>(), arrayAllocSize, &tempObjectSymOpnd);
  3327. isHeadSegmentZeroed = isHeadSegmentZeroed & isZeroed;
  3328. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  3329. {
  3330. // Hoist the vtable init to the outer most loop top as it never changes
  3331. InsertMove(tempObjectSymOpnd,
  3332. this->LoadVTableValueOpnd(this->outerMostLoopLabel, ArrayType::VtableHelper()),
  3333. this->outerMostLoopLabel, false);
  3334. }
  3335. else
  3336. {
  3337. GenerateMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, ArrayType::VtableHelper()), instr, isZeroed);
  3338. }
  3339. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfType(), this->LoadLibraryValueOpnd(instr, ArrayType::InitialTypeHelper()), instr, isZeroed);
  3340. GenerateMemInitNull(dstOpnd, ArrayType::GetOffsetOfAuxSlots(), instr, isZeroed);
  3341. // Emit the flags and call site index together
  3342. Js::ProfileId arrayCallSiteIndex = (Js::ProfileId)instr->AsProfiledInstr()->u.profileId;
  3343. #if DBG
  3344. if (instr->AsProfiledInstr()->u.profileId < Js::Constants::NoProfileId)
  3345. {
  3346. Assert((uint32)(arrayInfo - instr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(0)) == arrayCallSiteIndex);
  3347. }
  3348. else
  3349. {
  3350. Assert(arrayInfo == nullptr);
  3351. }
  3352. #endif
  3353. // The same at this:
  3354. // GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), (uint16)Js::DynamicObjectFlags::InitialArrayValue, instr, isZeroed);
  3355. // GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayCallSiteIndex(), arrayCallSiteIndex, instr, isZeroed);
  3356. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), (uint)Js::DynamicObjectFlags::InitialArrayValue | ((uint)arrayCallSiteIndex << 16), instr, isZeroed);
  3357. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLength(), count, instr, isZeroed);
  3358. if (leaHeadInstr != nullptr)
  3359. {
  3360. instr->InsertBefore(leaHeadInstr);
  3361. LowererMD::ChangeToLea(leaHeadInstr);
  3362. }
  3363. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfHead(), headOpnd, instr, isZeroed);
  3364. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLastUsedSegmentOrSegmentMap(), headOpnd, instr, isZeroed);
  3365. // Initialize segment head
  3366. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLeft(), 0, instr, isHeadSegmentZeroed);
  3367. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), isArrayObjCtor ? 0 : count, instr, isHeadSegmentZeroed);
  3368. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfSize(), alignedHeadSegmentSize, instr, isHeadSegmentZeroed);
  3369. GenerateMemInitNull(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfNext(), instr, isHeadSegmentZeroed);
  3370. *pIsHeadSegmentZeroed = isHeadSegmentZeroed;
  3371. return headOpnd;
  3372. }
  3373. template <typename ArrayType>
  3374. IR::RegOpnd *
  3375. Lowerer::GenerateArrayAlloc(IR::Instr *instr, IR::Opnd * arrayLenOpnd, Js::ArrayCallSiteInfo * arrayInfo)
  3376. {
  3377. Func * func = this->m_func;
  3378. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3379. IR::RegOpnd * headOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3380. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func, false);
  3381. IR::Instr * leaHeadInstr = nullptr;
  3382. IR::Opnd * arraySizeOpnd = IR::RegOpnd::New(TyUint32, func);
  3383. IR::Opnd * alignedArrayAllocSizeOpnd = IR::RegOpnd::New(TyUint32, func);
  3384. IR::LabelInstr * doneCalculatingAllocSize = IR::LabelInstr::New(Js::OpCode::Label, func);
  3385. IR::LabelInstr * skipToNextBucket = nullptr;
  3386. uint8 bucketsCount = ArrayType::AllocationBucketsCount;
  3387. Js::JavascriptArray::EnsureCalculationOfAllocationBuckets<ArrayType>();
  3388. for (uint8 i = 0;i < bucketsCount;i++)
  3389. {
  3390. uint elementsCountToInitialize = ArrayType::allocationBuckets[i][Js::JavascriptArray::MissingElementsCountIndex];
  3391. uint allocationSize = ArrayType::allocationBuckets[i][Js::JavascriptArray::AllocationSizeIndex];
  3392. // Ensure we already have allocation size calculated and within range
  3393. Assert(elementsCountToInitialize > 0 && elementsCountToInitialize <= ArrayType::allocationBuckets[bucketsCount - 1][Js::JavascriptArray::MissingElementsCountIndex]);
  3394. Assert(allocationSize > 0 && allocationSize <= ArrayType::allocationBuckets[bucketsCount - 1][Js::JavascriptArray::AllocationSizeIndex]);
  3395. // CMP arrayLen, currentBucket
  3396. // JG $checkNextBucket
  3397. if (i != (bucketsCount - 1))
  3398. {
  3399. Lowerer::InsertCompare(arrayLenOpnd, IR::IntConstOpnd::New((uint16)ArrayType::allocationBuckets[i][Js::JavascriptArray::AllocationBucketIndex], TyUint32, func), instr);
  3400. skipToNextBucket = IR::LabelInstr::New(Js::OpCode::Label, func);
  3401. Lowerer::InsertBranch(Js::OpCode::BrGt_A, skipToNextBucket, instr);
  3402. }
  3403. // MOV $arrayAlignedSize, <const1>
  3404. // MOV $arrayAllocSize, <const2>
  3405. Lowerer::InsertMove(arraySizeOpnd, IR::IntConstOpnd::New((uint16)elementsCountToInitialize, TyUint32, func), instr);
  3406. Lowerer::InsertMove(alignedArrayAllocSizeOpnd, IR::IntConstOpnd::New((uint16)allocationSize, TyUint32, func), instr);
  3407. // JMP $doneCalculatingAllocSize
  3408. if (i != (bucketsCount - 1))
  3409. {
  3410. Lowerer::InsertBranch(Js::OpCode::Br, doneCalculatingAllocSize, instr);
  3411. instr->InsertBefore(skipToNextBucket);
  3412. }
  3413. }
  3414. instr->InsertBefore(doneCalculatingAllocSize);
  3415. // ***** Call to allocation helper *****
  3416. this->m_lowererMD.LoadHelperArgument(instr, this->LoadScriptContextValueOpnd(instr, ScriptContextValue::ScriptContextRecycler));
  3417. this->m_lowererMD.LoadHelperArgument(instr, alignedArrayAllocSizeOpnd);
  3418. IR::Instr *newObjCall = IR::Instr::New(Js::OpCode::Call, dstOpnd, IR::HelperCallOpnd::New(GetArrayAllocMemHelper<ArrayType>(), func), func);
  3419. instr->InsertBefore(newObjCall);
  3420. this->m_lowererMD.LowerCall(newObjCall, 0);
  3421. // ***** Load headSeg/initialize it *****
  3422. leaHeadInstr = IR::Instr::New(Js::OpCode::LEA, headOpnd,
  3423. IR::IndirOpnd::New(dstOpnd, sizeof(ArrayType), TyMachPtr, func), func);
  3424. GenerateMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, ArrayType::VtableHelper()), instr, true);
  3425. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfType(), this->LoadLibraryValueOpnd(instr, ArrayType::InitialTypeHelper()), instr, true);
  3426. GenerateMemInitNull(dstOpnd, ArrayType::GetOffsetOfAuxSlots(), instr, true);
  3427. Js::ProfileId arrayCallSiteIndex = (Js::ProfileId)instr->AsProfiledInstr()->u.profileId;
  3428. #if DBG
  3429. if (instr->AsProfiledInstr()->u.profileId < Js::Constants::NoProfileId)
  3430. {
  3431. Assert((uint32)(arrayInfo - instr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(0)) == arrayCallSiteIndex);
  3432. }
  3433. else
  3434. {
  3435. Assert(arrayInfo == nullptr);
  3436. }
  3437. #endif
  3438. // ***** Array object initialization *****
  3439. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), IR::IntConstOpnd::New((uint16)Js::DynamicObjectFlags::InitialArrayValue, TyUint16, func), instr, true);
  3440. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLength(), arrayLenOpnd, instr, true);
  3441. if (leaHeadInstr != nullptr)
  3442. {
  3443. instr->InsertBefore(leaHeadInstr);
  3444. LowererMD::ChangeToLea(leaHeadInstr);
  3445. }
  3446. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfHead(), headOpnd, instr, true);
  3447. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLastUsedSegmentOrSegmentMap(), headOpnd, instr, true);
  3448. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLeft(), 0, instr, true);
  3449. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), 0, instr, true); // Set head segment length to 0
  3450. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfSize(), arraySizeOpnd, instr, true);
  3451. GenerateMemInitNull(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfNext(), instr, true);
  3452. return headOpnd;
  3453. }
  3454. void
  3455. Lowerer::GenerateProfiledNewScObjArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef, uint32 length, IR::LabelInstr* labelDone, bool isNoArgs)
  3456. {
  3457. if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func))
  3458. {
  3459. return;
  3460. }
  3461. Func * func = this->m_func;
  3462. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3463. uint32 size = length;
  3464. bool isZeroed = false;
  3465. IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
  3466. IR::RegOpnd *headOpnd;
  3467. Js::ProfileId profileId = static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId);
  3468. if (arrayInfo && arrayInfo->IsNativeIntArray())
  3469. {
  3470. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3471. Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
  3472. headOpnd = GenerateArrayObjectsAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isZeroed, isNoArgs);
  3473. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex(), IR::IntConstOpnd::New(profileId, TyUint16, func, true), instr, isZeroed);
  3474. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3475. for (uint i = 0; i < size; i++)
  3476. {
  3477. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3478. Js::JavascriptNativeIntArray::MissingItem, instr, isZeroed);
  3479. }
  3480. }
  3481. else if (arrayInfo && arrayInfo->IsNativeFloatArray())
  3482. {
  3483. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3484. Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
  3485. headOpnd = GenerateArrayObjectsAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isZeroed, isNoArgs);
  3486. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex(), IR::IntConstOpnd::New(profileId, TyUint16, func, true), instr, isZeroed);
  3487. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3488. // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
  3489. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3490. uint const missingItemCount = size * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3491. for (uint i = 0; i < missingItemCount; i++)
  3492. {
  3493. GenerateMemInit(
  3494. headOpnd, offsetStart + i * sizeof(Js::JavascriptArray::MissingItem),
  3495. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstantAddress, m_func, true),
  3496. instr, isZeroed);
  3497. }
  3498. }
  3499. else
  3500. {
  3501. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3502. headOpnd = GenerateArrayObjectsAlloc<Js::JavascriptArray>(instr, &size, arrayInfo, &isZeroed, isNoArgs);
  3503. for (uint i = 0; i < size; i++)
  3504. {
  3505. GenerateMemInit(
  3506. headOpnd, offsetStart + i * sizeof(Js::Var),
  3507. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstantAddress, m_func, true),
  3508. instr, isZeroed);
  3509. }
  3510. }
  3511. // Skip pass the helper call
  3512. InsertBranch(Js::OpCode::Br, labelDone, instr);
  3513. instr->InsertBefore(helperLabel);
  3514. }
  3515. template <typename ArrayType>
  3516. void
  3517. Lowerer::GenerateProfiledNewScObjArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef, IR::LabelInstr* helperLabel,
  3518. IR::LabelInstr* labelDone, IR::Opnd* lengthOpnd, uint32 offsetOfCallSiteIndex, uint32 offsetOfWeakFuncRef)
  3519. {
  3520. if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func))
  3521. {
  3522. return;
  3523. }
  3524. Func * func = this->m_func;
  3525. IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
  3526. IR::RegOpnd *headOpnd;
  3527. Js::ProfileId profileId = static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId);
  3528. uint sizeOfElement = 0;
  3529. uint allocationBucketsCount = ArrayType::AllocationBucketsCount;
  3530. uint(*allocationBuckets)[Js::JavascriptArray::AllocationBucketsInfoSize];
  3531. allocationBuckets = ArrayType::allocationBuckets;
  3532. uint sizeFactor = 1;
  3533. IRType missingItemType = (arrayInfo && arrayInfo->IsNativeIntArray()) ? IRType::TyInt32 : IRType::TyVar;
  3534. IR::LabelInstr * arrayInitDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  3535. bool isNativeArray = arrayInfo && (arrayInfo->IsNativeIntArray() || arrayInfo->IsNativeFloatArray());
  3536. if (arrayInfo && arrayInfo->IsNativeIntArray())
  3537. {
  3538. sizeOfElement = sizeof(int32);
  3539. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3540. }
  3541. else if (arrayInfo && arrayInfo->IsNativeFloatArray())
  3542. {
  3543. // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
  3544. sizeFactor = sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3545. sizeOfElement = sizeof(Js::JavascriptArray::MissingItem);
  3546. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3547. }
  3548. else
  3549. {
  3550. sizeOfElement = sizeof(Js::Var);
  3551. }
  3552. lengthOpnd = GenerateUntagVar(lengthOpnd->AsRegOpnd(), helperLabel, instr);
  3553. IR::Opnd* upperBound = IR::IntConstOpnd::New(8, TyUint8, func, true);
  3554. InsertCompare(lengthOpnd, upperBound, instr);
  3555. InsertBranch(Js::OpCode::BrGt_A, true /* isUnsigned */, helperLabel, instr);
  3556. headOpnd = GenerateArrayAlloc<ArrayType>(instr, lengthOpnd, arrayInfo);
  3557. if (isNativeArray)
  3558. {
  3559. Assert(ArrayType::GetOffsetOfArrayFlags() + sizeof(uint16) == offsetOfCallSiteIndex);
  3560. Assert(offsetOfWeakFuncRef > 0);
  3561. GenerateMemInit(dstOpnd, offsetOfCallSiteIndex, IR::IntConstOpnd::New(profileId, TyUint16, func, true), instr, true /* isZeroed */);
  3562. GenerateMemInit(dstOpnd, offsetOfWeakFuncRef, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, true /* isZeroed */);
  3563. }
  3564. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3565. uint missingItemCount = 0;
  3566. uint missingItemInitializedSoFar = 0;
  3567. uint missingItemIndex = 0;
  3568. uint maxAllocationSize = allocationBuckets[allocationBucketsCount - 1][Js::JavascriptArray::AllocationSizeIndex];
  3569. for (uint8 i = 0;i < allocationBucketsCount;i++)
  3570. {
  3571. missingItemCount = allocationBuckets[i][Js::JavascriptArray::MissingElementsCountIndex] * sizeFactor;
  3572. if (i > 0)
  3573. {
  3574. // Reduce missingItemCount we have already set so far
  3575. missingItemCount -= missingItemInitializedSoFar;
  3576. }
  3577. // Generate array initialization with MissingItem
  3578. for (uint j = 0;j < missingItemCount;j++)
  3579. {
  3580. // Ensure we don't write missingItems past allocation size
  3581. Assert(offsetStart + missingItemIndex * sizeOfElement <= maxAllocationSize);
  3582. GenerateMemInit(headOpnd, offsetStart + missingItemIndex * sizeOfElement, GetMissingItemOpnd(missingItemType, func), instr, true /*isZeroed*/);
  3583. missingItemIndex++;
  3584. }
  3585. // CMP arrayLen, currentBucket
  3586. // JG $checkNextBucket
  3587. if (i != (allocationBucketsCount - 1))
  3588. {
  3589. Lowerer::InsertCompare(lengthOpnd, IR::IntConstOpnd::New(allocationBuckets[i][Js::JavascriptArray::AllocationBucketIndex], TyUint32, func), instr);
  3590. Lowerer::InsertBranch(Js::OpCode::BrLe_A, arrayInitDone, instr);
  3591. }
  3592. missingItemInitializedSoFar += missingItemCount;
  3593. }
  3594. // Ensure no. of missingItems written are same
  3595. Assert(missingItemIndex == missingItemInitializedSoFar);
  3596. // Ensure no. of missingItems match what present in allocationBuckets
  3597. Assert(missingItemIndex == allocationBuckets[allocationBucketsCount - 1][Js::JavascriptArray::MissingElementsCountIndex] * sizeFactor);
  3598. instr->InsertBefore(arrayInitDone);
  3599. Lowerer::InsertBranch(Js::OpCode::Br, labelDone, instr);
  3600. instr->InsertBefore(helperLabel);
  3601. }
  3602. void
  3603. Lowerer::GenerateProfiledNewScIntArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef)
  3604. {
  3605. // Helper will deal with ForceES5ARray
  3606. if (PHASE_OFF(Js::ArrayLiteralFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
  3607. {
  3608. return;
  3609. }
  3610. if (!arrayInfo->IsNativeIntArray())
  3611. {
  3612. return;
  3613. }
  3614. if (instr->GetSrc1()->AsAddrOpnd()->GetAddrOpndKind() != IR::AddrOpndKindDynamicAuxBufferRef)
  3615. {
  3616. return;
  3617. }
  3618. Func * func = this->m_func;
  3619. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3620. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3621. IR::AddrOpnd * elementsOpnd = instr->GetSrc1()->AsAddrOpnd();
  3622. Js::AuxArray<int32> * ints = (Js::AuxArray<int32> *)elementsOpnd->m_metadata;
  3623. uint32 size = ints->count;
  3624. // Generate code as in JavascriptArray::NewLiteral
  3625. bool isHeadSegmentZeroed;
  3626. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3627. Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
  3628. IR::RegOpnd * headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isHeadSegmentZeroed);
  3629. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3630. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicMisc, m_func), instr, isHeadSegmentZeroed);
  3631. // Initialize the elements
  3632. uint i = 0;
  3633. if (ints->count > 16)
  3634. {
  3635. // Do memcpy if > 16
  3636. IR::RegOpnd * dstElementsOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3637. const IR::AutoReuseOpnd autoReuseDstElementsOpnd(dstElementsOpnd, func);
  3638. IR::Opnd * srcOpnd = IR::AddrOpnd::New((intptr_t)elementsOpnd->m_address + Js::AuxArray<int32>::OffsetOfElements(), IR::AddrOpndKindDynamicMisc, func);
  3639. InsertLea(dstElementsOpnd, IR::IndirOpnd::New(headOpnd, sizeof(Js::SparseArraySegmentBase), TyMachPtr, func), instr);
  3640. GenerateMemCopy(dstElementsOpnd, srcOpnd, ints->count * sizeof(int32), instr);
  3641. i = ints->count;
  3642. }
  3643. else
  3644. {
  3645. for (; i < ints->count; i++)
  3646. {
  3647. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3648. ints->elements[i], instr, isHeadSegmentZeroed);
  3649. }
  3650. }
  3651. Assert(i == ints->count);
  3652. for (; i < size; i++)
  3653. {
  3654. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3655. Js::JavascriptNativeIntArray::MissingItem, instr, isHeadSegmentZeroed);
  3656. }
  3657. // Skip pass the helper call
  3658. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3659. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  3660. instr->InsertBefore(helperLabel);
  3661. instr->InsertAfter(doneLabel);
  3662. }
  3663. void
  3664. Lowerer::GenerateProfiledNewScFloatArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef)
  3665. {
  3666. if (PHASE_OFF(Js::ArrayLiteralFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
  3667. {
  3668. return;
  3669. }
  3670. if (!arrayInfo->IsNativeFloatArray())
  3671. {
  3672. return;
  3673. }
  3674. if (instr->GetSrc1()->AsAddrOpnd()->GetAddrOpndKind() != IR::AddrOpndKindDynamicAuxBufferRef)
  3675. {
  3676. return;
  3677. }
  3678. Func * func = this->m_func;
  3679. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3680. // If the array info hasn't mark as not int array yet, go to the helper and mark it.
  3681. // It really is just for assert purpose in JavascriptNativeFloatArray::ToVarArray
  3682. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3683. IR::AddrOpnd * elementsOpnd = instr->GetSrc1()->AsAddrOpnd();
  3684. Js::AuxArray<double> * doubles = (Js::AuxArray<double> *)elementsOpnd->m_metadata;
  3685. uint32 size = doubles->count;
  3686. // Generate code as in JavascriptArray::NewLiteral
  3687. bool isHeadSegmentZeroed;
  3688. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3689. Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
  3690. IR::RegOpnd * headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isHeadSegmentZeroed);
  3691. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3692. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isHeadSegmentZeroed);
  3693. // Initialize the elements
  3694. IR::RegOpnd * dstElementsOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3695. const IR::AutoReuseOpnd autoReuseDstElementsOpnd(dstElementsOpnd, func);
  3696. IR::Opnd * srcOpnd = IR::AddrOpnd::New((intptr_t)elementsOpnd->m_address + Js::AuxArray<double>::OffsetOfElements(), IR::AddrOpndKindDynamicMisc, func);
  3697. InsertLea(dstElementsOpnd, IR::IndirOpnd::New(headOpnd, sizeof(Js::SparseArraySegmentBase), TyMachPtr, func), instr);
  3698. GenerateMemCopy(dstElementsOpnd, srcOpnd, doubles->count * sizeof(double), instr);
  3699. // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
  3700. uint const offsetStart = sizeof(Js::SparseArraySegmentBase) + doubles->count * sizeof(double);
  3701. uint const missingItem = (size - doubles->count) * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3702. for (uint i = 0; i < missingItem; i++)
  3703. {
  3704. GenerateMemInit(headOpnd, offsetStart + i * sizeof(Js::JavascriptArray::MissingItem),
  3705. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstantAddress, m_func, true), instr, isHeadSegmentZeroed);
  3706. }
  3707. // Skip pass the helper call
  3708. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3709. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  3710. instr->InsertBefore(helperLabel);
  3711. instr->InsertAfter(doneLabel);
  3712. }
  3713. IR::Instr *
  3714. Lowerer::LowerNewScIntArray(IR::Instr *arrInstr)
  3715. {
  3716. IR::Instr *instrPrev = arrInstr->m_prev;
  3717. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScIntArray;
  3718. if ((arrInstr->IsJitProfilingInstr() || arrInstr->IsProfiledInstr()) && arrInstr->m_func->HasProfileInfo())
  3719. {
  3720. intptr_t weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
  3721. if (weakFuncRef)
  3722. {
  3723. // Technically a load of the same memory address either way.
  3724. Js::ProfileId profileId =
  3725. arrInstr->IsJitProfilingInstr()
  3726. ? arrInstr->AsJitProfilingInstr()->profileId
  3727. : static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
  3728. Js::ArrayCallSiteInfo *arrayInfo = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
  3729. intptr_t arrayInfoAddr = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
  3730. // Only do fast-path if it isn't a JitProfiling instr and not copy-on-access array
  3731. if (arrInstr->IsProfiledInstr()
  3732. #if ENABLE_COPYONACCESS_ARRAY
  3733. && (PHASE_OFF1(Js::Phase::CopyOnAccessArrayPhase) || arrayInfo->isNotCopyOnAccessArray) && !PHASE_FORCE1(Js::Phase::CopyOnAccessArrayPhase)
  3734. #endif
  3735. )
  3736. {
  3737. GenerateProfiledNewScIntArrayFastPath(arrInstr, arrayInfo, arrayInfoAddr, weakFuncRef);
  3738. }
  3739. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
  3740. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
  3741. helperMethod = IR::HelperScrArr_ProfiledNewScIntArray;
  3742. }
  3743. }
  3744. LoadScriptContext(arrInstr);
  3745. IR::Opnd *elementsOpnd = arrInstr->UnlinkSrc1();
  3746. m_lowererMD.LoadHelperArgument(arrInstr, elementsOpnd);
  3747. m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
  3748. return instrPrev;
  3749. }
  3750. IR::Instr *
  3751. Lowerer::LowerNewScFltArray(IR::Instr *arrInstr)
  3752. {
  3753. IR::Instr *instrPrev = arrInstr->m_prev;
  3754. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScFltArray;
  3755. if ((arrInstr->IsJitProfilingInstr() || arrInstr->IsProfiledInstr()) && arrInstr->m_func->HasProfileInfo())
  3756. {
  3757. intptr_t weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
  3758. if (weakFuncRef)
  3759. {
  3760. Js::ProfileId profileId =
  3761. arrInstr->IsJitProfilingInstr()
  3762. ? arrInstr->AsJitProfilingInstr()->profileId
  3763. : static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
  3764. Js::ArrayCallSiteInfo *arrayInfo = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
  3765. intptr_t arrayInfoAddr = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
  3766. // Only do fast-path if it isn't a JitProfiling instr
  3767. if (arrInstr->IsProfiledInstr()) {
  3768. GenerateProfiledNewScFloatArrayFastPath(arrInstr, arrayInfo, arrayInfoAddr, weakFuncRef);
  3769. }
  3770. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
  3771. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
  3772. helperMethod = IR::HelperScrArr_ProfiledNewScFltArray;
  3773. }
  3774. }
  3775. LoadScriptContext(arrInstr);
  3776. IR::Opnd *elementsOpnd = arrInstr->UnlinkSrc1();
  3777. m_lowererMD.LoadHelperArgument(arrInstr, elementsOpnd);
  3778. m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
  3779. return instrPrev;
  3780. }
  3781. IR::Instr *
  3782. Lowerer::LowerArraySegmentVars(IR::Instr *arrayInstr)
  3783. {
  3784. IR::Instr * instrPrev;
  3785. IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperArraySegmentVars, m_func);
  3786. instrPrev = m_lowererMD.LoadHelperArgument(arrayInstr, arrayInstr->UnlinkSrc2());
  3787. m_lowererMD.LoadHelperArgument(arrayInstr, arrayInstr->UnlinkSrc1());
  3788. arrayInstr->m_opcode = Js::OpCode::Call;
  3789. arrayInstr->SetSrc1(opndHelper);
  3790. m_lowererMD.LowerCall(arrayInstr, 0);
  3791. return instrPrev;
  3792. }
  3793. IR::Instr* Lowerer::LowerProfiledNewArray(IR::JitProfilingInstr* instr, bool hasArgs)
  3794. {
  3795. // Use the special helper which checks whether Array has been overwritten by the user and if
  3796. // it hasn't, possibly allocates a native array
  3797. // Insert a temporary label before the instruction we're about to lower, so that we can return
  3798. // the first instruction above that needs to be lowered after we're done - regardless of argument
  3799. // list, StartCall, etc.
  3800. IR::Instr* startMarkerInstr = InsertLoweredRegionStartMarker(instr);
  3801. Assert(instr->isNewArray);
  3802. Assert(instr->arrayProfileId != Js::Constants::NoProfileId);
  3803. Assert(instr->profileId != Js::Constants::NoProfileId);
  3804. bool isSpreadCall = instr->m_opcode == Js::OpCode::NewScObjectSpread || instr->m_opcode == Js::OpCode::NewScObjArraySpread;
  3805. m_lowererMD.LoadNewScObjFirstArg(instr, IR::AddrOpnd::New(nullptr, IR::AddrOpndKindConstantVar, m_func, true), isSpreadCall ? 1 : 0);
  3806. if (isSpreadCall)
  3807. {
  3808. this->LowerSpreadCall(instr, Js::CallFlags_New, true);
  3809. }
  3810. else
  3811. {
  3812. const int32 argCount = m_lowererMD.LowerCallArgs(instr, Js::CallFlags_New, 4);
  3813. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->arrayProfileId, m_func));
  3814. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
  3815. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  3816. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  3817. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledNewScObjArray, m_func));
  3818. m_lowererMD.LowerCall(instr, static_cast<Js::ArgSlot>(argCount));
  3819. }
  3820. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  3821. }
  3822. ///----------------------------------------------------------------------------
  3823. ///
  3824. /// Lowerer::LowerNewScObject
  3825. ///
  3826. /// Machine independent lowering of a CallI instr.
  3827. ///
  3828. ///----------------------------------------------------------------------------
  3829. IR::Instr *
  3830. Lowerer::LowerNewScObject(IR::Instr *newObjInstr, bool callCtor, bool hasArgs, bool isBaseClassConstructorNewScObject)
  3831. {
  3832. if (newObjInstr->IsJitProfilingInstr() && newObjInstr->AsJitProfilingInstr()->isNewArray)
  3833. {
  3834. Assert(callCtor);
  3835. return LowerProfiledNewArray(newObjInstr->AsJitProfilingInstr(), hasArgs);
  3836. }
  3837. bool isSpreadCall = newObjInstr->m_opcode == Js::OpCode::NewScObjectSpread ||
  3838. newObjInstr->m_opcode == Js::OpCode::NewScObjArraySpread;
  3839. Func* func = newObjInstr->m_func;
  3840. // Insert a temporary label before the instruction we're about to lower, so that we can return
  3841. // the first instruction above that needs to be lowered after we're done - regardless of argument
  3842. // list, StartCall, etc.
  3843. IR::Instr* startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  3844. IR::Opnd *ctorOpnd = newObjInstr->GetSrc1();
  3845. IR::RegOpnd *newObjDst = newObjInstr->GetDst()->AsRegOpnd();
  3846. Assert(!callCtor || !hasArgs || (newObjInstr->GetSrc2() != nullptr /*&& newObjInstr->GetSrc2()->IsSymOpnd()*/));
  3847. bool skipNewScObj = false;
  3848. bool returnNewScObj = false;
  3849. bool emitBailOut = false;
  3850. // If we haven't yet split NewScObject into NewScObjectNoCtor and CallI, we will need a temporary register
  3851. // to hold the result of the object allocation.
  3852. IR::RegOpnd* createObjDst = callCtor ? IR::RegOpnd::New(TyVar, func) : newObjDst;
  3853. IR::LabelInstr* helperOrBailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, /* isOpHelper = */ true);
  3854. IR::LabelInstr* callCtorLabel = IR::LabelInstr::New(Js::OpCode::Label, func, /* isOpHelper = */ false);
  3855. // Try to emit the fast allocation and construction path.
  3856. bool usedFixedCtorCache = TryLowerNewScObjectWithFixedCtorCache(newObjInstr, createObjDst, helperOrBailoutLabel, callCtorLabel, skipNewScObj, returnNewScObj, emitBailOut);
  3857. AssertMsg(!skipNewScObj || callCtor, "What will we return if we skip the default new object and don't call the ctor?");
  3858. Assert(!skipNewScObj || !returnNewScObj);
  3859. Assert(usedFixedCtorCache || !skipNewScObj);
  3860. Assert(!usedFixedCtorCache || newObjInstr->HasFixedFunctionAddressTarget());
  3861. Assert(!skipNewScObj || !emitBailOut);
  3862. #if DBG && 0 // TODO: OOP JIT, enable assert
  3863. if (usedFixedCtorCache)
  3864. {
  3865. Js::JavascriptFunction* ctor = newObjInstr->GetFixedFunction();
  3866. Js::FunctionInfo* ctorInfo = ctor->GetFunctionInfo();
  3867. Assert((ctorInfo->GetAttributes() & Js::FunctionInfo::Attributes::ErrorOnNew) == 0);
  3868. Assert(!!(ctorInfo->GetAttributes() & Js::FunctionInfo::Attributes::SkipDefaultNewObject) == skipNewScObj);
  3869. }
  3870. #endif
  3871. IR::Instr* startCallInstr = nullptr;
  3872. if (callCtor && hasArgs)
  3873. {
  3874. hasArgs = !newObjInstr->HasEmptyArgOutChain(&startCallInstr);
  3875. }
  3876. // If we're not skipping the default new object, let's emit bailout or a call to NewScObject* helper
  3877. IR::JnHelperMethod newScHelper = IR::HelperInvalid;
  3878. IR::Instr *newScObjCall = nullptr;
  3879. if (!skipNewScObj)
  3880. {
  3881. // If we emitted the fast path, this block is a helper block.
  3882. if (usedFixedCtorCache)
  3883. {
  3884. newObjInstr->InsertBefore(helperOrBailoutLabel);
  3885. }
  3886. if (emitBailOut)
  3887. {
  3888. IR::Instr* bailOutInstr = newObjInstr;
  3889. newObjInstr = IR::Instr::New(newObjInstr->m_opcode, func);
  3890. bailOutInstr->TransferTo(newObjInstr);
  3891. bailOutInstr->m_opcode = Js::OpCode::BailOut;
  3892. bailOutInstr->InsertAfter(newObjInstr);
  3893. GenerateBailOut(bailOutInstr);
  3894. }
  3895. else
  3896. {
  3897. Assert(!newObjDst->CanStoreTemp());
  3898. // createObjDst = NewScObject...(ctorOpnd)
  3899. newScHelper = !callCtor ?
  3900. (isBaseClassConstructorNewScObject ?
  3901. (hasArgs ? IR::HelperNewScObjectNoCtorFull : IR::HelperNewScObjectNoArgNoCtorFull) :
  3902. (hasArgs ? IR::HelperNewScObjectNoCtor : IR::HelperNewScObjectNoArgNoCtor)) :
  3903. (hasArgs || usedFixedCtorCache ? IR::HelperNewScObjectNoCtor : IR::HelperNewScObjectNoArg);
  3904. LoadScriptContext(newObjInstr);
  3905. m_lowererMD.LoadHelperArgument(newObjInstr, newObjInstr->GetSrc1());
  3906. newScObjCall = IR::Instr::New(Js::OpCode::Call, createObjDst, IR::HelperCallOpnd::New(newScHelper, func), func);
  3907. newObjInstr->InsertBefore(newScObjCall);
  3908. m_lowererMD.LowerCall(newScObjCall, 0);
  3909. }
  3910. }
  3911. // If we call HelperNewScObjectNoArg directly, we won't be calling the constructor from here, because the helper will do it.
  3912. // We could probably avoid this complexity by converting NewScObjectNoArg to NewScObject in the IRBuilder, once we have dedicated
  3913. // code paths for new Object() and new Array().
  3914. callCtor &= hasArgs || usedFixedCtorCache;
  3915. AssertMsg(!skipNewScObj || callCtor, "What will we return if we skip the default new object and don't call the ctor?");
  3916. newObjInstr->InsertBefore(callCtorLabel);
  3917. if (callCtor && usedFixedCtorCache)
  3918. {
  3919. IR::JnHelperMethod ctorHelper = IR::JnHelperMethodCount;
  3920. // If we have no arguments (i.e. the argument chain is empty), we can recognize a couple of common special cases, such
  3921. // as new Object() or new Array(), for which we have optimized helpers.
  3922. FixedFieldInfo* ctor = newObjInstr->GetFixedFunction();
  3923. intptr_t ctorInfo = ctor->GetFuncInfoAddr();
  3924. if (!hasArgs && (ctorInfo == m_func->GetThreadContextInfo()->GetJavascriptObjectNewInstanceAddr() || ctorInfo == m_func->GetThreadContextInfo()->GetJavascriptArrayNewInstanceAddr()))
  3925. {
  3926. if (ctorInfo == m_func->GetThreadContextInfo()->GetJavascriptObjectNewInstanceAddr())
  3927. {
  3928. Assert(skipNewScObj);
  3929. ctorHelper = IR::HelperNewJavascriptObjectNoArg;
  3930. callCtor = false;
  3931. }
  3932. else if (ctorInfo == m_func->GetThreadContextInfo()->GetJavascriptArrayNewInstanceAddr())
  3933. {
  3934. Assert(skipNewScObj);
  3935. ctorHelper = IR::HelperNewJavascriptArrayNoArg;
  3936. callCtor = false;
  3937. }
  3938. if (!callCtor)
  3939. {
  3940. LoadScriptContext(newObjInstr);
  3941. IR::Instr *ctorCall = IR::Instr::New(Js::OpCode::Call, newObjDst, IR::HelperCallOpnd::New(ctorHelper, func), func);
  3942. newObjInstr->InsertBefore(ctorCall);
  3943. m_lowererMD.LowerCall(ctorCall, 0);
  3944. }
  3945. }
  3946. }
  3947. IR::AutoReuseOpnd autoReuseSavedCtorOpnd;
  3948. if (callCtor)
  3949. {
  3950. // Load the first argument, which is either the object just created or null. Spread has an extra argument.
  3951. IR::Instr * argInstr = this->m_lowererMD.LoadNewScObjFirstArg(newObjInstr, createObjDst, isSpreadCall ? 1 : 0);
  3952. IR::Instr * insertAfterCtorInstr = newObjInstr->m_next;
  3953. if (skipNewScObj)
  3954. {
  3955. // Since we skipped the default new object, we must be returning whatever the constructor returns
  3956. // (which better be an Object), so let's just use newObjDst directly.
  3957. // newObjDst = newObjInstr->m_src1(createObjDst, ...)
  3958. Assert(newObjInstr->GetDst() == newObjDst);
  3959. if (isSpreadCall)
  3960. {
  3961. newObjInstr = this->LowerSpreadCall(newObjInstr, Js::CallFlags_New);
  3962. }
  3963. else
  3964. {
  3965. newObjInstr = this->m_lowererMD.LowerCallI(newObjInstr, Js::CallFlags_New, false, argInstr);
  3966. }
  3967. }
  3968. else
  3969. {
  3970. // We may need to return the default new object or whatever the constructor returns. Let's stash
  3971. // away the constructor's return in a temporary operand, and do the right check, if necessary.
  3972. // ctorResultObjOpnd = newObjInstr->m_src1(createObjDst, ...)
  3973. IR::RegOpnd *ctorResultObjOpnd = IR::RegOpnd::New(TyVar, func);
  3974. newObjInstr->UnlinkDst();
  3975. newObjInstr->SetDst(ctorResultObjOpnd);
  3976. if (isSpreadCall)
  3977. {
  3978. newObjInstr = this->LowerSpreadCall(newObjInstr, Js::CallFlags_New);
  3979. }
  3980. else
  3981. {
  3982. newObjInstr = this->m_lowererMD.LowerCallI(newObjInstr, Js::CallFlags_New, false, argInstr);
  3983. }
  3984. if (returnNewScObj)
  3985. {
  3986. // MOV newObjDst, createObjDst
  3987. this->m_lowererMD.CreateAssign(newObjDst, createObjDst, insertAfterCtorInstr);
  3988. }
  3989. else
  3990. {
  3991. LowerGetNewScObjectCommon(ctorResultObjOpnd, ctorResultObjOpnd, createObjDst, insertAfterCtorInstr);
  3992. this->m_lowererMD.CreateAssign(newObjDst, ctorResultObjOpnd, insertAfterCtorInstr);
  3993. }
  3994. }
  3995. // We don't ever need to update the constructor cache, if we hard coded it. Caches requiring update after constructor
  3996. // don't get cloned, and those that don't require update will never need one anymore.
  3997. if (!usedFixedCtorCache)
  3998. {
  3999. LowerUpdateNewScObjectCache(insertAfterCtorInstr, newObjDst, ctorOpnd, false /* isCtorFunction */);
  4000. }
  4001. }
  4002. else
  4003. {
  4004. if (newObjInstr->IsJitProfilingInstr())
  4005. {
  4006. Assert(m_func->IsSimpleJit());
  4007. Assert(!CONFIG_FLAG(NewSimpleJit));
  4008. // This path skipped calling the Ctor, which skips calling LowerCallI with newObjInstr, meaning that the call will not be profiled.
  4009. // So we insert it manually here.
  4010. if(newScHelper == IR::HelperNewScObjectNoArg &&
  4011. newObjDst &&
  4012. ctorOpnd->IsRegOpnd() &&
  4013. newObjDst->AsRegOpnd()->m_sym == ctorOpnd->AsRegOpnd()->m_sym)
  4014. {
  4015. Assert(newObjInstr->m_func->IsSimpleJit());
  4016. Assert(createObjDst != newObjDst);
  4017. // The function object sym is going to be overwritten, so save it in a temp for profiling
  4018. IR::RegOpnd *const savedCtorOpnd = IR::RegOpnd::New(ctorOpnd->GetType(), newObjInstr->m_func);
  4019. autoReuseSavedCtorOpnd.Initialize(savedCtorOpnd, newObjInstr->m_func);
  4020. Lowerer::InsertMove(savedCtorOpnd, ctorOpnd, newObjInstr);
  4021. ctorOpnd = savedCtorOpnd;
  4022. }
  4023. // It is a constructor (CallFlags_New) and therefore a single argument (this) would have been given.
  4024. const auto info = Lowerer::MakeCallInfoConst(Js::CallFlags_New, 1, func);
  4025. Assert(newScObjCall);
  4026. IR::JitProfilingInstr *const newObjJitProfilingInstr = newObjInstr->AsJitProfilingInstr();
  4027. GenerateCallProfiling(
  4028. newObjJitProfilingInstr->profileId,
  4029. newObjJitProfilingInstr->inlineCacheIndex,
  4030. createObjDst,
  4031. ctorOpnd,
  4032. info,
  4033. false,
  4034. newScObjCall,
  4035. newObjInstr);
  4036. }
  4037. // MOV newObjDst, createObjDst
  4038. if (!skipNewScObj && createObjDst != newObjDst)
  4039. {
  4040. this->m_lowererMD.CreateAssign(newObjDst, createObjDst, newObjInstr);
  4041. }
  4042. newObjInstr->Remove();
  4043. }
  4044. // Return the first instruction above the region we've just lowered.
  4045. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  4046. }
  4047. IR::Instr*
  4048. Lowerer::GenerateCallProfiling(Js::ProfileId profileId, Js::InlineCacheIndex inlineCacheIndex, IR::Opnd* retval, IR::Opnd*calleeFunctionObjOpnd, IR::Opnd* callInfo, bool returnTypeOnly, IR::Instr*callInstr,IR::Instr*insertAfter)
  4049. {
  4050. // This should only ever happen in profiling simplejit
  4051. Assert(m_func->DoSimpleJitDynamicProfile());
  4052. // Make sure they gave us the correct call instruction
  4053. #if defined(_M_IX86) || defined(_M_X64)
  4054. Assert(callInstr->m_opcode == Js::OpCode::CALL);
  4055. #elif defined(_M_ARM)
  4056. Assert(callInstr->m_opcode == Js::OpCode::BLX);
  4057. #endif
  4058. Func*const func = insertAfter->m_func;
  4059. {
  4060. // First, we should save the implicit call flags
  4061. const auto starFlag = GetImplicitCallFlagsOpnd();
  4062. const auto saveOpnd = IR::RegOpnd::New(starFlag->GetType(), func);
  4063. IR::AutoReuseOpnd a(starFlag, func), b(saveOpnd, func);
  4064. //Save the flags (before call) and restore them (after the call)
  4065. this->InsertMove(saveOpnd, starFlag, callInstr);
  4066. // Note: On arm this is slightly inefficient because it forces a reload of the memory location to a reg (whereas x86 can load straight from hard-coded memory into a reg)
  4067. // But it works and making it not reload the memory location would force more refactoring.
  4068. this->InsertMove(starFlag, saveOpnd, insertAfter->m_next);
  4069. }
  4070. // Profile a call that just happened: push some extra info on the stack and call the helper
  4071. if (!retval)
  4072. {
  4073. if (returnTypeOnly)
  4074. {
  4075. // If we are only supposed to profile the return type but don't use the return value, we might
  4076. // as well do nothing!
  4077. return insertAfter;
  4078. }
  4079. retval = IR::AddrOpnd::NewNull(func);
  4080. }
  4081. IR::Instr* profileCall = IR::Instr::New(Js::OpCode::Call, func);
  4082. bool needInlineCacheIndex;
  4083. IR::JnHelperMethod helperMethod;
  4084. if (returnTypeOnly)
  4085. {
  4086. needInlineCacheIndex = false;
  4087. helperMethod = IR::HelperSimpleProfileReturnTypeCall;
  4088. }
  4089. else if(inlineCacheIndex == Js::Constants::NoInlineCacheIndex)
  4090. {
  4091. needInlineCacheIndex = false;
  4092. helperMethod = IR::HelperSimpleProfileCall_DefaultInlineCacheIndex;
  4093. }
  4094. else
  4095. {
  4096. needInlineCacheIndex = true;
  4097. helperMethod = IR::HelperSimpleProfileCall;
  4098. }
  4099. profileCall->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
  4100. insertAfter->InsertAfter(profileCall);
  4101. m_lowererMD.LoadHelperArgument(profileCall, callInfo);
  4102. m_lowererMD.LoadHelperArgument(profileCall, calleeFunctionObjOpnd);
  4103. m_lowererMD.LoadHelperArgument(profileCall, retval);
  4104. if(needInlineCacheIndex)
  4105. {
  4106. m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateInlineCacheIndexOpnd(inlineCacheIndex, func));
  4107. }
  4108. m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateProfileIdOpnd(profileId, func));
  4109. // Push the frame pointer so that the profiling call can grab the stack layout
  4110. m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateFramePointerOpnd(func));
  4111. // No args: the helper is stdcall
  4112. return m_lowererMD.LowerCall(profileCall, 0);
  4113. }
  4114. bool Lowerer::TryLowerNewScObjectWithFixedCtorCache(IR::Instr* newObjInstr, IR::RegOpnd* newObjDst,
  4115. IR::LabelInstr* helperOrBailoutLabel, IR::LabelInstr* callCtorLabel, bool& skipNewScObj, bool& returnNewScObj, bool& emitBailOut)
  4116. {
  4117. skipNewScObj = false;
  4118. returnNewScObj = false;
  4119. AssertMsg(!PHASE_OFF(Js::ObjTypeSpecNewObjPhase, this->m_func) || !newObjInstr->HasBailOutInfo(),
  4120. "Why do we have bailout on NewScObject when ObjTypeSpecNewObj is off?");
  4121. if (PHASE_OFF(Js::FixedNewObjPhase, newObjInstr->m_func) && PHASE_OFF(Js::ObjTypeSpecNewObjPhase, this->m_func))
  4122. {
  4123. return false;
  4124. }
  4125. JITTimeConstructorCache * ctorCache;
  4126. if (newObjInstr->HasBailOutInfo())
  4127. {
  4128. Assert(newObjInstr->IsNewScObjectInstr());
  4129. Assert(newObjInstr->IsProfiledInstr());
  4130. Assert(newObjInstr->GetBailOutKind() == IR::BailOutFailedCtorGuardCheck);
  4131. emitBailOut = true;
  4132. ctorCache = newObjInstr->m_func->GetConstructorCache(static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId));
  4133. Assert(ctorCache != nullptr);
  4134. Assert(!ctorCache->SkipNewScObject());
  4135. Assert(!ctorCache->IsTypeFinal() || ctorCache->CtorHasNoExplicitReturnValue());
  4136. LinkCtorCacheToGuardedProperties(ctorCache);
  4137. }
  4138. else
  4139. {
  4140. if (newObjInstr->m_opcode == Js::OpCode::NewScObjArray || newObjInstr->m_opcode == Js::OpCode::NewScObjArraySpread)
  4141. {
  4142. // These instr's carry a profile that indexes the array call site info, not the ctor cache.
  4143. return false;
  4144. }
  4145. ctorCache = newObjInstr->IsProfiledInstr() ? newObjInstr->m_func->GetConstructorCache(static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId)) : nullptr;
  4146. if (ctorCache == nullptr)
  4147. {
  4148. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func))
  4149. {
  4150. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4151. Output::Print(_u("FixedNewObj: function %s (%s): lowering non-fixed new script object for %s, because %s.\n"),
  4152. newObjInstr->m_func->GetJITFunctionBody()->GetDisplayName(), newObjInstr->m_func->GetDebugNumberSet(debugStringBuffer), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  4153. newObjInstr->IsProfiledInstr() ? _u("constructor cache hasn't been cloned") : _u("instruction is not profiled"));
  4154. Output::Flush();
  4155. }
  4156. return false;
  4157. }
  4158. }
  4159. Assert(ctorCache != nullptr);
  4160. // We should only have cloned if the script contexts match.
  4161. // TODO: oop jit, add ctorCache->scriptContext for tracing assert
  4162. // Assert(newObjInstr->m_func->GetScriptContextInfo()->GetAddr() == ctorCache->scriptContext);
  4163. // Built-in constructors don't need a default new object. Since we know which constructor we're calling, we can skip creating a default
  4164. // object and call a specialized helper (or even constructor, directly) avoiding the checks in generic NewScObjectCommon.
  4165. if (ctorCache->SkipNewScObject())
  4166. {
  4167. #if 0 // TODO: oop jit, add constructor info for tracing
  4168. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func))
  4169. {
  4170. const Js::JavascriptFunction* ctor = ctorCache->constructor;
  4171. Js::FunctionBody* ctorBody = ctor->GetFunctionInfo()->HasBody() ? ctor->GetFunctionInfo()->GetFunctionBody() : nullptr;
  4172. const char16* ctorName = ctorBody != nullptr ? ctorBody->GetDisplayName() : _u("<unknown>");
  4173. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4174. char16 debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4175. Output::Print(_u("FixedNewObj: function %s (%s): lowering skipped new script object for %s with %s ctor <unknown> (%s %s).\n"),
  4176. newObjInstr->m_func->GetJITFunctionBody()->GetDisplayName(), newObjInstr->m_func->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  4177. newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? _u("inlined") : _u("called"),
  4178. ctorName, ctorBody ? ctorBody->GetDebugNumberSet(debugStringBuffer) : _u("(null)"));
  4179. Output::Flush();
  4180. }
  4181. #endif
  4182. // All built-in constructors share a special singleton cache that is never checked and never invalidated. It cannot be used
  4183. // as a guard to protect any property operations downstream from the constructor. If this ever becomes a performance issue,
  4184. // we could have a dedicated cache for each built-in constructor, populate it and invalidate it as any other constructor cache.
  4185. AssertMsg(!emitBailOut, "Can't bail out on constructor cache guard for built-in constructors.");
  4186. skipNewScObj = true;
  4187. IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
  4188. this->m_lowererMD.CreateAssign(newObjDst, zeroOpnd, newObjInstr);
  4189. return true;
  4190. }
  4191. AssertMsg(ctorCache->GetType() != nullptr, "Why did we hard-code a mismatched, invalidated or polymorphic constructor cache?");
  4192. #if 0 // TODO: oop jit, add constructor info for tracing
  4193. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func))
  4194. {
  4195. const Js::JavascriptFunction* constructor = ctorCache->constructor;
  4196. Js::FunctionBody* constructorBody = constructor->GetFunctionInfo()->HasBody() ? constructor->GetFunctionInfo()->GetFunctionBody() : nullptr;
  4197. const char16* constructorName = constructorBody != nullptr ? constructorBody->GetDisplayName() : _u("<unknown>");
  4198. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4199. char16 debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4200. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func))
  4201. {
  4202. Output::Print(_u("FixedNewObj: function %s (%s): lowering fixed new script object for %s with %s ctor <unknown> (%s %s): type = %p, slots = %d, inlined slots = %d.\n"),
  4203. newObjInstr->m_func->GetJITFunctionBody()->GetDisplayName(), newObjInstr->m_func->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  4204. newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? _u("inlined") : _u("called"),
  4205. constructorName, constructorBody ? constructorBody->GetDebugNumberSet(debugStringBuffer) : _u("(null)"),
  4206. ctorCache->type, ctorCache->slotCount, ctorCache->inlineSlotCount);
  4207. }
  4208. else
  4209. {
  4210. Output::Print(_u("FixedNewObj: function %s (%s): lowering fixed new script object for %s with %s ctor <unknown> (%s %s): slots = %d, inlined slots = %d.\n"),
  4211. newObjInstr->m_func->GetJITFunctionBody()->GetDisplayName(), newObjInstr->m_func->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  4212. newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? _u("inlined") : _u("called"),
  4213. constructorName, debugStringBuffer, ctorCache->slotCount, ctorCache->inlineSlotCount);
  4214. }
  4215. Output::Flush();
  4216. }
  4217. #endif
  4218. // If the constructor has no return statements, we can safely return the object that was created here.
  4219. // No need to check what the constructor returned - it must be undefined.
  4220. returnNewScObj = ctorCache->CtorHasNoExplicitReturnValue();
  4221. Assert(Js::ConstructorCache::GetSizeOfGuardValue() == static_cast<size_t>(TySize[TyMachPtr]));
  4222. IR::MemRefOpnd* guardOpnd = IR::MemRefOpnd::New(ctorCache->GetRuntimeCacheGuardAddr(), TyMachReg, this->m_func,
  4223. IR::AddrOpndKindDynamicGuardValueRef);
  4224. IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
  4225. InsertCompareBranch(guardOpnd, zeroOpnd, Js::OpCode::BrEq_A, helperOrBailoutLabel, newObjInstr);
  4226. // If we are calling new on a class constructor, the contract is that we pass new.target as the 'this' argument.
  4227. // function is the constructor on which we called new - which is new.target.
  4228. FixedFieldInfo* ctor = newObjInstr->GetFixedFunction();
  4229. if (ctor->IsClassCtor())
  4230. {
  4231. // MOV newObjDst, function
  4232. this->m_lowererMD.CreateAssign(newObjDst, newObjInstr->GetSrc1(), newObjInstr);
  4233. }
  4234. else
  4235. {
  4236. JITTypeHolder newObjectType(ctorCache->GetType());
  4237. Assert(newObjectType->IsShared());
  4238. IR::AddrOpnd* typeSrc = IR::AddrOpnd::New(newObjectType->GetAddr(), IR::AddrOpndKindDynamicType, m_func);
  4239. // For the next call:
  4240. // inlineSlotSize == Number of slots to allocate beyond the DynamicObject header
  4241. // slotSize - inlineSlotSize == Number of aux slots to allocate
  4242. int inlineSlotSize = ctorCache->GetInlineSlotCount();
  4243. int slotSize = ctorCache->GetSlotCount();
  4244. if (newObjectType->GetTypeHandler()->IsObjectHeaderInlinedTypeHandler())
  4245. {
  4246. Assert(inlineSlotSize >= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity());
  4247. Assert(inlineSlotSize == slotSize);
  4248. slotSize = inlineSlotSize -= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity();
  4249. }
  4250. GenerateDynamicObjectAlloc(newObjInstr, inlineSlotSize, slotSize, newObjDst, typeSrc);
  4251. }
  4252. // JMP $callCtor
  4253. IR::BranchInstr *callCtorBranch = IR::BranchInstr::New(Js::OpCode::Br, callCtorLabel, m_func);
  4254. newObjInstr->InsertBefore(callCtorBranch);
  4255. this->m_lowererMD.LowerUncondBranch(callCtorBranch);
  4256. return true;
  4257. }
  4258. void
  4259. Lowerer::GenerateRecyclerAllocAligned(IR::JnHelperMethod allocHelper, size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, bool inOpHelper)
  4260. {
  4261. IR::LabelInstr * allocDoneLabel = nullptr;
  4262. if (!PHASE_OFF(Js::JitAllocNewObjPhase, insertionPointInstr->m_func) && HeapInfo::IsSmallObject(allocSize))
  4263. {
  4264. IR::LabelInstr * allocHelperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4265. allocDoneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, inOpHelper);
  4266. this->m_lowererMD.GenerateFastRecyclerAlloc(allocSize, newObjDst, insertionPointInstr, allocHelperLabel, allocDoneLabel);
  4267. // $allocHelper:
  4268. insertionPointInstr->InsertBefore(allocHelperLabel);
  4269. }
  4270. // call JavascriptOperators::AllocMemForScObject(allocSize, scriptContext->GetRecycler())
  4271. this->m_lowererMD.LoadHelperArgument(insertionPointInstr, this->LoadScriptContextValueOpnd(insertionPointInstr, ScriptContextValue::ScriptContextRecycler));
  4272. this->m_lowererMD.LoadHelperArgument(insertionPointInstr, IR::IntConstOpnd::New((int32)allocSize, TyUint32, m_func, true));
  4273. IR::Instr *newObjCall = IR::Instr::New(Js::OpCode::Call, newObjDst, IR::HelperCallOpnd::New(allocHelper, m_func), m_func);
  4274. insertionPointInstr->InsertBefore(newObjCall);
  4275. this->m_lowererMD.LowerCall(newObjCall, 0);
  4276. if (allocDoneLabel != nullptr)
  4277. {
  4278. // $allocDone:
  4279. insertionPointInstr->InsertBefore(allocDoneLabel);
  4280. }
  4281. }
  4282. IR::Instr *
  4283. Lowerer::LowerGetNewScObject(IR::Instr *instr)
  4284. {
  4285. Assert(instr);
  4286. Assert(instr->m_opcode == Js::OpCode::GetNewScObject);
  4287. Assert(instr->GetDst());
  4288. Assert(instr->GetSrc1());
  4289. Assert(instr->GetSrc2());
  4290. const auto instrPrev = instr->m_prev;
  4291. Assert(instrPrev);
  4292. LowerGetNewScObjectCommon(
  4293. instr->GetDst()->AsRegOpnd(),
  4294. instr->GetSrc1()->AsRegOpnd(),
  4295. instr->GetSrc2()->AsRegOpnd(),
  4296. instr);
  4297. instr->Remove();
  4298. return instrPrev;
  4299. }
  4300. void
  4301. Lowerer::LowerGetNewScObjectCommon(
  4302. IR::RegOpnd *const resultObjOpnd,
  4303. IR::RegOpnd *const constructorReturnOpnd,
  4304. IR::RegOpnd *const newObjOpnd,
  4305. IR::Instr *insertBeforeInstr)
  4306. {
  4307. Assert(resultObjOpnd);
  4308. Assert(constructorReturnOpnd);
  4309. Assert(newObjOpnd);
  4310. Assert(insertBeforeInstr);
  4311. // (newObjOpnd == 'this' value passed to constructor)
  4312. //
  4313. // if (!IsJsObject(constructorReturnOpnd))
  4314. // goto notObjectLabel
  4315. // newObjOpnd = constructorReturnOpnd
  4316. // notObjectLabel:
  4317. // resultObjOpnd = newObjOpnd
  4318. if(!constructorReturnOpnd->IsEqual(newObjOpnd))
  4319. {
  4320. // Need to check whether the constructor returned an object
  4321. IR::LabelInstr *notObjectLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4322. Assert(insertBeforeInstr->m_prev);
  4323. IR::LabelInstr *const doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4324. insertBeforeInstr->InsertBefore(doneLabel);
  4325. insertBeforeInstr = doneLabel;
  4326. #if defined(_M_ARM32_OR_ARM64)
  4327. m_lowererMD.LoadHelperArgument(insertBeforeInstr, constructorReturnOpnd);
  4328. IR::Opnd * targetOpnd = IR::RegOpnd::New(StackSym::New(TyInt32,m_func), TyInt32, m_func);
  4329. IR::Instr * callIsObjectInstr = IR::Instr::New(Js::OpCode::Call, targetOpnd, m_func);
  4330. insertBeforeInstr->InsertBefore(callIsObjectInstr);
  4331. this->m_lowererMD.ChangeToHelperCall(callIsObjectInstr, IR::HelperOp_IsObject);
  4332. InsertTestBranch( targetOpnd, targetOpnd, Js::OpCode::BrEq_A, notObjectLabel,insertBeforeInstr);
  4333. #else
  4334. m_lowererMD.GenerateIsJsObjectTest(constructorReturnOpnd, insertBeforeInstr, notObjectLabel);
  4335. #endif
  4336. // Value returned by constructor is an object (use constructorReturnOpnd)
  4337. if(!resultObjOpnd->IsEqual(constructorReturnOpnd))
  4338. {
  4339. this->m_lowererMD.CreateAssign(resultObjOpnd, constructorReturnOpnd, insertBeforeInstr);
  4340. }
  4341. insertBeforeInstr->InsertBefore(
  4342. m_lowererMD.LowerUncondBranch(IR::BranchInstr::New(Js::OpCode::Br, doneLabel, m_func)));
  4343. // Value returned by constructor is not an object (use newObjOpnd)
  4344. insertBeforeInstr->InsertBefore(notObjectLabel);
  4345. }
  4346. if(!resultObjOpnd->IsEqual(newObjOpnd))
  4347. {
  4348. this->m_lowererMD.CreateAssign(resultObjOpnd, newObjOpnd, insertBeforeInstr);
  4349. }
  4350. // fall through to insertBeforeInstr or doneLabel
  4351. }
  4352. ///----------------------------------------------------------------------------
  4353. ///
  4354. /// Lowerer::LowerUpdateNewScObjectCache
  4355. ///
  4356. ///----------------------------------------------------------------------------
  4357. IR::Instr *
  4358. Lowerer::LowerUpdateNewScObjectCache(IR::Instr * insertInstr, IR::Opnd *dst, IR::Opnd *src1, const bool isCtorFunction)
  4359. {
  4360. // if (!isCtorFunction)
  4361. // {
  4362. // MOV r1, [src1 + offset(type)] -- check base TypeIds_Function
  4363. // CMP [r1 + offset(typeId)], TypeIds_Function
  4364. // }
  4365. // JNE $fallThru
  4366. // MOV r2, [src1 + offset(constructorCache)]
  4367. // MOV r3, [r2 + offset(updateAfterCtor)]
  4368. // TEST r3, r3 -- check if updateAfterCtor is 0
  4369. // JEQ $fallThru
  4370. // CALL UpdateNewScObjectCache(src1, dst, scriptContext)
  4371. // $fallThru:
  4372. IR::LabelInstr *labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4373. if (!src1->IsRegOpnd())
  4374. {
  4375. IR::RegOpnd *srcRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4376. LowererMD::CreateAssign(srcRegOpnd, src1, insertInstr);
  4377. src1 = srcRegOpnd;
  4378. }
  4379. // Check if constructor is a function if we don't already know it.
  4380. if (!isCtorFunction)
  4381. {
  4382. // MOV r1, [src1 + offset(type)] -- check base TypeIds_Function
  4383. IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  4384. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  4385. LowererMD::CreateAssign(r1, indirOpnd, insertInstr);
  4386. // CMP [r1 + offset(typeId)], TypeIds_Function
  4387. // JNE $fallThru
  4388. indirOpnd = IR::IndirOpnd::New(r1, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
  4389. IR::IntConstOpnd *intOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, this->m_func, true);
  4390. InsertCompareBranch(indirOpnd, intOpnd, Js::OpCode::BrNeq_A, labelFallThru, insertInstr);
  4391. }
  4392. // Every function has a constructor cache, even if only the default blank one.
  4393. // r2 = MOV JavascriptFunction->constructorCache
  4394. IR::RegOpnd *r2 = IR::RegOpnd::New(TyVar, this->m_func);
  4395. IR::IndirOpnd *opndIndir = IR::IndirOpnd::New(src1->AsRegOpnd(), Js::JavascriptFunction::GetOffsetOfConstructorCache(), TyMachReg, this->m_func);
  4396. IR::Instr *instr = LowererMD::CreateAssign(r2, opndIndir, insertInstr);
  4397. // r3 = constructorCache->updateAfterCtor
  4398. IR::RegOpnd *r3 = IR::RegOpnd::New(TyInt8, this->m_func);
  4399. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(r2, Js::ConstructorCache::GetOffsetOfUpdateAfterCtor(), TyUint8, this->m_func);
  4400. instr = LowererMD::CreateAssign(r3, indirOpnd, insertInstr);
  4401. // TEST r3, r3 -- check if updateAfterCtor is 0
  4402. // JEQ $fallThru
  4403. InsertTestBranch(r3, r3, Js::OpCode::BrEq_A, labelFallThru, insertInstr);
  4404. // r2 = UpdateNewScObjectCache(src1, dst, scriptContext)
  4405. insertInstr->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true)); // helper label for uncommon path
  4406. IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperUpdateNewScObjectCache, m_func);
  4407. LoadScriptContext(insertInstr);
  4408. m_lowererMD.LoadHelperArgument(insertInstr, dst);
  4409. m_lowererMD.LoadHelperArgument(insertInstr, src1);
  4410. instr = IR::Instr::New(Js::OpCode::Call, m_func);
  4411. instr->SetSrc1(opndHelper);
  4412. insertInstr->InsertBefore(instr);
  4413. m_lowererMD.LowerCall(instr, 0);
  4414. // $fallThru:
  4415. insertInstr->InsertBefore(labelFallThru);
  4416. return insertInstr;
  4417. }
  4418. IR::Instr *
  4419. Lowerer::LowerNewScObjArray(IR::Instr *newObjInstr)
  4420. {
  4421. if (newObjInstr->HasEmptyArgOutChain())
  4422. {
  4423. newObjInstr->FreeSrc2();
  4424. return LowerNewScObjArrayNoArg(newObjInstr);
  4425. }
  4426. IR::Instr* startMarkerInstr = nullptr;
  4427. IR::Opnd *targetOpnd = newObjInstr->GetSrc1();
  4428. Func *func = newObjInstr->m_func;
  4429. if (!targetOpnd->IsAddrOpnd())
  4430. {
  4431. if (!newObjInstr->HasBailOutInfo())
  4432. {
  4433. return this->LowerNewScObject(newObjInstr, true, true);
  4434. }
  4435. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4436. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4437. // list, StartCall, etc.
  4438. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4439. // For whatever reason, we couldn't do a fixed function check on the call target.
  4440. // Generate a runtime check on the target.
  4441. Assert(newObjInstr->GetBailOutKind() == IR::BailOutOnNotNativeArray);
  4442. IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, func);
  4443. InsertCompareBranch(
  4444. targetOpnd,
  4445. LoadLibraryValueOpnd(newObjInstr, LibraryValue::ValueArrayConstructor),
  4446. Js::OpCode::BrEq_A,
  4447. true,
  4448. labelSkipBailOut,
  4449. newObjInstr);
  4450. IR::ProfiledInstr *instrNew = IR::ProfiledInstr::New(newObjInstr->m_opcode, newObjInstr->UnlinkDst(), newObjInstr->UnlinkSrc1(), newObjInstr->UnlinkSrc2(), func);
  4451. instrNew->u.profileId = newObjInstr->AsProfiledInstr()->u.profileId;
  4452. newObjInstr->InsertAfter(instrNew);
  4453. newObjInstr->m_opcode = Js::OpCode::BailOut;
  4454. GenerateBailOut(newObjInstr);
  4455. instrNew->InsertBefore(labelSkipBailOut);
  4456. newObjInstr = instrNew;
  4457. }
  4458. else
  4459. {
  4460. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4461. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4462. // list, StartCall, etc.
  4463. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4464. }
  4465. intptr_t weakFuncRef = 0;
  4466. Js::ArrayCallSiteInfo *arrayInfo = nullptr;
  4467. intptr_t arrayInfoAddr = 0;
  4468. Assert(newObjInstr->IsProfiledInstr());
  4469. IR::RegOpnd *resultObjOpnd = newObjInstr->GetDst()->AsRegOpnd();
  4470. IR::Instr * insertInstr = newObjInstr->m_next;
  4471. Js::ProfileId profileId = static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId);
  4472. // We may not have profileId if we converted a NewScObject to NewScObjArray
  4473. if (profileId != Js::Constants::NoProfileId)
  4474. {
  4475. arrayInfo = func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
  4476. arrayInfoAddr = func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
  4477. Assert(arrayInfo);
  4478. weakFuncRef = func->GetWeakFuncRef();
  4479. Assert(weakFuncRef);
  4480. }
  4481. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  4482. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  4483. IR::Opnd *linkOpnd = newObjInstr->GetSrc2();
  4484. Assert(linkOpnd->IsSymOpnd());
  4485. StackSym *linkSym = linkOpnd->AsSymOpnd()->m_sym->AsStackSym();
  4486. Assert(linkSym->IsSingleDef());
  4487. IR::Instr* argInstr = linkSym->GetInstrDef();
  4488. IR::Opnd *opndOfArrayCtor = argInstr->GetSrc1();
  4489. const uint16 upperBoundValue = 8;
  4490. // Generate fast path only if it meets all the conditions:
  4491. // 1. It is the only parameter
  4492. // 2a. If 1st paramter is a variable, emit fast path with checks
  4493. // 2b. If 1st paramter is a constant, it is in range 0 and upperBoundValue (inclusive)
  4494. if (opndOfArrayCtor->IsAddrOpnd() || opndOfArrayCtor->IsRegOpnd()) // #1
  4495. {
  4496. if ((linkSym->GetArgSlotNum() == 2)) // 1. It is the only parameter
  4497. {
  4498. AssertMsg(linkSym->IsArgSlotSym(), "Not an argSlot symbol...");
  4499. linkOpnd = argInstr->GetSrc2();
  4500. bool emittedFastPath = true;
  4501. // 2a. If 1st paramter is a variable, emit fast path with checks
  4502. if (opndOfArrayCtor->IsRegOpnd())
  4503. {
  4504. // 3. GenerateFastPath
  4505. if (arrayInfo && arrayInfo->IsNativeIntArray())
  4506. {
  4507. GenerateProfiledNewScObjArrayFastPath<Js::JavascriptNativeIntArray>(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, helperLabel, labelDone, opndOfArrayCtor,
  4508. Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex(),
  4509. Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef());
  4510. }
  4511. else if (arrayInfo && arrayInfo->IsNativeFloatArray())
  4512. {
  4513. GenerateProfiledNewScObjArrayFastPath<Js::JavascriptNativeFloatArray>(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, helperLabel, labelDone, opndOfArrayCtor,
  4514. Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex(),
  4515. Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef());
  4516. }
  4517. else
  4518. {
  4519. GenerateProfiledNewScObjArrayFastPath<Js::JavascriptArray>(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, helperLabel, labelDone, opndOfArrayCtor, 0, 0);
  4520. }
  4521. }
  4522. // 2b. If 1st paramter is a constant, it is in range 0 and upperBoundValue (inclusive)
  4523. else
  4524. {
  4525. int32 length = linkSym->GetIntConstValue();
  4526. if (length >= 0 && length <= upperBoundValue)
  4527. {
  4528. GenerateProfiledNewScObjArrayFastPath(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, (uint32)length, labelDone, false);
  4529. }
  4530. else
  4531. {
  4532. emittedFastPath = false;
  4533. }
  4534. }
  4535. // Since we emitted fast path above, move the startCall/argOut instruction right before helper
  4536. if (emittedFastPath)
  4537. {
  4538. linkSym = linkOpnd->AsRegOpnd()->m_sym->AsStackSym();
  4539. AssertMsg(!linkSym->IsArgSlotSym() && linkSym->m_isSingleDef, "Arg tree not single def...");
  4540. IR::Instr* startCallInstr = linkSym->m_instrDef;
  4541. AssertMsg(startCallInstr->GetArgOutCount(false) == 2, "Generating ArrayFastPath for more than 1 parameter not allowed.");
  4542. // Since we emitted fast path above, move the startCall/argOut instruction right before helper
  4543. startCallInstr->Move(newObjInstr);
  4544. argInstr->Move(newObjInstr);
  4545. }
  4546. }
  4547. }
  4548. newObjInstr->UnlinkSrc1();
  4549. IR::Opnd *profileOpnd = IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, func);
  4550. this->m_lowererMD.LoadNewScObjFirstArg(newObjInstr, profileOpnd);
  4551. IR::JnHelperMethod helperMethod = IR::HelperScrArr_ProfiledNewInstance;
  4552. newObjInstr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
  4553. newObjInstr = GenerateDirectCall(newObjInstr, targetOpnd, Js::CallFlags_New);
  4554. InsertCompareBranch(
  4555. IR::IndirOpnd::New(resultObjOpnd, 0, TyMachPtr, func),
  4556. LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptArray),
  4557. Js::OpCode::BrEq_A,
  4558. true,
  4559. labelDone,
  4560. insertInstr);
  4561. // We know we have a native array, so store the weak ref and call site index.
  4562. m_lowererMD.CreateAssign(
  4563. IR::IndirOpnd::New(resultObjOpnd, Js::JavascriptNativeArray::GetOffsetOfArrayCallSiteIndex(), TyUint16, func),
  4564. IR::Opnd::CreateProfileIdOpnd(profileId, func),
  4565. insertInstr);
  4566. m_lowererMD.CreateAssign(
  4567. IR::IndirOpnd::New(resultObjOpnd, Js::JavascriptNativeArray::GetOffsetOfWeakFuncRef(), TyMachReg, func),
  4568. IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, func),
  4569. insertInstr);
  4570. insertInstr->InsertBefore(labelDone);
  4571. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  4572. }
  4573. IR::Instr *
  4574. Lowerer::LowerNewScObjArrayNoArg(IR::Instr *newObjInstr)
  4575. {
  4576. IR::Opnd *targetOpnd = newObjInstr->GetSrc1();
  4577. Func *func = newObjInstr->m_func;
  4578. IR::Instr* startMarkerInstr = nullptr;
  4579. if (!targetOpnd->IsAddrOpnd())
  4580. {
  4581. if (!newObjInstr->HasBailOutInfo())
  4582. {
  4583. return this->LowerNewScObject(newObjInstr, true, false);
  4584. }
  4585. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4586. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4587. // list, StartCall, etc.
  4588. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4589. // For whatever reason, we couldn't do a fixed function check on the call target.
  4590. // Generate a runtime check on the target.
  4591. Assert(newObjInstr->GetBailOutKind() == IR::BailOutOnNotNativeArray);
  4592. IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, func);
  4593. InsertCompareBranch(
  4594. targetOpnd,
  4595. LoadLibraryValueOpnd(newObjInstr, LibraryValue::ValueArrayConstructor),
  4596. Js::OpCode::BrEq_A,
  4597. true,
  4598. labelSkipBailOut,
  4599. newObjInstr);
  4600. IR::ProfiledInstr *instrNew = IR::ProfiledInstr::New(newObjInstr->m_opcode, newObjInstr->UnlinkDst(), newObjInstr->UnlinkSrc1(), func);
  4601. instrNew->u.profileId = newObjInstr->AsProfiledInstr()->u.profileId;
  4602. newObjInstr->InsertAfter(instrNew);
  4603. newObjInstr->m_opcode = Js::OpCode::BailOut;
  4604. GenerateBailOut(newObjInstr);
  4605. instrNew->InsertBefore(labelSkipBailOut);
  4606. newObjInstr = instrNew;
  4607. }
  4608. else
  4609. {
  4610. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4611. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4612. // list, StartCall, etc.
  4613. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4614. }
  4615. Assert(newObjInstr->IsProfiledInstr());
  4616. intptr_t weakFuncRef = 0;
  4617. intptr_t arrayInfoAddr = 0;
  4618. Js::ArrayCallSiteInfo *arrayInfo = nullptr;
  4619. Js::ProfileId profileId = static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId);
  4620. if (profileId != Js::Constants::NoProfileId)
  4621. {
  4622. arrayInfo = func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
  4623. arrayInfoAddr = func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
  4624. Assert(arrayInfo);
  4625. weakFuncRef = func->GetWeakFuncRef();
  4626. Assert(weakFuncRef);
  4627. }
  4628. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  4629. GenerateProfiledNewScObjArrayFastPath(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, 0, labelDone, true);
  4630. newObjInstr->InsertAfter(labelDone);
  4631. m_lowererMD.LoadHelperArgument(newObjInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, func));
  4632. m_lowererMD.LoadHelperArgument(newObjInstr, IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, func));
  4633. LoadScriptContext(newObjInstr);
  4634. m_lowererMD.LoadHelperArgument(newObjInstr, targetOpnd);
  4635. newObjInstr->UnlinkSrc1();
  4636. newObjInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperScrArr_ProfiledNewInstanceNoArg, func));
  4637. m_lowererMD.LowerCall(newObjInstr, 0);
  4638. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  4639. }
  4640. ///----------------------------------------------------------------------------
  4641. ///
  4642. /// Lowerer::LowerPrologEpilog
  4643. ///
  4644. ///----------------------------------------------------------------------------
  4645. void
  4646. Lowerer::LowerPrologEpilog()
  4647. {
  4648. if (m_func->GetJITFunctionBody()->IsCoroutine())
  4649. {
  4650. LowerGeneratorResumeJumpTable();
  4651. }
  4652. IR::Instr * instr;
  4653. instr = m_func->m_headInstr;
  4654. AssertMsg(instr->IsEntryInstr(), "First instr isn't an EntryInstr...");
  4655. m_lowererMD.LowerEntryInstr(instr->AsEntryInstr());
  4656. instr = m_func->m_exitInstr;
  4657. AssertMsg(instr->IsExitInstr(), "Last instr isn't an ExitInstr...");
  4658. m_lowererMD.LowerExitInstr(instr->AsExitInstr());
  4659. }
  4660. void
  4661. Lowerer::LowerPrologEpilogAsmJs()
  4662. {
  4663. IR::Instr * instr;
  4664. instr = m_func->m_headInstr;
  4665. AssertMsg(instr->IsEntryInstr(), "First instr isn't an EntryInstr...");
  4666. m_lowererMD.LowerEntryInstrAsmJs(instr->AsEntryInstr());
  4667. instr = m_func->m_exitInstr;
  4668. AssertMsg(instr->IsExitInstr(), "Last instr isn't an ExitInstr...");
  4669. m_lowererMD.LowerExitInstrAsmJs(instr->AsExitInstr());
  4670. }
  4671. void
  4672. Lowerer::LowerGeneratorResumeJumpTable()
  4673. {
  4674. Assert(m_func->GetJITFunctionBody()->IsCoroutine());
  4675. IR::Instr * jumpTableInstr = m_func->m_headInstr;
  4676. AssertMsg(jumpTableInstr->IsEntryInstr(), "First instr isn't an EntryInstr...");
  4677. // Hope to do away with this linked list scan by moving this lowering to a post-prolog-epilog/pre-encoder phase that is common to all architectures (currently such phase is only available on amd64/arm)
  4678. while (jumpTableInstr->m_opcode != Js::OpCode::GeneratorResumeJumpTable)
  4679. {
  4680. jumpTableInstr = jumpTableInstr->m_next;
  4681. }
  4682. IR::Opnd * srcOpnd = jumpTableInstr->UnlinkSrc1();
  4683. m_func->MapYieldOffsetResumeLabels([&](int i, const YieldOffsetResumeLabel& yorl)
  4684. {
  4685. uint32 offset = yorl.First();
  4686. IR::LabelInstr * label = yorl.Second();
  4687. if (label != nullptr && label->m_hasNonBranchRef)
  4688. {
  4689. // Also fix up the bailout at the label with the jump to epilog that was not emitted in GenerateBailOut()
  4690. Assert(label->m_prev->HasBailOutInfo());
  4691. GenerateJumpToEpilogForBailOut(label->m_prev->GetBailOutInfo(), label->m_prev);
  4692. }
  4693. else if (label == nullptr)
  4694. {
  4695. label = m_func->m_bailOutNoSaveLabel;
  4696. }
  4697. // For each offset label pair, insert a compare of the offset and branch if equal to the label
  4698. InsertCompareBranch(srcOpnd, IR::IntConstOpnd::New(offset, TyUint32, m_func), Js::OpCode::BrSrEq_A, label, jumpTableInstr);
  4699. });
  4700. jumpTableInstr->Remove();
  4701. }
  4702. void
  4703. Lowerer::DoInterruptProbes()
  4704. {
  4705. this->m_func->SetHasInstrNumber(true);
  4706. uint instrCount = 1;
  4707. FOREACH_INSTR_IN_FUNC(instr, this->m_func)
  4708. {
  4709. instr->SetNumber(instrCount++);
  4710. if (instr->IsLabelInstr())
  4711. {
  4712. IR::LabelInstr *labelInstr = instr->AsLabelInstr();
  4713. if (labelInstr->m_isLoopTop)
  4714. {
  4715. // For every loop top label, insert the following:
  4716. // cmp sp, ThreadContext::stackLimitForCurrentThread
  4717. // bgt $continue
  4718. // $helper:
  4719. // call JavascriptOperators::ScriptAbort
  4720. // b $exit
  4721. // $continue:
  4722. IR::LabelInstr *newLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4723. labelInstr->InsertAfter(newLabel);
  4724. this->InsertOneLoopProbe(newLabel, newLabel);
  4725. }
  4726. }
  4727. }
  4728. NEXT_INSTR_IN_FUNC;
  4729. }
  4730. // Insert an interrupt probe at each loop back branch. (Currently uncalled, since we're inserting
  4731. // probes at loop tops instead of back edges, but kept around because it may prove useful.)
  4732. uint
  4733. Lowerer::DoLoopProbeAndNumber(IR::BranchInstr *branchInstr)
  4734. {
  4735. IR::LabelInstr *labelInstr = branchInstr->GetTarget();
  4736. if (labelInstr == nullptr || labelInstr->GetNumber() == 0)
  4737. {
  4738. // Forward branch (possibly an indirect jump after try-catch-finally); nothing to do.
  4739. return branchInstr->GetNumber() + 1;
  4740. }
  4741. Assert(labelInstr->m_isLoopTop);
  4742. // Insert a stack probe at this branch. Number all the instructions we insert
  4743. // and return the next instruction number.
  4744. uint number = branchInstr->GetNumber();
  4745. IR::Instr *instrPrev = branchInstr->m_prev;
  4746. IR::Instr *instrNext = branchInstr->m_next;
  4747. if (branchInstr->IsUnconditional())
  4748. {
  4749. // B $loop ==>
  4750. // cmp [], 0
  4751. // beq $loop
  4752. // $helper:
  4753. // call abort
  4754. // b $exit
  4755. this->InsertOneLoopProbe(branchInstr, labelInstr);
  4756. branchInstr->Remove();
  4757. }
  4758. else
  4759. {
  4760. // Bcc $loop ==>
  4761. // Binv $notloop
  4762. // cmp [], 0
  4763. // beq $loop
  4764. // $helper:
  4765. // call abort
  4766. // b $exit
  4767. // $notloop:
  4768. IR::LabelInstr *loopExitLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4769. branchInstr->SetTarget(loopExitLabel);
  4770. LowererMD::InvertBranch(branchInstr);
  4771. branchInstr->InsertAfter(loopExitLabel);
  4772. this->InsertOneLoopProbe(loopExitLabel, labelInstr);
  4773. }
  4774. FOREACH_INSTR_IN_RANGE(instr, instrPrev->m_next, instrNext->m_prev)
  4775. {
  4776. instr->SetNumber(number++);
  4777. }
  4778. NEXT_INSTR_IN_RANGE;
  4779. return number;
  4780. }
  4781. void
  4782. Lowerer::InsertOneLoopProbe(IR::Instr *insertInstr, IR::LabelInstr *loopLabel)
  4783. {
  4784. // Insert one interrupt probe at the given instruction. Probe the stack and call the abort helper
  4785. // directly if the probe fails.
  4786. IR::Opnd *memRefOpnd = IR::MemRefOpnd::New(
  4787. m_func->GetThreadContextInfo()->GetThreadStackLimitAddr(),
  4788. TyMachReg, this->m_func);
  4789. IR::RegOpnd *regStackPointer = IR::RegOpnd::New(
  4790. NULL, this->m_lowererMD.GetRegStackPointer(), TyMachReg, this->m_func);
  4791. InsertCompareBranch(regStackPointer, memRefOpnd, Js::OpCode::BrGt_A, loopLabel, insertInstr);
  4792. IR::LabelInstr *helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4793. insertInstr->InsertBefore(helperLabel);
  4794. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperScriptAbort, this->m_func);
  4795. IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  4796. instr->SetSrc1(helperOpnd);
  4797. insertInstr->InsertBefore(instr);
  4798. this->m_lowererMD.LowerCall(instr, 0);
  4799. // Jump to the exit after the helper call. This instruction will never be reached, but the jump
  4800. // indicates that nothing is live after the call (to avoid useless spills in code that will
  4801. // be executed).
  4802. instr = this->m_func->m_exitInstr->GetPrevRealInstrOrLabel();
  4803. if (instr->IsLabelInstr())
  4804. {
  4805. helperLabel = instr->AsLabelInstr();
  4806. }
  4807. else
  4808. {
  4809. helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4810. this->m_func->m_exitInstr->InsertBefore(helperLabel);
  4811. }
  4812. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, helperLabel, this->m_func);
  4813. insertInstr->InsertBefore(instr);
  4814. }
  4815. ///----------------------------------------------------------------------------
  4816. ///
  4817. /// Lowerer::LoadPropertySymAsArgument
  4818. ///
  4819. /// Generate code to pass a fieldSym as argument to a helper.
  4820. ///----------------------------------------------------------------------------
  4821. IR::Instr *
  4822. Lowerer::LoadPropertySymAsArgument(IR::Instr *instr, IR::Opnd *fieldSrc)
  4823. {
  4824. IR::Instr * instrPrev;
  4825. AssertMsg(fieldSrc->IsSymOpnd() && fieldSrc->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as src of LdFld");
  4826. IR::SymOpnd *symOpnd = fieldSrc->AsSymOpnd();
  4827. PropertySym * fieldSym = symOpnd->m_sym->AsPropertySym();
  4828. IR::IntConstOpnd * indexOpnd = IR::IntConstOpnd::New(fieldSym->m_propertyId, TyInt32, m_func, /*dontEncode*/true);
  4829. instrPrev = m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  4830. IR::RegOpnd * instanceOpnd = symOpnd->CreatePropertyOwnerOpnd(m_func);
  4831. m_lowererMD.LoadHelperArgument(instr, instanceOpnd);
  4832. return instrPrev;
  4833. }
  4834. ///----------------------------------------------------------------------------
  4835. ///
  4836. /// Lowerer::LoadFunctionBodyAsArgument
  4837. ///
  4838. /// Special case: the "property ID" is a key into the ScriptContext's FunctionBody map
  4839. ///----------------------------------------------------------------------------
  4840. IR::Instr *
  4841. Lowerer::LoadFunctionBodyAsArgument(IR::Instr *instr, IR::IntConstOpnd * functionBodySlotOpnd, IR::RegOpnd * envOpnd)
  4842. {
  4843. IR::Instr * instrPrev;
  4844. // We need to pass in the function reference, we can't embed the pointer to the function proxy here.
  4845. // The function proxy may be deferred parsed/serialize, and may 'progress' to a real function body after it is undeferred
  4846. // At which point the deferred function proxy may be collect.
  4847. // Just pass it the address where we will find the function proxy/body
  4848. Js::FunctionInfoPtrPtr infoRef = instr->m_func->GetJITFunctionBody()->GetNestedFuncRef((uint)functionBodySlotOpnd->GetValue());
  4849. AssertMsg(infoRef, "Expected FunctionProxy for index of NewScFunc or NewScGenFunc opnd");
  4850. IR::AddrOpnd * indexOpnd = IR::AddrOpnd::New((Js::Var)infoRef, IR::AddrOpndKindDynamicMisc, m_func);
  4851. instrPrev = m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  4852. m_lowererMD.LoadHelperArgument(instr, envOpnd);
  4853. return instrPrev;
  4854. }
  4855. IR::Instr *
  4856. Lowerer::LowerProfiledLdFld(IR::JitProfilingInstr *ldFldInstr)
  4857. {
  4858. const auto instrPrev = ldFldInstr->m_prev;
  4859. auto src = ldFldInstr->UnlinkSrc1();
  4860. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  4861. IR::JnHelperMethod helper = IR::HelperInvalid;
  4862. switch (ldFldInstr->m_opcode)
  4863. {
  4864. case Js::OpCode::LdFld:
  4865. helper = IR::HelperProfiledLdFld;
  4866. goto ldFldCommon;
  4867. case Js::OpCode::LdRootFld:
  4868. helper = IR::HelperProfiledLdRootFld;
  4869. goto ldFldCommon;
  4870. case Js::OpCode::LdMethodFld:
  4871. helper = IR::HelperProfiledLdMethodFld;
  4872. goto ldFldCommon;
  4873. case Js::OpCode::LdRootMethodFld:
  4874. helper = IR::HelperProfiledLdRootMethodFld;
  4875. goto ldFldCommon;
  4876. case Js::OpCode::LdFldForCallApplyTarget:
  4877. helper = IR::HelperProfiledLdFld_CallApplyTarget;
  4878. goto ldFldCommon;
  4879. case Js::OpCode::LdFldForTypeOf:
  4880. helper = IR::HelperProfiledLdFldForTypeOf;
  4881. goto ldFldCommon;
  4882. case Js::OpCode::LdRootFldForTypeOf:
  4883. helper = IR::HelperProfiledLdRootFldForTypeOf;
  4884. goto ldFldCommon;
  4885. ldFldCommon:
  4886. {
  4887. Assert(ldFldInstr->profileId == Js::Constants::NoProfileId);
  4888. /*
  4889. Var ProfilingHelpers::ProfiledLdFld_Jit(
  4890. const Var instance,
  4891. const PropertyId propertyId,
  4892. const InlineCacheIndex inlineCacheIndex,
  4893. void *const framePointer)
  4894. */
  4895. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  4896. m_lowererMD.LoadHelperArgument(
  4897. ldFldInstr,
  4898. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  4899. LoadPropertySymAsArgument(ldFldInstr, src);
  4900. break;
  4901. }
  4902. case Js::OpCode::LdSuperFld:
  4903. {
  4904. Assert(ldFldInstr->profileId == Js::Constants::NoProfileId);
  4905. IR::Opnd * src2 = nullptr;
  4906. /*
  4907. Var ProfilingHelpers::ProfiledLdSuperFld_Jit(
  4908. const Var instance,
  4909. const PropertyId propertyId,
  4910. const InlineCacheIndex inlineCacheIndex,
  4911. void *const framePointer,
  4912. const Var thisInstance)
  4913. */
  4914. src2 = ldFldInstr->UnlinkSrc2();
  4915. m_lowererMD.LoadHelperArgument(ldFldInstr, src2 );
  4916. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  4917. m_lowererMD.LoadHelperArgument(
  4918. ldFldInstr,
  4919. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  4920. LoadPropertySymAsArgument(ldFldInstr, src);
  4921. helper = IR::HelperProfiledLdSuperFld;
  4922. break;
  4923. }
  4924. case Js::OpCode::LdLen_A:
  4925. // If we want to profile this call, then push some extra args and call the profiling version
  4926. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateProfileIdOpnd(ldFldInstr->profileId, m_func));
  4927. m_lowererMD.LoadHelperArgument(ldFldInstr, src->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func));
  4928. m_lowererMD.LoadHelperArgument(ldFldInstr, CreateFunctionBodyOpnd(ldFldInstr->m_func));
  4929. helper = IR::HelperSimpleProfiledLdLen;
  4930. break;
  4931. default:
  4932. Assert(false);
  4933. }
  4934. ldFldInstr->SetSrc1(IR::HelperCallOpnd::New(helper, m_func));
  4935. m_lowererMD.LowerCall(ldFldInstr, 0);
  4936. return instrPrev;
  4937. }
  4938. ///----------------------------------------------------------------------------
  4939. ///
  4940. /// Lowerer::LowerLdFld
  4941. ///
  4942. /// Lower an instruction (LdFld, ScopedLdFld) that takes a property
  4943. /// reference as a source and puts a result in a register.
  4944. ///
  4945. ///----------------------------------------------------------------------------
  4946. IR::Instr *
  4947. Lowerer::LowerLdFld(
  4948. IR::Instr * ldFldInstr,
  4949. IR::JnHelperMethod helperMethod,
  4950. IR::JnHelperMethod polymorphicHelperMethod,
  4951. bool useInlineCache,
  4952. IR::LabelInstr *labelBailOut,
  4953. bool isHelper)
  4954. {
  4955. if (ldFldInstr->IsJitProfilingInstr())
  4956. {
  4957. // If we want to profile then do something completely different
  4958. return this->LowerProfiledLdFld(ldFldInstr->AsJitProfilingInstr());
  4959. }
  4960. IR::Opnd *src;
  4961. IR::Instr *instrPrev = ldFldInstr->m_prev;
  4962. src = ldFldInstr->UnlinkSrc1();
  4963. if (ldFldInstr->m_opcode == Js::OpCode::LdSuperFld)
  4964. {
  4965. IR::Opnd * src2 = nullptr;
  4966. src2 = ldFldInstr->UnlinkSrc2();
  4967. m_lowererMD.LoadHelperArgument(ldFldInstr, src2);
  4968. }
  4969. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  4970. if (useInlineCache)
  4971. {
  4972. IR::Opnd * inlineCacheOpnd;
  4973. AssertMsg(src->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  4974. if (src->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache && polymorphicHelperMethod != helperMethod)
  4975. {
  4976. JITTimePolymorphicInlineCache * polymorphicInlineCache = src->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache;
  4977. helperMethod = polymorphicHelperMethod;
  4978. inlineCacheOpnd = IR::AddrOpnd::New(polymorphicInlineCache->GetAddr(), IR::AddrOpndKindDynamicInlineCache, this->m_func);
  4979. }
  4980. else
  4981. {
  4982. // Need to load runtime inline cache opnd first before loading any helper argument
  4983. // because LoadRuntimeInlineCacheOpnd may create labels marked as helper,
  4984. // and cause op helper register push/pop save in x86, messing up with any helper arguments that is already pushed
  4985. inlineCacheOpnd = this->LoadRuntimeInlineCacheOpnd(ldFldInstr, src->AsPropertySymOpnd(), isHelper);
  4986. }
  4987. this->LoadPropertySymAsArgument(ldFldInstr, src);
  4988. this-> m_lowererMD.LoadHelperArgument(
  4989. ldFldInstr,
  4990. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  4991. this->m_lowererMD.LoadHelperArgument(ldFldInstr, inlineCacheOpnd);
  4992. this->m_lowererMD.LoadHelperArgument(ldFldInstr, LoadFunctionBodyOpnd(ldFldInstr));
  4993. }
  4994. else
  4995. {
  4996. LoadScriptContext(ldFldInstr);
  4997. this->LoadPropertySymAsArgument(ldFldInstr, src);
  4998. }
  4999. // Do we need to reload the type and slot array after the helper returns?
  5000. // (We do if there's a propertySymOpnd downstream that needs it, i.e., the type is not dead.)
  5001. IR::RegOpnd *opndBase = src->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func);
  5002. m_lowererMD.ChangeToHelperCall(ldFldInstr, helperMethod, labelBailOut, opndBase, src->AsSymOpnd()->IsPropertySymOpnd() ? src->AsSymOpnd()->AsPropertySymOpnd() : nullptr, isHelper);
  5003. return instrPrev;
  5004. }
  5005. bool
  5006. Lowerer::GenerateLdFldWithCachedType(IR::Instr * instrLdFld, bool* continueAsHelperOut, IR::LabelInstr** labelHelperOut, IR::RegOpnd** typeOpndOut)
  5007. {
  5008. IR::Instr *instr;
  5009. IR::Opnd *opnd;
  5010. IR::LabelInstr *labelObjCheckFailed = nullptr;
  5011. IR::LabelInstr *labelTypeCheckFailed = nullptr;
  5012. IR::LabelInstr *labelDone = nullptr;
  5013. Assert(continueAsHelperOut != nullptr);
  5014. *continueAsHelperOut = false;
  5015. Assert(labelHelperOut != nullptr);
  5016. *labelHelperOut = nullptr;
  5017. Assert(typeOpndOut != nullptr);
  5018. *typeOpndOut = nullptr;
  5019. Assert(instrLdFld->GetSrc1()->IsSymOpnd());
  5020. if (!instrLdFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
  5021. {
  5022. return false;
  5023. }
  5024. IR::PropertySymOpnd *propertySymOpnd = instrLdFld->GetSrc1()->AsPropertySymOpnd();
  5025. if (!propertySymOpnd->IsTypeCheckSeqCandidate())
  5026. {
  5027. return false;
  5028. }
  5029. AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
  5030. if (!propertySymOpnd->IsTypeCheckSeqParticipant() && !propertySymOpnd->NeedsLocalTypeCheck())
  5031. {
  5032. return false;
  5033. }
  5034. Assert(!propertySymOpnd->NeedsTypeCheckAndBailOut() || (instrLdFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrLdFld->GetBailOutKind())));
  5035. // In the backwards pass we only add guarded property operations to instructions that are not already
  5036. // protected by an upstream type check.
  5037. Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
  5038. PHASE_PRINT_TESTTRACE(
  5039. Js::ObjTypeSpecPhase,
  5040. this->m_func,
  5041. _u("Field load: %s, property ID: %d, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n"),
  5042. Js::OpCodeUtil::GetOpCodeName(instrLdFld->m_opcode),
  5043. propertySymOpnd->m_sym->AsPropertySym()->m_propertyId,
  5044. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  5045. propertySymOpnd->m_inlineCacheIndex,
  5046. propertySymOpnd->GetCacheLayoutString(),
  5047. propertySymOpnd->IsTypeChecked() ? _u("true") : _u("false"));
  5048. if (propertySymOpnd->HasFinalType() && !propertySymOpnd->IsLoadedFromProto())
  5049. {
  5050. propertySymOpnd->UpdateSlotForFinalType();
  5051. }
  5052. // TODO (ObjTypeSpec): If ((PropertySym*)propertySymOpnd->m_sym)->m_stackSym->m_isIntConst consider emitting a direct
  5053. // jump to helper or bailout. If we have a type check bailout, we could even abort compilation.
  5054. bool hasTypeCheckBailout = instrLdFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrLdFld->GetBailOutKind());
  5055. // If the hard-coded type is not available here, do a type check, and branch to the helper if the check fails.
  5056. // In the prototype case, we have to check the type even if it was checked upstream, to cover the case where
  5057. // the property has been added locally. Note that this is not necessary if the proto chain has been checked,
  5058. // because then we know there's been no store of the property since the type was checked.
  5059. bool emitPrimaryTypeCheck = propertySymOpnd->NeedsPrimaryTypeCheck();
  5060. bool emitLocalTypeCheck = propertySymOpnd->NeedsLocalTypeCheck();
  5061. bool emitLoadFromProtoTypeCheck = propertySymOpnd->NeedsLoadFromProtoTypeCheck();
  5062. if (emitPrimaryTypeCheck || emitLocalTypeCheck || emitLoadFromProtoTypeCheck)
  5063. {
  5064. if (emitLoadFromProtoTypeCheck)
  5065. {
  5066. propertySymOpnd->EnsureGuardedPropOps(this->m_func->m_alloc);
  5067. propertySymOpnd->SetGuardedPropOp(propertySymOpnd->GetObjTypeSpecFldId());
  5068. }
  5069. labelTypeCheckFailed = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5070. labelObjCheckFailed = hasTypeCheckBailout ? labelTypeCheckFailed : IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5071. *typeOpndOut = this->GenerateCachedTypeCheck(instrLdFld, propertySymOpnd, labelObjCheckFailed, labelTypeCheckFailed);
  5072. }
  5073. IR::Opnd *opndSlotArray;
  5074. if (propertySymOpnd->IsLoadedFromProto())
  5075. {
  5076. opndSlotArray = this->LoadSlotArrayWithCachedProtoType(instrLdFld, propertySymOpnd);
  5077. }
  5078. else
  5079. {
  5080. opndSlotArray = this->LoadSlotArrayWithCachedLocalType(instrLdFld, propertySymOpnd);
  5081. }
  5082. // Load the value from the slot, getting the slot ID from the cache.
  5083. uint16 index = propertySymOpnd->GetSlotIndex();
  5084. Assert(index != -1);
  5085. if (opndSlotArray->IsRegOpnd())
  5086. {
  5087. opnd = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, this->m_func);
  5088. }
  5089. else
  5090. {
  5091. Assert(opndSlotArray->IsMemRefOpnd());
  5092. opnd = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, this->m_func, IR::AddrOpndKindDynamicPropertySlotRef);
  5093. }
  5094. Lowerer::InsertMove(instrLdFld->GetDst(), opnd, instrLdFld);
  5095. // We eliminate the helper, or the type check succeeds, or we bail out before the operation.
  5096. // Either delete the original instruction or replace it with a bailout.
  5097. if (!emitPrimaryTypeCheck && !emitLocalTypeCheck && !emitLoadFromProtoTypeCheck)
  5098. {
  5099. Assert(labelTypeCheckFailed == nullptr);
  5100. AssertMsg(!instrLdFld->HasBailOutInfo(), "Why does a direct field load have bailout?");
  5101. instrLdFld->Remove();
  5102. return true;
  5103. }
  5104. // Otherwise, branch around the bailout or helper.
  5105. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5106. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  5107. instrLdFld->InsertBefore(instr);
  5108. // Insert the bailout or helper label here.
  5109. instrLdFld->InsertBefore(labelTypeCheckFailed);
  5110. instrLdFld->InsertAfter(labelDone);
  5111. if (hasTypeCheckBailout)
  5112. {
  5113. AssertMsg(PHASE_ON1(Js::ObjTypeSpecIsolatedFldOpsWithBailOutPhase) || !propertySymOpnd->IsTypeDead(),
  5114. "Why does a field load have a type check bailout, if its type is dead?");
  5115. // Convert the original instruction to a bailout.
  5116. if (instrLdFld->GetBailOutInfo()->bailOutInstr != instrLdFld)
  5117. {
  5118. // Set the cache index in the bailout info so that the bailout code will write it into the
  5119. // bailout record at runtime.
  5120. instrLdFld->GetBailOutInfo()->polymorphicCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  5121. }
  5122. instrLdFld->FreeDst();
  5123. instrLdFld->FreeSrc1();
  5124. instrLdFld->m_opcode = Js::OpCode::BailOut;
  5125. this->GenerateBailOut(instrLdFld);
  5126. return true;
  5127. }
  5128. else
  5129. {
  5130. *continueAsHelperOut = true;
  5131. Assert(labelObjCheckFailed != nullptr && labelObjCheckFailed != labelTypeCheckFailed);
  5132. *labelHelperOut = labelObjCheckFailed;
  5133. return false;
  5134. }
  5135. }
  5136. template<bool isRoot>
  5137. IR::Instr* Lowerer::GenerateCompleteLdFld(IR::Instr* instr, bool emitFastPath, IR::JnHelperMethod monoHelperAfterFastPath, IR::JnHelperMethod polyHelperAfterFastPath,
  5138. IR::JnHelperMethod monoHelperWithoutFastPath, IR::JnHelperMethod polyHelperWithoutFastPath)
  5139. {
  5140. if(instr->CallsAccessor() && instr->HasBailOutInfo())
  5141. {
  5142. IR::BailOutKind kindMinusBits = instr->GetBailOutKind() & ~IR::BailOutKindBits;
  5143. Assert(kindMinusBits != IR::BailOutOnImplicitCalls && kindMinusBits != IR::BailOutOnImplicitCallsPreOp);
  5144. }
  5145. IR::Instr* prevInstr = instr->m_prev;
  5146. IR::LabelInstr* labelHelper = nullptr;
  5147. IR::LabelInstr* labelBailOut = nullptr;
  5148. bool isHelper = false;
  5149. IR::RegOpnd* typeOpnd = nullptr;
  5150. if (isRoot)
  5151. {
  5152. // Don't do the fast path here if emitFastPath is false, even if we can.
  5153. if (emitFastPath && (this->GenerateLdFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd) || this->GenerateNonConfigurableLdRootFld(instr)))
  5154. {
  5155. Assert(labelHelper == nullptr);
  5156. return prevInstr;
  5157. }
  5158. }
  5159. else
  5160. {
  5161. if (this->GenerateLdFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd))
  5162. {
  5163. Assert(labelHelper == nullptr);
  5164. return prevInstr;
  5165. }
  5166. }
  5167. if (emitFastPath)
  5168. {
  5169. if (!GenerateFastLdFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, &labelBailOut, typeOpnd, &isHelper, &labelHelper))
  5170. {
  5171. if (labelHelper != nullptr)
  5172. {
  5173. labelHelper->isOpHelper = isHelper;
  5174. instr->InsertBefore(labelHelper);
  5175. }
  5176. prevInstr = LowerLdFld(instr, monoHelperAfterFastPath, polyHelperAfterFastPath, true, labelBailOut, isHelper);
  5177. }
  5178. }
  5179. else
  5180. {
  5181. if (labelHelper != nullptr)
  5182. {
  5183. labelHelper->isOpHelper = isHelper;
  5184. instr->InsertBefore(labelHelper);
  5185. }
  5186. prevInstr = LowerLdFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, true, labelBailOut, isHelper);
  5187. }
  5188. return prevInstr;
  5189. }
  5190. bool
  5191. Lowerer::GenerateCheckFixedFld(IR::Instr * instrChkFld)
  5192. {
  5193. IR::Instr *instr;
  5194. IR::LabelInstr *labelBailOut = nullptr;
  5195. IR::LabelInstr *labelDone = nullptr;
  5196. AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instrChkFld->m_func) ||
  5197. !PHASE_OFF(Js::UseFixedDataPropsPhase, instrChkFld->m_func), "Lowering a check fixed field with fixed data/method phase disabled?");
  5198. Assert(instrChkFld->GetSrc1()->IsSymOpnd() && instrChkFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd());
  5199. IR::PropertySymOpnd *propertySymOpnd = instrChkFld->GetSrc1()->AsPropertySymOpnd();
  5200. AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
  5201. Assert(propertySymOpnd->MayNeedTypeCheckProtection());
  5202. // In the backwards pass we only add guarded property operations to instructions that are not already
  5203. // protected by an upstream type check.
  5204. Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
  5205. // For the non-configurable properties on the global object we do not need a type check. Otherwise,
  5206. // we need a type check and bailout here unless this operation is part of the type check sequence and
  5207. // is protected by a type check upstream.
  5208. bool emitPrimaryTypeCheck = propertySymOpnd->NeedsPrimaryTypeCheck();
  5209. // In addition, we may also need a local type check in case the property comes from the prototype and
  5210. // it may have been overwritten on the instance after the primary type check upstream. If the property
  5211. // comes from the instance, we must still protect against its value changing after the type check, but
  5212. // for this a cheaper guard check is sufficient (see below).
  5213. bool emitFixedFieldTypeCheck = propertySymOpnd->NeedsCheckFixedFieldTypeCheck() &&
  5214. (!propertySymOpnd->IsTypeChecked() || propertySymOpnd->IsLoadedFromProto());
  5215. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  5216. uint inlineCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  5217. OUTPUT_TRACE_FUNC(
  5218. Js::ObjTypeSpecPhase,
  5219. this->m_func,
  5220. _u("Fixed field check: %s, property ID: %d, cache ID: %u, cloned cache: true, layout: %s, redundant check: %s count of props: %u \n"),
  5221. Js::OpCodeUtil::GetOpCodeName(instrChkFld->m_opcode),
  5222. propertySym->m_propertyId,
  5223. inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(), propertySymOpnd->IsTypeChecked() ? _u("true") : _u("false"),
  5224. propertySymOpnd->GetGuardedPropOps() ? propertySymOpnd->GetGuardedPropOps()->Count() : 0);
  5225. if (emitPrimaryTypeCheck || emitFixedFieldTypeCheck)
  5226. {
  5227. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5228. if(emitFixedFieldTypeCheck && propertySymOpnd->IsRootObjectNonConfigurableFieldLoad())
  5229. {
  5230. AssertMsg(!propertySymOpnd->GetGuardedPropOps() || propertySymOpnd->GetGuardedPropOps()->IsEmpty(), "This property Guard is used only for one property");
  5231. //We need only cheaper Guard check, if the property belongs to the GlobalObject.
  5232. GenerateFixedFieldGuardCheck(instrChkFld, propertySymOpnd, labelBailOut);
  5233. }
  5234. else
  5235. {
  5236. if (emitFixedFieldTypeCheck)
  5237. {
  5238. propertySymOpnd->EnsureGuardedPropOps(this->m_func->m_alloc);
  5239. propertySymOpnd->SetGuardedPropOp(propertySymOpnd->GetObjTypeSpecFldId());
  5240. }
  5241. this->GenerateCachedTypeCheck(instrChkFld, propertySymOpnd, labelBailOut, labelBailOut);
  5242. }
  5243. }
  5244. // We may still need this guard if we didn't emit the write protect type check above. This situation arises if we have
  5245. // a fixed field from the instance (not proto) and a property of the same name has been written somewhere between the
  5246. // primary type check and here. Note that we don't need a type check, because we know the fixed field exists on the
  5247. // object even if it has been written since primary type check, but we need to verify the fixed value didn't get overwritten.
  5248. if (!emitPrimaryTypeCheck && !emitFixedFieldTypeCheck && !propertySymOpnd->IsWriteGuardChecked())
  5249. {
  5250. if (!PHASE_OFF(Js::FixedFieldGuardCheckPhase, this->m_func))
  5251. {
  5252. Assert(labelBailOut == nullptr);
  5253. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5254. GenerateFixedFieldGuardCheck(instrChkFld, propertySymOpnd, labelBailOut);
  5255. }
  5256. }
  5257. // Note that a type handler holds only a weak reference to the singleton instance it represents, so
  5258. // it is possible that the instance gets collected before the type and handler do. Hence, the upstream
  5259. // type check may succeed, even as the original instance no longer exists. However, this would happen
  5260. // only if another instance reached the same type (otherwise we wouldn't ever pass the type check
  5261. // upstream). In that case we would have invalidated all fixed fields on that type, and so the type
  5262. // check (or property guard check, if necessary) above would fail. All in all, we would never attempt
  5263. // to access a fixed field from an instance that has been collected.
  5264. if (!emitPrimaryTypeCheck && !emitFixedFieldTypeCheck && propertySymOpnd->IsWriteGuardChecked())
  5265. {
  5266. Assert(labelBailOut == nullptr);
  5267. AssertMsg(!instrChkFld->HasBailOutInfo(), "Why does a direct fixed field check have bailout?");
  5268. instrChkFld->Remove();
  5269. return true;
  5270. }
  5271. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5272. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  5273. instrChkFld->InsertBefore(instr);
  5274. // Insert the helper label here.
  5275. instrChkFld->InsertBefore(labelBailOut);
  5276. instrChkFld->InsertAfter(labelDone);
  5277. // Convert the original instruction to a bailout.
  5278. Assert(instrChkFld->HasBailOutInfo());
  5279. if (instrChkFld->GetBailOutInfo()->bailOutInstr != instrChkFld)
  5280. {
  5281. // Set the cache index in the bailout info so that the bailout code will write it into the
  5282. // bailout record at runtime.
  5283. instrChkFld->GetBailOutInfo()->polymorphicCacheIndex = inlineCacheIndex;
  5284. }
  5285. instrChkFld->FreeSrc1();
  5286. instrChkFld->m_opcode = Js::OpCode::BailOut;
  5287. this->GenerateBailOut(instrChkFld);
  5288. return true;
  5289. }
  5290. void
  5291. Lowerer::GenerateCheckObjType(IR::Instr * instrChkObjType)
  5292. {
  5293. Assert(instrChkObjType->GetSrc1()->IsSymOpnd() && instrChkObjType->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd());
  5294. IR::PropertySymOpnd *propertySymOpnd = instrChkObjType->GetSrc1()->AsPropertySymOpnd();
  5295. // Why do we have an explicit type check if the cached type has been checked upstream? The dead store pass should have
  5296. // removed this instruction.
  5297. Assert(propertySymOpnd->IsTypeCheckSeqCandidate() && !propertySymOpnd->IsTypeChecked());
  5298. // Why do we have an explicit type check on a non-configurable root field load?
  5299. Assert(!propertySymOpnd->IsRootObjectNonConfigurableFieldLoad());
  5300. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  5301. uint inlineCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  5302. PHASE_PRINT_TESTTRACE(
  5303. Js::ObjTypeSpecPhase,
  5304. this->m_func,
  5305. _u("Object type check: %s, property ID: %d, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n"),
  5306. Js::OpCodeUtil::GetOpCodeName(instrChkObjType->m_opcode),
  5307. propertySym->m_propertyId,
  5308. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  5309. inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(), _u("false"));
  5310. IR::LabelInstr* labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5311. this->GenerateCachedTypeCheck(instrChkObjType, propertySymOpnd, labelBailOut, labelBailOut);
  5312. IR::LabelInstr* labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5313. IR::Instr* instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  5314. instrChkObjType->InsertBefore(instr);
  5315. // Insert the bailout label here.
  5316. instrChkObjType->InsertBefore(labelBailOut);
  5317. instrChkObjType->InsertAfter(labelDone);
  5318. // Convert the original instruction to a bailout.
  5319. Assert(instrChkObjType->HasBailOutInfo());
  5320. if (instrChkObjType->GetBailOutInfo()->bailOutInstr != instrChkObjType)
  5321. {
  5322. // Set the cache index in the bailout info so that the bailout code will write it into the
  5323. // bailout record at runtime.
  5324. instrChkObjType->GetBailOutInfo()->polymorphicCacheIndex = inlineCacheIndex;
  5325. }
  5326. instrChkObjType->FreeSrc1();
  5327. instrChkObjType->m_opcode = Js::OpCode::BailOut;
  5328. this->GenerateBailOut(instrChkObjType);
  5329. }
  5330. void
  5331. Lowerer::LowerAdjustObjType(IR::Instr * instrAdjustObjType)
  5332. {
  5333. IR::AddrOpnd *finalTypeOpnd = instrAdjustObjType->UnlinkDst()->AsAddrOpnd();
  5334. IR::AddrOpnd *initialTypeOpnd = instrAdjustObjType->UnlinkSrc2()->AsAddrOpnd();
  5335. IR::RegOpnd *baseOpnd = instrAdjustObjType->UnlinkSrc1()->AsRegOpnd();
  5336. this->GenerateAdjustBaseSlots(
  5337. instrAdjustObjType, baseOpnd, JITTypeHolder((JITType*)initialTypeOpnd->m_metadata), JITTypeHolder((JITType*)finalTypeOpnd->m_metadata));
  5338. this->m_func->PinTypeRef((JITType*)finalTypeOpnd->m_metadata);
  5339. IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrAdjustObjType->m_func);
  5340. this->m_lowererMD.CreateAssign(opnd, finalTypeOpnd, instrAdjustObjType);
  5341. initialTypeOpnd->Free(instrAdjustObjType->m_func);
  5342. instrAdjustObjType->Remove();
  5343. }
  5344. bool
  5345. Lowerer::GenerateNonConfigurableLdRootFld(IR::Instr * instrLdFld)
  5346. {
  5347. if (!instrLdFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
  5348. {
  5349. return false;
  5350. }
  5351. IR::PropertySymOpnd *propertySymOpnd = instrLdFld->GetSrc1()->AsPropertySymOpnd();
  5352. if (!propertySymOpnd->IsRootObjectNonConfigurableFieldLoad())
  5353. {
  5354. return false;
  5355. }
  5356. Assert(!PHASE_OFF(Js::RootObjectFldFastPathPhase, this->m_func));
  5357. Assert(!instrLdFld->HasBailOutInfo());
  5358. IR::Opnd * srcOpnd;
  5359. intptr_t rootObject = this->m_func->GetJITFunctionBody()->GetRootObject();
  5360. if (propertySymOpnd->UsesAuxSlot())
  5361. {
  5362. IR::RegOpnd * auxSlotOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  5363. this->InsertMove(auxSlotOpnd, IR::MemRefOpnd::New((byte *)rootObject + Js::DynamicObject::GetOffsetOfAuxSlots(),
  5364. TyMachPtr, this->m_func), instrLdFld);
  5365. srcOpnd = IR::IndirOpnd::New(auxSlotOpnd, propertySymOpnd->GetSlotIndex() * sizeof(Js::Var *),
  5366. TyVar, this->m_func);
  5367. }
  5368. else
  5369. {
  5370. srcOpnd = IR::MemRefOpnd::New((Js::Var *)rootObject + propertySymOpnd->GetSlotIndex(),
  5371. TyVar, this->m_func);
  5372. }
  5373. instrLdFld->ReplaceSrc1(srcOpnd);
  5374. instrLdFld->m_opcode = Js::OpCode::Ld_A;
  5375. LowererMD::ChangeToAssign(instrLdFld);
  5376. return true;
  5377. }
  5378. IR::Instr *
  5379. Lowerer::LowerDelFld(IR::Instr *delFldInstr, IR::JnHelperMethod helperMethod, bool useInlineCache, bool strictMode)
  5380. {
  5381. IR::Instr *instrPrev;
  5382. Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
  5383. if (strictMode)
  5384. {
  5385. propertyOperationFlag = Js::PropertyOperation_StrictMode;
  5386. }
  5387. instrPrev = m_lowererMD.LoadHelperArgument(delFldInstr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
  5388. LowerLdFld(delFldInstr, helperMethod, helperMethod, useInlineCache);
  5389. return instrPrev;
  5390. }
  5391. IR::Instr *
  5392. Lowerer::LowerIsInst(IR::Instr * isInstInstr, IR::JnHelperMethod helperMethod)
  5393. {
  5394. IR::Instr * instrPrev;
  5395. IR::Instr * instrArg;
  5396. IR::RegOpnd * argOpnd;
  5397. // inlineCache
  5398. instrPrev = m_lowererMD.LoadHelperArgument(isInstInstr, LoadIsInstInlineCacheOpnd(isInstInstr, isInstInstr->GetSrc1()->AsIntConstOpnd()->AsUint32()));
  5399. isInstInstr->FreeSrc1();
  5400. argOpnd = isInstInstr->UnlinkSrc2()->AsRegOpnd();
  5401. Assert(argOpnd->m_sym->m_isSingleDef);
  5402. instrArg = argOpnd->m_sym->m_instrDef;
  5403. argOpnd->Free(m_func);
  5404. // scriptContext
  5405. LoadScriptContext(isInstInstr);
  5406. // instance goes last, so remember it now
  5407. IR::Opnd * instanceOpnd = instrArg->UnlinkSrc1();
  5408. argOpnd = instrArg->UnlinkSrc2()->AsRegOpnd();
  5409. Assert(argOpnd->m_sym->m_isSingleDef);
  5410. instrArg->Remove();
  5411. instrArg = argOpnd->m_sym->m_instrDef;
  5412. argOpnd->Free(m_func);
  5413. // function
  5414. IR::Opnd *opnd = instrArg->UnlinkSrc1();
  5415. m_lowererMD.LoadHelperArgument(isInstInstr, opnd);
  5416. Assert(instrArg->GetSrc2() == NULL);
  5417. instrArg->Remove();
  5418. // instance
  5419. m_lowererMD.LoadHelperArgument(isInstInstr, instanceOpnd);
  5420. m_lowererMD.ChangeToHelperCall(isInstInstr, helperMethod);
  5421. return instrPrev;
  5422. }
  5423. void
  5424. Lowerer::GenerateStackScriptFunctionInit(StackSym * stackSym, Js::FunctionInfoPtrPtr nestedInfo)
  5425. {
  5426. Func * func = this->m_func;
  5427. Assert(func->HasAnyStackNestedFunc());
  5428. Assert(nextStackFunctionOpnd);
  5429. IR::Instr * insertBeforeInstr = func->GetFunctionEntryInsertionPoint();
  5430. IR::RegOpnd * addressOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5431. const IR::AutoReuseOpnd autoReuseAddressOpnd(addressOpnd, func);
  5432. InsertLea(addressOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertBeforeInstr);
  5433. // Currently we don't initialize the environment until we actually allocate the function, we also
  5434. // walk the list of stack function when we need to box them. so we should use initialize it to NullFrameDisplay
  5435. GenerateStackScriptFunctionInit(addressOpnd, nestedInfo,
  5436. IR::AddrOpnd::New(func->GetThreadContextInfo()->GetNullFrameDisplayAddr(), IR::AddrOpndKindDynamicMisc, func), insertBeforeInstr);
  5437. // Establish the next link
  5438. InsertMove(nextStackFunctionOpnd, addressOpnd, insertBeforeInstr);
  5439. this->nextStackFunctionOpnd = IR::SymOpnd::New(stackSym, sizeof(Js::StackScriptFunction), TyMachPtr, func);
  5440. }
  5441. void
  5442. Lowerer::GenerateScriptFunctionInit(IR::RegOpnd * regOpnd, IR::Opnd * vtableAddressOpnd,
  5443. Js::FunctionInfoPtrPtr nestedInfo, IR::Opnd * envOpnd, IR::Instr * insertBeforeInstr, bool isZeroed)
  5444. {
  5445. Func * func = this->m_func;
  5446. IR::Opnd * functionInfoOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5447. InsertMove(functionInfoOpnd, IR::MemRefOpnd::New(nestedInfo, TyMachPtr, func), insertBeforeInstr);
  5448. IR::Opnd * functionProxyOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5449. InsertMove(functionProxyOpnd, IR::IndirOpnd::New(functionInfoOpnd->AsRegOpnd(), Js::FunctionInfo::GetOffsetOfFunctionProxy(), TyMachPtr, func), insertBeforeInstr);
  5450. IR::Opnd * typeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5451. InsertMove(typeOpnd, IR::IndirOpnd::New(functionProxyOpnd->AsRegOpnd(), Js::FunctionProxy::GetOffsetOfDeferredPrototypeType(),
  5452. TyMachPtr, func), insertBeforeInstr);
  5453. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5454. InsertTestBranch(typeOpnd, typeOpnd, Js::OpCode::BrEq_A, labelHelper, insertBeforeInstr);
  5455. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  5456. InsertBranch(Js::OpCode::Br, labelDone, insertBeforeInstr);
  5457. insertBeforeInstr->InsertBefore(labelHelper);
  5458. m_lowererMD.LoadHelperArgument(insertBeforeInstr, functionProxyOpnd);
  5459. IR::Instr * callHelperInstr = IR::Instr::New(Js::OpCode::Call, typeOpnd,
  5460. IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperEnsureFunctionProxyDeferredPrototypeType, func), func);
  5461. insertBeforeInstr->InsertBefore(callHelperInstr);
  5462. m_lowererMD.LowerCall(callHelperInstr, 0);
  5463. insertBeforeInstr->InsertBefore(labelDone);
  5464. GenerateMemInit(regOpnd, 0, vtableAddressOpnd, insertBeforeInstr, isZeroed);
  5465. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfType(), typeOpnd, insertBeforeInstr, isZeroed);
  5466. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfAuxSlots(), insertBeforeInstr, isZeroed);
  5467. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfObjectArray(), insertBeforeInstr, isZeroed);
  5468. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfConstructorCache(),
  5469. LoadLibraryValueOpnd(insertBeforeInstr, LibraryValue::ValueConstructorCacheDefaultInstance),
  5470. insertBeforeInstr, isZeroed);
  5471. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfFunctionInfo(), functionInfoOpnd, insertBeforeInstr, isZeroed);
  5472. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfEnvironment(), envOpnd, insertBeforeInstr, isZeroed);
  5473. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfCachedScopeObj(), insertBeforeInstr, isZeroed);
  5474. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfHasInlineCaches(), insertBeforeInstr, isZeroed);
  5475. }
  5476. void
  5477. Lowerer::GenerateStackScriptFunctionInit(IR::RegOpnd * regOpnd, Js::FunctionInfoPtrPtr nestedInfo, IR::Opnd * envOpnd, IR::Instr * insertBeforeInstr)
  5478. {
  5479. Func * func = this->m_func;
  5480. GenerateScriptFunctionInit(regOpnd,
  5481. LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableStackScriptFunction),
  5482. nestedInfo, envOpnd, insertBeforeInstr);
  5483. InsertMove(IR::IndirOpnd::New(regOpnd, Js::StackScriptFunction::GetOffsetOfBoxedScriptFunction(), TyMachPtr, func),
  5484. IR::AddrOpnd::NewNull(func), insertBeforeInstr);
  5485. }
  5486. void
  5487. Lowerer::EnsureStackFunctionListStackSym()
  5488. {
  5489. Func * func = this->m_func;
  5490. Assert(func->HasAnyStackNestedFunc());
  5491. #if defined(_M_IX86) || defined(_M_X64)
  5492. Assert(func->m_localStackHeight == (func->HasArgumentSlot()? MachArgsSlotOffset : 0));
  5493. StackSym * stackFunctionListStackSym = StackSym::New(TyMachPtr, func);
  5494. func->StackAllocate(stackFunctionListStackSym, sizeof(Js::ScriptFunction *));
  5495. nextStackFunctionOpnd = IR::SymOpnd::New(stackFunctionListStackSym, TyMachPtr, func);
  5496. #else
  5497. Assert(func->m_localStackHeight == 0);
  5498. nextStackFunctionOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(NULL, FRAME_REG, TyMachReg, func),
  5499. -(int32)(Js::Constants::StackNestedFuncList * sizeof(Js::Var)), TyMachPtr, func);
  5500. #endif
  5501. }
  5502. void
  5503. Lowerer::AllocStackClosure()
  5504. {
  5505. m_func->StackAllocate(m_func->GetLocalFrameDisplaySym(), sizeof(Js::Var));
  5506. m_func->StackAllocate(m_func->GetLocalClosureSym(), sizeof(Js::Var));
  5507. }
  5508. void
  5509. Lowerer::EnsureZeroLastStackFunctionNext()
  5510. {
  5511. Assert(nextStackFunctionOpnd != nullptr);
  5512. Func * func = this->m_func;
  5513. IR::Instr * insertBeforeInstr = func->GetFunctionEntryInsertionPoint();
  5514. InsertMove(nextStackFunctionOpnd, IR::AddrOpnd::NewNull(func), insertBeforeInstr);
  5515. }
  5516. IR::Instr *
  5517. Lowerer::GenerateNewStackScFunc(IR::Instr * newScFuncInstr, IR::RegOpnd ** ppEnvOpnd)
  5518. {
  5519. Assert(newScFuncInstr->m_func->DoStackNestedFunc());
  5520. Func * func = newScFuncInstr->m_func;
  5521. uint index = newScFuncInstr->GetSrc1()->AsIntConstOpnd()->AsUint32();
  5522. Assert(index < func->GetJITFunctionBody()->GetNestedCount());
  5523. IR::LabelInstr * labelNoStackFunc = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5524. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  5525. InsertTestBranch(IR::MemRefOpnd::New(func->GetJITFunctionBody()->GetFlagsAddr(), TyInt8, func),
  5526. IR::IntConstOpnd::New(Js::FunctionBody::Flags_StackNestedFunc, TyInt8, func, true),
  5527. Js::OpCode::BrEq_A, labelNoStackFunc, newScFuncInstr);
  5528. Js::FunctionInfoPtrPtr nestedInfo = func->GetJITFunctionBody()->GetNestedFuncRef(index);
  5529. IR::Instr * instrAssignDst;
  5530. IR::RegOpnd * envOpnd = *ppEnvOpnd;
  5531. if (!func->IsLoopBody())
  5532. {
  5533. // the stackAllocate Call below for this sym is passing a size that is not represented by any IRType and hence passing TyMisc for the constructor
  5534. StackSym * stackSym = StackSym::New(TyMisc, func);
  5535. // ScriptFunction and it's next pointer
  5536. this->m_func->StackAllocate(stackSym, sizeof(Js::StackScriptFunction) + sizeof(Js::StackScriptFunction *));
  5537. GenerateStackScriptFunctionInit(stackSym, nestedInfo);
  5538. InsertMove(IR::SymOpnd::New(stackSym, Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, func),
  5539. envOpnd,
  5540. newScFuncInstr);
  5541. instrAssignDst =
  5542. InsertLea(newScFuncInstr->GetDst()->AsRegOpnd(), IR::SymOpnd::New(stackSym, TyMachPtr, func), newScFuncInstr);
  5543. }
  5544. else
  5545. {
  5546. Assert(func->IsTopFunc());
  5547. Assert(func->m_loopParamSym);
  5548. IR::Instr * envDefInstr = envOpnd->AsRegOpnd()->m_sym->m_instrDef;
  5549. Assert(envDefInstr && envDefInstr->m_opcode == Js::OpCode::NewScFuncData);
  5550. IR::RegOpnd * opndFuncPtr = envDefInstr->UnlinkSrc2()->AsRegOpnd();
  5551. Assert(opndFuncPtr);
  5552. envOpnd = envDefInstr->UnlinkSrc1()->AsRegOpnd();
  5553. Assert(envOpnd);
  5554. *ppEnvOpnd = envOpnd;
  5555. envDefInstr->Remove();
  5556. if (index != 0)
  5557. {
  5558. IR::RegOpnd * opnd = IR::RegOpnd::New(TyVar, func);
  5559. InsertAdd(false, opnd, opndFuncPtr, IR::IntConstOpnd::New(index * sizeof(Js::StackScriptFunction), TyMachPtr, func), newScFuncInstr);
  5560. opndFuncPtr = opnd;
  5561. }
  5562. InsertMove(IR::IndirOpnd::New(opndFuncPtr, Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, func),
  5563. envOpnd, newScFuncInstr);
  5564. instrAssignDst = InsertMove(newScFuncInstr->GetDst(), opndFuncPtr, newScFuncInstr);
  5565. }
  5566. InsertBranch(Js::OpCode::Br, labelDone, newScFuncInstr);
  5567. newScFuncInstr->InsertBefore(labelNoStackFunc);
  5568. newScFuncInstr->InsertAfter(labelDone);
  5569. return instrAssignDst;
  5570. }
  5571. IR::Instr *
  5572. Lowerer::LowerNewScFunc(IR::Instr * newScFuncInstr)
  5573. {
  5574. IR::Instr *stackNewScFuncInstr = nullptr;
  5575. IR::RegOpnd * envOpnd = newScFuncInstr->UnlinkSrc2()->AsRegOpnd();
  5576. if (newScFuncInstr->m_func->DoStackNestedFunc())
  5577. {
  5578. stackNewScFuncInstr = GenerateNewStackScFunc(newScFuncInstr, &envOpnd);
  5579. }
  5580. IR::IntConstOpnd * functionBodySlotOpnd = newScFuncInstr->UnlinkSrc1()->AsIntConstOpnd();
  5581. IR::Instr * instrPrev = this->LoadFunctionBodyAsArgument(newScFuncInstr, functionBodySlotOpnd, envOpnd);
  5582. m_lowererMD.ChangeToHelperCall(newScFuncInstr, IR::HelperScrFunc_OP_NewScFunc );
  5583. return stackNewScFuncInstr == nullptr? instrPrev : stackNewScFuncInstr;
  5584. }
  5585. IR::Instr *
  5586. Lowerer::LowerNewScGenFunc(IR::Instr * newScFuncInstr)
  5587. {
  5588. IR::IntConstOpnd * functionBodySlotOpnd = newScFuncInstr->UnlinkSrc1()->AsIntConstOpnd();
  5589. IR::RegOpnd * envOpnd = newScFuncInstr->UnlinkSrc2()->AsRegOpnd();
  5590. IR::Instr * instrPrev = this->LoadFunctionBodyAsArgument(newScFuncInstr, functionBodySlotOpnd, envOpnd);
  5591. m_lowererMD.ChangeToHelperCall(newScFuncInstr, IR::HelperScrFunc_OP_NewScGenFunc );
  5592. return instrPrev;
  5593. }
  5594. ///----------------------------------------------------------------------------
  5595. ///
  5596. /// Lowerer::LowerScopedLdFld
  5597. ///
  5598. /// Lower a load instruction that takes an additional instance to use as a
  5599. /// a default if the scope chain provided doesn't contain the property.
  5600. ///
  5601. ///----------------------------------------------------------------------------
  5602. IR::Instr *
  5603. Lowerer::LowerScopedLdFld(IR::Instr * ldFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache)
  5604. {
  5605. IR::Opnd *src;
  5606. IR::Instr *instrPrev = ldFldInstr->m_prev;
  5607. if(!withInlineCache)
  5608. {
  5609. LoadScriptContext(ldFldInstr);
  5610. }
  5611. src = ldFldInstr->UnlinkSrc2();
  5612. AssertMsg(src->IsRegOpnd(), "Expected reg opnd as src2");
  5613. instrPrev = m_lowererMD.LoadHelperArgument(ldFldInstr, src);
  5614. src = ldFldInstr->UnlinkSrc1();
  5615. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  5616. this->LoadPropertySymAsArgument(ldFldInstr, src);
  5617. if (withInlineCache)
  5618. {
  5619. AssertMsg(src->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  5620. m_lowererMD.LoadHelperArgument(
  5621. ldFldInstr,
  5622. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  5623. // Not using the polymorphic inline cache because the fast path only uses the monomorphic inline cache
  5624. this->m_lowererMD.LoadHelperArgument(ldFldInstr, this->LoadRuntimeInlineCacheOpnd(ldFldInstr, src->AsPropertySymOpnd()));
  5625. m_lowererMD.LoadHelperArgument(ldFldInstr, LoadFunctionBodyOpnd(ldFldInstr));
  5626. }
  5627. m_lowererMD.ChangeToHelperCall(ldFldInstr, helperMethod);
  5628. return instrPrev;
  5629. }
  5630. ///----------------------------------------------------------------------------
  5631. ///
  5632. /// Lowerer::LowerScopedLdInst
  5633. ///
  5634. /// Lower a load instruction that takes an additional instance to use as a
  5635. /// a default if the scope chain provided doesn't contain the property.
  5636. ///
  5637. ///----------------------------------------------------------------------------
  5638. IR::Instr *
  5639. Lowerer::LowerScopedLdInst(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  5640. {
  5641. IR::Opnd *src;
  5642. IR::Instr *instrPrev;
  5643. // last argument is the scriptContext
  5644. instrPrev = LoadScriptContext(instr);
  5645. src = instr->UnlinkSrc2();
  5646. AssertMsg(src->IsRegOpnd(), "Expected Reg opnd as src2");
  5647. // __out Var*. The StackSym is allocated in irbuilder, and here we need to insert a lea
  5648. StackSym* dstSym = src->GetStackSym();
  5649. IR::Instr *load = m_lowererMD.LoadStackAddress(dstSym);
  5650. instr->InsertBefore(load);
  5651. IR::Opnd* tempOpnd = load->GetDst();
  5652. m_lowererMD.LoadHelperArgument(instr, tempOpnd);
  5653. // now 3rd last argument is the rootObject of the function. Need to add addrOpnd to
  5654. // pass in the address of the roobObject.
  5655. IR::Opnd * srcOpnd;
  5656. intptr_t rootObject = m_func->GetJITFunctionBody()->GetRootObject();
  5657. srcOpnd = IR::AddrOpnd::New(rootObject, IR::AddrOpndKindDynamicVar, instr->m_func, true);
  5658. instrPrev = m_lowererMD.LoadHelperArgument(instr, srcOpnd);
  5659. // no change, the property field built from irbuilder.
  5660. src = instr->UnlinkSrc1();
  5661. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  5662. this->LoadPropertySymAsArgument(instr, src);
  5663. instrPrev = m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  5664. IR::RegOpnd* regOpnd = IR::RegOpnd::New(dstSym, TyVar, m_func);
  5665. IR::SymOpnd*symOpnd = IR::SymOpnd::New(dstSym, TyVar, m_func);
  5666. this->m_lowererMD.CreateAssign(regOpnd, symOpnd, instrPrev);
  5667. return instrPrev;
  5668. }
  5669. IR::Instr *
  5670. Lowerer::LowerScopedDelFld(IR::Instr * delFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache, bool strictMode)
  5671. {
  5672. IR::Instr *instrPrev;
  5673. Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
  5674. if (strictMode)
  5675. {
  5676. propertyOperationFlag = Js::PropertyOperation_StrictMode;
  5677. }
  5678. instrPrev = m_lowererMD.LoadHelperArgument(delFldInstr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
  5679. LowerScopedLdFld(delFldInstr, helperMethod, withInlineCache);
  5680. return instrPrev;
  5681. }
  5682. IR::Instr *
  5683. Lowerer::LowerProfiledStFld(IR::JitProfilingInstr *stFldInstr, Js::PropertyOperationFlags flags)
  5684. {
  5685. Assert(stFldInstr->profileId == Js::Constants::NoProfileId);
  5686. IR::Instr *const instrPrev = stFldInstr->m_prev;
  5687. /*
  5688. void ProfilingHelpers::ProfiledInitFld_Jit(
  5689. const Var instance,
  5690. const PropertyId propertyId,
  5691. const InlineCacheIndex inlineCacheIndex,
  5692. const Var value,
  5693. void *const framePointer)
  5694. void ProfilingHelpers::ProfiledStFld_Jit(
  5695. const Var instance,
  5696. const PropertyId propertyId,
  5697. const InlineCacheIndex inlineCacheIndex,
  5698. const Var value,
  5699. void *const framePointer)
  5700. void ProfilingHelpers::ProfiledStSuperFld_Jit(
  5701. const Var instance,
  5702. const PropertyId propertyId,
  5703. const InlineCacheIndex inlineCacheIndex,
  5704. const Var value,
  5705. void *const framePointer,
  5706. const Var thisInstance)
  5707. {
  5708. */
  5709. m_lowererMD.LoadHelperArgument(stFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  5710. if (stFldInstr->m_opcode == Js::OpCode::StSuperFld)
  5711. {
  5712. m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc2());
  5713. }
  5714. m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc1());
  5715. IR::Opnd *dst = stFldInstr->UnlinkDst();
  5716. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
  5717. m_lowererMD.LoadHelperArgument(
  5718. stFldInstr,
  5719. IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  5720. LoadPropertySymAsArgument(stFldInstr, dst);
  5721. IR::JnHelperMethod helper;
  5722. switch (stFldInstr->m_opcode)
  5723. {
  5724. case Js::OpCode::InitFld:
  5725. case Js::OpCode::InitRootFld:
  5726. helper = IR::HelperProfiledInitFld;
  5727. break;
  5728. case Js::OpCode::StSuperFld:
  5729. helper = IR::HelperProfiledStSuperFld;
  5730. break;
  5731. default:
  5732. helper =
  5733. flags & Js::PropertyOperation_Root
  5734. ? flags & Js::PropertyOperation_StrictMode ? IR::HelperProfiledStRootFld_Strict : IR::HelperProfiledStRootFld
  5735. : flags & Js::PropertyOperation_StrictMode ? IR::HelperProfiledStFld_Strict : IR::HelperProfiledStFld;
  5736. break;
  5737. }
  5738. stFldInstr->SetSrc1(IR::HelperCallOpnd::New(helper, m_func));
  5739. m_lowererMD.LowerCall(stFldInstr, 0);
  5740. return instrPrev;
  5741. }
  5742. ///----------------------------------------------------------------------------
  5743. ///
  5744. /// Lowerer::LowerStFld
  5745. ///
  5746. ///----------------------------------------------------------------------------
  5747. IR::Instr *
  5748. Lowerer::LowerStFld(
  5749. IR::Instr * stFldInstr,
  5750. IR::JnHelperMethod helperMethod,
  5751. IR::JnHelperMethod polymorphicHelperMethod,
  5752. bool withInlineCache,
  5753. IR::LabelInstr *labelBailOut,
  5754. bool isHelper,
  5755. bool withPutFlags,
  5756. Js::PropertyOperationFlags flags)
  5757. {
  5758. if (stFldInstr->IsJitProfilingInstr())
  5759. {
  5760. // If we want to profile then do something completely different
  5761. return this->LowerProfiledStFld(stFldInstr->AsJitProfilingInstr(), flags);
  5762. }
  5763. IR::Instr *instrPrev = stFldInstr->m_prev;
  5764. IR::Opnd *dst = stFldInstr->UnlinkDst();
  5765. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
  5766. IR::Opnd * inlineCacheOpnd = nullptr;
  5767. if (withInlineCache)
  5768. {
  5769. AssertMsg(dst->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  5770. if (dst->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache && polymorphicHelperMethod != helperMethod)
  5771. {
  5772. JITTimePolymorphicInlineCache * polymorphicInlineCache = dst->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache;
  5773. helperMethod = polymorphicHelperMethod;
  5774. inlineCacheOpnd = IR::AddrOpnd::New(polymorphicInlineCache->GetAddr(), IR::AddrOpndKindDynamicInlineCache, this->m_func);
  5775. }
  5776. else
  5777. {
  5778. // Need to load runtime inline cache opnd first before loading any helper argument
  5779. // because LoadRuntimeInlineCacheOpnd may create labels marked as helper
  5780. // and cause op helper register push/pop save in x86, messing up with any helper arguments that is already pushed
  5781. inlineCacheOpnd = this->LoadRuntimeInlineCacheOpnd(stFldInstr, dst->AsPropertySymOpnd(), isHelper);
  5782. }
  5783. }
  5784. if (withPutFlags)
  5785. {
  5786. m_lowererMD.LoadHelperArgument(stFldInstr,
  5787. IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
  5788. }
  5789. IR::Opnd *src = stFldInstr->UnlinkSrc1();
  5790. if (stFldInstr->m_opcode == Js::OpCode::StSuperFld)
  5791. {
  5792. m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc2());
  5793. }
  5794. m_lowererMD.LoadHelperArgument(stFldInstr, src);
  5795. this->LoadPropertySymAsArgument(stFldInstr, dst);
  5796. if (withInlineCache)
  5797. {
  5798. Assert(inlineCacheOpnd != nullptr);
  5799. this->m_lowererMD.LoadHelperArgument(
  5800. stFldInstr,
  5801. IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  5802. this->m_lowererMD.LoadHelperArgument(stFldInstr, inlineCacheOpnd);
  5803. this->m_lowererMD.LoadHelperArgument(stFldInstr, LoadFunctionBodyOpnd(stFldInstr));
  5804. }
  5805. IR::RegOpnd *opndBase = dst->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func);
  5806. m_lowererMD.ChangeToHelperCall(stFldInstr, helperMethod, labelBailOut, opndBase, dst->AsSymOpnd()->IsPropertySymOpnd() ? dst->AsSymOpnd()->AsPropertySymOpnd() : nullptr, isHelper);
  5807. return instrPrev;
  5808. }
  5809. IR::Instr* Lowerer::GenerateCompleteStFld(IR::Instr* instr, bool emitFastPath, IR::JnHelperMethod monoHelperAfterFastPath, IR::JnHelperMethod polyHelperAfterFastPath,
  5810. IR::JnHelperMethod monoHelperWithoutFastPath, IR::JnHelperMethod polyHelperWithoutFastPath, bool withPutFlags, Js::PropertyOperationFlags flags)
  5811. {
  5812. if(instr->CallsAccessor() && instr->HasBailOutInfo())
  5813. {
  5814. IR::BailOutKind kindMinusBits = instr->GetBailOutKind() & ~IR::BailOutKindBits;
  5815. Assert(kindMinusBits != IR::BailOutOnImplicitCalls && kindMinusBits != IR::BailOutOnImplicitCallsPreOp);
  5816. }
  5817. IR::Instr* prevInstr = instr->m_prev;
  5818. IR::LabelInstr* labelBailOut = nullptr;
  5819. IR::LabelInstr* labelHelper = nullptr;
  5820. bool isHelper = false;
  5821. IR::RegOpnd* typeOpnd = nullptr;
  5822. if(emitFastPath && GenerateFastStFldForCustomProperty(instr, &labelHelper))
  5823. {
  5824. if(labelHelper)
  5825. {
  5826. Assert(labelHelper->isOpHelper);
  5827. instr->InsertBefore(labelHelper);
  5828. prevInstr = this->LowerStFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
  5829. }
  5830. else
  5831. {
  5832. instr->Remove();
  5833. return prevInstr;
  5834. }
  5835. }
  5836. else if (this->GenerateStFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd))
  5837. {
  5838. Assert(labelHelper == nullptr);
  5839. return prevInstr;
  5840. }
  5841. else if (emitFastPath)
  5842. {
  5843. if (!GenerateFastStFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, &labelBailOut, typeOpnd, &isHelper, &labelHelper, withPutFlags, flags))
  5844. {
  5845. if (labelHelper != nullptr)
  5846. {
  5847. labelHelper->isOpHelper = isHelper;
  5848. instr->InsertBefore(labelHelper);
  5849. }
  5850. prevInstr = this->LowerStFld(instr, monoHelperAfterFastPath, polyHelperAfterFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
  5851. }
  5852. }
  5853. else
  5854. {
  5855. if (labelHelper != nullptr)
  5856. {
  5857. labelHelper->isOpHelper = isHelper;
  5858. instr->InsertBefore(labelHelper);
  5859. }
  5860. prevInstr = this->LowerStFld(instr, monoHelperWithoutFastPath, monoHelperWithoutFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
  5861. }
  5862. return prevInstr;
  5863. }
  5864. void
  5865. Lowerer::GenerateDirectFieldStore(IR::Instr* instrStFld, IR::PropertySymOpnd* propertySymOpnd)
  5866. {
  5867. Func* func = instrStFld->m_func;
  5868. IR::Opnd *opndSlotArray = this->LoadSlotArrayWithCachedLocalType(instrStFld, propertySymOpnd);
  5869. // Store the value to the slot, getting the slot index from the cache.
  5870. uint16 index = propertySymOpnd->GetSlotIndex();
  5871. Assert(index != -1);
  5872. #if defined(RECYCLER_WRITE_BARRIER_JIT) && (defined(_M_IX86) || defined(_M_AMD64))
  5873. if (opndSlotArray->IsRegOpnd())
  5874. {
  5875. IR::IndirOpnd * opndDst = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, func);
  5876. this->GetLowererMD()->GenerateWriteBarrierAssign(opndDst, instrStFld->GetSrc1(), instrStFld);
  5877. }
  5878. else
  5879. {
  5880. Assert(opndSlotArray->IsMemRefOpnd());
  5881. IR::MemRefOpnd * opndDst = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, func);
  5882. this->GetLowererMD()->GenerateWriteBarrierAssign(opndDst, instrStFld->GetSrc1(), instrStFld);
  5883. }
  5884. #else
  5885. IR::Opnd *opnd;
  5886. if (opndSlotArray->IsRegOpnd())
  5887. {
  5888. opnd = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, func);
  5889. }
  5890. else
  5891. {
  5892. opnd = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, func);
  5893. }
  5894. this->m_lowererMD.CreateAssign(opnd, instrStFld->GetSrc1(), instrStFld);
  5895. #endif
  5896. }
  5897. bool
  5898. Lowerer::GenerateStFldWithCachedType(IR::Instr *instrStFld, bool* continueAsHelperOut, IR::LabelInstr** labelHelperOut, IR::RegOpnd** typeOpndOut)
  5899. {
  5900. IR::Instr *instr;
  5901. IR::RegOpnd *typeOpnd = nullptr;
  5902. IR::LabelInstr* labelObjCheckFailed = nullptr;
  5903. IR::LabelInstr *labelTypeCheckFailed = nullptr;
  5904. IR::LabelInstr *labelBothTypeChecksFailed = nullptr;
  5905. IR::LabelInstr *labelDone = nullptr;
  5906. Assert(continueAsHelperOut != nullptr);
  5907. *continueAsHelperOut = false;
  5908. Assert(labelHelperOut != nullptr);
  5909. *labelHelperOut = nullptr;
  5910. Assert(typeOpndOut != nullptr);
  5911. *typeOpndOut = nullptr;
  5912. Assert(instrStFld->GetDst()->IsSymOpnd());
  5913. if (!instrStFld->GetDst()->AsSymOpnd()->IsPropertySymOpnd() || !instrStFld->GetDst()->AsPropertySymOpnd()->IsTypeCheckSeqCandidate())
  5914. {
  5915. return false;
  5916. }
  5917. IR::PropertySymOpnd *propertySymOpnd = instrStFld->GetDst()->AsPropertySymOpnd();
  5918. // If we have any object type spec info, we better not believe this is a load from prototype, since this is a store
  5919. // and we never share inline caches between loads and stores.
  5920. Assert(!propertySymOpnd->HasObjTypeSpecFldInfo() || !propertySymOpnd->IsLoadedFromProto());
  5921. AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
  5922. if (!propertySymOpnd->IsTypeCheckSeqCandidate())
  5923. {
  5924. return false;
  5925. }
  5926. if (!propertySymOpnd->IsTypeCheckSeqParticipant() && !propertySymOpnd->NeedsLocalTypeCheck())
  5927. {
  5928. return false;
  5929. }
  5930. Assert(!propertySymOpnd->NeedsTypeCheckAndBailOut() || (instrStFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrStFld->GetBailOutKind())));
  5931. // In the backwards pass we only add guarded property operations to instructions that are not already
  5932. // protected by an upstream type check.
  5933. Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
  5934. PHASE_PRINT_TESTTRACE(
  5935. Js::ObjTypeSpecPhase,
  5936. this->m_func,
  5937. _u("Field store: %s, property ID: %d, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n"),
  5938. Js::OpCodeUtil::GetOpCodeName(instrStFld->m_opcode),
  5939. propertySymOpnd->m_sym->AsPropertySym()->m_propertyId,
  5940. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  5941. propertySymOpnd->m_inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(),
  5942. propertySymOpnd->IsTypeChecked() ? _u("true") : _u("false"));
  5943. if (propertySymOpnd->HasFinalType() && !propertySymOpnd->IsLoadedFromProto())
  5944. {
  5945. propertySymOpnd->UpdateSlotForFinalType();
  5946. }
  5947. Func* func = instrStFld->m_func;
  5948. // TODO (ObjTypeSpec): If ((PropertySym*)propertySymOpnd->m_sym)->m_stackSym->m_isIntConst consider emitting a direct
  5949. // jump to helper or bailout. If we have a type check bailout, we could even abort compilation.
  5950. bool hasTypeCheckBailout = instrStFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrStFld->GetBailOutKind());
  5951. // If the type hasn't been checked upstream, see if it makes sense to check it here.
  5952. bool isTypeChecked = propertySymOpnd->IsTypeChecked();
  5953. if (!isTypeChecked)
  5954. {
  5955. // If the initial type has been checked, we can do a hard coded type transition without any type checks
  5956. // (see GenerateStFldWithCachedFinalType), which is always worth doing, even if the type is not needed
  5957. // downstream. We're not introducing any additional bailouts.
  5958. if (propertySymOpnd->HasFinalType() && propertySymOpnd->HasInitialType() && !propertySymOpnd->IsTypeDead())
  5959. {
  5960. // We have a final type in hand, so we can JIT (most of) the type transition work.
  5961. return this->GenerateStFldWithCachedFinalType(instrStFld, propertySymOpnd);
  5962. }
  5963. if (propertySymOpnd->HasTypeMismatch())
  5964. {
  5965. // So we have a type mismatch, which happens when the type (and the type without property if ObjTypeSpecStore
  5966. // is on) on this instruction didn't match the live type value according to the flow. We must have hit some
  5967. // stale inline cache (perhaps inlined from a different function, or on a code path not taken for a while).
  5968. // Either way, we know exactly what type the object must have at this point (fully determined by flow), but
  5969. // we don't know whether that type already has the property we're storing here. All in all, we know exactly
  5970. // what shape the object will have after this operation, but we're not sure what label (type) to give this
  5971. // shape. Thus we can simply let the fast path do its thing based on the live inline cache. The downstream
  5972. // instructions relying only on this shape (loads and stores) are safe, and those that need the next type
  5973. // (i.e. adds) will do the same thing as this instruction.
  5974. return false;
  5975. }
  5976. // If we're still here then we must need a primary type check on this instruction to protect
  5977. // a sequence of field operations downstream, or a local type check for an isolated field store.
  5978. Assert(propertySymOpnd->NeedsPrimaryTypeCheck() || propertySymOpnd->NeedsLocalTypeCheck());
  5979. labelTypeCheckFailed = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5980. labelBothTypeChecksFailed = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5981. labelObjCheckFailed = hasTypeCheckBailout ? labelBothTypeChecksFailed : IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5982. typeOpnd = this->GenerateCachedTypeCheck(instrStFld, propertySymOpnd, labelObjCheckFailed, labelBothTypeChecksFailed, labelTypeCheckFailed);
  5983. *typeOpndOut = typeOpnd;
  5984. }
  5985. // Either we are protected by a type check upstream or we just emitted a type check above,
  5986. // now it's time to store the field value.
  5987. GenerateDirectFieldStore(instrStFld, propertySymOpnd);
  5988. // If we are protected by a type check upstream, we don't need a bailout or helper here, delete the instruction
  5989. // and return "true" to indicate that we succeeded in eliminating it.
  5990. if (isTypeChecked)
  5991. {
  5992. Assert(labelTypeCheckFailed == nullptr && labelBothTypeChecksFailed == nullptr);
  5993. AssertMsg(!instrStFld->HasBailOutInfo(), "Why does a direct field store have bailout?");
  5994. instrStFld->Remove();
  5995. return true;
  5996. }
  5997. // Otherwise, branch around the helper on successful type check.
  5998. labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  5999. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, func);
  6000. instrStFld->InsertBefore(instr);
  6001. // On failed type check, try the type without property if we've got one.
  6002. instrStFld->InsertBefore(labelTypeCheckFailed);
  6003. // Caution, this is one of the dusty corners of the JIT. We only get here if this is an isolated StFld which adds a property, or
  6004. // ObjTypeSpecStore is off. In the former case no downstream operations depend on the final type produced here, and we can fall
  6005. // back on live cache and helper if the type doesn't match. In the latter we may have a cache with type transition, which must
  6006. // produce a value for the type after transition, because that type is consumed downstream. Thus, if the object's type doesn't
  6007. // match either the type with or the type without the property we're storing, we must bail out here.
  6008. bool emitAddProperty = propertySymOpnd->IsMono() && propertySymOpnd->HasInitialType();
  6009. if (emitAddProperty)
  6010. {
  6011. GenerateCachedTypeWithoutPropertyCheck(instrStFld, propertySymOpnd, typeOpnd, labelBothTypeChecksFailed);
  6012. GenerateFieldStoreWithTypeChange(instrStFld, propertySymOpnd, propertySymOpnd->GetInitialType(), propertySymOpnd->GetType());
  6013. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, func);
  6014. instrStFld->InsertBefore(instr);
  6015. }
  6016. instrStFld->InsertBefore(labelBothTypeChecksFailed);
  6017. instrStFld->InsertAfter(labelDone);
  6018. if (hasTypeCheckBailout)
  6019. {
  6020. AssertMsg(PHASE_ON1(Js::ObjTypeSpecIsolatedFldOpsWithBailOutPhase) || !propertySymOpnd->IsTypeDead(),
  6021. "Why does a field store have a type check bailout, if its type is dead?");
  6022. if (instrStFld->GetBailOutInfo()->bailOutInstr != instrStFld)
  6023. {
  6024. // Set the cache index in the bailout info so that the generated code will write it into the
  6025. // bailout record at runtime.
  6026. instrStFld->GetBailOutInfo()->polymorphicCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  6027. }
  6028. else
  6029. {
  6030. Assert(instrStFld->GetBailOutInfo()->polymorphicCacheIndex == propertySymOpnd->m_inlineCacheIndex);
  6031. }
  6032. instrStFld->m_opcode = Js::OpCode::BailOut;
  6033. instrStFld->FreeSrc1();
  6034. instrStFld->FreeDst();
  6035. this->GenerateBailOut(instrStFld);
  6036. return true;
  6037. }
  6038. else
  6039. {
  6040. *continueAsHelperOut = true;
  6041. Assert(labelObjCheckFailed != nullptr && labelObjCheckFailed != labelBothTypeChecksFailed);
  6042. *labelHelperOut = labelObjCheckFailed;
  6043. return false;
  6044. }
  6045. }
  6046. IR::RegOpnd *
  6047. Lowerer::GenerateCachedTypeCheck(IR::Instr *instrChk, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr* labelObjCheckFailed, IR::LabelInstr *labelTypeCheckFailed, IR::LabelInstr *labelSecondChance)
  6048. {
  6049. Assert(propertySymOpnd->MayNeedTypeCheckProtection());
  6050. Func* func = instrChk->m_func;
  6051. IR::RegOpnd *regOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(func);
  6052. regOpnd->SetValueType(propertySymOpnd->GetPropertyOwnerValueType());
  6053. if (!regOpnd->IsNotTaggedValue())
  6054. {
  6055. m_lowererMD.GenerateObjectTest(regOpnd, instrChk, labelObjCheckFailed);
  6056. }
  6057. IR::Opnd *expectedTypeOpnd;
  6058. bool emitDirectCheck = true;
  6059. // Note: don't attempt equivalent type check if we're doing a final type optimization or if we have a monomorphic
  6060. // cache and no type check bailout. In the latter case, we can wind up doing expensive failed equivalence checks
  6061. // repeatedly and never rejit.
  6062. bool doEquivTypeCheck =
  6063. propertySymOpnd->HasEquivalentTypeSet() &&
  6064. !(propertySymOpnd->HasFinalType() && propertySymOpnd->HasInitialType()) &&
  6065. !propertySymOpnd->MustDoMonoCheck() &&
  6066. (propertySymOpnd->IsPoly() || instrChk->HasTypeCheckBailOut());
  6067. Assert(doEquivTypeCheck || !instrChk->HasEquivalentTypeCheckBailOut());
  6068. JITTypeHolder type = propertySymOpnd->MustDoMonoCheck() ? propertySymOpnd->GetMonoGuardType() :
  6069. doEquivTypeCheck ? propertySymOpnd->GetFirstEquivalentType() : propertySymOpnd->GetType();
  6070. Js::PropertyGuard* typeCheckGuard = doEquivTypeCheck ?
  6071. (Js::PropertyGuard*)CreateEquivalentTypeGuardAndLinkToGuardedProperties(type, propertySymOpnd) :
  6072. (Js::PropertyGuard*)CreateTypePropertyGuardForGuardedProperties(type, propertySymOpnd);
  6073. if (typeCheckGuard == nullptr)
  6074. {
  6075. Assert(type != nullptr);
  6076. expectedTypeOpnd = IR::AddrOpnd::New(type->GetAddr(), IR::AddrOpndKindDynamicType, func, true);
  6077. }
  6078. else
  6079. {
  6080. Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
  6081. if (this->m_func->IsOOPJIT())
  6082. {
  6083. int typeCheckGuardOffset = NativeCodeData::GetDataTotalOffset(typeCheckGuard);
  6084. expectedTypeOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), typeCheckGuardOffset, TyMachPtr,
  6085. #if DBG
  6086. NativeCodeData::GetDataDescription(typeCheckGuard, func->m_alloc),
  6087. #endif
  6088. func, true);
  6089. this->addToLiveOnBackEdgeSyms->Set(func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  6090. }
  6091. else
  6092. {
  6093. expectedTypeOpnd = IR::MemRefOpnd::New((void*)(typeCheckGuard->GetAddressOfValue()), TyMachPtr, func, IR::AddrOpndKindDynamicGuardValueRef);
  6094. }
  6095. emitDirectCheck = false;
  6096. }
  6097. if (PHASE_VERBOSE_TRACE(Js::ObjTypeSpecPhase, this->m_func))
  6098. {
  6099. OUTPUT_VERBOSE_TRACE_FUNC(Js::ObjTypeSpecPhase, this->m_func, _u("Emitted %s type check for type 0x%p"),
  6100. emitDirectCheck ? _u("direct") : propertySymOpnd->IsPoly() ? _u("equivalent") : _u("indirect"), type);
  6101. #if DBG
  6102. if (propertySymOpnd->GetGuardedPropOps() != nullptr)
  6103. {
  6104. Output::Print(_u(" guarding operations:\n "));
  6105. propertySymOpnd->GetGuardedPropOps()->Dump();
  6106. }
  6107. else
  6108. {
  6109. Output::Print(_u("\n"));
  6110. }
  6111. #else
  6112. Output::Print(_u("\n"));
  6113. #endif
  6114. Output::Flush();
  6115. }
  6116. IR::RegOpnd* typeOpnd = IR::RegOpnd::New(TyMachReg, func);
  6117. IR::Opnd *sourceType;
  6118. if (regOpnd->m_sym->IsConst() && !regOpnd->m_sym->IsIntConst() && !regOpnd->m_sym->IsFloatConst())
  6119. {
  6120. sourceType = IR::MemRefOpnd::New((BYTE*)regOpnd->m_sym->GetConstAddress() +
  6121. Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func, IR::AddrOpndKindDynamicObjectTypeRef);
  6122. }
  6123. else
  6124. {
  6125. sourceType = IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func);
  6126. }
  6127. m_lowererMD.CreateAssign(typeOpnd, sourceType, instrChk);
  6128. if (doEquivTypeCheck)
  6129. {
  6130. // TODO (ObjTypeSpec): For isolated equivalent type checks it would be good to emit a check if the cache is still valid, and
  6131. // if not go straight to live polymorphic cache. This way we wouldn't have to bail out and re-JIT, and also wouldn't continue
  6132. // to try the equivalent type cache, miss it and do the slow comparison. This may be as easy as sticking a null on the main
  6133. // type in the equivalent type cache.
  6134. IR::LabelInstr* labelCheckEquivalentType = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  6135. InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelCheckEquivalentType, instrChk);
  6136. IR::LabelInstr *labelTypeCheckSucceeded = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  6137. InsertBranch(Js::OpCode::Br, labelTypeCheckSucceeded, instrChk);
  6138. instrChk->InsertBefore(labelCheckEquivalentType);
  6139. IR::Opnd* typeCheckGuardOpnd = nullptr;
  6140. if (this->m_func->IsOOPJIT())
  6141. {
  6142. typeCheckGuardOpnd = IR::RegOpnd::New(TyMachPtr, func);
  6143. int typeCheckGuardOffset = NativeCodeData::GetDataTotalOffset(typeCheckGuard);
  6144. Lowerer::InsertLea(
  6145. typeCheckGuardOpnd->AsRegOpnd(),
  6146. IR::IndirOpnd::New(IR::RegOpnd::New(func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), typeCheckGuardOffset, TyMachPtr,
  6147. #if DBG
  6148. NativeCodeData::GetDataDescription(typeCheckGuard, func->m_alloc),
  6149. #endif
  6150. func, true),
  6151. instrChk);
  6152. this->addToLiveOnBackEdgeSyms->Set(func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  6153. }
  6154. else
  6155. {
  6156. typeCheckGuardOpnd = IR::AddrOpnd::New((Js::Var)typeCheckGuard, IR::AddrOpndKindDynamicTypeCheckGuard, func, true);
  6157. }
  6158. this->m_lowererMD.LoadHelperArgument(instrChk, typeCheckGuardOpnd);
  6159. this->m_lowererMD.LoadHelperArgument(instrChk, typeOpnd);
  6160. IR::RegOpnd* equivalentTypeCheckResultOpnd = IR::RegOpnd::New(TyUint8, func);
  6161. IR::HelperCallOpnd* equivalentTypeCheckHelperCallOpnd = IR::HelperCallOpnd::New(
  6162. propertySymOpnd->HasFixedValue() ? IR::HelperCheckIfTypeIsEquivalentForFixedField : IR::HelperCheckIfTypeIsEquivalent, func);
  6163. IR::Instr* equivalentTypeCheckCallInstr = IR::Instr::New(Js::OpCode::Call, equivalentTypeCheckResultOpnd, equivalentTypeCheckHelperCallOpnd, func);
  6164. instrChk->InsertBefore(equivalentTypeCheckCallInstr);
  6165. this->m_lowererMD.LowerCall(equivalentTypeCheckCallInstr, 0);
  6166. InsertTestBranch(equivalentTypeCheckResultOpnd, equivalentTypeCheckResultOpnd, Js::OpCode::BrEq_A, labelTypeCheckFailed, instrChk);
  6167. // TODO (ObjTypeSpec): Consider emitting a shared bailout to which a specific bailout kind is written at runtime. This would allow us to distinguish
  6168. // between non-equivalent type and other cases, such as invalidated guard (due to fixed field overwrite, perhaps) or too much thrashing on the
  6169. // equivalent type cache. We could determine bailout kind based on the value returned by the helper. In the case of cache thrashing we could just
  6170. // turn off the whole optimization for a given function.
  6171. instrChk->InsertBefore(labelTypeCheckSucceeded);
  6172. }
  6173. else
  6174. {
  6175. InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelSecondChance != nullptr ? labelSecondChance : labelTypeCheckFailed, instrChk);
  6176. }
  6177. // Don't pin the type for polymorphic operations. The code can successfully execute even if this type is no longer referenced by any objects,
  6178. // as long as there are other objects with types equivalent on the properties referenced by this code. The type is kept alive until entry point
  6179. // installation by the JIT transfer data, and after that by the equivalent type cache, so it will stay alive unless or until it gets evicted
  6180. // from the cache.
  6181. if (!doEquivTypeCheck)
  6182. {
  6183. PinTypeRef(type, type.t, instrChk, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
  6184. }
  6185. return typeOpnd;
  6186. }
  6187. void
  6188. Lowerer::PinTypeRef(JITTypeHolder type, void* typeRef, IR::Instr* instr, Js::PropertyId propertyId)
  6189. {
  6190. this->m_func->PinTypeRef(typeRef);
  6191. if (PHASE_TRACE(Js::TracePinnedTypesPhase, this->m_func))
  6192. {
  6193. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6194. Output::Print(_u("PinnedTypes: function %s(%s) instr %s property ID %u pinned %s reference 0x%p to type 0x%p.\n"),
  6195. this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  6196. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode), propertyId,
  6197. typeRef == type.t ? _u("strong") : _u("weak"), typeRef, type.t);
  6198. Output::Flush();
  6199. }
  6200. }
  6201. void
  6202. Lowerer::GenerateCachedTypeWithoutPropertyCheck(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, IR::Opnd *typeOpnd, IR::LabelInstr *labelTypeCheckFailed)
  6203. {
  6204. Assert(propertySymOpnd->IsMonoObjTypeSpecCandidate());
  6205. Assert(propertySymOpnd->HasInitialType());
  6206. JITTypeHolder typeWithoutProperty = propertySymOpnd->GetInitialType();
  6207. // We should never add properties to objects of static types.
  6208. Assert(Js::DynamicType::Is(typeWithoutProperty->GetTypeId()));
  6209. if (typeOpnd == nullptr)
  6210. {
  6211. // No opnd holding the type was passed in, so we have to load the type here.
  6212. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  6213. if (!baseOpnd->IsNotTaggedValue())
  6214. {
  6215. m_lowererMD.GenerateObjectTest(baseOpnd, instrInsert, labelTypeCheckFailed);
  6216. }
  6217. IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  6218. typeOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  6219. m_lowererMD.CreateAssign(typeOpnd, opnd, instrInsert);
  6220. }
  6221. Js::JitTypePropertyGuard* typePropertyGuard = CreateTypePropertyGuardForGuardedProperties(typeWithoutProperty, propertySymOpnd);
  6222. IR::Opnd *expectedTypeOpnd;
  6223. if (typePropertyGuard)
  6224. {
  6225. bool emitDirectCheck = true;
  6226. Assert(typePropertyGuard != nullptr);
  6227. Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
  6228. if (this->m_func->IsOOPJIT())
  6229. {
  6230. int typeCheckGuardOffset = NativeCodeData::GetDataTotalOffset(typePropertyGuard);
  6231. expectedTypeOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), typeCheckGuardOffset, TyMachPtr,
  6232. #if DBG
  6233. NativeCodeData::GetDataDescription(typePropertyGuard, this->m_func->m_alloc),
  6234. #endif
  6235. this->m_func, true);
  6236. this->addToLiveOnBackEdgeSyms->Set(m_func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  6237. }
  6238. else
  6239. {
  6240. expectedTypeOpnd = IR::MemRefOpnd::New((void*)(typePropertyGuard->GetAddressOfValue()), TyMachPtr, this->m_func, IR::AddrOpndKindDynamicGuardValueRef);
  6241. }
  6242. emitDirectCheck = false;
  6243. OUTPUT_VERBOSE_TRACE_FUNC(Js::ObjTypeSpecPhase, this->m_func, _u("Emitted %s type check for type 0x%p.\n"),
  6244. emitDirectCheck ? _u("direct") : _u("indirect"), typeWithoutProperty->GetAddr());
  6245. }
  6246. else
  6247. {
  6248. expectedTypeOpnd = IR::AddrOpnd::New(typeWithoutProperty->GetAddr(), IR::AddrOpndKindDynamicType, m_func, true);
  6249. }
  6250. InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelTypeCheckFailed, instrInsert);
  6251. // Technically, it should be enough to pin the final type, because it should keep all of its predecessors alive, but
  6252. // just to be extra cautious, let's pin the initial type as well.
  6253. PinTypeRef(typeWithoutProperty, typeWithoutProperty.t, instrInsert, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
  6254. }
  6255. void
  6256. Lowerer::GenerateFixedFieldGuardCheck(IR::Instr *insertPointInstr, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
  6257. {
  6258. GeneratePropertyGuardCheck(insertPointInstr, propertySymOpnd, labelBailOut);
  6259. }
  6260. Js::JitTypePropertyGuard*
  6261. Lowerer::CreateTypePropertyGuardForGuardedProperties(JITTypeHolder type, IR::PropertySymOpnd* propertySymOpnd)
  6262. {
  6263. // We should always have a list of guarded properties.
  6264. Assert(propertySymOpnd->GetGuardedPropOps() != nullptr);
  6265. Js::JitTypePropertyGuard* guard = nullptr;
  6266. if (m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuards())
  6267. {
  6268. // Consider (ObjTypeSpec): Because we allocate these guards from the JIT thread we can't share guards for the same type across multiple functions.
  6269. // This leads to proliferation of property guards on the thread context. The alternative would be to pre-allocate shared (by value) guards
  6270. // from the thread context during work item creation. We would create too many of them (because some types aren't actually used as guards),
  6271. // but we could share a guard for a given type between functions. This may ultimately be better.
  6272. LinkGuardToGuardedProperties(propertySymOpnd->GetGuardedPropOps(), [this, type, &guard](Js::PropertyId propertyId)
  6273. {
  6274. if (DoLazyFixedTypeBailout(this->m_func))
  6275. {
  6276. this->m_func->lazyBailoutProperties.Item(propertyId);
  6277. }
  6278. else
  6279. {
  6280. if (guard == nullptr)
  6281. {
  6282. guard = this->m_func->GetOrCreateSingleTypeGuard(type->GetAddr());
  6283. }
  6284. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
  6285. {
  6286. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6287. Output::Print(_u("ObjTypeSpec: function %s(%s) registered guard 0x%p with value 0x%p for property ID %u.\n"),
  6288. m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  6289. guard, guard->GetValue(), propertyId);
  6290. Output::Flush();
  6291. }
  6292. this->m_func->EnsurePropertyGuardsByPropertyId();
  6293. this->m_func->LinkGuardToPropertyId(propertyId, guard);
  6294. }
  6295. });
  6296. }
  6297. return guard;
  6298. }
  6299. Js::JitEquivalentTypeGuard*
  6300. Lowerer::CreateEquivalentTypeGuardAndLinkToGuardedProperties(JITTypeHolder type, IR::PropertySymOpnd* propertySymOpnd)
  6301. {
  6302. // We should always have a list of guarded properties.
  6303. Assert(propertySymOpnd->HasObjTypeSpecFldInfo() && propertySymOpnd->HasEquivalentTypeSet() && propertySymOpnd->GetGuardedPropOps());
  6304. Js::JitEquivalentTypeGuard* guard = this->m_func->CreateEquivalentTypeGuard(type, propertySymOpnd->GetObjTypeSpecFldId());
  6305. if (m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuards())
  6306. {
  6307. LinkGuardToGuardedProperties(propertySymOpnd->GetGuardedPropOps(), [=](Js::PropertyId propertyId)
  6308. {
  6309. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
  6310. {
  6311. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6312. Output::Print(_u("ObjTypeSpec: function %s(%s) registered equivalent type spec guard 0x%p with value 0x%p for property ID %u.\n"),
  6313. this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  6314. guard, guard->GetValue(), propertyId);
  6315. Output::Flush();
  6316. }
  6317. this->m_func->EnsurePropertyGuardsByPropertyId();
  6318. this->m_func->LinkGuardToPropertyId(propertyId, guard);
  6319. });
  6320. }
  6321. Assert(guard->GetCache() != nullptr);
  6322. Js::EquivalentTypeCache* cache = guard->GetCache();
  6323. // TODO (ObjTypeSpec): If we delayed populating the types until encoder, we could bulk allocate all equivalent type caches
  6324. // in one block from the heap. This would allow us to not allocate them from the native code data allocator and free them
  6325. // when no longer needed. However, we would need to store the global property operation ID in the guard, so we can look up
  6326. // the info in the encoder. Perhaps we could overload the cache pointer to be the ID until encoder.
  6327. // Copy types from the type set to the guard's cache
  6328. Js::EquivalentTypeSet* typeSet = propertySymOpnd->GetEquivalentTypeSet();
  6329. uint16 cachedTypeCount = typeSet->GetCount() < EQUIVALENT_TYPE_CACHE_SIZE ? typeSet->GetCount() : EQUIVALENT_TYPE_CACHE_SIZE;
  6330. for (uint16 ti = 0; ti < cachedTypeCount; ti++)
  6331. {
  6332. cache->types[ti] = (Js::Type*)typeSet->GetType(ti)->GetAddr();
  6333. }
  6334. // Populate property ID and slot index arrays on the guard's cache. We iterate over the
  6335. // bit vector of property operations protected by this guard, but some property operations
  6336. // may be referring to the same property ID (but not share the same cache). We skip
  6337. // redundant entries by maintaining a hash set of property IDs we've already encountered.
  6338. auto propOps = propertySymOpnd->GetGuardedPropOps();
  6339. uint propOpCount = propOps->Count();
  6340. bool isTypeStatic = Js::StaticType::Is(type->GetTypeId());
  6341. JsUtil::BaseDictionary<Js::PropertyId, Js::EquivalentPropertyEntry*, JitArenaAllocator> propIds(this->m_alloc, propOpCount);
  6342. Js::EquivalentPropertyEntry* properties = AnewArray(this->m_alloc, Js::EquivalentPropertyEntry, propOpCount);
  6343. uint propIdCount = 0;
  6344. FOREACH_BITSET_IN_SPARSEBV(propOpId, propOps)
  6345. {
  6346. ObjTypeSpecFldInfo* propOpInfo = this->m_func->GetGlobalObjTypeSpecFldInfo(propOpId);
  6347. Js::PropertyId propertyId = propOpInfo->GetPropertyId();
  6348. Js::PropertyIndex propOpIndex = Js::Constants::NoSlot;
  6349. bool hasFixedValue = propOpInfo->HasFixedValue();
  6350. if (hasFixedValue)
  6351. {
  6352. cache->SetHasFixedValue();
  6353. }
  6354. bool isLoadedFromProto = propOpInfo->IsLoadedFromProto();
  6355. if (isLoadedFromProto)
  6356. {
  6357. cache->SetIsLoadedFromProto();
  6358. }
  6359. else
  6360. {
  6361. propOpIndex = propOpInfo->GetSlotIndex();
  6362. }
  6363. bool propOpUsesAuxSlot = propOpInfo->UsesAuxSlot();
  6364. AssertMsg(!isTypeStatic || !propOpInfo->IsBeingStored(), "Why are we storing a field to an object of static type?");
  6365. Js::EquivalentPropertyEntry* entry = nullptr;
  6366. if (propIds.TryGetValue(propertyId, &entry))
  6367. {
  6368. if (propOpIndex == entry->slotIndex && propOpUsesAuxSlot == entry->isAuxSlot)
  6369. {
  6370. entry->mustBeWritable |= propOpInfo->IsBeingStored();
  6371. }
  6372. else
  6373. {
  6374. // Due to inline cache sharing we have the same property accessed using different caches
  6375. // with inconsistent info. This means a guaranteed bailout on the equivalent type check.
  6376. // We'll just let it happen and turn off the optimization for this function. We could avoid
  6377. // this problem by tracking property information on the value type in glob opt.
  6378. if (PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->m_func))
  6379. {
  6380. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6381. Output::Print(_u("EquivObjTypeSpec: top function %s (%s): duplicate property clash on %d \n"),
  6382. m_func->GetJITFunctionBody()->GetDisplayName(), m_func->GetDebugNumberSet(debugStringBuffer), propertyId);
  6383. Output::Flush();
  6384. }
  6385. Assert(propIdCount < propOpCount);
  6386. __analysis_assume(propIdCount < propOpCount);
  6387. entry = &properties[propIdCount++];
  6388. entry->propertyId = propertyId;
  6389. entry->slotIndex = propOpIndex;
  6390. entry->isAuxSlot = propOpUsesAuxSlot;
  6391. entry->mustBeWritable = propOpInfo->IsBeingStored();
  6392. }
  6393. }
  6394. else
  6395. {
  6396. Assert(propIdCount < propOpCount);
  6397. __analysis_assume(propIdCount < propOpCount);
  6398. entry = &properties[propIdCount++];
  6399. entry->propertyId = propertyId;
  6400. entry->slotIndex = propOpIndex;
  6401. entry->isAuxSlot = propOpUsesAuxSlot;
  6402. entry->mustBeWritable = propOpInfo->IsBeingStored();
  6403. propIds.AddNew(propertyId, entry);
  6404. }
  6405. }
  6406. NEXT_BITSET_IN_SPARSEBV;
  6407. cache->record.propertyCount = propIdCount;
  6408. // Js::EquivalentPropertyEntry does not contain pointer, no need to fixup
  6409. cache->record.properties = NativeCodeDataNewArrayNoFixup(this->m_func->GetNativeCodeDataAllocator(), Js::EquivalentPropertyEntry, propIdCount);
  6410. memcpy(cache->record.properties, properties, propIdCount * sizeof(Js::EquivalentPropertyEntry));
  6411. return guard;
  6412. }
  6413. bool
  6414. Lowerer::LinkCtorCacheToGuardedProperties(JITTimeConstructorCache* ctorCache)
  6415. {
  6416. // We do not always have guarded properties. If the constructor is empty and the subsequent code doesn't load or store any of
  6417. // the constructed object's properties, or if all inline caches are empty then this ctor cache doesn't guard any properties.
  6418. if (ctorCache->GetGuardedPropOps() == nullptr)
  6419. {
  6420. return false;
  6421. }
  6422. bool linked = false;
  6423. if (this->m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuards())
  6424. {
  6425. linked = LinkGuardToGuardedProperties(ctorCache->GetGuardedPropOps(), [=](Js::PropertyId propertyId)
  6426. {
  6427. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
  6428. {
  6429. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6430. Output::Print(_u("ObjTypeSpec: function %s(%s) registered ctor cache 0x%p with value 0x%p for property %u.\n"),
  6431. this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  6432. ctorCache->GetRuntimeCacheAddr(), ctorCache->GetType()->GetAddr(), propertyId);
  6433. Output::Flush();
  6434. }
  6435. this->m_func->EnsureCtorCachesByPropertyId();
  6436. this->m_func->LinkCtorCacheToPropertyId(propertyId, ctorCache);
  6437. });
  6438. }
  6439. return linked;
  6440. }
  6441. template<typename LinkFunc>
  6442. bool
  6443. Lowerer::LinkGuardToGuardedProperties(const BVSparse<JitArenaAllocator>* guardedPropOps, LinkFunc link)
  6444. {
  6445. Assert(this->m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuards());
  6446. Assert(guardedPropOps != nullptr);
  6447. bool linked = false;
  6448. // For every entry in the bit vector, register the guard for the corresponding property ID.
  6449. FOREACH_BITSET_IN_SPARSEBV(propertyOpId, guardedPropOps)
  6450. {
  6451. ObjTypeSpecFldInfo* propertyOpInfo = this->m_func->GetGlobalObjTypeSpecFldInfo(propertyOpId);
  6452. Js::PropertyId propertyId = propertyOpInfo->GetPropertyId();
  6453. // It's okay for an equivalent type check to be registered as a guard against a property becoming read-only. This transpires if, there is
  6454. // a different monomorphic type check upstream, which guarantees the actual type of the object needed for the hard-coded type transition,
  6455. // but it is later followed by a sequence of polymorphic inline caches, which do not have that type in the type set. At the beginning of
  6456. // that sequence we'll emit an equivalent type check to verify that the actual type has relevant properties on appropriate slots. Then in
  6457. // the dead store pass we'll walk upwards and encounter this check first, thus we'll drop the guarded properties accumulated thus far
  6458. // (including the one being added) on that check.
  6459. // AssertMsg(!propertyOpInfo->IsBeingAdded() || !isEquivalentTypeGuard, "Why do we have an equivalent type check protecting a property add?");
  6460. if (propertyOpInfo->IsBeingAdded() || propertyOpInfo->IsLoadedFromProto() || propertyOpInfo->HasFixedValue())
  6461. {
  6462. // Equivalent object type spec only supports fixed fields on prototypes. This is to simplify the slow type equivalence check.
  6463. // See JavascriptOperators::CheckIfTypeIsEquivalent.
  6464. Assert(!propertyOpInfo->IsPoly() || (!propertyOpInfo->HasFixedValue() || propertyOpInfo->IsLoadedFromProto() || propertyOpInfo->UsesAccessor()));
  6465. if (this->m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuard(propertyId))
  6466. {
  6467. link(propertyId);
  6468. linked = true;
  6469. }
  6470. else
  6471. {
  6472. AssertMsg(false, "Did we fail to create a shared property guard for a guarded property?");
  6473. }
  6474. }
  6475. }
  6476. NEXT_BITSET_IN_SPARSEBV;
  6477. return linked;
  6478. }
  6479. void
  6480. Lowerer::GeneratePropertyGuardCheck(IR::Instr *insertPointInstr, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
  6481. {
  6482. intptr_t guard = propertySymOpnd->GetPropertyGuardValueAddr();
  6483. Assert(guard != 0);
  6484. if (!DoLazyFixedDataBailout(this->m_func))
  6485. {
  6486. Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
  6487. IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
  6488. IR::MemRefOpnd* guardOpnd = IR::MemRefOpnd::New(guard, TyMachPtr, this->m_func, IR::AddrOpndKindDynamicGuardValueRef);
  6489. InsertCompareBranch(guardOpnd, zeroOpnd, Js::OpCode::BrEq_A, labelBailOut, insertPointInstr);
  6490. }
  6491. else
  6492. {
  6493. this->m_func->lazyBailoutProperties.Item(propertySymOpnd->GetPropertyId());
  6494. }
  6495. }
  6496. IR::Instr*
  6497. Lowerer::GeneratePropertyGuardCheckBailoutAndLoadType(IR::Instr *insertInstr)
  6498. {
  6499. IR::Instr* instrPrev = insertInstr->m_prev;
  6500. IR::Opnd* numberTypeOpnd = IR::AddrOpnd::New(insertInstr->m_func->GetScriptContextInfo()->GetNumberTypeStaticAddr(), IR::AddrOpndKindDynamicType, insertInstr->m_func);
  6501. IR::PropertySymOpnd* propertySymOpnd = insertInstr->GetSrc1()->AsPropertySymOpnd();
  6502. IR::LabelInstr* labelBailout = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func, true);
  6503. IR::LabelInstr* labelContinue = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func);
  6504. IR::LabelInstr* loadNumberTypeLabel = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func, true);
  6505. GeneratePropertyGuardCheck(insertInstr, propertySymOpnd, labelBailout);
  6506. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  6507. GenerateObjectTestAndTypeLoad(insertInstr, baseOpnd, insertInstr->GetDst()->AsRegOpnd(), loadNumberTypeLabel);
  6508. insertInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelContinue, this->m_func));
  6509. insertInstr->InsertBefore(loadNumberTypeLabel);
  6510. this->m_lowererMD.CreateAssign(insertInstr->GetDst(), numberTypeOpnd, insertInstr);
  6511. insertInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelContinue, this->m_func));
  6512. insertInstr->InsertBefore(labelBailout);
  6513. insertInstr->InsertAfter(labelContinue);
  6514. insertInstr->FreeSrc1();
  6515. insertInstr->m_opcode = Js::OpCode::BailOut;
  6516. this->GenerateBailOut(insertInstr);
  6517. return instrPrev;
  6518. }
  6519. void
  6520. Lowerer::GenerateNonWritablePropertyCheck(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
  6521. {
  6522. IR::Opnd *opnd;
  6523. IR::Instr *instr;
  6524. // Generate a check for non-writable properties, on the model of the work done by PatchPutValueetc.
  6525. // Inline the check on the bit in the prototype object's type. If that check fails, call the helper.
  6526. // If the helper finds a non-writable property, bail out, as we're counting on being able to add the property.
  6527. JITTypeHolder typeWithoutProperty(propertySymOpnd->GetInitialType());
  6528. Assert(typeWithoutProperty != nullptr);
  6529. intptr_t protoAddr = typeWithoutProperty->GetPrototypeAddr();
  6530. Assert(protoAddr != 0);
  6531. // s1 = MOV [proto->type].ptr
  6532. IR::RegOpnd *typeOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  6533. opnd = IR::MemRefOpnd::New((char*)protoAddr + Js::RecyclableObject::GetOffsetOfType(), TyMachReg,
  6534. this->m_func, IR::AddrOpndKindDynamicObjectTypeRef);
  6535. m_lowererMD.CreateAssign(typeOpnd, opnd, instrInsert);
  6536. // TEST [s1->areThisAndPrototypesEnsuredToHaveOnlyWritableDataProperties].u8, 1
  6537. // JNE $continue
  6538. IR::LabelInstr *labelContinue = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6539. opnd = IR::IndirOpnd::New(typeOpnd, (int32)Js::Type::OffsetOfWritablePropertiesFlag(), TyUint8, this->m_func);
  6540. InsertTestBranch(opnd, IR::IntConstOpnd::New(1, TyUint8, this->m_func), Js::OpCode::BrNeq_A, labelContinue, instrInsert);
  6541. // $Lhelper:
  6542. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6543. instrInsert->InsertBefore(labelHelper);
  6544. // s2 = CALL DoProtoCheck, prototype
  6545. opnd = IR::AddrOpnd::New(protoAddr, IR::AddrOpndKindDynamicVar, this->m_func, true);
  6546. m_lowererMD.LoadHelperArgument(instrInsert, opnd);
  6547. opnd = IR::HelperCallOpnd::New(IR::HelperCheckProtoHasNonWritable, this->m_func);
  6548. instr = IR::Instr::New(Js::OpCode::Call, IR::RegOpnd::New(TyUint8, this->m_func), opnd, this->m_func);
  6549. instrInsert->InsertBefore(instr);
  6550. opnd = instr->GetDst();
  6551. m_lowererMD.LowerCall(instr, 0);
  6552. InsertTestBranch(opnd, opnd, Js::OpCode::BrEq_A, labelBailOut, instrInsert);
  6553. // $Lcontinue:
  6554. instrInsert->InsertBefore(labelContinue);
  6555. }
  6556. void
  6557. Lowerer::GenerateAdjustSlots(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, JITTypeHolder initialType, JITTypeHolder finalType)
  6558. {
  6559. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  6560. bool adjusted = this->GenerateAdjustBaseSlots(instrInsert, baseOpnd, initialType, finalType);
  6561. if (!adjusted)
  6562. {
  6563. baseOpnd->Free(m_func);
  6564. }
  6565. }
  6566. bool
  6567. Lowerer::GenerateAdjustBaseSlots(IR::Instr *instrInsert, IR::RegOpnd *baseOpnd, JITTypeHolder initialType, JITTypeHolder finalType)
  6568. {
  6569. // Possibly allocate new slot capacity to accommodate a type transition.
  6570. AssertMsg(JITTypeHandler::IsTypeHandlerCompatibleForObjectHeaderInlining(initialType->GetTypeHandler(), finalType->GetTypeHandler()),
  6571. "Incompatible typeHandler transition?");
  6572. int oldCount = initialType->GetTypeHandler()->GetSlotCapacity();
  6573. int newCount = finalType->GetTypeHandler()->GetSlotCapacity();
  6574. Js::PropertyIndex inlineSlotCapacity = initialType->GetTypeHandler()->GetInlineSlotCapacity();
  6575. Js::PropertyIndex newInlineSlotCapacity = finalType->GetTypeHandler()->GetInlineSlotCapacity();
  6576. if (oldCount >= newCount || newCount <= inlineSlotCapacity)
  6577. {
  6578. // Already have enough slot capacity. Do nothing.
  6579. return false;
  6580. }
  6581. // Call AdjustSlots using the new counts. Because AdjustSlots uses the "no dispose" flavor of alloc,
  6582. // no implicit calls are possible, and we don't need an implicit call check and bailout.
  6583. // CALL AdjustSlots, instance, newInlineSlotCapacity, newAuxSlotCapacity
  6584. //3rd Param
  6585. Assert(newCount > newInlineSlotCapacity);
  6586. const int newAuxSlotCapacity = newCount - newInlineSlotCapacity;
  6587. m_lowererMD.LoadHelperArgument(instrInsert, IR::IntConstOpnd::New(newAuxSlotCapacity, TyInt32, this->m_func));
  6588. //2nd Param
  6589. m_lowererMD.LoadHelperArgument(instrInsert, IR::IntConstOpnd::New(newInlineSlotCapacity, TyUint16, this->m_func));
  6590. //1st Param (instance)
  6591. m_lowererMD.LoadHelperArgument(instrInsert, baseOpnd);
  6592. //CALL HelperAdjustSlots
  6593. IR::Opnd *opnd = IR::HelperCallOpnd::New(IR::HelperAdjustSlots, this->m_func);
  6594. IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  6595. instr->SetSrc1(opnd);
  6596. instrInsert->InsertBefore(instr);
  6597. m_lowererMD.LowerCall(instr, 0);
  6598. return true;
  6599. }
  6600. void
  6601. Lowerer::GenerateFieldStoreWithTypeChange(IR::Instr * instrStFld, IR::PropertySymOpnd *propertySymOpnd, JITTypeHolder initialType, JITTypeHolder finalType)
  6602. {
  6603. // Adjust instance slots, if necessary.
  6604. this->GenerateAdjustSlots(instrStFld, propertySymOpnd, initialType, finalType);
  6605. // We should never add properties to objects of static types.
  6606. Assert(Js::DynamicType::Is(finalType->GetTypeId()));
  6607. // Let's pin the final type to be sure its alive when we try to do the type transition.
  6608. PinTypeRef(finalType, finalType.t, instrStFld, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
  6609. IR::Opnd *finalTypeOpnd = IR::AddrOpnd::New(finalType->GetAddr(), IR::AddrOpndKindDynamicType, instrStFld->m_func, true);
  6610. // Set the new type.
  6611. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(instrStFld->m_func);
  6612. IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrStFld->m_func);
  6613. this->m_lowererMD.CreateAssign(opnd, finalTypeOpnd, instrStFld);
  6614. // Now do the store.
  6615. GenerateDirectFieldStore(instrStFld, propertySymOpnd);
  6616. }
  6617. bool
  6618. Lowerer::GenerateStFldWithCachedFinalType(IR::Instr * instrStFld, IR::PropertySymOpnd *propertySymOpnd)
  6619. {
  6620. // This function tries to treat a sequence of add-property stores as a single type transition.
  6621. Assert(propertySymOpnd == instrStFld->GetDst()->AsPropertySymOpnd());
  6622. Assert(propertySymOpnd->IsMonoObjTypeSpecCandidate());
  6623. Assert(propertySymOpnd->HasFinalType());
  6624. Assert(propertySymOpnd->HasInitialType());
  6625. IR::Instr *instr;
  6626. IR::LabelInstr *labelBailOut = nullptr;
  6627. AssertMsg(!propertySymOpnd->IsTypeChecked(), "Why are we doing a type transition when we have the type we want?");
  6628. // If the initial type must be checked here, do it.
  6629. Assert(instrStFld->HasBailOutInfo());
  6630. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6631. GenerateCachedTypeWithoutPropertyCheck(instrStFld, propertySymOpnd, nullptr/*typeOpnd*/, labelBailOut);
  6632. // Do the type transition.
  6633. GenerateFieldStoreWithTypeChange(instrStFld, propertySymOpnd, propertySymOpnd->GetInitialType(), propertySymOpnd->GetFinalType());
  6634. instrStFld->FreeSrc1();
  6635. instrStFld->FreeDst();
  6636. // Insert the bailout and let the main path branch around it.
  6637. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6638. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  6639. instrStFld->InsertBefore(instr);
  6640. if (instrStFld->HasBailOutInfo())
  6641. {
  6642. Assert(labelBailOut != nullptr);
  6643. instrStFld->InsertBefore(labelBailOut);
  6644. instrStFld->InsertAfter(labelDone);
  6645. instrStFld->m_opcode = Js::OpCode::BailOut;
  6646. this->GenerateBailOut(instrStFld);
  6647. }
  6648. else
  6649. {
  6650. instrStFld->InsertAfter(labelDone);
  6651. instrStFld->Remove();
  6652. }
  6653. return true;
  6654. }
  6655. ///----------------------------------------------------------------------------
  6656. ///
  6657. /// Lowerer::LowerScopedStFld
  6658. ///
  6659. ///----------------------------------------------------------------------------
  6660. IR::Instr *
  6661. Lowerer::LowerScopedStFld(IR::Instr * stFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache,
  6662. bool withPropertyOperationFlags, Js::PropertyOperationFlags flags)
  6663. {
  6664. IR::Instr *instrPrev = stFldInstr->m_prev;
  6665. if (withPropertyOperationFlags)
  6666. {
  6667. m_lowererMD.LoadHelperArgument(stFldInstr,
  6668. IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
  6669. }
  6670. if(!withInlineCache)
  6671. {
  6672. LoadScriptContext(stFldInstr);
  6673. }
  6674. // Pass the default instance
  6675. IR::Opnd *src = stFldInstr->UnlinkSrc2();
  6676. m_lowererMD.LoadHelperArgument(stFldInstr, src);
  6677. // Pass the value to store
  6678. src = stFldInstr->UnlinkSrc1();
  6679. m_lowererMD.LoadHelperArgument(stFldInstr, src);
  6680. // Pass the property sym to store to
  6681. IR::Opnd *dst = stFldInstr->UnlinkDst();
  6682. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
  6683. this->LoadPropertySymAsArgument(stFldInstr, dst);
  6684. if (withInlineCache)
  6685. {
  6686. AssertMsg(dst->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  6687. m_lowererMD.LoadHelperArgument(
  6688. stFldInstr,
  6689. IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  6690. // Not using the polymorphic inline cache because the fast path only uses the monomorphic inline cache
  6691. this->m_lowererMD.LoadHelperArgument(stFldInstr, this->LoadRuntimeInlineCacheOpnd(stFldInstr, dst->AsPropertySymOpnd()));
  6692. m_lowererMD.LoadHelperArgument(stFldInstr, LoadFunctionBodyOpnd(stFldInstr));
  6693. }
  6694. m_lowererMD.ChangeToHelperCall(stFldInstr, helperMethod);
  6695. return instrPrev;
  6696. }
  6697. ///----------------------------------------------------------------------------
  6698. ///
  6699. /// Lowerer::LowerLoadVar
  6700. ///
  6701. ///----------------------------------------------------------------------------
  6702. IR::Instr *
  6703. Lowerer::LowerLoadVar(IR::Instr *instr, IR::Opnd *opnd)
  6704. {
  6705. instr->SetSrc1(opnd);
  6706. return m_lowererMD.ChangeToAssign(instr);
  6707. }
  6708. IR::Instr *
  6709. Lowerer::LoadHelperTemp(IR::Instr * instr, IR::Instr * instrInsert)
  6710. {
  6711. IR::Opnd *tempOpnd;
  6712. IR::Opnd *dst = instr->GetDst();
  6713. AssertMsg(dst != nullptr, "Always expect a dst for these.");
  6714. AssertMsg(instr->dstIsTempNumber, "Should only be loading temps here");
  6715. Assert(dst->IsRegOpnd());
  6716. StackSym * tempNumberSym = this->GetTempNumberSym(dst, instr->dstIsTempNumberTransferred);
  6717. IR::Instr *load = this->m_lowererMD.LoadStackAddress(tempNumberSym);
  6718. instrInsert->InsertBefore(load);
  6719. tempOpnd = load->GetDst();
  6720. m_lowererMD.LoadHelperArgument(instrInsert, tempOpnd);
  6721. return load;
  6722. }
  6723. void
  6724. Lowerer::LoadArgumentCount(IR::Instr *const instr)
  6725. {
  6726. Assert(instr);
  6727. Assert(instr->GetDst());
  6728. Assert(!instr->GetSrc1());
  6729. Assert(!instr->GetSrc2());
  6730. if(instr->m_func->IsInlinee())
  6731. {
  6732. // Argument count including 'this'
  6733. instr->SetSrc1(IR::IntConstOpnd::New(instr->m_func->actualCount, TyUint32, instr->m_func, true));
  6734. LowererMD::ChangeToAssign(instr);
  6735. }
  6736. else if (instr->m_func->GetJITFunctionBody()->IsCoroutine())
  6737. {
  6738. IR::SymOpnd* symOpnd = LoadCallInfo(instr);
  6739. instr->SetSrc1(symOpnd);
  6740. LowererMD::ChangeToAssign(instr);
  6741. }
  6742. else
  6743. {
  6744. m_lowererMD.LoadArgumentCount(instr);
  6745. }
  6746. }
  6747. void
  6748. Lowerer::LoadStackArgPtr(IR::Instr *const instr)
  6749. {
  6750. Assert(instr);
  6751. Assert(instr->GetDst());
  6752. Assert(!instr->GetSrc1());
  6753. Assert(!instr->GetSrc2());
  6754. if(instr->m_func->IsInlinee())
  6755. {
  6756. // Address of argument after 'this'
  6757. const auto firstRealArgStackSym = instr->m_func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  6758. this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr);
  6759. instr->SetSrc1(IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, instr->m_func));
  6760. LowererMD::ChangeToLea(instr);
  6761. }
  6762. else
  6763. {
  6764. m_lowererMD.LoadStackArgPtr(instr);
  6765. }
  6766. }
  6767. void
  6768. Lowerer::LoadArgumentsFromFrame(IR::Instr *const instr)
  6769. {
  6770. Assert(instr);
  6771. Assert(instr->GetDst());
  6772. Assert(!instr->GetSrc1());
  6773. Assert(!instr->GetSrc2());
  6774. if(instr->m_func->IsInlinee())
  6775. {
  6776. // Use the inline object meta arg slot for the arguments object
  6777. instr->SetSrc1(instr->m_func->GetInlineeArgumentsObjectSlotOpnd());
  6778. LowererMD::ChangeToAssign(instr);
  6779. }
  6780. else
  6781. {
  6782. m_lowererMD.LoadArgumentsFromFrame(instr);
  6783. }
  6784. }
  6785. #ifdef ENABLE_WASM
  6786. IR::Instr *
  6787. Lowerer::LowerCheckWasmSignature(IR::Instr * instr)
  6788. {
  6789. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  6790. Assert(instr->GetSrc1());
  6791. Assert(instr->GetSrc2()->IsIntConstOpnd());
  6792. int sigId = instr->UnlinkSrc2()->AsIntConstOpnd()->AsInt32();
  6793. IR::Instr *instrPrev = instr->m_prev;
  6794. IR::IndirOpnd * actualSig = IR::IndirOpnd::New(instr->UnlinkSrc1()->AsRegOpnd(), Js::AsmJsScriptFunction::GetOffsetOfSignature(), TyMachReg, m_func);
  6795. Wasm::WasmSignature * expectedSig = m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetWasmSignature(sigId);
  6796. if (expectedSig->GetShortSig() == Js::Constants::InvalidSignature)
  6797. {
  6798. intptr_t sigAddr = m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetWasmSignatureAddr(sigId);
  6799. IR::AddrOpnd * expectedOpnd = IR::AddrOpnd::New(sigAddr, IR::AddrOpndKindConstantAddress, m_func);
  6800. m_lowererMD.LoadHelperArgument(instr, expectedOpnd);
  6801. m_lowererMD.LoadHelperArgument(instr, actualSig);
  6802. LoadScriptContext(instr);
  6803. m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_CheckWasmSignature);
  6804. }
  6805. else
  6806. {
  6807. IR::LabelInstr * trapLabel = InsertLabel(true, instr);
  6808. IR::LabelInstr * labelFallThrough = InsertLabel(false, instr->m_next);
  6809. IR::RegOpnd * actualRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  6810. InsertMove(actualRegOpnd, actualSig, trapLabel);
  6811. IR::IndirOpnd * shortSigIndir = IR::IndirOpnd::New(actualRegOpnd, Wasm::WasmSignature::GetOffsetOfShortSig(), TyMachReg, m_func);
  6812. InsertCompareBranch(shortSigIndir, IR::IntConstOpnd::New(expectedSig->GetShortSig(), TyMachReg, m_func), Js::OpCode::BrNeq_A, trapLabel, trapLabel);
  6813. InsertBranch(Js::OpCode::Br, labelFallThrough, trapLabel);
  6814. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_SignatureMismatch), TyInt32, m_func), instr);
  6815. instr->Remove();
  6816. }
  6817. return instrPrev;
  6818. }
  6819. IR::Instr *
  6820. Lowerer::LowerLdWasmFunc(IR::Instr* instr)
  6821. {
  6822. IR::Instr * prev = instr->m_prev;
  6823. IR::RegOpnd * tableReg = instr->UnlinkSrc1()->AsRegOpnd();
  6824. IR::Opnd * indexOpnd = instr->UnlinkSrc2();
  6825. IR::Opnd * dst = instr->UnlinkDst();
  6826. IR::IndirOpnd * lengthOpnd = IR::IndirOpnd::New(tableReg, Js::WebAssemblyTable::GetOffsetOfCurrentLength(), TyUint32, m_func);
  6827. IR::IndirOpnd * valuesIndirOpnd = IR::IndirOpnd::New(tableReg, Js::WebAssemblyTable::GetOffsetOfValues(), TyMachPtr, m_func);
  6828. IR::RegOpnd * valuesRegOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  6829. byte scale = m_lowererMD.GetDefaultIndirScale();
  6830. IR::IndirOpnd * funcIndirOpnd;
  6831. if (indexOpnd->IsIntConstOpnd())
  6832. {
  6833. funcIndirOpnd = IR::IndirOpnd::New(valuesRegOpnd, indexOpnd->AsIntConstOpnd()->AsInt32() << scale, TyMachPtr, m_func);
  6834. }
  6835. else
  6836. {
  6837. Assert(indexOpnd->IsRegOpnd());
  6838. funcIndirOpnd = IR::IndirOpnd::New(valuesRegOpnd, indexOpnd->AsRegOpnd(), TyMachPtr, m_func);
  6839. funcIndirOpnd->SetScale(scale);
  6840. }
  6841. IR::LabelInstr * trapOutOfBoundsLabel = InsertLabel(true, instr);
  6842. IR::LabelInstr * trapLabel = InsertLabel(true, trapOutOfBoundsLabel);
  6843. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  6844. InsertCompareBranch(indexOpnd, lengthOpnd, Js::OpCode::BrGe_A, true, trapOutOfBoundsLabel, trapLabel);
  6845. InsertMove(valuesRegOpnd, valuesIndirOpnd, trapLabel);
  6846. InsertMove(dst, funcIndirOpnd, trapLabel);
  6847. InsertCompareBranch(dst, IR::IntConstOpnd::New(0, TyMachPtr, m_func), Js::OpCode::BrEq_A, trapLabel, trapLabel);
  6848. InsertBranch(Js::OpCode::Br, doneLabel, trapLabel);
  6849. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_NeedWebAssemblyFunc), TyInt32, m_func), trapOutOfBoundsLabel);
  6850. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_TableIndexOutOfRange), TyInt32, m_func), instr);
  6851. instr->Remove();
  6852. return prev;
  6853. }
  6854. IR::Instr *
  6855. Lowerer::LowerGrowWasmMemory(IR::Instr* instr)
  6856. {
  6857. IR::Instr * instrPrev = m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
  6858. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  6859. m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_GrowWasmMemory);
  6860. return instrPrev;
  6861. }
  6862. #endif
  6863. IR::Instr *
  6864. Lowerer::LowerUnaryHelper(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::Opnd* opndBailoutArg)
  6865. {
  6866. IR::Instr *instrPrev;
  6867. IR::Opnd *src1 = instr->UnlinkSrc1();
  6868. instrPrev = m_lowererMD.LoadHelperArgument(instr, src1);
  6869. m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, opndBailoutArg);
  6870. return instrPrev;
  6871. }
  6872. // helper takes memory context as second argument
  6873. IR::Instr *
  6874. Lowerer::LowerUnaryHelperMem(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::Opnd* opndBailoutArg)
  6875. {
  6876. IR::Instr *instrPrev;
  6877. instrPrev = LoadScriptContext(instr);
  6878. return this->LowerUnaryHelper(instr, helperMethod, opndBailoutArg);
  6879. }
  6880. IR::Instr *
  6881. Lowerer::LowerUnaryHelperMemWithFunctionInfo(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6882. {
  6883. m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionInfoOpnd(instr));
  6884. return this->LowerUnaryHelperMem(instr, helperMethod);
  6885. }
  6886. IR::Instr *
  6887. Lowerer::LowerUnaryHelperMemWithFuncBody(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6888. {
  6889. m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionBodyOpnd(instr));
  6890. return this->LowerUnaryHelperMem(instr, helperMethod);
  6891. }
  6892. IR::Instr *
  6893. Lowerer::LowerBinaryHelperMemWithFuncBody(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6894. {
  6895. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Int2, "Expected a binary instruction...");
  6896. m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionBodyOpnd(instr));
  6897. return this->LowerBinaryHelperMem(instr, helperMethod);
  6898. }
  6899. IR::Instr *
  6900. Lowerer::LowerUnaryHelperMemWithTemp(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6901. {
  6902. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2, "Expected a unary instruction...");
  6903. IR::Instr * instrFirst;
  6904. IR::Opnd * tempOpnd;
  6905. if (instr->dstIsTempNumber)
  6906. {
  6907. instrFirst = this->LoadHelperTemp(instr, instr);
  6908. }
  6909. else
  6910. {
  6911. tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  6912. instrFirst = m_lowererMD.LoadHelperArgument(instr, tempOpnd);
  6913. }
  6914. this->LowerUnaryHelperMem(instr, helperMethod);
  6915. return instrFirst;
  6916. }
  6917. IR::Instr *
  6918. Lowerer::LowerUnaryHelperMemWithTemp2(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::JnHelperMethod helperMethodWithTemp)
  6919. {
  6920. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2, "Expected a unary instruction...");
  6921. if (instr->dstIsTempNumber)
  6922. {
  6923. IR::Instr * instrFirst = this->LoadHelperTemp(instr, instr);
  6924. this->LowerUnaryHelperMem(instr, helperMethodWithTemp);
  6925. return instrFirst;
  6926. }
  6927. return this->LowerUnaryHelperMem(instr, helperMethod);
  6928. }
  6929. IR::Instr *
  6930. Lowerer::LowerUnaryHelperMemWithBoolReference(IR::Instr *instr, IR::JnHelperMethod helperMethod, bool useBoolForBailout)
  6931. {
  6932. if (!this->m_func->tempSymBool)
  6933. {
  6934. this->m_func->tempSymBool = StackSym::New(TyUint8, this->m_func);
  6935. this->m_func->StackAllocate(this->m_func->tempSymBool, TySize[TyUint8]);
  6936. }
  6937. IR::SymOpnd * boolOpnd = IR::SymOpnd::New(this->m_func->tempSymBool, TyUint8, this->m_func);
  6938. IR::RegOpnd * boolRefOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  6939. InsertLea(boolRefOpnd, boolOpnd, instr);
  6940. m_lowererMD.LoadHelperArgument(instr, boolRefOpnd);
  6941. return this->LowerUnaryHelperMem(instr, helperMethod, useBoolForBailout ? boolOpnd : nullptr);
  6942. }
  6943. IR::Instr *
  6944. Lowerer::LowerInitCachedScope(IR::Instr* instr)
  6945. {
  6946. instr->m_opcode = Js::OpCode::CallHelper;
  6947. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperOP_InitCachedScope, this->m_func);
  6948. IR::Opnd * src1 = instr->UnlinkSrc1();
  6949. instr->SetSrc1(helperOpnd);
  6950. instr->SetSrc2(src1);
  6951. return instr;
  6952. }
  6953. ///----------------------------------------------------------------------------
  6954. ///
  6955. /// Lowerer::LowerBinaryHelper
  6956. ///
  6957. ///----------------------------------------------------------------------------
  6958. IR::Instr *
  6959. Lowerer::LowerBinaryHelper(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6960. {
  6961. // The only case where this would still be null when we return is when
  6962. // helperMethod == HelperOP_CmSrEq_EmptyString; in which case we ignore
  6963. // instrPrev.
  6964. IR::Instr *instrPrev = nullptr;
  6965. AssertMsg((Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Unsigned1 && !instr->GetDst()) ||
  6966. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3 ||
  6967. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2Int1 ||
  6968. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Int2 ||
  6969. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::ElementU ||
  6970. instr->m_opcode == Js::OpCode::InvalCachedScope, "Expected a binary instruction...");
  6971. IR::Opnd *src2 = instr->UnlinkSrc2();
  6972. if (helperMethod != IR::HelperOP_CmSrEq_EmptyString)
  6973. instrPrev = m_lowererMD.LoadHelperArgument(instr, src2);
  6974. IR::Opnd *src1 = instr->UnlinkSrc1();
  6975. m_lowererMD.LoadHelperArgument(instr, src1);
  6976. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  6977. return instrPrev;
  6978. }
  6979. // helper takes memory context as third argument
  6980. IR::Instr *
  6981. Lowerer::LowerBinaryHelperMem(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6982. {
  6983. IR::Instr *instrPrev;
  6984. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3 ||
  6985. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2Int1 ||
  6986. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Int2 ||
  6987. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Unsigned1, "Expected a binary instruction...");
  6988. instrPrev = LoadScriptContext(instr);
  6989. return this->LowerBinaryHelper(instr, helperMethod);
  6990. }
  6991. IR::Instr *
  6992. Lowerer::LowerBinaryHelperMemWithTemp(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6993. {
  6994. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3, "Expected a binary instruction...");
  6995. IR::Instr * instrFirst;
  6996. IR::Opnd * tempOpnd;
  6997. if (instr->dstIsTempNumber)
  6998. {
  6999. instrFirst = this->LoadHelperTemp(instr, instr);
  7000. }
  7001. else
  7002. {
  7003. tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  7004. instrFirst = m_lowererMD.LoadHelperArgument(instr, tempOpnd);
  7005. }
  7006. this->LowerBinaryHelperMem(instr, helperMethod);
  7007. return instrFirst;
  7008. }
  7009. IR::Instr *
  7010. Lowerer::LowerBinaryHelperMemWithTemp2(
  7011. IR::Instr *instr,
  7012. IR::JnHelperMethod helperMethod,
  7013. IR::JnHelperMethod helperMethodWithTemp
  7014. )
  7015. {
  7016. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3, "Expected a binary instruction...");
  7017. if (instr->dstIsTempNumber && instr->GetDst() && instr->GetDst()->GetValueType().HasBeenNumber())
  7018. {
  7019. IR::Instr * instrFirst = this->LoadHelperTemp(instr, instr);
  7020. this->LowerBinaryHelperMem(instr, helperMethodWithTemp);
  7021. return instrFirst;
  7022. }
  7023. return this->LowerBinaryHelperMem(instr, helperMethod);
  7024. }
  7025. IR::Instr *
  7026. Lowerer::LowerAddLeftDeadForString(IR::Instr *instr)
  7027. {
  7028. IR::Opnd * opndLeft;
  7029. IR::Opnd * opndRight;
  7030. opndLeft = instr->GetSrc1();
  7031. opndRight = instr->GetSrc2();
  7032. Assert(opndLeft && opndRight);
  7033. bool generateFastPath = this->m_func->DoFastPaths();
  7034. if (!generateFastPath
  7035. || !opndLeft->IsRegOpnd()
  7036. || !opndRight->IsRegOpnd()
  7037. || !instr->GetDst()->IsRegOpnd()
  7038. || !opndLeft->GetValueType().IsLikelyString()
  7039. || !opndRight->GetValueType().IsLikelyString()
  7040. || !opndLeft->IsEqual(instr->GetDst()->AsRegOpnd())
  7041. || opndLeft->IsEqual(opndRight))
  7042. {
  7043. return this->LowerBinaryHelperMemWithTemp(instr, IR::HelperOp_AddLeftDead);
  7044. }
  7045. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  7046. IR::LabelInstr * labelFallThrough = instr->GetOrCreateContinueLabel(false);
  7047. IR::LabelInstr *insertBeforeInstr = labelHelper;
  7048. instr->InsertBefore(labelHelper);
  7049. if (!opndLeft->IsNotTaggedValue())
  7050. {
  7051. this->m_lowererMD.GenerateObjectTest(opndLeft->AsRegOpnd(), insertBeforeInstr, labelHelper);
  7052. }
  7053. InsertCompareBranch(
  7054. IR::IndirOpnd::New(opndLeft->AsRegOpnd(), 0, TyMachPtr, m_func),
  7055. this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableCompoundString),
  7056. Js::OpCode::BrNeq_A,
  7057. labelHelper,
  7058. insertBeforeInstr);
  7059. GenerateStringTest(opndRight->AsRegOpnd(), insertBeforeInstr, labelHelper);
  7060. // left->m_charLength <= JavascriptArray::MaxCharLength
  7061. IR::IndirOpnd *indirLeftCharLengthOpnd = IR::IndirOpnd::New(opndLeft->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, m_func);
  7062. IR::RegOpnd *regLeftCharLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  7063. InsertMove(regLeftCharLengthOpnd, indirLeftCharLengthOpnd, insertBeforeInstr);
  7064. InsertCompareBranch(
  7065. regLeftCharLengthOpnd,
  7066. IR::IntConstOpnd::New(Js::JavascriptString::MaxCharLength, TyUint32, m_func),
  7067. Js::OpCode::BrGt_A,
  7068. labelHelper,
  7069. insertBeforeInstr);
  7070. // left->m_pszValue == NULL (!left->IsFinalized())
  7071. InsertCompareBranch(
  7072. IR::IndirOpnd::New(opndLeft->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func),
  7073. IR::AddrOpnd::NewNull(m_func),
  7074. Js::OpCode::BrNeq_A,
  7075. labelHelper,
  7076. insertBeforeInstr);
  7077. // right->m_pszValue != NULL (right->IsFinalized())
  7078. InsertCompareBranch(
  7079. IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func),
  7080. IR::AddrOpnd::NewNull(m_func),
  7081. Js::OpCode::BrEq_A,
  7082. labelHelper,
  7083. insertBeforeInstr);
  7084. // if ownsLastBlock != 0
  7085. InsertCompareBranch(
  7086. IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfOwnsLastBlock(), TyUint8, m_func),
  7087. IR::IntConstOpnd::New(0, TyUint8, m_func),
  7088. Js::OpCode::BrEq_A,
  7089. labelHelper,
  7090. insertBeforeInstr);
  7091. // if right->m_charLength == 1
  7092. InsertCompareBranch(IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_charLength), TyUint32, m_func),
  7093. IR::IntConstOpnd::New(1, TyUint32, m_func),
  7094. Js::OpCode::BrNeq_A, labelHelper, insertBeforeInstr);
  7095. // if left->m_directCharLength == -1
  7096. InsertCompareBranch(IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfDirectCharLength(), TyUint32, m_func),
  7097. IR::IntConstOpnd::New(UINT32_MAX, TyUint32, m_func),
  7098. Js::OpCode::BrNeq_A, labelHelper, insertBeforeInstr);
  7099. // if lastBlockInfo.charLength < lastBlockInfo.charCapacity
  7100. IR::IndirOpnd *indirCharLength = IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo()+ (int32)Js::CompoundString::GetOffsetOfLastBlockInfoCharLength(), TyMachPtr, m_func);
  7101. IR::RegOpnd *charLengthOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
  7102. InsertMove(charLengthOpnd, indirCharLength, insertBeforeInstr);
  7103. InsertCompareBranch(charLengthOpnd, IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo() + (int32)Js::CompoundString::GetOffsetOfLastBlockInfoCharCapacity(), TyMachPtr, m_func), Js::OpCode::BrGe_A, labelHelper, insertBeforeInstr);
  7104. // load c= right->m_pszValue[0]
  7105. IR::RegOpnd *pszValue0Opnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  7106. IR::IndirOpnd *indirRightPszOpnd = IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func);
  7107. InsertMove(pszValue0Opnd, indirRightPszOpnd, insertBeforeInstr);
  7108. IR::RegOpnd *charResultOpnd = IR::RegOpnd::New(TyUint16, this->m_func);
  7109. InsertMove(charResultOpnd, IR::IndirOpnd::New(pszValue0Opnd, 0, TyUint16, this->m_func), insertBeforeInstr);
  7110. // lastBlockInfo.buffer[blockCharLength] = c;
  7111. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  7112. InsertMove(baseOpnd, IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo() + (int32)Js::CompoundString::GetOffsetOfLastBlockInfoBuffer(), TyMachPtr, m_func), insertBeforeInstr);
  7113. IR::IndirOpnd *indirBufferToStore = IR::IndirOpnd::New(baseOpnd, charLengthOpnd, (byte)Math::Log2(sizeof(char16)), TyUint16, m_func);
  7114. InsertMove(indirBufferToStore, charResultOpnd, insertBeforeInstr);
  7115. // left->m_charLength++
  7116. InsertAdd(false, indirLeftCharLengthOpnd, regLeftCharLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
  7117. // lastBlockInfo.charLength++
  7118. InsertAdd(false, indirCharLength, indirCharLength, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
  7119. InsertBranch(Js::OpCode::Br, labelFallThrough, insertBeforeInstr);
  7120. return this->LowerBinaryHelperMemWithTemp(instr, IR::HelperOp_AddLeftDead);
  7121. }
  7122. IR::Instr *
  7123. Lowerer::LowerBinaryHelperMemWithTemp3(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::JnHelperMethod helperMethodWithTemp, IR::JnHelperMethod helperMethodLeftDead)
  7124. {
  7125. IR::Opnd *src1 = instr->GetSrc1();
  7126. if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_isTempLastUse && !src1->GetValueType().IsNotString())
  7127. {
  7128. Assert(helperMethodLeftDead == IR::HelperOp_AddLeftDead);
  7129. return LowerAddLeftDeadForString(instr);
  7130. }
  7131. else
  7132. {
  7133. return this->LowerBinaryHelperMemWithTemp2(instr, helperMethod, helperMethodWithTemp);
  7134. }
  7135. }
  7136. StackSym *
  7137. Lowerer::GetTempNumberSym(IR::Opnd * opnd, bool isTempTransferred)
  7138. {
  7139. AssertMsg(opnd->IsRegOpnd(), "Expected regOpnd");
  7140. if (isTempTransferred)
  7141. {
  7142. StackSym * tempNumberSym = StackSym::New(TyMisc, m_func);
  7143. this->m_func->StackAllocate(tempNumberSym, sizeof(Js::JavascriptNumber));
  7144. return tempNumberSym;
  7145. }
  7146. StackSym * stackSym = opnd->AsRegOpnd()->m_sym;
  7147. StackSym * tempNumberSym = stackSym->m_tempNumberSym;
  7148. if (tempNumberSym == nullptr)
  7149. {
  7150. tempNumberSym = StackSym::New(TyMisc, m_func);
  7151. this->m_func->StackAllocate(tempNumberSym, sizeof(Js::JavascriptNumber));
  7152. stackSym->m_tempNumberSym = tempNumberSym;
  7153. }
  7154. return tempNumberSym;
  7155. }
  7156. void Lowerer::LowerProfiledLdElemI(IR::JitProfilingInstr *const instr)
  7157. {
  7158. Assert(instr);
  7159. /*
  7160. Var ProfilingHelpers::ProfiledLdElem(
  7161. const Var base,
  7162. const Var varIndex,
  7163. FunctionBody *const functionBody,
  7164. const ProfileId profileId,
  7165. bool didArrayAccessHelperCall)
  7166. */
  7167. Func *const func = instr->m_func;
  7168. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(false, TyInt8, func));
  7169. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, func));
  7170. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(func));
  7171. IR::IndirOpnd *const indir = instr->UnlinkSrc1()->AsIndirOpnd();
  7172. IR::Opnd *const indexOpnd = indir->UnlinkIndexOpnd();
  7173. Assert(indexOpnd || indir->GetOffset() >= 0 && !Js::TaggedInt::IsOverflow(indir->GetOffset()));
  7174. m_lowererMD.LoadHelperArgument(
  7175. instr,
  7176. indexOpnd
  7177. ? static_cast<IR::Opnd *>(indexOpnd)
  7178. : IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(indir->GetOffset()), IR::AddrOpndKindDynamicVar, func));
  7179. m_lowererMD.LoadHelperArgument(instr, indir->UnlinkBaseOpnd());
  7180. indir->Free(func);
  7181. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledLdElem, func));
  7182. m_lowererMD.LowerCall(instr, 0);
  7183. }
  7184. void Lowerer::LowerProfiledStElemI(IR::JitProfilingInstr *const instr, const Js::PropertyOperationFlags flags)
  7185. {
  7186. Assert(instr);
  7187. /*
  7188. void ProfilingHelpers::ProfiledStElem(
  7189. const Var base,
  7190. const Var varIndex,
  7191. const Var value,
  7192. FunctionBody *const functionBody,
  7193. const ProfileId profileId,
  7194. const PropertyOperationFlags flags,
  7195. bool didArrayAccessHelperCall)
  7196. */
  7197. Func *const func = instr->m_func;
  7198. IR::JnHelperMethod helper;
  7199. if(flags == Js::PropertyOperation_None)
  7200. {
  7201. helper = IR::HelperProfiledStElem_DefaultFlags;
  7202. }
  7203. else
  7204. {
  7205. helper = IR::HelperProfiledStElem;
  7206. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(false, TyInt8, func));
  7207. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(flags, TyInt32, func, true));
  7208. }
  7209. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, func));
  7210. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(func));
  7211. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  7212. IR::IndirOpnd *const indir = instr->UnlinkDst()->AsIndirOpnd();
  7213. IR::Opnd *const indexOpnd = indir->UnlinkIndexOpnd();
  7214. Assert(indexOpnd || indir->GetOffset() >= 0 && !Js::TaggedInt::IsOverflow(indir->GetOffset()));
  7215. m_lowererMD.LoadHelperArgument(
  7216. instr,
  7217. indexOpnd
  7218. ? static_cast<IR::Opnd *>(indexOpnd)
  7219. : IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(indir->GetOffset()), IR::AddrOpndKindDynamicVar, func));
  7220. m_lowererMD.LoadHelperArgument(instr, indir->UnlinkBaseOpnd());
  7221. indir->Free(func);
  7222. instr->SetSrc1(IR::HelperCallOpnd::New(helper, func));
  7223. m_lowererMD.LowerCall(instr, 0);
  7224. }
  7225. ///----------------------------------------------------------------------------
  7226. ///
  7227. /// Lowerer::LowerStElemI
  7228. ///
  7229. ///----------------------------------------------------------------------------
  7230. IR::Instr *
  7231. Lowerer::LowerStElemI(IR::Instr * instr, Js::PropertyOperationFlags flags, bool isHelper, IR::JnHelperMethod helperMethod)
  7232. {
  7233. IR::Instr *instrPrev = instr->m_prev;
  7234. if (instr->IsJitProfilingInstr())
  7235. {
  7236. Assert(!isHelper);
  7237. LowerProfiledStElemI(instr->AsJitProfilingInstr(), flags);
  7238. return instrPrev;
  7239. }
  7240. IR::Opnd *src1 = instr->GetSrc1();
  7241. IR::Opnd *dst = instr->GetDst();
  7242. IR::Opnd *newDst = nullptr;
  7243. IRType srcType = src1->GetType();
  7244. AssertMsg(dst->IsIndirOpnd(), "Expected indirOpnd on StElementI");
  7245. #if !FLOATVAR
  7246. if (dst->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyOptimizedTypedArray() && src1->IsRegOpnd())
  7247. {
  7248. // We allow the source of typedArray StElem to be marked as temp, since we just need the value,
  7249. // however if the array turns out to be a non-typed array, or the index isn't valid (the value is then stored as a property)
  7250. // the temp needs to be boxed if it is a float. The BoxStackNumber helper will box JavascriptNumbers
  7251. // which are on the stack.
  7252. // regVar = BoxStackNumber(src1, scriptContext)
  7253. IR::Instr *newInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  7254. IR::RegOpnd *regVar = IR::RegOpnd::New(TyVar, this->m_func);
  7255. newInstr->SetDst(regVar);
  7256. newInstr->SetSrc1(src1);
  7257. instr->InsertBefore(newInstr);
  7258. LowerUnaryHelperMem(newInstr, IR::HelperBoxStackNumber);
  7259. // MOV src1, regVar
  7260. newInstr = IR::Instr::New(Js::OpCode::Ld_A, src1, regVar, this->m_func);
  7261. instr->InsertBefore(m_lowererMD.ChangeToAssign(newInstr));
  7262. }
  7263. #endif
  7264. if(instr->HasBailOutInfo())
  7265. {
  7266. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  7267. if(bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment)
  7268. {
  7269. Assert(!(bailOutKind & IR::BailOutOnMissingValue));
  7270. LowerBailOnInvalidatedArrayHeadSegment(instr, isHelper);
  7271. bailOutKind ^= IR::BailOutOnInvalidatedArrayHeadSegment;
  7272. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7273. }
  7274. else if(bailOutKind & IR::BailOutOnMissingValue)
  7275. {
  7276. LowerBailOnCreatedMissingValue(instr, isHelper);
  7277. bailOutKind ^= IR::BailOutOnMissingValue;
  7278. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7279. }
  7280. if(bailOutKind & IR::BailOutOnInvalidatedArrayLength)
  7281. {
  7282. LowerBailOnInvalidatedArrayLength(instr, isHelper);
  7283. bailOutKind ^= IR::BailOutOnInvalidatedArrayLength;
  7284. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7285. }
  7286. if(bailOutKind & IR::BailOutConvertedNativeArray)
  7287. {
  7288. IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  7289. instr->InsertAfter(labelSkipBailOut);
  7290. LowerOneBailOutKind(instr, IR::BailOutConvertedNativeArray, isHelper);
  7291. newDst = IR::RegOpnd::New(TyMachReg, m_func);
  7292. InsertTestBranch(newDst, newDst, Js::OpCode::BrEq_A, labelSkipBailOut, instr->m_next);
  7293. }
  7294. }
  7295. instr->UnlinkDst();
  7296. instr->UnlinkSrc1();
  7297. IR::Opnd *indexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
  7298. Assert(
  7299. helperMethod == IR::HelperOP_InitElemGetter ||
  7300. helperMethod == IR::HelperOP_InitElemSetter ||
  7301. helperMethod == IR::HelperOP_InitComputedProperty ||
  7302. helperMethod == IR::HelperOp_SetElementI ||
  7303. helperMethod == IR::HelperOp_InitClassMemberComputedName ||
  7304. helperMethod == IR::HelperOp_InitClassMemberGetComputedName ||
  7305. helperMethod == IR::HelperOp_InitClassMemberSetComputedName
  7306. );
  7307. if (indexOpnd && indexOpnd->GetType() != TyVar)
  7308. {
  7309. if (indexOpnd->GetType() == TyInt32)
  7310. {
  7311. helperMethod =
  7312. srcType == TyVar ? IR::HelperOp_SetElementI_Int32 :
  7313. srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI_Int32 :
  7314. IR::HelperOp_SetNativeFloatElementI_Int32;
  7315. }
  7316. else if (indexOpnd->GetType() == TyUint32)
  7317. {
  7318. helperMethod =
  7319. srcType == TyVar ? IR::HelperOp_SetElementI_UInt32 :
  7320. srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI_UInt32 :
  7321. IR::HelperOp_SetNativeFloatElementI_UInt32;
  7322. }
  7323. else
  7324. {
  7325. Assert(FALSE);
  7326. }
  7327. }
  7328. else
  7329. {
  7330. if (indexOpnd == nullptr)
  7331. {
  7332. // No index; the offset identifies the element.
  7333. IntConstType offset = (IntConstType)dst->AsIndirOpnd()->GetOffset();
  7334. indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
  7335. }
  7336. if (srcType != TyVar)
  7337. {
  7338. helperMethod =
  7339. srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI : IR::HelperOp_SetNativeFloatElementI;
  7340. }
  7341. }
  7342. if (srcType == TyFloat64)
  7343. {
  7344. m_lowererMD.LoadDoubleHelperArgument(instr, src1);
  7345. }
  7346. m_lowererMD.LoadHelperArgument(instr,
  7347. IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
  7348. LoadScriptContext(instr);
  7349. if (srcType != TyFloat64)
  7350. {
  7351. m_lowererMD.LoadHelperArgument(instr, src1);
  7352. }
  7353. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  7354. IR::Opnd *baseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
  7355. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  7356. dst->Free(this->m_func);
  7357. if (newDst)
  7358. {
  7359. instr->SetDst(newDst);
  7360. }
  7361. m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, nullptr, nullptr, isHelper);
  7362. return instrPrev;
  7363. }
  7364. ///----------------------------------------------------------------------------
  7365. ///
  7366. /// Lowerer::LowerLdElemI
  7367. ///
  7368. ///----------------------------------------------------------------------------
  7369. IR::Instr *
  7370. Lowerer::LowerLdElemI(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool isHelper)
  7371. {
  7372. IR::Instr *instrPrev = instr->m_prev;
  7373. if(instr->IsJitProfilingInstr())
  7374. {
  7375. Assert(helperMethod == IR::HelperOp_GetElementI);
  7376. Assert(!isHelper);
  7377. LowerProfiledLdElemI(instr->AsJitProfilingInstr());
  7378. return instrPrev;
  7379. }
  7380. if (!isHelper && instr->DoStackArgsOpt(this->m_func))
  7381. {
  7382. IR::LabelInstr * labelLdElem = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  7383. // Pass in null for labelFallThru to only generate the LdHeapArgument call
  7384. GenerateFastArgumentsLdElemI(instr, nullptr);
  7385. instr->InsertBefore(labelLdElem);
  7386. instr->UnlinkSrc1();
  7387. instr->UnlinkDst();
  7388. Assert(instr->HasBailOutInfo() && instr->GetBailOutKind() == IR::BailOutKind::BailOnStackArgsOutOfActualsRange);
  7389. instr = GenerateBailOut(instr, nullptr, nullptr);
  7390. return instrPrev;
  7391. }
  7392. IR::Opnd *src1 = instr->UnlinkSrc1();
  7393. AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd");
  7394. IR::IndirOpnd *indirOpnd = src1->AsIndirOpnd();
  7395. bool loadScriptContext = true;
  7396. IRType dstType = instr->GetDst()->GetType();
  7397. IR::Opnd *indexOpnd = indirOpnd->UnlinkIndexOpnd();
  7398. if (indexOpnd && indexOpnd->GetType() != TyVar)
  7399. {
  7400. Assert(indexOpnd->GetType() == TyUint32 || indexOpnd->GetType() == TyInt32);
  7401. switch (helperMethod)
  7402. {
  7403. case IR::HelperOp_GetElementI:
  7404. if (indexOpnd->GetType() == TyUint32)
  7405. {
  7406. helperMethod =
  7407. dstType == TyVar ? IR::HelperOp_GetElementI_UInt32 :
  7408. dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI_UInt32 :
  7409. IR::HelperOp_GetNativeFloatElementI_UInt32;
  7410. }
  7411. else
  7412. {
  7413. helperMethod =
  7414. dstType == TyVar ? IR::HelperOp_GetElementI_Int32 :
  7415. dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI_Int32 :
  7416. IR::HelperOp_GetNativeFloatElementI_Int32;
  7417. }
  7418. break;
  7419. case IR::HelperOp_GetMethodElement:
  7420. Assert(dstType == TyVar);
  7421. helperMethod = indexOpnd->GetType() == TyUint32?
  7422. IR::HelperOp_GetMethodElement_UInt32 : IR::HelperOp_GetMethodElement_Int32;
  7423. break;
  7424. case IR::HelperOp_TypeofElem:
  7425. Assert(dstType == TyVar);
  7426. helperMethod = indexOpnd->GetType() == TyUint32?
  7427. IR::HelperOp_TypeofElem_UInt32 : IR::HelperOp_TypeofElem_Int32;
  7428. break;
  7429. default:
  7430. Assert(false);
  7431. }
  7432. }
  7433. else
  7434. {
  7435. if (indexOpnd == nullptr)
  7436. {
  7437. // No index; the offset identifies the element.
  7438. IntConstType offset = (IntConstType)src1->AsIndirOpnd()->GetOffset();
  7439. indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
  7440. }
  7441. if (dstType != TyVar)
  7442. {
  7443. loadScriptContext = false;
  7444. helperMethod =
  7445. dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI : IR::HelperOp_GetNativeFloatElementI;
  7446. }
  7447. }
  7448. // Jitted loop bodies have volatile information about values created outside the loop, so don't update array creation site
  7449. // profile data from jitted loop bodies
  7450. if(!m_func->IsLoopBody())
  7451. {
  7452. const ValueType baseValueType(indirOpnd->GetBaseOpnd()->GetValueType());
  7453. if( baseValueType.IsLikelyObject() &&
  7454. baseValueType.GetObjectType() == ObjectType::Array &&
  7455. !baseValueType.HasIntElements())
  7456. {
  7457. switch(helperMethod)
  7458. {
  7459. case IR::HelperOp_GetElementI:
  7460. helperMethod =
  7461. baseValueType.HasFloatElements()
  7462. ? IR::HelperOp_GetElementI_ExpectingNativeFloatArray
  7463. : IR::HelperOp_GetElementI_ExpectingVarArray;
  7464. break;
  7465. case IR::HelperOp_GetElementI_UInt32:
  7466. helperMethod =
  7467. baseValueType.HasFloatElements()
  7468. ? IR::HelperOp_GetElementI_UInt32_ExpectingNativeFloatArray
  7469. : IR::HelperOp_GetElementI_UInt32_ExpectingVarArray;
  7470. break;
  7471. case IR::HelperOp_GetElementI_Int32:
  7472. helperMethod =
  7473. baseValueType.HasFloatElements()
  7474. ? IR::HelperOp_GetElementI_Int32_ExpectingNativeFloatArray
  7475. : IR::HelperOp_GetElementI_Int32_ExpectingVarArray;
  7476. break;
  7477. }
  7478. }
  7479. }
  7480. if (loadScriptContext)
  7481. {
  7482. LoadScriptContext(instr);
  7483. }
  7484. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  7485. IR::Opnd *baseOpnd = indirOpnd->UnlinkBaseOpnd();
  7486. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  7487. src1->Free(this->m_func);
  7488. m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, nullptr, nullptr, isHelper);
  7489. return instrPrev;
  7490. }
  7491. void Lowerer::LowerLdLen(IR::Instr *const instr, const bool isHelper)
  7492. {
  7493. Assert(instr);
  7494. Assert(instr->m_opcode == Js::OpCode::LdLen_A);
  7495. // LdLen has persisted to this point for the sake of pre-lower opts.
  7496. // Turn it into a LdFld of the "length" property.
  7497. // This is normally a load of the internal "length" of an Array, so it probably doesn't benefit
  7498. // from inline caching.
  7499. // Changing the opcode to LdFld is done in LowerLdFld and needs to remain that way to take into
  7500. // account ProfiledLdLen_A
  7501. IR::RegOpnd * baseOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  7502. PropertySym* fieldSym = PropertySym::FindOrCreate(baseOpnd->m_sym->m_id, Js::PropertyIds::length, (uint32)-1, (uint)-1, PropertyKindData, m_func);
  7503. baseOpnd->Free(this->m_func);
  7504. instr->SetSrc1(IR::SymOpnd::New(fieldSym, TyVar, m_func));
  7505. LowerLdFld(instr, IR::HelperOp_GetProperty, IR::HelperOp_GetProperty, false, nullptr, isHelper);
  7506. }
  7507. IR::Instr *
  7508. Lowerer::LowerLdArrViewElem(IR::Instr * instr)
  7509. {
  7510. #ifdef ASMJS_PLAT
  7511. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  7512. Assert(instr);
  7513. Assert(instr->m_opcode == Js::OpCode::LdArrViewElem);
  7514. IR::Instr * instrPrev = instr->m_prev;
  7515. IR::RegOpnd * indexOpnd = instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd();
  7516. int32 offset = instr->GetSrc1()->AsIndirOpnd()->GetOffset();
  7517. IR::Opnd * dst = instr->GetDst();
  7518. IR::Opnd * src1 = instr->GetSrc1();
  7519. IR::Opnd * src2 = instr->GetSrc2();
  7520. IR::Instr * done;
  7521. if (offset < 0)
  7522. {
  7523. IR::Opnd * oobValue = nullptr;
  7524. if(dst->IsFloat32())
  7525. {
  7526. oobValue = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNaNAddr(), TyFloat32, m_func);
  7527. }
  7528. else if(dst->IsFloat64())
  7529. {
  7530. oobValue = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNaNAddr(), TyFloat64, m_func);
  7531. }
  7532. else
  7533. {
  7534. oobValue = IR::IntConstOpnd::New(0, dst->GetType(), m_func);
  7535. }
  7536. instr->ReplaceSrc1(oobValue);
  7537. if (src2)
  7538. {
  7539. instr->FreeSrc2();
  7540. }
  7541. return m_lowererMD.ChangeToAssign(instr);
  7542. }
  7543. if (indexOpnd || m_func->GetJITFunctionBody()->GetAsmJsInfo()->AccessNeedsBoundCheck((uint32)offset))
  7544. {
  7545. // CMP indexOpnd, src2(arrSize)
  7546. // JA $helper
  7547. // JMP $load
  7548. // $helper:
  7549. // MOV dst, 0
  7550. // JMP $done
  7551. // $load:
  7552. // MOV dst, src1([arrayBuffer + indexOpnd])
  7553. // $done:
  7554. Assert(!dst->IsFloat32() || src1->IsFloat32());
  7555. Assert(!dst->IsFloat64() || src1->IsFloat64());
  7556. done = m_lowererMD.LowerAsmJsLdElemHelper(instr);
  7557. }
  7558. else
  7559. {
  7560. // any access below 0x10000 is safe
  7561. instr->UnlinkDst();
  7562. instr->UnlinkSrc1();
  7563. if (src2)
  7564. {
  7565. instr->FreeSrc2();
  7566. }
  7567. done = instr;
  7568. }
  7569. if (dst->IsInt64())
  7570. {
  7571. IR::Instr* movInt64 = IR::Instr::New(Js::OpCode::Ld_I4, dst, src1, m_func);
  7572. done->InsertBefore(movInt64);
  7573. m_lowererMD.LowerInt64Assign(movInt64);
  7574. }
  7575. else
  7576. {
  7577. InsertMove(dst, src1, done);
  7578. }
  7579. instr->Remove();
  7580. return instrPrev;
  7581. #else
  7582. Assert(UNREACHED);
  7583. return instr;
  7584. #endif
  7585. }
  7586. IR::Instr *
  7587. Lowerer::LowerWasmMemOp(IR::Instr * instr, IR::Opnd *addrOpnd)
  7588. {
  7589. uint32 offset = addrOpnd->AsIndirOpnd()->GetOffset();
  7590. // don't encode offset for wasm memory reads/writes
  7591. addrOpnd->AsIndirOpnd()->m_dontEncode = true;
  7592. // if offset/size overflow the max length, throw (this also saves us from having to do int64 math)
  7593. int64 constOffset = (int64)addrOpnd->GetSize() + (int64)offset;
  7594. if (constOffset >= Js::ArrayBuffer::MaxArrayBufferLength)
  7595. {
  7596. GenerateRuntimeError(instr, WASMERR_ArrayIndexOutOfRange, IR::HelperOp_WebAssemblyRuntimeError);
  7597. return instr;
  7598. }
  7599. else
  7600. {
  7601. return m_lowererMD.LowerWasmMemOp(instr, addrOpnd);
  7602. }
  7603. }
  7604. IR::Instr *
  7605. Lowerer::LowerLdArrViewElemWasm(IR::Instr * instr)
  7606. {
  7607. #ifdef ENABLE_WASM
  7608. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  7609. Assert(instr);
  7610. Assert(instr->m_opcode == Js::OpCode::LdArrViewElemWasm);
  7611. IR::Instr * instrPrev = instr->m_prev;
  7612. IR::Opnd * dst = instr->GetDst();
  7613. IR::Opnd * src1 = instr->GetSrc1();
  7614. Assert(!dst->IsFloat32() || src1->IsFloat32());
  7615. Assert(!dst->IsFloat64() || src1->IsFloat64());
  7616. IR::Instr * done = LowerWasmMemOp(instr, src1);
  7617. IR::Instr* newMove = nullptr;
  7618. if (dst->IsInt64())
  7619. {
  7620. IR::Instr* movInt64 = IR::Instr::New(Js::OpCode::Ld_I4, dst, src1, m_func);
  7621. done->InsertBefore(movInt64);
  7622. newMove = m_lowererMD.LowerInt64Assign(movInt64);
  7623. }
  7624. else
  7625. {
  7626. newMove = InsertMove(dst, src1, done);
  7627. }
  7628. #if ENABLE_FAST_ARRAYBUFFER
  7629. // We need to have an AV when accessing out of bounds memory even if the dst is not used
  7630. // Make sure LinearScan doesn't dead store this instruction
  7631. newMove->hasSideEffects = true;
  7632. #endif
  7633. instr->Remove();
  7634. return instrPrev;
  7635. #else
  7636. Assert(UNREACHED);
  7637. return instr;
  7638. #endif
  7639. }
  7640. IR::Instr *
  7641. Lowerer::LowerMemset(IR::Instr * instr, IR::RegOpnd * helperRet)
  7642. {
  7643. IR::Opnd * dst = instr->UnlinkDst();
  7644. IR::Opnd * src1 = instr->UnlinkSrc1();
  7645. Assert(dst->IsIndirOpnd());
  7646. IR::Opnd *baseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
  7647. IR::Opnd *indexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
  7648. IR::Opnd *sizeOpnd = instr->UnlinkSrc2();
  7649. Assert(baseOpnd);
  7650. Assert(sizeOpnd);
  7651. Assert(indexOpnd);
  7652. IR::JnHelperMethod helperMethod = IR::HelperOp_Memset;
  7653. IR::Instr *instrPrev = nullptr;
  7654. if (src1->IsRegOpnd() && !src1->IsVar())
  7655. {
  7656. IR::RegOpnd* varOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  7657. instrPrev = IR::Instr::New(Js::OpCode::ToVar, varOpnd, src1, instr->m_func);
  7658. instr->InsertBefore(instrPrev);
  7659. src1 = varOpnd;
  7660. }
  7661. instr->SetDst(helperRet);
  7662. LoadScriptContext(instr);
  7663. m_lowererMD.LoadHelperArgument(instr, sizeOpnd);
  7664. m_lowererMD.LoadHelperArgument(instr, src1);
  7665. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  7666. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  7667. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  7668. dst->Free(m_func);
  7669. return instrPrev;
  7670. }
  7671. IR::Instr *
  7672. Lowerer::LowerMemcopy(IR::Instr * instr, IR::RegOpnd * helperRet)
  7673. {
  7674. IR::Opnd * dst = instr->UnlinkDst();
  7675. IR::Opnd * src = instr->UnlinkSrc1();
  7676. Assert(dst->IsIndirOpnd());
  7677. Assert(src->IsIndirOpnd());
  7678. IR::Opnd *dstBaseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
  7679. IR::Opnd *dstIndexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
  7680. IR::Opnd *srcBaseOpnd = src->AsIndirOpnd()->UnlinkBaseOpnd();
  7681. IR::Opnd *srcIndexOpnd = src->AsIndirOpnd()->UnlinkIndexOpnd();
  7682. IR::Opnd *sizeOpnd = instr->UnlinkSrc2();
  7683. Assert(sizeOpnd);
  7684. Assert(dstBaseOpnd);
  7685. Assert(dstIndexOpnd);
  7686. Assert(srcBaseOpnd);
  7687. Assert(srcIndexOpnd);
  7688. IR::JnHelperMethod helperMethod = IR::HelperOp_Memcopy;
  7689. instr->SetDst(helperRet);
  7690. LoadScriptContext(instr);
  7691. m_lowererMD.LoadHelperArgument(instr, sizeOpnd);
  7692. m_lowererMD.LoadHelperArgument(instr, srcIndexOpnd);
  7693. m_lowererMD.LoadHelperArgument(instr, srcBaseOpnd);
  7694. m_lowererMD.LoadHelperArgument(instr, dstIndexOpnd);
  7695. m_lowererMD.LoadHelperArgument(instr, dstBaseOpnd);
  7696. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  7697. dst->Free(m_func);
  7698. src->Free(m_func);
  7699. return nullptr;
  7700. }
  7701. IR::Instr *
  7702. Lowerer::LowerMemOp(IR::Instr * instr)
  7703. {
  7704. Assert(instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  7705. IR::Instr *instrPrev = instr->m_prev;
  7706. IR::RegOpnd* helperRet = IR::RegOpnd::New(TyInt8, instr->m_func);
  7707. const bool isHelper = false;
  7708. AssertMsg(instr->HasBailOutInfo(), "Expected bailOut on MemOp instruction");
  7709. if (instr->HasBailOutInfo())
  7710. {
  7711. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  7712. if (bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment)
  7713. {
  7714. Assert(!(bailOutKind & IR::BailOutOnMissingValue));
  7715. LowerBailOnInvalidatedArrayHeadSegment(instr, isHelper);
  7716. bailOutKind ^= IR::BailOutOnInvalidatedArrayHeadSegment;
  7717. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7718. }
  7719. else if (bailOutKind & IR::BailOutOnMissingValue)
  7720. {
  7721. LowerBailOnCreatedMissingValue(instr, isHelper);
  7722. bailOutKind ^= IR::BailOutOnMissingValue;
  7723. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7724. }
  7725. if (bailOutKind & IR::BailOutOnInvalidatedArrayLength)
  7726. {
  7727. LowerBailOnInvalidatedArrayLength(instr, isHelper);
  7728. bailOutKind ^= IR::BailOutOnInvalidatedArrayLength;
  7729. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7730. }
  7731. AssertMsg(bailOutKind & IR::BailOutOnMemOpError, "Expected BailOutOnMemOpError on MemOp instruction");
  7732. if (bailOutKind & IR::BailOutOnMemOpError)
  7733. {
  7734. // Insert or get continue label
  7735. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isHelper);
  7736. Func *const func = instr->m_func;
  7737. LowerOneBailOutKind(instr, IR::BailOutOnMemOpError, isHelper);
  7738. IR::Instr *const insertBeforeInstr = instr->m_next;
  7739. // test helperRet, helperRet
  7740. // jz $skipBailOut
  7741. InsertCompareBranch(
  7742. helperRet,
  7743. IR::IntConstOpnd::New(0, TyInt8, func),
  7744. Js::OpCode::BrNeq_A,
  7745. skipBailOutLabel,
  7746. insertBeforeInstr);
  7747. // (Bail out with IR::BailOutOnMemOpError)
  7748. // $skipBailOut:
  7749. bailOutKind ^= IR::BailOutOnMemOpError;
  7750. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7751. }
  7752. instr->ClearBailOutInfo();
  7753. }
  7754. IR::Instr* newInstrPrev = nullptr;
  7755. if (instr->m_opcode == Js::OpCode::Memset)
  7756. {
  7757. newInstrPrev = LowerMemset(instr, helperRet);
  7758. }
  7759. else if (instr->m_opcode == Js::OpCode::Memcopy)
  7760. {
  7761. newInstrPrev = LowerMemcopy(instr, helperRet);
  7762. }
  7763. if (newInstrPrev != nullptr)
  7764. {
  7765. instrPrev = newInstrPrev;
  7766. }
  7767. return instrPrev;
  7768. }
  7769. IR::Instr *
  7770. Lowerer::LowerStArrViewElem(IR::Instr * instr)
  7771. {
  7772. #ifdef ASMJS_PLAT
  7773. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  7774. Assert(instr);
  7775. Assert(instr->m_opcode == Js::OpCode::StArrViewElem);
  7776. IR::Instr * instrPrev = instr->m_prev;
  7777. IR::Opnd * dst = instr->GetDst();
  7778. IR::Opnd * src1 = instr->GetSrc1();
  7779. IR::Opnd * src2 = instr->GetSrc2();
  7780. // type of dst is the type of array
  7781. IR::RegOpnd * indexOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
  7782. int32 offset = dst->AsIndirOpnd()->GetOffset();
  7783. Assert(!dst->IsFloat32() || src1->IsFloat32());
  7784. Assert(!dst->IsFloat64() || src1->IsFloat64());
  7785. Assert(!dst->IsInt64() || src1->IsInt64());
  7786. IR::Instr * done;
  7787. if (m_func->GetJITFunctionBody()->IsWasmFunction())
  7788. {
  7789. done = LowerWasmMemOp(instr, dst);
  7790. }
  7791. else if (offset < 0)
  7792. {
  7793. instr->Remove();
  7794. return instrPrev;
  7795. }
  7796. else if (indexOpnd || m_func->GetJITFunctionBody()->GetAsmJsInfo()->AccessNeedsBoundCheck((uint32)offset))
  7797. {
  7798. // CMP indexOpnd, src2(arrSize)
  7799. // JA $helper
  7800. // JMP $store
  7801. // $helper:
  7802. // JMP $done
  7803. // $store:
  7804. // MOV dst([arrayBuffer + indexOpnd]), src1
  7805. // $done:
  7806. done = m_lowererMD.LowerAsmJsStElemHelper(instr);
  7807. }
  7808. else
  7809. {
  7810. // any constant access below 0x10000 is safe, as that is the min heap size
  7811. instr->UnlinkDst();
  7812. instr->UnlinkSrc1();
  7813. done = instr;
  7814. if (src2)
  7815. {
  7816. instr->FreeSrc2();
  7817. }
  7818. }
  7819. if (src1->IsInt64())
  7820. {
  7821. IR::Instr* movInt64 = IR::Instr::New(Js::OpCode::Ld_I4, dst, src1, m_func);
  7822. done->InsertBefore(movInt64);
  7823. m_lowererMD.LowerInt64Assign(movInt64);
  7824. }
  7825. else
  7826. {
  7827. InsertMove(dst, src1, done);
  7828. }
  7829. instr->Remove();
  7830. return instrPrev;
  7831. #else
  7832. Assert(UNREACHED);
  7833. return instr;
  7834. #endif
  7835. }
  7836. IR::Instr *
  7837. Lowerer::LowerArrayDetachedCheck(IR::Instr * instr)
  7838. {
  7839. // TEST isDetached, isDetached
  7840. // JE Done
  7841. // Helper:
  7842. // CALL Js::Throw::OutOfMemory
  7843. // Done:
  7844. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  7845. IR::Instr * instrPrev = instr->m_prev;
  7846. IR::Opnd * isDetachedOpnd = instr->UnlinkSrc1();
  7847. Assert(isDetachedOpnd->IsIndirOpnd() || isDetachedOpnd->IsMemRefOpnd());
  7848. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  7849. IR::LabelInstr * helperLabel = InsertLabel(true, instr);
  7850. InsertTestBranch(isDetachedOpnd, isDetachedOpnd, Js::OpCode::BrNotNeq_A, doneLabel, helperLabel);
  7851. m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_OutOfMemoryError);
  7852. return instrPrev;
  7853. }
  7854. ///----------------------------------------------------------------------------
  7855. ///
  7856. /// Lowerer::LowerDeleteElemI
  7857. ///
  7858. ///----------------------------------------------------------------------------
  7859. IR::Instr *
  7860. Lowerer::LowerDeleteElemI(IR::Instr * instr, bool strictMode)
  7861. {
  7862. IR::Instr *instrPrev;
  7863. IR::Opnd *src1 = instr->UnlinkSrc1();
  7864. AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd on DeleteElementI");
  7865. Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
  7866. if (strictMode)
  7867. {
  7868. propertyOperationFlag = Js::PropertyOperation_StrictMode;
  7869. }
  7870. instrPrev = instr->m_prev;
  7871. IR::JnHelperMethod helperMethod = IR::HelperOp_DeleteElementI;
  7872. IR::Opnd *indexOpnd = src1->AsIndirOpnd()->UnlinkIndexOpnd();
  7873. if (indexOpnd)
  7874. {
  7875. if (indexOpnd->GetType() == TyInt32)
  7876. {
  7877. helperMethod = IR::HelperOp_DeleteElementI_Int32;
  7878. }
  7879. else if (indexOpnd->GetType() == TyUint32)
  7880. {
  7881. helperMethod = IR::HelperOp_DeleteElementI_UInt32;
  7882. }
  7883. else
  7884. {
  7885. Assert(indexOpnd->GetType() == TyVar);
  7886. }
  7887. }
  7888. else
  7889. {
  7890. // No index; the offset identifies the element.
  7891. IntConstType offset = (IntConstType)src1->AsIndirOpnd()->GetOffset();
  7892. indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
  7893. }
  7894. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
  7895. LoadScriptContext(instr);
  7896. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  7897. IR::Opnd *baseOpnd = src1->AsIndirOpnd()->UnlinkBaseOpnd();
  7898. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  7899. src1->Free(this->m_func);
  7900. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  7901. return instrPrev;
  7902. }
  7903. IR::Opnd *
  7904. Lowerer::GetForInEnumeratorFieldOpnd(IR::Opnd * forInEnumeratorOpnd, uint fieldOffset, IRType type)
  7905. {
  7906. if (forInEnumeratorOpnd->IsSymOpnd())
  7907. {
  7908. IR::SymOpnd * symOpnd = forInEnumeratorOpnd->AsSymOpnd();
  7909. return IR::SymOpnd::New(symOpnd->GetStackSym(), symOpnd->m_offset + fieldOffset, type, this->m_func);
  7910. }
  7911. Assert(forInEnumeratorOpnd->IsIndirOpnd());
  7912. IR::IndirOpnd * indirOpnd = forInEnumeratorOpnd->AsIndirOpnd();
  7913. return IR::IndirOpnd::New(indirOpnd->GetBaseOpnd(), indirOpnd->GetOffset() + fieldOffset, type, this->m_func);
  7914. }
  7915. void
  7916. Lowerer::GenerateFastBrBReturn(IR::Instr * instr)
  7917. {
  7918. Assert(instr->m_opcode == Js::OpCode::BrOnEmpty || instr->m_opcode == Js::OpCode::BrOnNotEmpty);
  7919. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  7920. IR::Opnd * forInEnumeratorOpnd = instr->GetSrc1();
  7921. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  7922. IR::LabelInstr * loopBody = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7923. // CMP forInEnumerator->canUseJitFastPath, 0
  7924. // JEQ $helper
  7925. IR::Opnd * canUseJitFastPathOpnd = GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfCanUseJitFastPath(), TyInt8);
  7926. InsertCompareBranch(canUseJitFastPathOpnd, IR::IntConstOpnd::New(0, TyInt8, this->m_func), Js::OpCode::BrEq_A, labelHelper, instr);
  7927. // MOV objectOpnd, forInEnumerator->enumerator.object
  7928. // MOV cachedDataTypeOpnd, forInEnumerator->enumerator.cachedDataType
  7929. // CMP cachedDataTypeOpnd, objectOpnd->type
  7930. // JNE $helper
  7931. IR::RegOpnd * objectOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  7932. InsertMove(objectOpnd,
  7933. GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorObject(), TyMachPtr), instr);
  7934. IR::RegOpnd * cachedDataTypeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  7935. InsertMove(cachedDataTypeOpnd,
  7936. GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorInitialType(), TyMachPtr), instr);
  7937. InsertCompareBranch(cachedDataTypeOpnd, IR::IndirOpnd::New(objectOpnd, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, this->m_func),
  7938. Js::OpCode::BrNeq_A, labelHelper, instr);
  7939. // MOV cachedDataOpnd, forInEnumeratorOpnd->enumerator.cachedData
  7940. // MOV enumeratedCountOpnd, forInEnumeratorOpnd->enumerator.enumeratedCount
  7941. // CMP enumeratedCountOpnd, cachedDataOpnd->cachedCount
  7942. // JLT $loopBody
  7943. IR::RegOpnd * cachedDataOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  7944. InsertMove(cachedDataOpnd,
  7945. GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorCachedData(), TyMachPtr), instr);
  7946. IR::RegOpnd * enumeratedCountOpnd = IR::RegOpnd::New(TyUint32, m_func);
  7947. InsertMove(enumeratedCountOpnd,
  7948. GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorEnumeratedCount(), TyUint32), instr);
  7949. InsertCompareBranch(enumeratedCountOpnd,
  7950. IR::IndirOpnd::New(cachedDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataCachedCount(), TyUint32, this->m_func),
  7951. Js::OpCode::BrLt_A, loopBody, instr);
  7952. // CMP cacheData.completed, 0
  7953. // JNE $loopEnd
  7954. // JMP $helper
  7955. IR::LabelInstr * labelAfter = instr->GetOrCreateContinueLabel();
  7956. InsertCompareBranch(
  7957. IR::IndirOpnd::New(cachedDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataCompleted(), TyInt8, this->m_func),
  7958. IR::IntConstOpnd::New(0, TyInt8, this->m_func),
  7959. Js::OpCode::BrNeq_A, instr->m_opcode == Js::OpCode::BrOnNotEmpty ? labelAfter : instr->AsBranchInstr()->GetTarget(), instr);
  7960. InsertBranch(Js::OpCode::Br, labelHelper, instr);
  7961. // $loopBody:
  7962. instr->InsertBefore(loopBody);
  7963. IR::Opnd * opndDst = instr->GetDst(); // ForIn result propertyString
  7964. Assert(opndDst->IsRegOpnd());
  7965. // MOV stringsOpnd, cachedData->strings
  7966. // MOV opndDst, stringsOpnd[enumeratedCount]
  7967. IR::RegOpnd * stringsOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  7968. InsertMove(stringsOpnd,
  7969. IR::IndirOpnd::New(cachedDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataStrings(), TyMachPtr, this->m_func), instr);
  7970. InsertMove(opndDst,
  7971. IR::IndirOpnd::New(stringsOpnd, enumeratedCountOpnd, m_lowererMD.GetDefaultIndirScale(), TyVar, this->m_func), instr);
  7972. // MOV indexesOpnd, cachedData->indexes
  7973. // MOV objectIndexOpnd, indexesOpnd[enumeratedCount]
  7974. // MOV forInEnumeratorOpnd->enumerator.objectIndex, objectIndexOpnd
  7975. IR::RegOpnd * indexesOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  7976. InsertMove(indexesOpnd,
  7977. IR::IndirOpnd::New(cachedDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataIndexes(), TyMachPtr, this->m_func), instr);
  7978. IR::RegOpnd * objectIndexOpnd = IR::RegOpnd::New(TyUint32, m_func);
  7979. InsertMove(objectIndexOpnd,
  7980. IR::IndirOpnd::New(indexesOpnd, enumeratedCountOpnd, IndirScale4, TyUint32, this->m_func), instr);
  7981. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorObjectIndex(), TyUint32),
  7982. objectIndexOpnd, instr);
  7983. // INC enumeratedCountOpnd
  7984. // MOV forInEnumeratorOpnd->enumerator.enumeratedCount, enumeratedCountOpnd
  7985. InsertAdd(false, enumeratedCountOpnd, enumeratedCountOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), instr);
  7986. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorEnumeratedCount(), TyUint32),
  7987. enumeratedCountOpnd, instr);
  7988. // We know result propertyString (opndDst) != NULL
  7989. InsertBranch(Js::OpCode::Br, instr->m_opcode == Js::OpCode::BrOnNotEmpty ? instr->AsBranchInstr()->GetTarget() : labelAfter, instr);
  7990. // $helper
  7991. instr->InsertBefore(labelHelper);
  7992. // $after
  7993. }
  7994. ///----------------------------------------------------------------------------
  7995. ///
  7996. /// Lowerer::LowerBrB - lower 1-operand (boolean) conditional branch
  7997. ///
  7998. ///----------------------------------------------------------------------------
  7999. IR::Instr *
  8000. Lowerer::LowerBrBReturn(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool isHelper)
  8001. {
  8002. IR::Instr * instrPrev;
  8003. IR::Instr * instrCall;
  8004. IR::HelperCallOpnd * opndHelper;
  8005. IR::Opnd * opndDst;
  8006. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  8007. Assert(instr->m_opcode == Js::OpCode::BrOnEmpty || instr->m_opcode == Js::OpCode::BrOnNotEmpty);
  8008. IR::RegOpnd * forInEnumeratorRegOpnd = GenerateForInEnumeratorLoad(instr->UnlinkSrc1(), instr);
  8009. instrPrev = m_lowererMD.LoadHelperArgument(instr, forInEnumeratorRegOpnd);
  8010. // Generate helper call to convert the unknown operand to boolean
  8011. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8012. opndDst = instr->UnlinkDst();
  8013. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8014. instr->InsertBefore(instrCall);
  8015. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8016. // Branch on the result of the call
  8017. instr->m_opcode = (instr->m_opcode == Js::OpCode::BrOnNotEmpty? Js::OpCode::BrTrue_A : Js::OpCode::BrFalse_A);
  8018. instr->SetSrc1(opndDst);
  8019. IR::Instr *loweredInstr;
  8020. loweredInstr = this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, isHelper);
  8021. #if DBG
  8022. if (isHelper)
  8023. {
  8024. if (!loweredInstr->IsBranchInstr())
  8025. {
  8026. loweredInstr = loweredInstr->GetNextBranchOrLabel();
  8027. }
  8028. if (loweredInstr->IsBranchInstr())
  8029. {
  8030. loweredInstr->AsBranchInstr()->m_isHelperToNonHelperBranch = true;
  8031. }
  8032. }
  8033. #endif
  8034. return instrPrev;
  8035. }
  8036. ///----------------------------------------------------------------------------
  8037. ///
  8038. /// Lowerer::LowerMultiBr
  8039. /// - Lowers the instruction for dictionary look up(string case arms)
  8040. ///
  8041. ///----------------------------------------------------------------------------
  8042. IR::Instr* Lowerer::LowerMultiBr(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  8043. {
  8044. IR::Instr * instrPrev = instr->m_prev;
  8045. IR::Instr * instrCall;
  8046. IR::HelperCallOpnd * opndHelper;
  8047. IR::Opnd * opndSrc;
  8048. IR::Opnd * opndDst;
  8049. StackSym * symDst;
  8050. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnd on BrB");
  8051. // Push the args in reverse order.
  8052. // The end and start labels for the function are used to guarantee
  8053. // that the dictionary jump destinations haven't been tampered with, so we
  8054. // will always jump to some location within this function
  8055. IR::LabelOpnd * endFuncOpnd = IR::LabelOpnd::New(m_func->EnsureFuncEndLabel(), m_func);
  8056. m_lowererMD.LoadHelperArgument(instr, endFuncOpnd);
  8057. IR::LabelOpnd * startFuncOpnd = IR::LabelOpnd::New(m_func->EnsureFuncStartLabel(), m_func);
  8058. m_lowererMD.LoadHelperArgument(instr, startFuncOpnd);
  8059. //Load the address of the dictionary pair- Js::StringDictionaryWrapper
  8060. auto dictionary = instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary();
  8061. if (this->m_func->IsOOPJIT())
  8062. {
  8063. auto dictionaryOffset = NativeCodeData::GetDataTotalOffset(dictionary);
  8064. auto addressRegOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  8065. Lowerer::InsertLea(addressRegOpnd,
  8066. IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), dictionaryOffset, TyMachPtr,
  8067. #if DBG
  8068. NativeCodeData::GetDataDescription(dictionary, this->m_func->m_alloc),
  8069. #endif
  8070. this->m_func, true), instr);
  8071. this->addToLiveOnBackEdgeSyms->Set(m_func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  8072. m_lowererMD.LoadHelperArgument(instr, addressRegOpnd);
  8073. }
  8074. else
  8075. {
  8076. IR::AddrOpnd* nativestringDictionaryOpnd = IR::AddrOpnd::New(dictionary, IR::AddrOpndKindDynamicMisc, this->m_func);
  8077. m_lowererMD.LoadHelperArgument(instr, nativestringDictionaryOpnd);
  8078. }
  8079. //Load the String passed in the Switch expression for look up - JavascriptString
  8080. opndSrc = instr->UnlinkSrc1();
  8081. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8082. // Generate helper call for dictionary lookup.
  8083. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8084. symDst = StackSym::New(TyMachPtr,this->m_func);
  8085. opndDst = IR::RegOpnd::New(symDst, TyMachPtr, this->m_func);
  8086. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8087. instr->InsertBefore(instrCall);
  8088. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8089. instr->SetSrc1(instrCall->GetDst());
  8090. m_lowererMD.LowerMultiBranch(instr);
  8091. return instrPrev;
  8092. }
  8093. void
  8094. Lowerer::LowerJumpTableMultiBranch(IR::MultiBranchInstr * multiBrInstr, IR::RegOpnd * indexOpnd)
  8095. {
  8096. Func * func = this->m_func;
  8097. IR::Opnd * opndDst = IR::RegOpnd::New(TyMachPtr, func);
  8098. //Move the native address of the jump table to a register
  8099. IR::LabelInstr * nativeJumpTableLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  8100. nativeJumpTableLabel->m_isDataLabel = true;
  8101. IR::LabelOpnd * nativeJumpTable = IR::LabelOpnd::New(nativeJumpTableLabel, m_func);
  8102. IR::RegOpnd * nativeJumpTableReg = IR::RegOpnd::New(TyMachPtr, func);
  8103. m_lowererMD.CreateAssign(nativeJumpTableReg, nativeJumpTable, multiBrInstr);
  8104. BranchJumpTableWrapper * branchJumpTable = multiBrInstr->GetBranchJumpTable();
  8105. AssertMsg(branchJumpTable->labelInstr == nullptr, "Should not be already assigned");
  8106. branchJumpTable->labelInstr = nativeJumpTableLabel;
  8107. //Indirect addressing @ target location in the jump table.
  8108. //MOV eax, [nativeJumpTableReg + (offset * indirScale)]
  8109. BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  8110. IR::Opnd * opndSrc = IR::IndirOpnd::New(nativeJumpTableReg, indexOpnd, indirScale, TyMachReg, this->m_func);
  8111. IR::Instr * indirInstr = m_lowererMD.CreateAssign(opndDst, opndSrc, multiBrInstr);
  8112. //MultiBr eax
  8113. multiBrInstr->SetSrc1(indirInstr->GetDst());
  8114. //Jump to the address at the target location in the jump table
  8115. m_lowererMD.LowerMultiBranch(multiBrInstr);
  8116. }
  8117. ///----------------------------------------------------------------------------
  8118. ///
  8119. /// Lowerer::LowerMultiBr
  8120. /// - Lowers the instruction for jump table(consecutive integer case arms)
  8121. ///
  8122. ///----------------------------------------------------------------------------
  8123. IR::Instr* Lowerer::LowerMultiBr(IR::Instr * instr)
  8124. {
  8125. IR::Instr * instrPrev = instr->m_prev;
  8126. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnd on BrB");
  8127. AssertMsg(instr->IsBranchInstr() && instr->AsBranchInstr()->IsMultiBranch(), "Bad Instruction Lowering Call to LowerMultiBr()");
  8128. IR::MultiBranchInstr * multiBrInstr = instr->AsBranchInstr()->AsMultiBrInstr();
  8129. IR::RegOpnd * offset = instr->UnlinkSrc1()->AsRegOpnd();
  8130. LowerJumpTableMultiBranch(multiBrInstr, offset);
  8131. return instrPrev;
  8132. }
  8133. IR::Instr* Lowerer::LowerBrBMem(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  8134. {
  8135. IR::Instr * instrPrev;
  8136. IR::Instr * instrCall;
  8137. IR::HelperCallOpnd * opndHelper;
  8138. IR::Opnd * opndSrc;
  8139. IR::Opnd * opndDst;
  8140. StackSym * symDst;
  8141. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  8142. instrPrev = LoadScriptContext(instr);
  8143. opndSrc = instr->UnlinkSrc1();
  8144. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8145. // Generate helper call to convert the unknown operand to boolean
  8146. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8147. symDst = StackSym::New(TyVar, this->m_func);
  8148. opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
  8149. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8150. instr->InsertBefore(instrCall);
  8151. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8152. // Branch on the result of the call
  8153. instr->SetSrc1(opndDst);
  8154. m_lowererMD.LowerCondBranch(instr);
  8155. return instrPrev;
  8156. }
  8157. IR::Instr* Lowerer::LowerBrOnObject(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  8158. {
  8159. IR::Instr * instrPrev;
  8160. IR::Instr * instrCall;
  8161. IR::HelperCallOpnd * opndHelper;
  8162. IR::Opnd * opndSrc;
  8163. IR::Opnd * opndDst;
  8164. StackSym * symDst;
  8165. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  8166. opndSrc = instr->UnlinkSrc1();
  8167. instrPrev = m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8168. // Generate helper call to check if the operand's type is object
  8169. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8170. symDst = StackSym::New(TyVar, this->m_func);
  8171. opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
  8172. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8173. instr->InsertBefore(instrCall);
  8174. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8175. // Branch on the result of the call
  8176. instr->SetSrc1(opndDst);
  8177. m_lowererMD.LowerCondBranch(instr);
  8178. return instrPrev;
  8179. }
  8180. IR::Instr * Lowerer::LowerBrOnClassConstructor(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  8181. {
  8182. IR::Instr * instrPrev;
  8183. IR::Instr * instrCall;
  8184. IR::HelperCallOpnd * opndHelper;
  8185. IR::Opnd * opndSrc;
  8186. IR::Opnd * opndDst;
  8187. StackSym * symDst;
  8188. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  8189. opndSrc = instr->UnlinkSrc1();
  8190. instrPrev = m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8191. // Generate helper call to check if the operand's type is object
  8192. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8193. symDst = StackSym::New(TyVar, this->m_func);
  8194. opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
  8195. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8196. instr->InsertBefore(instrCall);
  8197. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8198. // Branch on the result of the call
  8199. instr->SetSrc1(opndDst);
  8200. m_lowererMD.LowerCondBranch(instr);
  8201. return instrPrev;
  8202. }
  8203. IR::Instr *
  8204. Lowerer::LowerEqualityCompare(IR::Instr* instr, IR::JnHelperMethod helper)
  8205. {
  8206. IR::Instr * instrPrev = instr->m_prev;
  8207. bool needHelper = true;
  8208. bool fNoLower = false;
  8209. if (instr->GetSrc1()->IsFloat())
  8210. {
  8211. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  8212. this->m_lowererMD.GenerateFastCmXxR8(instr);
  8213. }
  8214. else if (PHASE_OFF(Js::BranchFastPathPhase, m_func) || !m_func->DoFastPaths())
  8215. {
  8216. LowerBinaryHelperMem(instr, helper);
  8217. }
  8218. else if (TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, instr->IsNeq(), &fNoLower))
  8219. {
  8220. if (!fNoLower)
  8221. {
  8222. LowerBinaryHelperMem(instr, helper);
  8223. }
  8224. }
  8225. else if (instr->m_opcode == Js::OpCode::CmSrEq_A && TryGenerateFastCmSrEq(instr))
  8226. {
  8227. }
  8228. else
  8229. {
  8230. bool hasStrFastpath = GenerateFastBrOrCmString(instr);
  8231. if(GenerateFastCmEqLikely(instr, &needHelper, hasStrFastpath) || GenerateFastEqBoolInt(instr, &needHelper, hasStrFastpath))
  8232. {
  8233. if (needHelper)
  8234. {
  8235. LowerBinaryHelperMem(instr, helper);
  8236. }
  8237. else
  8238. {
  8239. instr->Remove();
  8240. }
  8241. }
  8242. else if (!m_lowererMD.GenerateFastCmXxTaggedInt(instr, hasStrFastpath))
  8243. {
  8244. LowerBinaryHelperMem(instr, helper);
  8245. }
  8246. }
  8247. return instrPrev;
  8248. }
  8249. IR::Instr *
  8250. Lowerer::LowerEqualityBranch(IR::Instr* instr, IR::JnHelperMethod helper)
  8251. {
  8252. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  8253. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  8254. IR::Instr * instrPrev = instr->m_prev;
  8255. bool fNoLower = false;
  8256. const bool noFastPath = PHASE_OFF(Js::BranchFastPathPhase, m_func) || !m_func->DoFastPaths();
  8257. if (instr->GetSrc1()->IsFloat())
  8258. {
  8259. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  8260. m_lowererMD.LowerToFloat(instr);
  8261. return instrPrev;
  8262. }
  8263. if (noFastPath)
  8264. {
  8265. LowerBrCMem(instr, helper, true, false /*isHelper*/);
  8266. return instrPrev;
  8267. }
  8268. if (TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, instr->IsNeq(), &fNoLower))
  8269. {
  8270. if (!fNoLower)
  8271. {
  8272. LowerBrCMem(instr, helper, false, false /*isHelper*/);
  8273. }
  8274. return instrPrev;
  8275. }
  8276. bool done = false;
  8277. switch(instr->m_opcode)
  8278. {
  8279. case Js::OpCode::BrNeq_A:
  8280. case Js::OpCode::BrNotEq_A:
  8281. done = TryGenerateFastBrNeq(instr);
  8282. break;
  8283. case Js::OpCode::BrEq_A:
  8284. case Js::OpCode::BrNotNeq_A:
  8285. done = TryGenerateFastBrEq(instr);
  8286. break;
  8287. case Js::OpCode::BrSrEq_A:
  8288. case Js::OpCode::BrSrNotNeq_A:
  8289. done = GenerateFastBrSrEq(instr, srcReg1, srcReg2, &instrPrev, noFastPath);
  8290. break;
  8291. case Js::OpCode::BrSrNeq_A:
  8292. case Js::OpCode::BrSrNotEq_A:
  8293. done = GenerateFastBrSrNeq(instr, &instrPrev);
  8294. break;
  8295. default:
  8296. Assume(UNREACHED);
  8297. }
  8298. if (done)
  8299. {
  8300. return instrPrev;
  8301. }
  8302. bool needHelper = true;
  8303. IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  8304. bool hasStrFastPath = GenerateFastBrOrCmString(instr);
  8305. if (GenerateFastBrEqLikely(instr->AsBranchInstr(), &needHelper, hasStrFastPath) || GenerateFastEqBoolInt(instr, &needHelper, hasStrFastPath))
  8306. {
  8307. if (needHelper)
  8308. {
  8309. LowerBrCMem(instr, helper, false);
  8310. }
  8311. }
  8312. else if (needHelper)
  8313. {
  8314. LowerBrCMem(instr, helper, false, hasStrFastPath);
  8315. }
  8316. if (!needHelper)
  8317. {
  8318. instr->Remove();
  8319. }
  8320. return instrPrev;
  8321. }
  8322. IR::Instr *
  8323. Lowerer::LowerBrCMem(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool noMathFastPath, bool isHelper)
  8324. {
  8325. IR::Instr * instrPrev = instr->m_prev;
  8326. IR::Instr * instrCall;
  8327. IR::HelperCallOpnd * opndHelper;
  8328. IR::Opnd * opndSrc;
  8329. IR::Opnd * opndDst;
  8330. StackSym * symDst;
  8331. bool inverted = false;
  8332. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() != nullptr, "Expected 2 src opnds on BrC");
  8333. if (!noMathFastPath && !this->GenerateFastCondBranch(instr->AsBranchInstr(), &isHelper))
  8334. {
  8335. return instrPrev;
  8336. }
  8337. // Push the args in reverse order.
  8338. const bool loadScriptContext = !(helperMethod == IR::HelperOp_StrictEqualString || helperMethod == IR::HelperOp_StrictEqualEmptyString);
  8339. const bool loadArg2 = !(helperMethod == IR::HelperOp_StrictEqualEmptyString);
  8340. if (helperMethod == IR::HelperOp_NotEqual)
  8341. {
  8342. // Op_NotEqual() returns !Op_Equal(). It is faster to call Op_Equal() directly.
  8343. helperMethod = IR::HelperOp_Equal;
  8344. instr->AsBranchInstr()->Invert();
  8345. inverted = true;
  8346. }
  8347. else if(helperMethod == IR::HelperOp_NotStrictEqual)
  8348. {
  8349. // Op_NotStrictEqual() returns !Op_StrictEqual(). It is faster to call Op_StrictEqual() directly.
  8350. helperMethod = IR::HelperOp_StrictEqual;
  8351. instr->AsBranchInstr()->Invert();
  8352. inverted = true;
  8353. }
  8354. if (loadScriptContext)
  8355. LoadScriptContext(instr);
  8356. opndSrc = instr->UnlinkSrc2();
  8357. if (loadArg2)
  8358. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8359. opndSrc = instr->UnlinkSrc1();
  8360. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8361. // Generate helper call to compare the source operands.
  8362. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8363. symDst = StackSym::New(TyMachReg, this->m_func);
  8364. opndDst = IR::RegOpnd::New(symDst, TyMachReg, this->m_func);
  8365. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8366. instr->InsertBefore(instrCall);
  8367. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8368. switch (instr->m_opcode)
  8369. {
  8370. case Js::OpCode::BrNotEq_A:
  8371. case Js::OpCode::BrNotNeq_A:
  8372. case Js::OpCode::BrSrNotEq_A:
  8373. case Js::OpCode::BrSrNotNeq_A:
  8374. if (instr->HasBailOutInfo())
  8375. {
  8376. instr->GetBailOutInfo()->isInvertedBranch = true;
  8377. }
  8378. break;
  8379. case Js::OpCode::BrNotGe_A:
  8380. case Js::OpCode::BrNotGt_A:
  8381. case Js::OpCode::BrNotLe_A:
  8382. case Js::OpCode::BrNotLt_A:
  8383. inverted = true;
  8384. break;
  8385. }
  8386. // Branch if the result is "true".
  8387. instr->SetSrc1(opndDst);
  8388. instr->m_opcode = (inverted ? Js::OpCode::BrFalse_A : Js::OpCode::BrTrue_A);
  8389. this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, !noMathFastPath && isHelper);
  8390. return instrPrev;
  8391. }
  8392. IR::Instr *
  8393. Lowerer::LowerBrFncApply(IR::Instr * instr, IR::JnHelperMethod helperMethod) {
  8394. IR::Instr * instrPrev = instr->m_prev;
  8395. IR::Instr * instrCall;
  8396. IR::HelperCallOpnd * opndHelper;
  8397. IR::Opnd * opndSrc;
  8398. IR::Opnd * opndDst;
  8399. StackSym * symDst;
  8400. AssertMsg(instr->GetSrc1() != nullptr, "Expected 1 src opnd on BrFncApply");
  8401. LoadScriptContext(instr);
  8402. opndSrc = instr->UnlinkSrc1();
  8403. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8404. // Generate helper call to compare the source operands.
  8405. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8406. symDst = StackSym::New(TyMachReg, this->m_func);
  8407. opndDst = IR::RegOpnd::New(symDst, TyMachReg, this->m_func);
  8408. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8409. instr->InsertBefore(instrCall);
  8410. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8411. // Branch if the result is "true".
  8412. instr->SetSrc1(opndDst);
  8413. instr->m_opcode = Js::OpCode::BrTrue_A;
  8414. m_lowererMD.LowerCondBranch(instr);
  8415. return instrPrev;
  8416. }
  8417. ///----------------------------------------------------------------------------
  8418. ///
  8419. /// Lowerer::LowerBrProperty - lower branch-on-has/no-property
  8420. ///
  8421. ///----------------------------------------------------------------------------
  8422. IR::Instr *
  8423. Lowerer::LowerBrProperty(IR::Instr * instr, IR::JnHelperMethod helper)
  8424. {
  8425. IR::Instr * instrPrev;
  8426. IR::Instr * instrCall;
  8427. IR::HelperCallOpnd * opndHelper;
  8428. IR::Opnd * opndSrc;
  8429. IR::Opnd * opndDst;
  8430. opndSrc = instr->UnlinkSrc1();
  8431. AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(),
  8432. "Expected propertySym as src of BrProperty");
  8433. instrPrev = LoadScriptContext(instr);
  8434. this->LoadPropertySymAsArgument(instr, opndSrc);
  8435. opndHelper = IR::HelperCallOpnd::New(helper, this->m_func);
  8436. opndDst = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), TyMachReg, this->m_func);
  8437. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8438. instr->InsertBefore(instrCall);
  8439. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8440. // Branch on the result of the call
  8441. instr->SetSrc1(opndDst);
  8442. switch (instr->m_opcode)
  8443. {
  8444. case Js::OpCode::BrOnHasProperty:
  8445. instr->m_opcode = Js::OpCode::BrTrue_A;
  8446. break;
  8447. case Js::OpCode::BrOnNoProperty:
  8448. instr->m_opcode = Js::OpCode::BrFalse_A;
  8449. break;
  8450. default:
  8451. AssertMsg(0, "Unknown opcode on BrProperty branch");
  8452. break;
  8453. }
  8454. this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, false);
  8455. return instrPrev;
  8456. }
  8457. ///----------------------------------------------------------------------------
  8458. ///
  8459. /// Lowerer::LowerElementUndefined
  8460. ///
  8461. ///----------------------------------------------------------------------------
  8462. IR::Instr *
  8463. Lowerer::LowerElementUndefined(IR::Instr * instr, IR::JnHelperMethod helper)
  8464. {
  8465. IR::Opnd *dst = instr->UnlinkDst();
  8466. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as dst of Ld Undefined");
  8467. // Pass the property sym to store to
  8468. this->LoadPropertySymAsArgument(instr, dst);
  8469. m_lowererMD.ChangeToHelperCall(instr, helper);
  8470. return instr;
  8471. }
  8472. IR::Instr *
  8473. Lowerer::LowerElementUndefinedMem(IR::Instr * instr, IR::JnHelperMethod helper)
  8474. {
  8475. // Pass script context
  8476. IR::Instr * instrPrev = LoadScriptContext(instr);
  8477. this->LowerElementUndefined(instr, helper);
  8478. return instrPrev;
  8479. }
  8480. IR::Instr *
  8481. Lowerer::LowerLdElemUndef(IR::Instr * instr)
  8482. {
  8483. if (this->m_func->GetJITFunctionBody()->IsEval())
  8484. {
  8485. return LowerElementUndefinedMem(instr, IR::HelperOp_LdElemUndefDynamic);
  8486. }
  8487. else
  8488. {
  8489. return LowerElementUndefined(instr, IR::HelperOp_LdElemUndef);
  8490. }
  8491. }
  8492. ///----------------------------------------------------------------------------
  8493. ///
  8494. /// Lowerer::LowerElementUndefinedScoped
  8495. ///
  8496. ///----------------------------------------------------------------------------
  8497. IR::Instr *
  8498. Lowerer::LowerElementUndefinedScoped(IR::Instr * instr, IR::JnHelperMethod helper)
  8499. {
  8500. IR::Instr * instrPrev = instr->m_prev;
  8501. // Pass the default instance
  8502. IR::Opnd *src = instr->UnlinkSrc1();
  8503. m_lowererMD.LoadHelperArgument(instr, src);
  8504. // Pass the property sym to store to
  8505. IR::Opnd * dst = instr->UnlinkDst();
  8506. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as dst of Ld Undefined Scoped");
  8507. this->LoadPropertySymAsArgument(instr, dst);
  8508. m_lowererMD.ChangeToHelperCall(instr, helper);
  8509. return instrPrev;
  8510. }
  8511. IR::Instr *
  8512. Lowerer::LowerElementUndefinedScopedMem(IR::Instr * instr, IR::JnHelperMethod helper)
  8513. {
  8514. // Pass script context
  8515. IR::Instr * instrPrev = LoadScriptContext(instr);
  8516. this->LowerElementUndefinedScoped(instr, helper);
  8517. return instrPrev;
  8518. }
  8519. void
  8520. Lowerer::LowerStLoopBodyCount(IR::Instr* instr)
  8521. {
  8522. intptr_t header = m_func->m_workItem->GetLoopHeaderAddr();
  8523. IR::MemRefOpnd *loopBodyCounterOpnd = IR::MemRefOpnd::New((BYTE*)(header) + Js::LoopHeader::GetOffsetOfProfiledLoopCounter(), TyUint32, this->m_func);
  8524. instr->SetDst(loopBodyCounterOpnd);
  8525. instr->ReplaceSrc1(instr->GetSrc1()->AsRegOpnd()->UseWithNewType(TyUint32, this->m_func));
  8526. IR::AutoReuseOpnd(loopBodyCounterOpnd, this->m_func);
  8527. m_lowererMD.ChangeToAssign(instr);
  8528. return;
  8529. }
  8530. #if !FLOATVAR
  8531. IR::Instr *
  8532. Lowerer::LowerStSlotBoxTemp(IR::Instr *stSlot)
  8533. {
  8534. // regVar = BoxStackNumber(src, scriptContext)
  8535. IR::RegOpnd * regSrc = stSlot->UnlinkSrc1()->AsRegOpnd();
  8536. IR::Instr * instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  8537. IR::RegOpnd *regVar = IR::RegOpnd::New(TyVar, this->m_func);
  8538. instr->SetDst(regVar);
  8539. instr->SetSrc1(regSrc);
  8540. stSlot->InsertBefore(instr);
  8541. this->LowerUnaryHelperMem(instr, IR::HelperBoxStackNumber);
  8542. stSlot->SetSrc1(regVar);
  8543. return this->LowerStSlot(stSlot);
  8544. }
  8545. #endif
  8546. IR::Opnd *
  8547. Lowerer::CreateOpndForSlotAccess(IR::Opnd * opnd)
  8548. {
  8549. IR::SymOpnd * symOpnd = opnd->AsSymOpnd();
  8550. PropertySym * dstSym = symOpnd->m_sym->AsPropertySym();
  8551. if (!m_func->IsLoopBody() &&
  8552. m_func->DoStackFrameDisplay() &&
  8553. (dstSym->m_stackSym == m_func->GetLocalClosureSym() || dstSym->m_stackSym == m_func->GetLocalFrameDisplaySym()))
  8554. {
  8555. // Stack closure syms are made to look like slot accesses for the benefit of GlobOpt, so that it can do proper
  8556. // copy prop and implicit call bailout. But what we really want is local stack load/store.
  8557. // Don't do this for loop body, though, since we don't have the value saved on the stack.
  8558. IR::SymOpnd * closureSym = IR::SymOpnd::New(dstSym->m_stackSym, 0, TyMachReg, this->m_func);
  8559. closureSym->GetStackSym()->m_isClosureSym = true;
  8560. return closureSym;
  8561. }
  8562. int32 offset = dstSym->m_propertyId;
  8563. if (!m_func->GetJITFunctionBody()->IsAsmJsMode())
  8564. {
  8565. offset = offset * TySize[opnd->GetType()];
  8566. }
  8567. #ifdef ASMJS_PLAT
  8568. if (m_func->IsTJLoopBody())
  8569. {
  8570. offset = offset - m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetTotalSizeInBytes();
  8571. }
  8572. #endif
  8573. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(symOpnd->CreatePropertyOwnerOpnd(m_func),
  8574. offset , opnd->GetType(), this->m_func);
  8575. return indirOpnd;
  8576. }
  8577. IR::Instr *
  8578. Lowerer::LowerStSlot(IR::Instr *instr)
  8579. {
  8580. // StSlot stores the nth Var in the buffer pointed to by the property sym's stack sym.
  8581. IR::Opnd * dstOpnd = instr->UnlinkDst();
  8582. AssertMsg(dstOpnd, "Expected dst opnd on StSlot");
  8583. IR::Opnd * dstNew = this->CreateOpndForSlotAccess(dstOpnd);
  8584. dstOpnd->Free(this->m_func);
  8585. instr->SetDst(dstNew);
  8586. if (instr->GetDst() && instr->GetDst()->IsInt64())
  8587. {
  8588. m_lowererMD.LowerInt64Assign(instr);
  8589. }
  8590. else
  8591. {
  8592. instr = m_lowererMD.ChangeToWriteBarrierAssign(instr, this->m_func);
  8593. }
  8594. return instr;
  8595. }
  8596. IR::Instr *
  8597. Lowerer::LowerStSlotChkUndecl(IR::Instr *instrStSlot)
  8598. {
  8599. Assert(instrStSlot->GetSrc2() != nullptr);
  8600. // Src2 is required only to avoid dead store false positives during GlobOpt.
  8601. instrStSlot->FreeSrc2();
  8602. IR::Opnd *dstOpnd = this->CreateOpndForSlotAccess(instrStSlot->GetDst());
  8603. IR::Instr *instr = this->LowerStSlot(instrStSlot);
  8604. this->GenUndeclChk(instr, dstOpnd);
  8605. return instr;
  8606. }
  8607. void Lowerer::LowerProfileLdSlot(IR::Opnd *const valueOpnd, Func *const ldSlotFunc, const Js::ProfileId profileId, IR::Instr *const insertBeforeInstr)
  8608. {
  8609. Assert(valueOpnd);
  8610. Assert(profileId != Js::Constants::NoProfileId);
  8611. Assert(insertBeforeInstr);
  8612. Func *const irFunc = insertBeforeInstr->m_func;
  8613. m_lowererMD.LoadHelperArgument(insertBeforeInstr, IR::Opnd::CreateProfileIdOpnd(profileId, irFunc));
  8614. m_lowererMD.LoadHelperArgument(insertBeforeInstr, CreateFunctionBodyOpnd(ldSlotFunc));
  8615. m_lowererMD.LoadHelperArgument(insertBeforeInstr, valueOpnd);
  8616. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, irFunc);
  8617. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfileLdSlot, irFunc));
  8618. insertBeforeInstr->InsertBefore(callInstr);
  8619. m_lowererMD.LowerCall(callInstr, 0);
  8620. }
  8621. void
  8622. Lowerer::LowerLdSlot(IR::Instr *instr)
  8623. {
  8624. IR::Opnd * srcOpnd = instr->UnlinkSrc1();
  8625. AssertMsg(srcOpnd, "Expected src opnd on LdSlot");
  8626. IR::Opnd * srcNew = this->CreateOpndForSlotAccess(srcOpnd);
  8627. srcOpnd->Free(this->m_func);
  8628. instr->SetSrc1(srcNew);
  8629. if (instr->GetDst() && instr->GetDst()->IsInt64())
  8630. {
  8631. m_lowererMD.LowerInt64Assign(instr);
  8632. }
  8633. else
  8634. {
  8635. m_lowererMD.ChangeToAssign(instr);
  8636. }
  8637. }
  8638. IR::Instr *
  8639. Lowerer::LowerChkUndecl(IR::Instr *instr)
  8640. {
  8641. IR::Instr *instrPrev = instr->m_prev;
  8642. this->GenUndeclChk(instr, instr->GetSrc1());
  8643. instr->Remove();
  8644. return instrPrev;
  8645. }
  8646. void
  8647. Lowerer::GenUndeclChk(IR::Instr *instrInsert, IR::Opnd *opnd)
  8648. {
  8649. IR::LabelInstr *labelContinue = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  8650. InsertCompareBranch(
  8651. opnd,
  8652. LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueUndeclBlockVar),
  8653. Js::OpCode::BrNeq_A, labelContinue, instrInsert);
  8654. IR::LabelInstr *labelThrow = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  8655. instrInsert->InsertBefore(labelThrow);
  8656. IR::Instr *instr = IR::Instr::New(
  8657. Js::OpCode::RuntimeReferenceError,
  8658. IR::RegOpnd::New(TyMachReg, m_func),
  8659. IR::IntConstOpnd::New(SCODE_CODE(JSERR_UseBeforeDeclaration), TyInt32, m_func),
  8660. m_func);
  8661. instrInsert->InsertBefore(instr);
  8662. this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeReferenceError);
  8663. instrInsert->InsertBefore(labelContinue);
  8664. }
  8665. ///----------------------------------------------------------------------------
  8666. ///
  8667. /// Lowerer::LowerStElemC
  8668. ///
  8669. ///----------------------------------------------------------------------------
  8670. IR::Instr *
  8671. Lowerer::LowerStElemC(IR::Instr * stElem)
  8672. {
  8673. IR::Instr *instrPrev = stElem->m_prev;
  8674. IR::IndirOpnd * indirOpnd = stElem->GetDst()->AsIndirOpnd();
  8675. IR::RegOpnd *indexOpnd = indirOpnd->UnlinkIndexOpnd();
  8676. Assert(!indexOpnd || indexOpnd->m_sym->IsIntConst());
  8677. IntConstType value;
  8678. if (indexOpnd)
  8679. {
  8680. value = indexOpnd->AsRegOpnd()->m_sym->GetIntConstValue();
  8681. indexOpnd->Free(this->m_func);
  8682. }
  8683. else
  8684. {
  8685. value = (IntConstType)indirOpnd->GetOffset();
  8686. }
  8687. if (stElem->IsJitProfilingInstr())
  8688. {
  8689. Assert(stElem->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
  8690. m_lowererMD.LoadHelperArgument(stElem, stElem->UnlinkSrc1());
  8691. const auto meth = stElem->m_opcode == Js::OpCode::StElemC ? IR::HelperSimpleStoreArrayHelper : IR::HelperSimpleStoreArraySegHelper;
  8692. stElem->SetSrc1(IR::HelperCallOpnd::New(meth, m_func));
  8693. m_lowererMD.LoadHelperArgument(stElem, IR::IntConstOpnd::New(value, TyUint32, m_func));
  8694. m_lowererMD.LoadHelperArgument(stElem, indirOpnd->UnlinkBaseOpnd());
  8695. stElem->UnlinkDst()->Free(m_func);
  8696. m_lowererMD.LowerCall(stElem, 0);
  8697. return instrPrev;
  8698. }
  8699. IntConstType base;
  8700. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  8701. const ValueType baseValueType(baseOpnd->GetValueType());
  8702. if(baseValueType.IsLikelyNativeArray())
  8703. {
  8704. Assert(stElem->m_opcode == Js::OpCode::StElemC);
  8705. IR::LabelInstr *labelBailOut = nullptr;
  8706. IR::Instr *instrBailOut = nullptr;
  8707. if (stElem->HasBailOutInfo())
  8708. {
  8709. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  8710. instrBailOut = stElem;
  8711. stElem = IR::Instr::New(instrBailOut->m_opcode, m_func);
  8712. instrBailOut->TransferTo(stElem);
  8713. instrBailOut->InsertBefore(stElem);
  8714. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  8715. InsertBranch(Js::OpCode::Br, labelDone, instrBailOut);
  8716. instrBailOut->InsertBefore(labelBailOut);
  8717. instrBailOut->InsertAfter(labelDone);
  8718. instrBailOut->m_opcode = Js::OpCode::BailOut;
  8719. GenerateBailOut(instrBailOut);
  8720. }
  8721. if (!baseValueType.IsObject())
  8722. {
  8723. // Likely native array: do a vtable check and bail if it fails.
  8724. Assert(labelBailOut);
  8725. GenerateArrayTest(baseOpnd, labelBailOut, labelBailOut, stElem, true);
  8726. }
  8727. if (stElem->GetSrc1()->GetType() == TyVar)
  8728. {
  8729. // Storing a non-specialized value. This may cause array conversion, which invalidates all the code
  8730. // that depends on the array check we've already done.
  8731. // Call a helper that returns the type ID of the resulting array, check it here against the one we
  8732. // expect, and bail if it fails.
  8733. Assert(labelBailOut);
  8734. // Call a helper to (try and) unbox the var and store it.
  8735. // If we had to convert the array to do the store, we'll bail.
  8736. LoadScriptContext(stElem);
  8737. m_lowererMD.LoadHelperArgument(stElem, stElem->UnlinkSrc1());
  8738. IR::IntConstOpnd * intConstIndexOpnd = IR::IntConstOpnd::New(value, TyUint32, m_func);
  8739. m_lowererMD.LoadHelperArgument(stElem, intConstIndexOpnd);
  8740. m_lowererMD.LoadHelperArgument(stElem, indirOpnd->UnlinkBaseOpnd());
  8741. IR::JnHelperMethod helperMethod;
  8742. if (baseValueType.HasIntElements())
  8743. {
  8744. helperMethod = IR::HelperScrArr_SetNativeIntElementC;
  8745. }
  8746. else
  8747. {
  8748. helperMethod = IR::HelperScrArr_SetNativeFloatElementC;
  8749. }
  8750. IR::Instr *instrInsertBranch = stElem->m_next;
  8751. IR::RegOpnd *typeIdOpnd = IR::RegOpnd::New(TyUint32, m_func);
  8752. stElem->ReplaceDst(typeIdOpnd);
  8753. m_lowererMD.ChangeToHelperCall(stElem, helperMethod);
  8754. InsertCompareBranch(
  8755. typeIdOpnd,
  8756. IR::IntConstOpnd::New(
  8757. baseValueType.HasIntElements() ?
  8758. Js::TypeIds_NativeIntArray : Js::TypeIds_NativeFloatArray, TyUint32, m_func),
  8759. Js::OpCode::BrNeq_A,
  8760. labelBailOut,
  8761. instrInsertBranch);
  8762. return instrPrev;
  8763. }
  8764. else if (baseValueType.HasIntElements() && labelBailOut)
  8765. {
  8766. Assert(stElem->GetSrc1()->GetType() == GetArrayIndirType(baseValueType));
  8767. IR::Opnd* missingElementOpnd = GetMissingItemOpnd(stElem->GetSrc1()->GetType(), m_func);
  8768. if (!stElem->GetSrc1()->IsEqual(missingElementOpnd))
  8769. {
  8770. InsertCompareBranch(stElem->GetSrc1(), missingElementOpnd , Js::OpCode::BrEq_A, labelBailOut, stElem, true);
  8771. }
  8772. else
  8773. {
  8774. //Its a missing value store and data flow proves that src1 is always missing value. Array cannot be an int array at the first place
  8775. //if this code was ever hit. Just bailout, this code path would be updated with the profile information next time around.
  8776. InsertBranch(Js::OpCode::Br, labelBailOut, stElem);
  8777. #if DBG
  8778. labelBailOut->m_noHelperAssert = true;
  8779. #endif
  8780. stElem->Remove();
  8781. return instrPrev;
  8782. }
  8783. }
  8784. else
  8785. {
  8786. Assert(stElem->GetSrc1()->GetType() == GetArrayIndirType(baseValueType));
  8787. }
  8788. stElem->GetDst()->SetType(stElem->GetSrc1()->GetType());
  8789. Assert(value <= Js::SparseArraySegmentBase::INLINE_CHUNK_SIZE);
  8790. if(baseValueType.HasIntElements())
  8791. {
  8792. base = sizeof(Js::JavascriptNativeIntArray) + offsetof(Js::SparseArraySegment<int32>, elements);
  8793. }
  8794. else
  8795. {
  8796. base = sizeof(Js::JavascriptNativeFloatArray) + offsetof(Js::SparseArraySegment<double>, elements);
  8797. }
  8798. }
  8799. else if(baseValueType.IsLikelyObject() && baseValueType.GetObjectType() == ObjectType::Array)
  8800. {
  8801. Assert(stElem->m_opcode == Js::OpCode::StElemC);
  8802. Assert(value <= Js::SparseArraySegmentBase::INLINE_CHUNK_SIZE);
  8803. base = sizeof(Js::JavascriptArray) + offsetof(Js::SparseArraySegment<Js::Var>, elements);
  8804. }
  8805. else
  8806. {
  8807. Assert(stElem->m_opcode == Js::OpCode::StElemC || stElem->m_opcode == Js::OpCode::StArrSegElemC);
  8808. Assert(indirOpnd->GetBaseOpnd()->GetType() == TyVar);
  8809. base = offsetof(Js::SparseArraySegment<Js::Var>, elements);
  8810. }
  8811. Assert(value >= 0);
  8812. // MOV [r3 + offset(element) + index], src
  8813. const BYTE indirScale =
  8814. baseValueType.IsLikelyAnyOptimizedArray() ? GetArrayIndirScale(baseValueType) : m_lowererMD.GetDefaultIndirScale();
  8815. IntConstType offset = base + (value << indirScale);
  8816. Assert(Math::FitsInDWord(offset));
  8817. indirOpnd->SetOffset((int32)offset);
  8818. m_lowererMD.ChangeToWriteBarrierAssign(stElem, this->m_func);
  8819. return instrPrev;
  8820. }
  8821. void Lowerer::LowerLdArrHead(IR::Instr *const instr)
  8822. {
  8823. IR::RegOpnd *array = instr->UnlinkSrc1()->AsRegOpnd();
  8824. const ValueType arrayValueType(array->GetValueType());
  8825. Assert(arrayValueType.IsAnyOptimizedArray());
  8826. if(arrayValueType.GetObjectType() == ObjectType::ObjectWithArray)
  8827. {
  8828. array = LoadObjectArray(array, instr);
  8829. }
  8830. // mov arrayHeadSegment, [array + offset(headSegment)]
  8831. instr->GetDst()->SetType(TyMachPtr);
  8832. instr->SetSrc1(
  8833. IR::IndirOpnd::New(
  8834. array,
  8835. GetArrayOffsetOfHeadSegment(arrayValueType),
  8836. TyMachPtr,
  8837. instr->m_func));
  8838. LowererMD::ChangeToAssign(instr);
  8839. }
  8840. // Creates the rest parameter array.
  8841. // Var JavascriptArray::OP_NewScArrayWithElements(
  8842. // uint32 elementCount,
  8843. // Var *elements,
  8844. // ScriptContext* scriptContext)
  8845. IR::Instr *Lowerer::LowerRestParameter(IR::Opnd *formalsOpnd, IR::Opnd *dstOpnd, IR::Opnd *excessOpnd, IR::Instr *instr, IR::RegOpnd *generatorArgsPtrOpnd)
  8846. {
  8847. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, dstOpnd, instr->m_func);
  8848. instr->InsertAfter(helperCallInstr);
  8849. // Var JavascriptArray::OP_NewScArrayWithElements(
  8850. // int32 elementCount,
  8851. // Var *elements,
  8852. // ScriptContext* scriptContext)
  8853. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScArrayWithElements;
  8854. LoadScriptContext(helperCallInstr);
  8855. BOOL isGenerator = this->m_func->GetJITFunctionBody()->IsCoroutine();
  8856. // Elements pointer = ebp + (formals count + formals offset + 1)*sizeof(Var)
  8857. IR::RegOpnd *srcOpnd = isGenerator ? generatorArgsPtrOpnd : IR::Opnd::CreateFramePointerOpnd(this->m_func);
  8858. uint16 actualOffset = isGenerator ? 0 : GetFormalParamOffset(); //4
  8859. IR::RegOpnd *argPtrOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  8860. InsertAdd(false, argPtrOpnd, srcOpnd, IR::IntConstOpnd::New((formalsOpnd->AsIntConstOpnd()->GetValue() + actualOffset) * MachPtr, TyMachPtr, this->m_func), helperCallInstr);
  8861. m_lowererMD.LoadHelperArgument(helperCallInstr, argPtrOpnd);
  8862. m_lowererMD.LoadHelperArgument(helperCallInstr, excessOpnd);
  8863. m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
  8864. return helperCallInstr;
  8865. }
  8866. ///----------------------------------------------------------------------------
  8867. ///
  8868. /// Lowerer::LowerArgIn
  8869. ///
  8870. /// This function checks the passed-in argument count against the index of this
  8871. /// argument and uses null for a param value if the caller didn't explicitly
  8872. /// pass anything.
  8873. ///
  8874. ///----------------------------------------------------------------------------
  8875. IR::Instr *
  8876. Lowerer::LowerArgIn(IR::Instr *instrArgIn)
  8877. {
  8878. IR::LabelInstr * labelDone;
  8879. IR::LabelInstr * labelUndef;
  8880. IR::LabelInstr * labelNormal;
  8881. IR::LabelInstr * labelInit;
  8882. IR::LabelInstr * labelInitNext;
  8883. IR::BranchInstr * instrBranch;
  8884. IR::Instr * instrArgInNext;
  8885. IR::Instr * instrInsert;
  8886. IR::Instr * instrPrev;
  8887. IR::Instr * instrResume = nullptr;
  8888. IR::Opnd * dstOpnd;
  8889. IR::Opnd * srcOpnd;
  8890. IR::Opnd * opndUndef;
  8891. Js::ArgSlot argIndex;
  8892. StackSym * symParam;
  8893. BOOLEAN isDuplicate;
  8894. IR::RegOpnd * generatorArgsPtrOpnd = nullptr;
  8895. // We start with:
  8896. // s1 = ArgIn_A param1
  8897. // s2 = ArgIn_A param2
  8898. // ...
  8899. // sn = ArgIn_A paramn
  8900. //
  8901. // We want to end up with:
  8902. //
  8903. // s1 = ArgIn_A param1 -- Note that this is unconditional
  8904. // count = (load from param area)
  8905. // BrLt_A $start, count, n -- Forward cbranch to the uncommon case
  8906. // Br $Ln
  8907. // $start:
  8908. // sn = assign undef
  8909. // BrGe_A $Ln-1, count, n-1
  8910. // sn-1 = assign undef
  8911. // ...
  8912. // s2 = assign undef
  8913. // Br $done
  8914. // $Ln:
  8915. // sn = assign paramn
  8916. // $Ln-1:
  8917. // sn-1 = assign paramn-1
  8918. // ...
  8919. // s2 = assign param2
  8920. // $done:
  8921. AnalysisAssert(instrArgIn);
  8922. IR::Opnd *restDst = nullptr;
  8923. bool hasRest = instrArgIn->m_opcode == Js::OpCode::ArgIn_Rest;
  8924. if (hasRest)
  8925. {
  8926. IR::Instr *restInstr = instrArgIn;
  8927. restDst = restInstr->UnlinkDst();
  8928. if (m_func->GetJITFunctionBody()->HasImplicitArgIns() && m_func->argInsCount > 0)
  8929. {
  8930. while (instrArgIn->m_opcode != Js::OpCode::ArgIn_A)
  8931. {
  8932. instrArgIn = instrArgIn->m_prev;
  8933. if (instrResume == nullptr)
  8934. {
  8935. instrResume = instrArgIn;
  8936. }
  8937. }
  8938. restInstr->Remove();
  8939. }
  8940. else
  8941. {
  8942. Assert(instrArgIn->m_func == this->m_func);
  8943. IR::Instr * instrCount = m_lowererMD.LoadInputParamCount(instrArgIn, -this->m_func->GetInParamsCount());
  8944. IR::Opnd * excessOpnd = instrCount->GetDst();
  8945. IR::LabelInstr *createRestArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  8946. // BrGe $createRestArray, excess, 0
  8947. InsertCompareBranch(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), Js::OpCode::BrGe_A, createRestArrayLabel, instrArgIn);
  8948. // MOV excess, 0
  8949. InsertMove(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), instrArgIn);
  8950. // $createRestArray
  8951. instrArgIn->InsertBefore(createRestArrayLabel);
  8952. if (m_func->GetJITFunctionBody()->IsCoroutine())
  8953. {
  8954. generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrArgIn);
  8955. }
  8956. IR::IntConstOpnd * formalsOpnd = IR::IntConstOpnd::New(this->m_func->GetInParamsCount(), TyUint32, this->m_func);
  8957. IR::Instr *prev = LowerRestParameter(formalsOpnd, restDst, excessOpnd, instrArgIn, generatorArgsPtrOpnd);
  8958. instrArgIn->Remove();
  8959. return prev;
  8960. }
  8961. }
  8962. srcOpnd = instrArgIn->GetSrc1();
  8963. symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
  8964. argIndex = symParam->GetParamSlotNum();
  8965. if (argIndex == 1)
  8966. {
  8967. // The "this" argument is not source-dependent and doesn't need to be checked.
  8968. if (m_func->GetJITFunctionBody()->IsCoroutine())
  8969. {
  8970. generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrArgIn);
  8971. ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
  8972. }
  8973. m_lowererMD.ChangeToAssign(instrArgIn);
  8974. return instrResume == nullptr ? instrArgIn->m_prev : instrResume;
  8975. }
  8976. Js::ArgSlot formalsCount = this->m_func->GetInParamsCount();
  8977. AssertMsg(argIndex <= formalsCount, "Expect to see the ArgIn's within the range of the formals");
  8978. // Because there may be instructions between the ArgIn's, such as saves to the frame object,
  8979. // we find the top of the sequence of ArgIn's and insert everything there. This assumes that
  8980. // ArgIn's use param symbols as src's and not the results of previous instructions.
  8981. instrPrev = instrArgIn;
  8982. Js::ArgSlot currArgInCount = 0;
  8983. Assert(this->m_func->argInsCount > 0);
  8984. while (currArgInCount < this->m_func->argInsCount - 1)
  8985. {
  8986. instrPrev = instrPrev->m_prev;
  8987. if (instrPrev->m_opcode == Js::OpCode::ArgIn_A)
  8988. {
  8989. srcOpnd = instrPrev->GetSrc1();
  8990. symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
  8991. AssertMsg(symParam->GetParamSlotNum() < argIndex, "ArgIn's not in numerical order");
  8992. argIndex = symParam->GetParamSlotNum();
  8993. currArgInCount++;
  8994. }
  8995. else
  8996. {
  8997. // Make sure that this instruction gets lowered.
  8998. if (instrResume == nullptr)
  8999. {
  9000. instrResume = instrPrev;
  9001. }
  9002. }
  9003. }
  9004. // The loading of parameters will be inserted above this instruction.
  9005. instrInsert = instrPrev;
  9006. AnalysisAssert(instrInsert);
  9007. if (instrResume == nullptr)
  9008. {
  9009. // We found no intervening non-ArgIn's, so lowering can resume at the previous instruction.
  9010. instrResume = instrInsert->m_prev;
  9011. }
  9012. // Now insert all the checks and undef-assigns.
  9013. if (m_func->GetJITFunctionBody()->IsCoroutine())
  9014. {
  9015. generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrInsert);
  9016. }
  9017. // excessOpnd = (load from param area) - formalCounts
  9018. IR::Instr * instrCount = this->m_lowererMD.LoadInputParamCount(instrInsert, -formalsCount, true);
  9019. IR::Opnd * excessOpnd = instrCount->GetDst();
  9020. labelUndef = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /*helperLabel*/ true);
  9021. Lowerer::InsertBranch(Js::OpCode::BrLt_A, labelUndef, instrInsert);
  9022. // Br $Ln
  9023. labelNormal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  9024. labelInit = labelNormal;
  9025. instrBranch = IR::BranchInstr::New(Js::OpCode::Br, labelNormal, this->m_func);
  9026. instrInsert->InsertBefore(instrBranch);
  9027. this->m_lowererMD.LowerUncondBranch(instrBranch);
  9028. // Insert the labels
  9029. instrInsert->InsertBefore(labelUndef);
  9030. instrInsert->InsertBefore(labelNormal);
  9031. //Adjustment for deadstore of ArgIn_A
  9032. Js::ArgSlot highestSlotNum = instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum();
  9033. Js::ArgSlot missingSlotNums = this->m_func->GetInParamsCount() - highestSlotNum;
  9034. Assert(missingSlotNums >= 0);
  9035. while (missingSlotNums > 0)
  9036. {
  9037. InsertAdd(true, excessOpnd, excessOpnd, IR::IntConstOpnd::New(1, TyMachReg, this->m_func), labelNormal);
  9038. Lowerer::InsertBranch(Js::OpCode::BrEq_A, labelNormal, labelNormal);
  9039. missingSlotNums--;
  9040. }
  9041. // MOV undefReg, undefAddress
  9042. IR::Opnd* opndUndefAddress = this->LoadLibraryValueOpnd(labelNormal, LibraryValue::ValueUndefined);
  9043. opndUndef = IR::RegOpnd::New(TyMachPtr, this->m_func);
  9044. LowererMD::CreateAssign(opndUndef, opndUndefAddress, labelNormal);
  9045. BVSparse<JitArenaAllocator> *formalsBv = JitAnew(this->m_alloc, BVSparse<JitArenaAllocator>, this->m_alloc);
  9046. while (currArgInCount > 0)
  9047. {
  9048. dstOpnd = instrArgIn->GetDst();
  9049. Assert(dstOpnd->IsRegOpnd());
  9050. isDuplicate = formalsBv->TestAndSet(dstOpnd->AsRegOpnd()->m_sym->AsStackSym()->m_id);
  9051. // Now insert the undef initialization before the "normal" label
  9052. // sn = assign undef
  9053. LowererMD::CreateAssign(dstOpnd, opndUndef, labelNormal);
  9054. // INC excessOpnd
  9055. // BrEq_A $Ln-1
  9056. currArgInCount--;
  9057. labelInitNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  9058. // And insert the "normal" initialization before the "done" label
  9059. // sn = assign paramn
  9060. // $Ln-1:
  9061. labelInit->InsertAfter(labelInitNext);
  9062. labelInit = labelInitNext;
  9063. instrArgInNext = instrArgIn->m_prev;
  9064. instrArgIn->Unlink();
  9065. Js::ArgSlot prevParamSlotNum = instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum();
  9066. // function foo(x, x) { use(x); }
  9067. // This should refer to the second 'x'. Since we reverse the order here however, we need to skip
  9068. // the initialization of the first 'x' to not override the one for the second. WOOB:1105504
  9069. if (isDuplicate)
  9070. {
  9071. instrArgIn->Free();
  9072. }
  9073. else
  9074. {
  9075. ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
  9076. labelInit->InsertBefore(instrArgIn);
  9077. this->m_lowererMD.ChangeToAssign(instrArgIn);
  9078. }
  9079. instrArgIn = instrArgInNext;
  9080. while (instrArgIn->m_opcode != Js::OpCode::ArgIn_A)
  9081. {
  9082. instrArgIn = instrArgIn->m_prev;
  9083. AssertMsg(instrArgIn, "???");
  9084. }
  9085. //Adjustment for deadstore of ArgIn_A
  9086. Js::ArgSlot currParamSlotNum = instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum();
  9087. Js::ArgSlot diffSlotsNum = prevParamSlotNum - currParamSlotNum;
  9088. AssertMsg(diffSlotsNum > 0, "Argins are not in order?");
  9089. while (diffSlotsNum > 0)
  9090. {
  9091. InsertAdd(true, excessOpnd, excessOpnd, IR::IntConstOpnd::New(1, TyMachReg, this->m_func), labelNormal);
  9092. InsertBranch(Js::OpCode::BrEq_A, labelInitNext, labelNormal);
  9093. diffSlotsNum--;
  9094. }
  9095. AssertMsg(instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum() <= formalsCount,
  9096. "Expect all ArgIn's to be in numerical order by param slot");
  9097. }
  9098. // Insert final undef and normal initializations, jumping unconditionally to the end
  9099. // rather than checking against the decremented formals count as we did inside the loop above.
  9100. // s2 = assign undef
  9101. dstOpnd = instrArgIn->GetDst();
  9102. Assert(dstOpnd->IsRegOpnd());
  9103. isDuplicate = formalsBv->TestAndSet(dstOpnd->AsRegOpnd()->m_sym->AsStackSym()->m_id);
  9104. LowererMD::CreateAssign(dstOpnd, opndUndef, labelNormal);
  9105. if (hasRest)
  9106. {
  9107. InsertMove(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), labelNormal);
  9108. }
  9109. // Br $done
  9110. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  9111. instrBranch = IR::BranchInstr::New(Js::OpCode::Br, labelDone, this->m_func);
  9112. labelNormal->InsertBefore(instrBranch);
  9113. this->m_lowererMD.LowerUncondBranch(instrBranch);
  9114. // s2 = assign param2
  9115. // $done:
  9116. labelInit->InsertAfter(labelDone);
  9117. if (hasRest)
  9118. {
  9119. // The formals count has been tainted, so restore it before lowering rest
  9120. IR::IntConstOpnd * formalsOpnd = IR::IntConstOpnd::New(this->m_func->GetInParamsCount(), TyUint32, this->m_func);
  9121. LowerRestParameter(formalsOpnd, restDst, excessOpnd, labelDone, generatorArgsPtrOpnd);
  9122. }
  9123. instrArgIn->Unlink();
  9124. if (isDuplicate)
  9125. {
  9126. instrArgIn->Free();
  9127. }
  9128. else
  9129. {
  9130. ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
  9131. labelDone->InsertBefore(instrArgIn);
  9132. this->m_lowererMD.ChangeToAssign(instrArgIn);
  9133. }
  9134. JitAdelete(this->m_alloc, formalsBv);
  9135. return instrResume;
  9136. }
  9137. void
  9138. Lowerer::ConvertArgOpndIfGeneratorFunction(IR::Instr *instrArgIn, IR::RegOpnd *generatorArgsPtrOpnd)
  9139. {
  9140. if (this->m_func->GetJITFunctionBody()->IsCoroutine())
  9141. {
  9142. // Replace stack param operand with offset into arguments array held by
  9143. // the generator object.
  9144. IR::Opnd * srcOpnd = instrArgIn->UnlinkSrc1();
  9145. StackSym * symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
  9146. Js::ArgSlot argIndex = symParam->GetParamSlotNum();
  9147. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorArgsPtrOpnd, (argIndex - 1) * MachPtr, TyMachPtr, this->m_func);
  9148. srcOpnd->Free(this->m_func);
  9149. instrArgIn->SetSrc1(indirOpnd);
  9150. }
  9151. }
  9152. IR::RegOpnd *
  9153. Lowerer::LoadGeneratorArgsPtr(IR::Instr *instrInsert)
  9154. {
  9155. IR::Instr * instr = LoadGeneratorObject(instrInsert);
  9156. IR::RegOpnd * generatorRegOpnd = instr->GetDst()->AsRegOpnd();
  9157. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorRegOpnd, Js::JavascriptGenerator::GetArgsPtrOffset(), TyMachPtr, instrInsert->m_func);
  9158. IR::RegOpnd * argsPtrOpnd = IR::RegOpnd::New(TyMachReg, instrInsert->m_func);
  9159. LowererMD::CreateAssign(argsPtrOpnd, indirOpnd, instrInsert);
  9160. return argsPtrOpnd;
  9161. }
  9162. IR::Instr *
  9163. Lowerer::LoadGeneratorObject(IR::Instr * instrInsert)
  9164. {
  9165. StackSym * generatorSym = StackSym::NewImplicitParamSym(3, instrInsert->m_func);
  9166. instrInsert->m_func->SetArgOffset(generatorSym, LowererMD::GetFormalParamOffset() * MachPtr);
  9167. IR::SymOpnd * generatorSymOpnd = IR::SymOpnd::New(generatorSym, TyMachPtr, instrInsert->m_func);
  9168. IR::RegOpnd * generatorRegOpnd = IR::RegOpnd::New(TyMachPtr, instrInsert->m_func);
  9169. instrInsert->m_func->SetHasImplicitParamLoad();
  9170. return LowererMD::CreateAssign(generatorRegOpnd, generatorSymOpnd, instrInsert);
  9171. }
  9172. IR::Instr *
  9173. Lowerer::LowerArgInAsmJs(IR::Instr * instr)
  9174. {
  9175. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  9176. Assert(instr && instr->m_opcode == Js::OpCode::ArgIn_A);
  9177. IR::Instr* instrPrev = instr->m_prev;
  9178. #ifdef _M_IX86
  9179. if (instr->GetDst()->IsInt64())
  9180. {
  9181. m_lowererMD.LowerInt64Assign(instr);
  9182. }
  9183. else
  9184. #endif
  9185. {
  9186. m_lowererMD.ChangeToAssign(instr);
  9187. }
  9188. return instrPrev;
  9189. }
  9190. bool
  9191. Lowerer::InlineBuiltInLibraryCall(IR::Instr *callInstr)
  9192. {
  9193. IR::Opnd *src1 = callInstr->GetSrc1();
  9194. IR::Opnd *src2 = callInstr->GetSrc2();
  9195. // Get the arg count by looking at the slot number of the last arg symbol.
  9196. if (!src2->IsSymOpnd())
  9197. {
  9198. // No args? Not sure this is possible, but handle it.
  9199. return false;
  9200. }
  9201. StackSym *argLinkSym = src2->AsSymOpnd()->m_sym->AsStackSym();
  9202. // Subtract "this" from the arg count.
  9203. IntConstType argCount = argLinkSym->GetArgSlotNum() - 1;
  9204. // Find the callee's built-in index (if any).
  9205. Js::BuiltinFunction index = Func::GetBuiltInIndex(src1);
  9206. // Warning!
  9207. // Don't add new built-in to following switch. Built-ins needs to be inlined in call direct way.
  9208. // Following is only for prejit scenarios where we don't get inlining always and generate fast path in lowerer.
  9209. // Generating fastpath here misses fixed functions and globopt optimizations.
  9210. switch(index)
  9211. {
  9212. case Js::BuiltinFunction::JavascriptString_CharAt:
  9213. case Js::BuiltinFunction::JavascriptString_CharCodeAt:
  9214. if (argCount != 1)
  9215. {
  9216. return false;
  9217. }
  9218. if (!callInstr->GetDst())
  9219. {
  9220. // Optimization of Char[Code]At assumes result is used.
  9221. return false;
  9222. }
  9223. break;
  9224. case Js::BuiltinFunction::Math_Abs:
  9225. #ifdef _M_IX86
  9226. if (!AutoSystemInfo::Data.SSE2Available())
  9227. {
  9228. return false;
  9229. }
  9230. #endif
  9231. if (argCount != 1)
  9232. {
  9233. return false;
  9234. }
  9235. if (!callInstr->GetDst())
  9236. {
  9237. // Optimization of Abs assumes result is used.
  9238. return false;
  9239. }
  9240. break;
  9241. case Js::BuiltinFunction::JavascriptArray_Push:
  9242. {
  9243. if (argCount != 1)
  9244. {
  9245. return false;
  9246. }
  9247. if (callInstr->GetDst())
  9248. {
  9249. // Optimization of push assumes result is unused.
  9250. return false;
  9251. }
  9252. StackSym *linkSym = callInstr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
  9253. Assert(linkSym->IsSingleDef());
  9254. linkSym = linkSym->m_instrDef->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
  9255. Assert(linkSym->IsSingleDef());
  9256. IR::Opnd *const arrayOpnd = linkSym->m_instrDef->GetSrc1();
  9257. if(!arrayOpnd->IsRegOpnd())
  9258. {
  9259. // This should be rare, but needs to be handled.
  9260. // By now, we've already started some of the inlining. Simply jmp to the helper.
  9261. // The branch will get peeped later.
  9262. return false;
  9263. }
  9264. if(!ShouldGenerateArrayFastPath(arrayOpnd, false, false, false) ||
  9265. arrayOpnd->GetValueType().IsLikelyNativeArray())
  9266. {
  9267. // Rejecting native array for now, since we have to do a FromVar at the call site and bail out.
  9268. return false;
  9269. }
  9270. break;
  9271. }
  9272. case Js::BuiltinFunction::JavascriptString_Replace:
  9273. {
  9274. if(argCount != 2)
  9275. {
  9276. return false;
  9277. }
  9278. if(!ShouldGenerateStringReplaceFastPath(callInstr, argCount))
  9279. {
  9280. return false;
  9281. }
  9282. break;
  9283. }
  9284. default:
  9285. return false;
  9286. }
  9287. Assert(Func::IsBuiltInInlinedInLowerer(callInstr->GetSrc1()));
  9288. IR::Opnd *callTargetOpnd = callInstr->GetSrc1();
  9289. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  9290. IR::Opnd *objRefOpnd = IR::MemRefOpnd::New((void*)this->GetObjRefForBuiltInTarget(callTargetOpnd->AsRegOpnd()), TyMachReg, this->m_func);
  9291. InsertCompareBranch(callTargetOpnd, objRefOpnd, Js::OpCode::BrNeq_A, labelHelper, callInstr);
  9292. callInstr->InsertBefore(labelHelper);
  9293. Assert(argCount <= 2);
  9294. IR::Opnd *argsOpnd[3];
  9295. IR::Opnd *linkOpnd = callInstr->GetSrc2();
  9296. while(linkOpnd->IsSymOpnd())
  9297. {
  9298. IR::SymOpnd * symOpnd = linkOpnd->AsSymOpnd();
  9299. StackSym *sym = symOpnd->m_sym->AsStackSym();
  9300. Assert(sym->m_isSingleDef);
  9301. IR::Instr *argInstr = sym->m_instrDef;
  9302. Assert(argCount >= 0);
  9303. argsOpnd[argCount] = argInstr->GetSrc1();
  9304. argCount--;
  9305. argInstr->Unlink();
  9306. labelHelper->InsertAfter(argInstr);
  9307. linkOpnd = argInstr->GetSrc2();
  9308. }
  9309. AnalysisAssert(argCount == -1);
  9310. // Move startcall
  9311. Assert(linkOpnd->IsRegOpnd());
  9312. StackSym *sym = linkOpnd->AsRegOpnd()->m_sym;
  9313. Assert(sym->m_isSingleDef);
  9314. IR::Instr *startCall = sym->m_instrDef;
  9315. Assert(startCall->m_opcode == Js::OpCode::StartCall);
  9316. startCall->Unlink();
  9317. labelHelper->InsertAfter(startCall);
  9318. // $doneLabel:
  9319. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  9320. callInstr->InsertAfter(doneLabel);
  9321. bool success = true;
  9322. switch(index)
  9323. {
  9324. case Js::BuiltinFunction::Math_Abs:
  9325. this->m_lowererMD.GenerateFastAbs(callInstr->GetDst(), argsOpnd[1], callInstr, labelHelper, labelHelper, doneLabel);
  9326. break;
  9327. case Js::BuiltinFunction::JavascriptString_CharCodeAt:
  9328. case Js::BuiltinFunction::JavascriptString_CharAt:
  9329. success = this->m_lowererMD.GenerateFastCharAt(index, callInstr->GetDst(), argsOpnd[0], argsOpnd[1],
  9330. callInstr, labelHelper, labelHelper, doneLabel);
  9331. break;
  9332. case Js::BuiltinFunction::JavascriptArray_Push:
  9333. success = GenerateFastPush(argsOpnd[0], argsOpnd[1], callInstr, labelHelper, labelHelper, nullptr, doneLabel);
  9334. break;
  9335. case Js::BuiltinFunction::JavascriptString_Replace:
  9336. success = GenerateFastReplace(argsOpnd[0], argsOpnd[1], argsOpnd[2], callInstr, labelHelper, labelHelper, doneLabel);
  9337. break;
  9338. default:
  9339. Assert(UNREACHED);
  9340. }
  9341. IR::Instr *instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, doneLabel, this->m_func);
  9342. labelHelper->InsertBefore(instr);
  9343. return success;
  9344. }
  9345. // Perform lowerer part of inlining built-in function.
  9346. // For details, see inline.cpp.
  9347. //
  9348. // Description of changes here (note that taking care of Argouts are similar to InlineeStart):
  9349. // - Move ArgOut_A_InlineBuiltIn next to the call instr -- used by bailout processing in register allocator.
  9350. // - Remove StartCall and InlineBuiltInStart for this call.
  9351. // Before:
  9352. // StartCall fn
  9353. // d1 = BIA s1, link1
  9354. // ...
  9355. // InlineBuiltInStart fn, link0
  9356. // After:
  9357. // ...
  9358. // d1 = BIA s1, NULL
  9359. void Lowerer::LowerInlineBuiltIn(IR::Instr* builtInEndInstr)
  9360. {
  9361. Assert(builtInEndInstr->m_opcode == Js::OpCode::InlineBuiltInEnd || builtInEndInstr->m_opcode == Js::OpCode::InlineNonTrackingBuiltInEnd);
  9362. IR::Instr* startCallInstr = nullptr;
  9363. builtInEndInstr->IterateArgInstrs([&](IR::Instr* argInstr) {
  9364. startCallInstr = argInstr->GetSrc2()->GetStackSym()->m_instrDef;
  9365. return false;
  9366. });
  9367. // Keep the startCall around as bailout refers to it. Just unlink it for now - do not delete it.
  9368. startCallInstr->Unlink();
  9369. builtInEndInstr->Remove();
  9370. }
  9371. intptr_t
  9372. Lowerer::GetObjRefForBuiltInTarget(IR::RegOpnd * regOpnd)
  9373. {
  9374. intptr_t mathFns = m_func->GetScriptContextInfo()->GetBuiltinFunctionsBaseAddr();
  9375. Js::BuiltinFunction index = regOpnd->m_sym->m_builtInIndex;
  9376. AssertMsg(index < Js::BuiltinFunction::Count, "Invalid built-in index on a call target marked as built-in");
  9377. return mathFns + index;
  9378. }
  9379. IR::Instr *
  9380. Lowerer::LowerNewRegEx(IR::Instr * instr)
  9381. {
  9382. IR::Opnd *src1 = instr->UnlinkSrc1();
  9383. Assert(src1->IsAddrOpnd());
  9384. #if ENABLE_REGEX_CONFIG_OPTIONS
  9385. if (REGEX_CONFIG_FLAG(RegexTracing))
  9386. {
  9387. Assert(!instr->GetDst()->CanStoreTemp());
  9388. IR::Instr * instrPrev = LoadScriptContext(instr);
  9389. instrPrev = m_lowererMD.LoadHelperArgument(instr, src1);
  9390. m_lowererMD.ChangeToHelperCall(instr, IR::HelperScrRegEx_OP_NewRegEx);
  9391. return instrPrev;
  9392. }
  9393. #endif
  9394. IR::Instr * instrPrev = instr->m_prev;
  9395. IR::RegOpnd * dstOpnd = instr->UnlinkDst()->AsRegOpnd();
  9396. IR::SymOpnd * tempObjectSymOpnd;
  9397. bool isZeroed = GenerateRecyclerOrMarkTempAlloc(instr, dstOpnd, IR::HelperAllocMemForJavascriptRegExp, sizeof(Js::JavascriptRegExp), &tempObjectSymOpnd);
  9398. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  9399. {
  9400. // Hoist the vtable and pattern init to the outer most loop top as it never changes
  9401. InsertMove(tempObjectSymOpnd,
  9402. LoadVTableValueOpnd(this->outerMostLoopLabel, VTableValue::VtableJavascriptRegExp),
  9403. this->outerMostLoopLabel, false);
  9404. }
  9405. else
  9406. {
  9407. GenerateMemInit(dstOpnd, 0, LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp), instr, isZeroed);
  9408. }
  9409. GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfType(),
  9410. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueRegexType), instr, isZeroed);
  9411. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfAuxSlots(), instr, isZeroed);
  9412. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfObjectArray(), instr, isZeroed);
  9413. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  9414. {
  9415. InsertMove(IR::SymOpnd::New(tempObjectSymOpnd->m_sym,
  9416. tempObjectSymOpnd->m_offset + Js::JavascriptRegExp::GetOffsetOfPattern(), TyMachPtr, this->m_func),
  9417. src1, this->outerMostLoopLabel, false);
  9418. }
  9419. else
  9420. {
  9421. GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfPattern(), src1, instr, isZeroed);
  9422. }
  9423. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfSplitPattern(), instr, isZeroed);
  9424. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), instr, isZeroed);
  9425. GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), 0, instr, isZeroed);
  9426. instr->Remove();
  9427. return instrPrev;
  9428. }
  9429. IR::Instr *
  9430. Lowerer::GenerateRuntimeError(IR::Instr * insertBeforeInstr, Js::MessageId errorCode, IR::JnHelperMethod helper /*= IR::JnHelperMethod::HelperOp_RuntimeTypeError*/)
  9431. {
  9432. IR::Instr * runtimeErrorInstr = IR::Instr::New(Js::OpCode::RuntimeTypeError, this->m_func);
  9433. runtimeErrorInstr->SetSrc1(IR::IntConstOpnd::New(errorCode, TyInt32, this->m_func, true));
  9434. insertBeforeInstr->InsertBefore(runtimeErrorInstr);
  9435. return this->LowerUnaryHelperMem(runtimeErrorInstr, helper);
  9436. }
  9437. bool Lowerer::IsNullOrUndefRegOpnd(IR::RegOpnd *opnd) const
  9438. {
  9439. StackSym *sym = opnd->m_sym;
  9440. if (!sym->IsConst() || sym->IsIntConst() || sym->IsFloatConst())
  9441. {
  9442. return false;
  9443. }
  9444. Js::Var var = sym->GetConstAddress();
  9445. return (intptr_t)var == m_func->GetScriptContextInfo()->GetNullAddr() || (intptr_t)var == m_func->GetScriptContextInfo()->GetUndefinedAddr();
  9446. }
  9447. bool Lowerer::IsConstRegOpnd(IR::RegOpnd *opnd) const
  9448. {
  9449. StackSym *sym = opnd->m_sym;
  9450. if (!sym->IsConst() || sym->IsIntConst() || sym->IsFloatConst())
  9451. {
  9452. return false;
  9453. }
  9454. const auto& vt = sym->m_instrDef->GetSrc1()->GetValueType();
  9455. return vt.IsUndefined() || vt.IsNull() || vt.IsBoolean();
  9456. }
  9457. bool
  9458. Lowerer::HasSideEffects(IR::Instr *instr)
  9459. {
  9460. if (LowererMD::IsCall(instr))
  9461. {
  9462. #ifdef _M_IX86
  9463. IR::Opnd *src1 = instr->GetSrc1();
  9464. if (src1->IsHelperCallOpnd())
  9465. {
  9466. IR::HelperCallOpnd * helper = src1->AsHelperCallOpnd();
  9467. switch(helper->m_fnHelper)
  9468. {
  9469. case IR::HelperOp_Int32ToAtomInPlace:
  9470. case IR::HelperOp_Int32ToAtom:
  9471. case IR::HelperOp_UInt32ToAtom:
  9472. return false;
  9473. }
  9474. }
  9475. #endif
  9476. return true;
  9477. }
  9478. return instr->HasAnySideEffects();
  9479. }
  9480. bool Lowerer::IsArgSaveRequired(Func *func) {
  9481. return (!func->IsTrueLeaf() || func->IsJitInDebugMode() ||
  9482. // GetHasImplicitParamLoad covers generators, asmjs,
  9483. // and other javascript functions that implicitly read from the arg stack slots
  9484. func->GetHasThrow() || func->GetHasImplicitParamLoad() || func->HasThis() || func->argInsCount > 0);
  9485. }
  9486. IR::Instr*
  9487. Lowerer::GenerateFastInlineBuiltInMathRandom(IR::Instr* instr)
  9488. {
  9489. AssertMsg(instr->GetDst()->IsFloat(), "dst must be float.");
  9490. IR::Instr* retInstr = instr->m_prev;
  9491. IR::Opnd* dst = instr->GetDst();
  9492. #if defined(_M_X64)
  9493. if (m_func->GetScriptContextInfo()->IsPRNGSeeded())
  9494. {
  9495. const uint64 mExp = 0x3FF0000000000000;
  9496. const uint64 mMant = 0x000FFFFFFFFFFFFF;
  9497. IR::RegOpnd* r0 = IR::RegOpnd::New(TyUint64, m_func); // s0
  9498. IR::RegOpnd* r1 = IR::RegOpnd::New(TyUint64, m_func); // s1
  9499. IR::RegOpnd* r3 = IR::RegOpnd::New(TyUint64, m_func); // helper uint64 reg
  9500. IR::RegOpnd* r4 = IR::RegOpnd::New(TyFloat64, m_func); // helper float64 reg
  9501. // ===========================================================
  9502. // s0 = scriptContext->GetLibrary()->GetRandSeed1();
  9503. // s1 = scriptContext->GetLibrary()->GetRandSeed0();
  9504. // ===========================================================
  9505. this->m_lowererMD.CreateAssign(r0,
  9506. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetRandSeed1Offset(), TyUint64, instr->m_func), instr);
  9507. this->m_lowererMD.CreateAssign(r1,
  9508. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetRandSeed0Offset(), TyUint64, instr->m_func), instr);
  9509. // ===========================================================
  9510. // s1 ^= s1 << 23;
  9511. // ===========================================================
  9512. this->m_lowererMD.CreateAssign(r3, r1, instr);
  9513. this->InsertShift(Js::OpCode::Shl_A, false, r3, r3, IR::IntConstOpnd::New(23, TyInt8, m_func), instr);
  9514. this->InsertXor(r1, r1, r3, instr);
  9515. // ===========================================================
  9516. // s1 ^= s1 >> 17;
  9517. // ===========================================================
  9518. this->m_lowererMD.CreateAssign(r3, r1, instr);
  9519. this->InsertShift(Js::OpCode::ShrU_A, false, r3, r3, IR::IntConstOpnd::New(17, TyInt8, m_func), instr);
  9520. this->InsertXor(r1, r1, r3, instr);
  9521. // ===========================================================
  9522. // s1 ^= s0;
  9523. // ===========================================================
  9524. this->InsertXor(r1, r1, r0, instr);
  9525. // ===========================================================
  9526. // s1 ^= s0 >> 26;
  9527. // ===========================================================
  9528. this->m_lowererMD.CreateAssign(r3, r0, instr);
  9529. this->InsertShift(Js::OpCode::ShrU_A, false, r3, r3, IR::IntConstOpnd::New(26, TyInt8, m_func), instr);
  9530. this->InsertXor(r1, r1, r3, instr);
  9531. // ===========================================================
  9532. // scriptContext->GetLibrary()->SetRandSeed0(s0);
  9533. // scriptContext->GetLibrary()->SetRandSeed1(s1);
  9534. // ===========================================================
  9535. this->m_lowererMD.CreateAssign(
  9536. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetRandSeed0Offset(), TyUint64, m_func), r0, instr);
  9537. this->m_lowererMD.CreateAssign(
  9538. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetRandSeed1Offset(), TyUint64, m_func), r1, instr);
  9539. // ===========================================================
  9540. // dst = bit_cast<float64>(((s0 + s1) & mMant) | mExp);
  9541. // ===========================================================
  9542. this->InsertAdd(false, r1, r1, r0, instr);
  9543. this->m_lowererMD.CreateAssign(r3, IR::IntConstOpnd::New(mMant, TyInt64, m_func, true), instr);
  9544. this->InsertAnd(r1, r1, r3, instr);
  9545. this->m_lowererMD.CreateAssign(r3, IR::IntConstOpnd::New(mExp, TyInt64, m_func, true), instr);
  9546. this->InsertOr(r1, r1, r3, instr);
  9547. this->InsertMoveBitCast(dst, r1, instr);
  9548. // ===================================================================
  9549. // dst -= 1.0;
  9550. // ===================================================================
  9551. this->m_lowererMD.CreateAssign(r4, IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleOnePointZeroAddr(), TyFloat64, m_func, IR::AddrOpndKindDynamicDoubleRef), instr);
  9552. this->InsertSub(false, dst, dst, r4, instr);
  9553. }
  9554. else
  9555. #endif
  9556. {
  9557. IR::Opnd* tmpdst = dst;
  9558. if (!dst->IsRegOpnd())
  9559. {
  9560. tmpdst = IR::RegOpnd::New(dst->GetType(), instr->m_func);
  9561. }
  9562. LoadScriptContext(instr);
  9563. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, tmpdst, instr->m_func);
  9564. instr->InsertBefore(helperCallInstr);
  9565. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperDirectMath_Random);
  9566. if (tmpdst != dst)
  9567. {
  9568. InsertMove(dst, tmpdst, instr);
  9569. }
  9570. }
  9571. instr->Remove();
  9572. return retInstr;
  9573. }
  9574. IR::Instr *
  9575. Lowerer::LowerCallDirect(IR::Instr * instr)
  9576. {
  9577. IR::Opnd* linkOpnd = instr->UnlinkSrc2();
  9578. StackSym *linkSym = linkOpnd->AsSymOpnd()->m_sym->AsStackSym();
  9579. IR::Instr* argInstr = linkSym->m_instrDef;
  9580. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_InlineSpecialized);
  9581. IR::Opnd* funcObj = argInstr->UnlinkSrc1();
  9582. instr->SetSrc2(argInstr->UnlinkSrc2());
  9583. argInstr->Remove();
  9584. if(instr->HasBailOutInfo())
  9585. {
  9586. IR::Instr * bailOutInstr = this->SplitBailOnImplicitCall(instr, instr->m_next, instr->m_next);
  9587. this->LowerBailOnEqualOrNotEqual(bailOutInstr);
  9588. }
  9589. Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
  9590. return this->GenerateDirectCall(instr, funcObj, (ushort)flags);
  9591. }
  9592. IR::Instr *
  9593. Lowerer::GenerateDirectCall(IR::Instr* inlineInstr, IR::Opnd* funcObj, ushort callflags)
  9594. {
  9595. int32 argCount = m_lowererMD.LowerCallArgs(inlineInstr, callflags);
  9596. m_lowererMD.LoadHelperArgument(inlineInstr, funcObj);
  9597. m_lowererMD.LowerCall(inlineInstr, (Js::ArgSlot)argCount); //to account for function object and callinfo
  9598. return inlineInstr->m_prev;
  9599. }
  9600. /*
  9601. * GenerateHelperToArrayPushFastPath
  9602. * Generates Helper Call and pushes arguments to the Push HelperCall
  9603. */
  9604. IR::Instr *
  9605. Lowerer::GenerateHelperToArrayPushFastPath(IR::Instr * instr, IR::LabelInstr * bailOutLabelHelper)
  9606. {
  9607. IR::Opnd * arrayHelperOpnd = instr->UnlinkSrc1();
  9608. IR::Opnd * elementHelperOpnd = instr->UnlinkSrc2();
  9609. IR::JnHelperMethod helperMethod;
  9610. if(elementHelperOpnd->IsInt32())
  9611. {
  9612. Assert(arrayHelperOpnd->GetValueType().IsLikelyNativeIntArray());
  9613. helperMethod = IR::HelperArray_NativeIntPush;
  9614. m_lowererMD.LoadHelperArgument(instr, elementHelperOpnd);
  9615. }
  9616. else if(elementHelperOpnd->IsFloat())
  9617. {
  9618. Assert(arrayHelperOpnd->GetValueType().IsLikelyNativeFloatArray());
  9619. helperMethod = IR::HelperArray_NativeFloatPush;
  9620. m_lowererMD.LoadDoubleHelperArgument(instr, elementHelperOpnd);
  9621. }
  9622. else
  9623. {
  9624. helperMethod = IR::HelperArray_VarPush;
  9625. m_lowererMD.LoadHelperArgument(instr, elementHelperOpnd);
  9626. }
  9627. m_lowererMD.LoadHelperArgument(instr, arrayHelperOpnd);
  9628. LoadScriptContext(instr);
  9629. return m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  9630. }
  9631. /*
  9632. * GenerateHelperToArrayPopFastPath
  9633. * Generates Helper Call and pushes arguments to the Pop HelperCall
  9634. */
  9635. IR::Instr *
  9636. Lowerer::GenerateHelperToArrayPopFastPath(IR::Instr * instr, IR::LabelInstr * doneLabel, IR::LabelInstr * bailOutLabelHelper)
  9637. {
  9638. IR::Opnd * arrayHelperOpnd = instr->UnlinkSrc1();
  9639. ValueType arrayValueType = arrayHelperOpnd->GetValueType();
  9640. IR::JnHelperMethod helperMethod;
  9641. //Decide the helperMethod based on dst availability and nativity of the array.
  9642. if(arrayValueType.IsLikelyNativeArray() && !instr->GetDst())
  9643. {
  9644. helperMethod = IR::HelperArray_NativePopWithNoDst;
  9645. }
  9646. else if(arrayValueType.IsLikelyNativeIntArray())
  9647. {
  9648. helperMethod = IR::HelperArray_NativeIntPop;
  9649. }
  9650. else if(arrayValueType.IsLikelyNativeFloatArray())
  9651. {
  9652. helperMethod = IR::HelperArray_NativeFloatPop;
  9653. }
  9654. else
  9655. {
  9656. helperMethod = IR::HelperArray_VarPop;
  9657. }
  9658. m_lowererMD.LoadHelperArgument(instr, arrayHelperOpnd);
  9659. //We do not need scriptContext for HelperArray_NativePopWithNoDst call.
  9660. if(helperMethod != IR::HelperArray_NativePopWithNoDst)
  9661. {
  9662. LoadScriptContext(instr);
  9663. }
  9664. IR::Instr * retInstr = m_lowererMD.ChangeToHelperCall(instr, helperMethod, bailOutLabelHelper);
  9665. //We don't need missing item check for var arrays, as there it is taken care by the helper.
  9666. if(arrayValueType.IsLikelyNativeArray())
  9667. {
  9668. if(retInstr->GetDst())
  9669. {
  9670. //Do this check only for native arrays with Dst. For Var arrays, this is taken care in the Runtime helper itself.
  9671. InsertCompareBranch(GetMissingItemOpnd(retInstr->GetDst()->GetType(), m_func), retInstr->GetDst(), Js::OpCode::BrNeq_A, doneLabel, bailOutLabelHelper);
  9672. }
  9673. else
  9674. {
  9675. //We need unconditional jump to doneLabel, if there is no dst in Pop instr.
  9676. InsertBranch(Js::OpCode::Br, true, doneLabel, bailOutLabelHelper);
  9677. }
  9678. }
  9679. return retInstr;
  9680. }
  9681. IR::Instr *
  9682. Lowerer::LowerCondBranchCheckBailOut(IR::BranchInstr * branchInstr, IR::Instr * helperCall, bool isHelper)
  9683. {
  9684. Assert(branchInstr->m_opcode == Js::OpCode::BrTrue_A || branchInstr->m_opcode == Js::OpCode::BrFalse_A);
  9685. if (branchInstr->HasBailOutInfo())
  9686. {
  9687. IR::BailOutKind debuggerBailOutKind = IR::BailOutInvalid;
  9688. if (branchInstr->HasAuxBailOut())
  9689. {
  9690. // We have shared debugger bailout. For branches we lower it here, not in SplitBailForDebugger.
  9691. // See SplitBailForDebugger for details.
  9692. AssertMsg(!(branchInstr->GetBailOutKind() & IR::BailOutForDebuggerBits), "There should be no debugger bits in main bailout kind.");
  9693. debuggerBailOutKind = branchInstr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits;
  9694. AssertMsg((debuggerBailOutKind & ~(IR::BailOutIgnoreException | IR::BailOutForceByFlag)) == 0, "Only IR::BailOutIgnoreException|ForceByFlag supported here.");
  9695. }
  9696. IR::Instr * bailOutInstr = this->SplitBailOnImplicitCall(branchInstr, helperCall, branchInstr);
  9697. IR::Instr* prevInstr = this->LowerBailOnEqualOrNotEqual(bailOutInstr, branchInstr, nullptr, nullptr, isHelper);
  9698. if (debuggerBailOutKind != IR::BailOutInvalid)
  9699. {
  9700. // Note that by this time implicit calls bailout is already lowered.
  9701. // What we do here is use same bailout info and lower debugger bailout which would be shared bailout.
  9702. BailOutInfo* bailOutInfo = bailOutInstr->GetBailOutInfo();
  9703. IR::BailOutInstr* debuggerBailoutInstr = IR::BailOutInstr::New(
  9704. Js::OpCode::BailForDebugger, debuggerBailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
  9705. prevInstr->InsertAfter(debuggerBailoutInstr);
  9706. // The result of that is:
  9707. // original helper op_* instr, then debugger bailout, then implicit calls bailout/etc with the branch instr.
  9708. // Example:
  9709. // s35(eax).i32 = CALL Op_GreaterEqual.u32 # -- original op_* helper
  9710. // s34.i32 = MOV s35(eax).i32 #
  9711. // BailForDebugger # Bailout: #0042 (BailOutIgnoreException) -- the debugger bailout
  9712. // CMP [0x0003BDE0].i8, 1 (0x1).i8 # -- implicit calls check
  9713. // JEQ $L10 #
  9714. //$L11: [helper] #
  9715. // CALL SaveAllRegistersAndBranchBailOut.u32 # Bailout: #0042 (BailOutOnImplicitCalls)
  9716. // JMP $L5 #
  9717. //$L10: [helper] #
  9718. // BrFalse_A $L3, s34.i32 #0034 -- The BrTrue/BrFalse branch (branch instr)
  9719. //$L6: [helper] #0042
  9720. this->LowerBailForDebugger(debuggerBailoutInstr, isHelper);
  9721. // After lowering this we will have a check which on bailout condition will JMP to $L11.
  9722. }
  9723. }
  9724. return m_lowererMD.LowerCondBranch(branchInstr);
  9725. }
  9726. IR::SymOpnd *
  9727. Lowerer::LoadCallInfo(IR::Instr * instrInsert)
  9728. {
  9729. IR::SymOpnd * srcOpnd;
  9730. Func * func = instrInsert->m_func;
  9731. if (func->GetJITFunctionBody()->IsCoroutine())
  9732. {
  9733. // Generator function arguments and ArgumentsInfo are not on the stack. Instead they
  9734. // are accessed off the generator object (which is prm1).
  9735. IR::Instr *genLoadInstr = LoadGeneratorObject(instrInsert);
  9736. IR::RegOpnd * generatorRegOpnd = genLoadInstr->GetDst()->AsRegOpnd();
  9737. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorRegOpnd, Js::JavascriptGenerator::GetCallInfoOffset(), TyMachPtr, func);
  9738. IR::Instr * instr = LowererMD::CreateAssign(IR::RegOpnd::New(TyMachPtr, func), indirOpnd, instrInsert);
  9739. StackSym * callInfoSym = StackSym::New(TyMachReg, func);
  9740. IR::SymOpnd * callInfoSymOpnd = IR::SymOpnd::New(callInfoSym, TyMachReg, func);
  9741. LowererMD::CreateAssign(callInfoSymOpnd, instr->GetDst(), instrInsert);
  9742. srcOpnd = IR::SymOpnd::New(callInfoSym, TyMachReg, func);
  9743. }
  9744. else
  9745. {
  9746. // Otherwise callInfo is always the "second" argument.
  9747. // The stack looks like this:
  9748. //
  9749. // script param N
  9750. // ...
  9751. // script param 1
  9752. // callinfo
  9753. // function object
  9754. // return addr
  9755. // FP -> FP chain
  9756. StackSym * srcSym = LowererMD::GetImplicitParamSlotSym(1, func);
  9757. srcOpnd = IR::SymOpnd::New(srcSym, TyMachReg, func);
  9758. }
  9759. return srcOpnd;
  9760. }
  9761. IR::Instr *
  9762. Lowerer::LowerBailOnNotStackArgs(IR::Instr * instr)
  9763. {
  9764. if (!this->m_func->GetHasStackArgs())
  9765. {
  9766. throw Js::RejitException(RejitReason::InlineApplyDisabled);
  9767. }
  9768. IR::Instr * prevInstr = instr->m_prev;
  9769. // Bail out test
  9770. // Label to skip Bailout and continue
  9771. IR::LabelInstr * continueLabelInstr;
  9772. IR::Instr *instrNext = instr->m_next;
  9773. if (instrNext->IsLabelInstr())
  9774. {
  9775. continueLabelInstr = instrNext->AsLabelInstr();
  9776. }
  9777. else
  9778. {
  9779. continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  9780. instr->InsertAfter(continueLabelInstr);
  9781. }
  9782. if (!instr->m_func->IsInlinee())
  9783. {
  9784. //BailOut if the number of actuals (except "this" argument) is greater than or equal to 15.
  9785. IR::RegOpnd* ldLenDstOpnd = IR::RegOpnd::New(TyUint32, instr->m_func);
  9786. IR::Instr* ldLen = IR::Instr::New(Js::OpCode::LdLen_A, ldLenDstOpnd, instr->m_func);
  9787. ldLenDstOpnd->SetValueType(ValueType::GetTaggedInt()); //LdLen_A works only on stack arguments
  9788. instr->InsertBefore(ldLen);
  9789. this->GenerateFastRealStackArgumentsLdLen(ldLen);
  9790. this->InsertCompareBranch(ldLenDstOpnd, IR::IntConstOpnd::New(Js::InlineeCallInfo::MaxInlineeArgoutCount, TyUint32, m_func, true), Js::OpCode::BrLt_A, true, continueLabelInstr, instr);
  9791. this->GenerateBailOut(instr, nullptr, nullptr);
  9792. }
  9793. else
  9794. {
  9795. //For Inlined functions, we are sure actuals can't exceed Js::InlineeCallInfo::MaxInlineeArgoutCount (15).
  9796. //No need to bail out.
  9797. instr->Remove();
  9798. }
  9799. return prevInstr;
  9800. }
  9801. IR::Instr *
  9802. Lowerer::LowerBailOnNotSpreadable(IR::Instr *instr)
  9803. {
  9804. // We only avoid bailing out / throwing a rejit exception when the array operand is a simple, non-optimized, non-object array.
  9805. IR::Instr * prevInstr = instr->m_prev;
  9806. Func *func = instr->m_func;
  9807. IR::RegOpnd *arrayOpnd = nullptr;
  9808. IR::Opnd *arraySrcOpnd = instr->UnlinkSrc1();
  9809. if (!arraySrcOpnd->IsRegOpnd())
  9810. {
  9811. arrayOpnd = IR::RegOpnd::New(TyMachPtr, func);
  9812. LowererMD::CreateAssign(arrayOpnd, arraySrcOpnd, instr);
  9813. }
  9814. else
  9815. {
  9816. arrayOpnd = arraySrcOpnd->AsRegOpnd();
  9817. }
  9818. const ValueType baseValueType(arrayOpnd->GetValueType());
  9819. // Check if we can just throw a rejit exception based on valuetype alone instead of bailing out.
  9820. if (!baseValueType.IsLikelyArray()
  9821. || baseValueType.IsLikelyAnyOptimizedArray()
  9822. || (baseValueType.IsLikelyObject() && (baseValueType.GetObjectType() == ObjectType::ObjectWithArray))
  9823. // Validate that GenerateArrayTest will not fail.
  9824. || !(baseValueType.IsUninitialized() || baseValueType.HasBeenObject())
  9825. || m_func->IsInlinee())
  9826. {
  9827. throw Js::RejitException(RejitReason::InlineSpreadDisabled);
  9828. }
  9829. // Past this point, we will need to use a bailout.
  9830. IR::LabelInstr *bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true /* isOpHelper */);
  9831. // See if we can skip various array checks on value type alone
  9832. if (!baseValueType.IsArray())
  9833. {
  9834. GenerateArrayTest(arrayOpnd, bailOutLabel, bailOutLabel, instr, false);
  9835. }
  9836. if (!(baseValueType.IsArray() && baseValueType.HasNoMissingValues()))
  9837. {
  9838. InsertTestBranch(
  9839. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, func),
  9840. IR::IntConstOpnd::New(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues), TyUint8, func, true),
  9841. Js::OpCode::BrEq_A,
  9842. bailOutLabel,
  9843. instr);
  9844. }
  9845. IR::IndirOpnd *arrayLenPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, func);
  9846. InsertCompareBranch(arrayLenPtrOpnd, IR::IntConstOpnd::New(Js::InlineeCallInfo::MaxInlineeArgoutCount - 1, TyUint8, func), Js::OpCode::BrGt_A, true, bailOutLabel, instr);
  9847. IR::LabelInstr *skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  9848. InsertBranch(Js::OpCode::Br, skipBailOutLabel, instr);
  9849. instr->InsertBefore(bailOutLabel);
  9850. instr->InsertAfter(skipBailOutLabel);
  9851. GenerateBailOut(instr);
  9852. return prevInstr;
  9853. }
  9854. IR::Instr *
  9855. Lowerer::LowerBailOnNotPolymorphicInlinee(IR::Instr * instr)
  9856. {
  9857. Assert(instr->HasBailOutInfo() && (instr->GetBailOutKind() == IR::BailOutOnFailedPolymorphicInlineTypeCheck || instr->GetBailOutKind() == IR::BailOutOnPolymorphicInlineFunction));
  9858. IR::Instr* instrPrev = instr->m_prev;
  9859. this->GenerateBailOut(instr, nullptr, nullptr);
  9860. return instrPrev;
  9861. }
  9862. void
  9863. Lowerer::LowerBailoutCheckAndLabel(IR::Instr *instr, bool onEqual, bool isHelper)
  9864. {
  9865. // Label to skip Bailout and continue
  9866. IR::LabelInstr * continueLabelInstr;
  9867. IR::Instr *instrNext = instr->m_next;
  9868. if (instrNext->IsLabelInstr())
  9869. {
  9870. continueLabelInstr = instrNext->AsLabelInstr();
  9871. }
  9872. else
  9873. {
  9874. continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  9875. instr->InsertAfter(continueLabelInstr);
  9876. }
  9877. if(instr->GetBailOutKind() == IR::BailOutInjected)
  9878. {
  9879. // BailOnEqual 0, 0
  9880. Assert(onEqual);
  9881. Assert(instr->GetSrc1()->IsEqual(instr->GetSrc2()));
  9882. Assert(instr->GetSrc1()->AsIntConstOpnd()->GetValue() == 0);
  9883. // The operands cannot be equal when generating a compare (assert) but since this is for testing purposes, hoist a src.
  9884. // Ideally, we would just create a BailOut instruction that generates a guaranteed bailout, but there seem to be issues
  9885. // with doing this in a non-helper path. So finally, it would generate:
  9886. // xor s0, s0
  9887. // test s0, s0
  9888. // jnz $continue
  9889. // $bailout:
  9890. // // bailout
  9891. // $continue:
  9892. instr->HoistSrc1(LowererMD::GetLoadOp(instr->GetSrc1()->GetType()));
  9893. }
  9894. InsertCompareBranch(instr->UnlinkSrc1(), instr->UnlinkSrc2(),
  9895. onEqual ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, continueLabelInstr, instr);
  9896. if (!isHelper)
  9897. {
  9898. IR::LabelInstr * helperLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  9899. instr->InsertBefore(helperLabelInstr);
  9900. }
  9901. }
  9902. IR::Instr *
  9903. Lowerer::LowerBailOnEqualOrNotEqual(IR::Instr * instr,
  9904. IR::BranchInstr *branchInstr, // = nullptr
  9905. IR::LabelInstr *labelBailOut, // = nullptr
  9906. IR::PropertySymOpnd * propSymOpnd, // = nullptr
  9907. bool isHelper) // = false
  9908. {
  9909. IR::Instr * prevInstr = instr->m_prev;
  9910. // Bail out test
  9911. bool onEqual = instr->m_opcode == Js::OpCode::BailOnEqual;
  9912. LowerBailoutCheckAndLabel(instr, onEqual, isHelper);
  9913. // BailOutOnImplicitCalls is a post-op bailout. Since we look at the profile info for LdFld/StFld to decide whether the instruction may or may not call an accessor,
  9914. // we need to update this profile information on the bailout path for BailOutOnImplicitCalls if the implicit call was an accessor call.
  9915. if(propSymOpnd && ((instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCalls) && (propSymOpnd->m_inlineCacheIndex != -1) &&
  9916. instr->m_func->HasProfileInfo())
  9917. {
  9918. // result = AND implCallFlags, ~ImplicitCall_None
  9919. // TST result, ImplicitCall_Accessor
  9920. // JEQ $bail
  9921. // OR profiledFlags, ( FldInfo_FromAccessor | FldInfo_Polymorphic )
  9922. // $bail
  9923. IR::Opnd * implicitCallFlags = GetImplicitCallFlagsOpnd();
  9924. IR::Opnd * accessorImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_Accessor & ~Js::ImplicitCall_None, GetImplicitCallFlagsType(), instr->m_func, true);
  9925. IR::Opnd * maskNoImplicitCall = IR::IntConstOpnd::New((Js::ImplicitCallFlags)~Js::ImplicitCall_None, GetImplicitCallFlagsType(), instr->m_func, true);
  9926. IR::Opnd * fldInfoAccessor = IR::IntConstOpnd::New(Js::FldInfo_FromAccessor | Js::FldInfo_Polymorphic, GetFldInfoFlagsType(), instr->m_func, true);
  9927. IR::LabelInstr * label = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true);
  9928. IR::Instr * andInstr = InsertAnd(IR::RegOpnd::New(GetImplicitCallFlagsType(), instr->m_func), implicitCallFlags, maskNoImplicitCall, instr);
  9929. InsertTestBranch(andInstr->GetDst(), accessorImplicitCall, Js::OpCode::BrEq_A, label, instr);
  9930. intptr_t infoAddr = instr->m_func->GetReadOnlyProfileInfo()->GetFldInfoAddr(propSymOpnd->m_inlineCacheIndex);
  9931. IR::Opnd * profiledFlags = IR::MemRefOpnd::New(infoAddr + Js::FldInfo::GetOffsetOfFlags(), TyInt8, instr->m_func);
  9932. InsertOr(profiledFlags, profiledFlags, fldInfoAccessor, instr);
  9933. instr->InsertBefore(label);
  9934. }
  9935. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  9936. return prevInstr;
  9937. }
  9938. void Lowerer::LowerBailOnNegative(IR::Instr *const instr)
  9939. {
  9940. Assert(instr);
  9941. Assert(instr->m_opcode == Js::OpCode::BailOnNegative);
  9942. Assert(instr->HasBailOutInfo());
  9943. Assert(!instr->GetDst());
  9944. Assert(instr->GetSrc1());
  9945. Assert(instr->GetSrc1()->GetType() == TyInt32 || instr->GetSrc1()->GetType() == TyUint32);
  9946. Assert(!instr->GetSrc2());
  9947. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(false);
  9948. LowerOneBailOutKind(instr, instr->GetBailOutKind(), false);
  9949. Assert(!instr->HasBailOutInfo());
  9950. IR::Instr *insertBeforeInstr = instr->m_next;
  9951. Func *const func = instr->m_func;
  9952. // test src, src
  9953. // jns $skipBailOut
  9954. InsertCompareBranch(
  9955. instr->UnlinkSrc1(),
  9956. IR::IntConstOpnd::New(0, TyInt32, func, true),
  9957. Js::OpCode::BrGe_A,
  9958. skipBailOutLabel,
  9959. insertBeforeInstr);
  9960. instr->Remove();
  9961. }
  9962. IR::Instr *
  9963. Lowerer::LowerBailOnNotObject(IR::Instr *instr,
  9964. IR::BranchInstr *branchInstr /* = nullptr */,
  9965. IR::LabelInstr *labelBailOut /* = nullptr */)
  9966. {
  9967. IR::Instr *prevInstr = instr->m_prev;
  9968. IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label,
  9969. m_func);
  9970. instr->InsertAfter(continueLabelInstr);
  9971. this->m_lowererMD.GenerateObjectTest(instr->UnlinkSrc1(),
  9972. instr,
  9973. continueLabelInstr,
  9974. /* fContinueLabel = */ true);
  9975. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  9976. return prevInstr;
  9977. }
  9978. IR::Instr *
  9979. Lowerer::LowerBailOnTrue(IR::Instr* instr, IR::LabelInstr* labelBailOut /*nullptr*/)
  9980. {
  9981. IR::Instr* instrPrev = instr->m_prev;
  9982. IR::LabelInstr* continueLabel = instr->GetOrCreateContinueLabel();
  9983. IR::RegOpnd * regSrc1 = IR::RegOpnd::New(instr->GetSrc1()->GetType(), this->m_func);
  9984. InsertMove(regSrc1, instr->UnlinkSrc1(), instr);
  9985. InsertTestBranch(regSrc1, regSrc1, Js::OpCode::BrEq_A, continueLabel, instr);
  9986. GenerateBailOut(instr, nullptr, labelBailOut);
  9987. return instrPrev;
  9988. }
  9989. IR::Instr *
  9990. Lowerer::LowerBailOnNotBuiltIn(IR::Instr *instr,
  9991. IR::BranchInstr *branchInstr /* = nullptr */,
  9992. IR::LabelInstr *labelBailOut /* = nullptr */)
  9993. {
  9994. Assert(instr->GetSrc2()->IsIntConstOpnd());
  9995. IR::Instr *prevInstr = instr->m_prev;
  9996. intptr_t builtInFuncs = m_func->GetScriptContextInfo()->GetBuiltinFunctionsBaseAddr();
  9997. Js::BuiltinFunction builtInIndex = instr->UnlinkSrc2()->AsIntConstOpnd()->AsInt32();
  9998. IR::Opnd *builtIn = IR::MemRefOpnd::New((void*)(builtInFuncs + builtInIndex * MachPtr), TyMachReg, instr->m_func);
  9999. #if TESTBUILTINFORNULL
  10000. IR::LabelInstr * continueAfterTestLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  10001. InsertTestBranch(builtIn, builtIn, Js::OpCode::BrNeq_A, continueAfterTestLabel, instr);
  10002. this->m_lowererMD.GenerateDebugBreak(instr);
  10003. instr->InsertBefore(continueAfterTestLabel);
  10004. #endif
  10005. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  10006. instr->InsertAfter(continueLabel);
  10007. InsertCompareBranch(instr->UnlinkSrc1(), builtIn, Js::OpCode::BrEq_A, continueLabel, instr);
  10008. GenerateBailOut(instr, branchInstr, labelBailOut);
  10009. return prevInstr;
  10010. }
  10011. IR::Instr *
  10012. Lowerer::LowerBailForDebugger(IR::Instr* instr, bool isInsideHelper /* = false */)
  10013. {
  10014. IR::Instr * prevInstr = instr->m_prev;
  10015. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  10016. AssertMsg(bailOutKind, "bailOutKind should not be zero at this time.");
  10017. AssertMsg(!(bailOutKind & IR::BailOutExplicit) || bailOutKind == IR::BailOutExplicit,
  10018. "BailOutExplicit cannot be combined with any other bailout flags.");
  10019. IR::LabelInstr* explicitBailOutLabel = nullptr;
  10020. if (!(bailOutKind & IR::BailOutExplicit))
  10021. {
  10022. intptr_t flags = m_func->GetScriptContextInfo()->GetDebuggingFlagsAddr();
  10023. // Check 1 (do we need to bail out?)
  10024. // JXX bailoutLabel
  10025. // Check 2 (do we need to bail out?)
  10026. // JXX bailoutLabel
  10027. // ...
  10028. // JMP continueLabel
  10029. // bailoutDocumentLabel:
  10030. // (determine if document boundary reached - if not, JMP to continueLabel)
  10031. // NOTE: THIS BLOCK IS CONDITIONALLY GENERATED BASED ON doGenerateBailOutDocumentBlock
  10032. // bailoutLabel:
  10033. // bail out
  10034. // continueLabel:
  10035. // ...
  10036. IR::LabelInstr* bailOutDocumentLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ true);
  10037. instr->InsertBefore(bailOutDocumentLabel);
  10038. IR::LabelInstr* bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ true);
  10039. instr->InsertBefore(bailOutLabel);
  10040. IR::LabelInstr* continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ isInsideHelper);
  10041. instr->InsertAfter(continueLabel);
  10042. IR::BranchInstr* continueBranchInstr = this->InsertBranch(Js::OpCode::Br, continueLabel, bailOutDocumentLabel); // JMP continueLabel.
  10043. bool doGenerateBailOutDocumentBlock = false;
  10044. const IR::BailOutKind c_forceAndIgnoreEx = IR::BailOutForceByFlag | IR::BailOutIgnoreException;
  10045. if ((bailOutKind & c_forceAndIgnoreEx) == c_forceAndIgnoreEx)
  10046. {
  10047. // It's faster to check these together in 1 check rather than 2 separate checks at run time.
  10048. // CMP [&(flags->m_forceInterpreter, flags->m_isIgnoreException)], 0
  10049. // BNE bailout
  10050. IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetForceInterpreterOffset(), TyInt16, m_func);
  10051. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt16, m_func, /*dontEncode*/ true);
  10052. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10053. bailOutKind ^= c_forceAndIgnoreEx;
  10054. }
  10055. else
  10056. {
  10057. if (bailOutKind & IR::BailOutForceByFlag)
  10058. {
  10059. // CMP [&flags->m_forceInterpreter], 0
  10060. // BNE bailout
  10061. IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetForceInterpreterOffset(), TyInt8, m_func);
  10062. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func, /*dontEncode*/ true);
  10063. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10064. bailOutKind ^= IR::BailOutForceByFlag;
  10065. }
  10066. if (bailOutKind & IR::BailOutIgnoreException)
  10067. {
  10068. // CMP [&flags->m_byteCodeOffsetAfterIgnoreException], DebuggingFlags::InvalidByteCodeOffset
  10069. // BNE bailout
  10070. IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetByteCodeOffsetAfterIgnoreExceptionOffset(), TyInt32, m_func);
  10071. IR::Opnd* opnd2 = IR::IntConstOpnd::New(DebuggingFlags::InvalidByteCodeOffset, TyInt32, m_func, /*dontEncode*/ true);
  10072. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10073. bailOutKind ^= IR::BailOutIgnoreException;
  10074. }
  10075. }
  10076. if (bailOutKind & IR::BailOutBreakPointInFunction)
  10077. {
  10078. // CMP [&functionBody->m_sourceInfo.m_probeCount], 0
  10079. // BNE bailout
  10080. IR::Opnd* opnd1 = IR::MemRefOpnd::New(m_func->GetJITFunctionBody()->GetProbeCountAddr(), TyInt32, m_func);
  10081. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt32, m_func, /*dontEncode*/ true);
  10082. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10083. bailOutKind ^= IR::BailOutBreakPointInFunction;
  10084. }
  10085. // on method entry
  10086. if(bailOutKind & IR::BailOutStep)
  10087. {
  10088. // TEST STEP_BAILOUT, [&stepController->StepType]
  10089. // BNE BailoutLabel
  10090. IR::Opnd* opnd1 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugStepTypeAddr(), TyInt8, m_func);
  10091. IR::Opnd* opnd2 = IR::IntConstOpnd::New(Js::STEP_BAILOUT, TyInt8, this->m_func, /*dontEncode*/ true);
  10092. InsertTestBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10093. // CMP STEP_DOCUMENT, [&stepController->StepType]
  10094. // BEQ BailoutDocumentLabel
  10095. opnd1 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugStepTypeAddr(), TyInt8, m_func);
  10096. opnd2 = IR::IntConstOpnd::New(Js::STEP_DOCUMENT, TyInt8, this->m_func, /*dontEncode*/ true);
  10097. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrEq_A, /*isUnsigned*/ true, bailOutDocumentLabel, continueBranchInstr);
  10098. doGenerateBailOutDocumentBlock = true;
  10099. bailOutKind ^= IR::BailOutStep;
  10100. }
  10101. // on method exit
  10102. if (bailOutKind & IR::BailOutStackFrameBase)
  10103. {
  10104. // CMP EffectiveFrameBase, [&stepController->frameAddrWhenSet]
  10105. // BA bailoutLabel
  10106. RegNum effectiveFrameBaseReg;
  10107. #ifdef _M_X64
  10108. effectiveFrameBaseReg = m_lowererMD.GetRegStackPointer();
  10109. #else
  10110. effectiveFrameBaseReg = m_lowererMD.GetRegFramePointer();
  10111. #endif
  10112. IR::Opnd* opnd1 = IR::RegOpnd::New(nullptr, effectiveFrameBaseReg, TyMachReg, m_func);
  10113. IR::Opnd* opnd2 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugFrameAddressAddr(), TyMachReg, m_func);
  10114. this->InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrGt_A, /*isUnsigned*/ true, bailOutLabel, continueBranchInstr);
  10115. // CMP STEP_DOCUMENT, [&stepController->StepType]
  10116. // BEQ BailoutDocumentLabel
  10117. opnd1 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugStepTypeAddr(), TyInt8, m_func);
  10118. opnd2 = IR::IntConstOpnd::New(Js::STEP_DOCUMENT, TyInt8, this->m_func, /*dontEncode*/ true);
  10119. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrEq_A, /*isUnsigned*/ true, bailOutDocumentLabel, continueBranchInstr);
  10120. doGenerateBailOutDocumentBlock = true;
  10121. bailOutKind ^= IR::BailOutStackFrameBase;
  10122. }
  10123. if (bailOutKind & IR::BailOutLocalValueChanged)
  10124. {
  10125. int32 hasLocalVarChangedOffset = m_func->GetHasLocalVarChangedOffset();
  10126. if (hasLocalVarChangedOffset != Js::Constants::InvalidOffset)
  10127. {
  10128. // CMP [EBP + hasLocalVarChangedStackOffset], 0
  10129. // BNE bailout
  10130. StackSym* sym = StackSym::New(TyInt8, m_func);
  10131. sym->m_offset = hasLocalVarChangedOffset;
  10132. sym->m_allocated = true;
  10133. IR::Opnd* opnd1 = IR::SymOpnd::New(sym, TyInt8, m_func);
  10134. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func);
  10135. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10136. }
  10137. bailOutKind ^= IR::BailOutLocalValueChanged;
  10138. }
  10139. if (doGenerateBailOutDocumentBlock)
  10140. {
  10141. // GENERATE the BailoutDocumentLabel
  10142. // bailOutDocumentLabel:
  10143. // CMP CurrentScriptId, [&stepController->ScriptIdWhenSet]
  10144. // BEQ ContinueLabel
  10145. // bailOutLabel: // (fallthrough bailOutLabel)
  10146. IR::Opnd* opnd1 = IR::MemRefOpnd::New(m_func->GetJITFunctionBody()->GetScriptIdAddr(), TyInt32, m_func);
  10147. IR::Opnd* opnd2 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugScriptIdWhenSetAddr(), TyInt32, m_func);
  10148. IR::RegOpnd* reg1 = IR::RegOpnd::New(TyInt32, m_func);
  10149. InsertMove(reg1, opnd2, bailOutLabel);
  10150. InsertCompareBranch(opnd1, reg1, Js::OpCode::BrEq_A, /*isUnsigned*/ true, continueLabel, bailOutLabel);
  10151. }
  10152. AssertMsg(bailOutKind == (IR::BailOutKind)0, "Some of the bits in BailOutKind were not processed!");
  10153. // Note: at this time the 'instr' is in between bailoutLabel and continueLabel.
  10154. }
  10155. else
  10156. {
  10157. // For explicit/unconditional bailout use label which is not a helper, otherwise we would get a helper in main code path
  10158. // which breaks helper label consistency (you can only get to helper from a conditional branch in main code), see DbCheckPostLower.
  10159. explicitBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  10160. }
  10161. this->GenerateBailOut(instr, nullptr, explicitBailOutLabel);
  10162. return prevInstr;
  10163. }
  10164. IR::Instr*
  10165. Lowerer::LowerBailOnException(IR::Instr * instr)
  10166. {
  10167. Assert(instr->HasBailOutInfo());
  10168. IR::Instr * instrPrev = instr->m_prev;
  10169. this->GenerateBailOut(instr, nullptr, nullptr);
  10170. return instrPrev;
  10171. }
  10172. IR::Instr*
  10173. Lowerer::LowerBailOnEarlyExit(IR::Instr * instr)
  10174. {
  10175. Assert(instr->HasBailOutInfo());
  10176. IR::Instr * instrPrev = instr->m_prev;
  10177. this->GenerateBailOut(instr, nullptr, nullptr);
  10178. return instrPrev;
  10179. }
  10180. // Generate BailOut Lowerer Instruction if the value is INT_MIN.
  10181. // It it's not INT_MIN, we continue without bailout.
  10182. IR::Instr *
  10183. Lowerer::LowerBailOnIntMin(IR::Instr *instr, IR::BranchInstr *branchInstr /* = nullptr */, IR::LabelInstr *labelBailOut /* = nullptr */)
  10184. {
  10185. Assert(instr);
  10186. Assert(instr->GetSrc1());
  10187. IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  10188. instr->InsertAfter(continueLabelInstr);
  10189. if(!instr->HasBailOutInfo())
  10190. {
  10191. instr->Remove();
  10192. }
  10193. else
  10194. {
  10195. Assert(instr->GetBailOutKind() == IR::BailOnIntMin);
  10196. // Note: src1 must be int32 at this point.
  10197. if (instr->GetSrc1()->IsIntConstOpnd())
  10198. {
  10199. // For consts we can check the value at JIT time. Note: without this check we'll have to legalize the CMP instr.
  10200. IR::IntConstOpnd* intConst = instr->UnlinkSrc1()->AsIntConstOpnd();
  10201. if (intConst->GetValue() == INT_MIN)
  10202. {
  10203. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  10204. intConst->Free(instr->m_func);
  10205. }
  10206. else
  10207. {
  10208. instr->Remove();
  10209. }
  10210. }
  10211. else
  10212. {
  10213. InsertCompareBranch(instr->UnlinkSrc1(), IR::IntConstOpnd::New(INT_MIN, TyInt32, this->m_func), Js::OpCode::BrNeq_A, continueLabelInstr, instr);
  10214. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  10215. }
  10216. }
  10217. return continueLabelInstr;
  10218. }
  10219. ///----------------------------------------------------------------------------
  10220. ///
  10221. /// Lowerer::LowerBailOnNotString
  10222. /// Generate BailOut Lowerer Instruction if not a String
  10223. ///
  10224. ///----------------------------------------------------------------------------
  10225. void Lowerer::LowerBailOnNotString(IR::Instr *instr)
  10226. {
  10227. if (!instr->GetSrc1()->GetValueType().IsString())
  10228. {
  10229. /*Creating a MOV instruction*/
  10230. IR::Instr * movInstr = IR::Instr::New(instr->m_opcode, instr->UnlinkDst(), instr->UnlinkSrc1(), instr->m_func);
  10231. instr->InsertBefore(movInstr);
  10232. IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  10233. IR::LabelInstr *helperLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  10234. instr->InsertAfter(continueLabelInstr);
  10235. IR::RegOpnd *srcReg = movInstr->GetSrc1()->IsRegOpnd() ? movInstr->GetSrc1()->AsRegOpnd() : nullptr;
  10236. this->GenerateStringTest(srcReg, instr, helperLabelInstr, continueLabelInstr);
  10237. this->GenerateBailOut(instr, nullptr, helperLabelInstr);
  10238. }
  10239. else
  10240. {
  10241. instr->ClearBailOutInfo();
  10242. }
  10243. }
  10244. void Lowerer::LowerOneBailOutKind(
  10245. IR::Instr *const instr,
  10246. const IR::BailOutKind bailOutKindToLower,
  10247. const bool isInHelperBlock,
  10248. const bool preserveBailOutKindInInstr)
  10249. {
  10250. Assert(instr);
  10251. Assert(bailOutKindToLower);
  10252. Assert(!(bailOutKindToLower & IR::BailOutKindBits) || !(bailOutKindToLower & bailOutKindToLower - 1u));
  10253. Func *const func = instr->m_func;
  10254. // Split bailouts other than the one being lowered here
  10255. BailOutInfo *const bailOutInfo = instr->GetBailOutInfo();
  10256. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  10257. Assert(
  10258. bailOutKindToLower & IR::BailOutKindBits
  10259. ? bailOutKind & bailOutKindToLower
  10260. : (bailOutKind & ~IR::BailOutKindBits) == bailOutKindToLower);
  10261. if(!preserveBailOutKindInInstr)
  10262. {
  10263. bailOutKind -= bailOutKindToLower;
  10264. }
  10265. if(bailOutKind)
  10266. {
  10267. if(bailOutInfo->bailOutInstr == instr)
  10268. {
  10269. // Create a shared bailout point for the split bailout checks
  10270. IR::Instr *const sharedBail = instr->ShareBailOut();
  10271. Assert(sharedBail->GetBailOutInfo() == bailOutInfo);
  10272. GenerateBailOut(sharedBail);
  10273. }
  10274. instr->SetBailOutKind(bailOutKind);
  10275. }
  10276. else
  10277. {
  10278. instr->UnlinkBailOutInfo();
  10279. if(bailOutInfo->bailOutInstr == instr)
  10280. {
  10281. bailOutInfo->bailOutInstr = nullptr;
  10282. }
  10283. }
  10284. IR::Instr *const insertBeforeInstr = instr->m_next;
  10285. // (Bail out with the requested bail out kind)
  10286. IR::BailOutInstr *const bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOut, bailOutKindToLower, bailOutInfo, func);
  10287. bailOutInstr->SetByteCodeOffset(instr);
  10288. insertBeforeInstr->InsertBefore(bailOutInstr);
  10289. GenerateBailOut(bailOutInstr);
  10290. // The caller is expected to generate code to decide whether to bail out
  10291. }
  10292. void Lowerer::SplitBailOnNotArray(
  10293. IR::Instr *const instr,
  10294. IR::Instr * *const bailOnNotArrayRef,
  10295. IR::Instr * *const bailOnMissingValueRef)
  10296. {
  10297. Assert(instr);
  10298. Assert(!instr->GetDst());
  10299. Assert(instr->GetSrc1());
  10300. Assert(instr->GetSrc1()->IsRegOpnd());
  10301. Assert(!instr->GetSrc2());
  10302. Assert(bailOnNotArrayRef);
  10303. Assert(bailOnMissingValueRef);
  10304. IR::Instr *&bailOnNotArray = *bailOnNotArrayRef;
  10305. IR::Instr *&bailOnMissingValue = *bailOnMissingValueRef;
  10306. bailOnNotArray = instr;
  10307. bailOnMissingValue = nullptr;
  10308. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  10309. if(bailOutKind == IR::BailOutOnNotArray ||
  10310. bailOutKind == IR::BailOutOnNotNativeArray)
  10311. {
  10312. return;
  10313. }
  10314. // Split array checks
  10315. BailOutInfo *const bailOutInfo = instr->GetBailOutInfo();
  10316. if(bailOutInfo->bailOutInstr == instr)
  10317. {
  10318. // Create a shared bailout point for the split bailout checks
  10319. IR::Instr *const sharedBail = instr->ShareBailOut();
  10320. Assert(sharedBail->GetBailOutInfo() == bailOutInfo);
  10321. LowerBailTarget(sharedBail);
  10322. }
  10323. bailOutKind -= IR::BailOutOnMissingValue;
  10324. Assert(bailOutKind == IR::BailOutOnNotArray ||
  10325. bailOutKind == IR::BailOutOnNotNativeArray);
  10326. instr->SetBailOutKind(bailOutKind);
  10327. Func *const func = bailOutInfo->bailOutFunc;
  10328. IR::Instr *const insertBeforeInstr = instr->m_next;
  10329. // Split missing value checks
  10330. bailOnMissingValue = IR::BailOutInstr::New(Js::OpCode::BailOnNotArray, IR::BailOutOnMissingValue, bailOutInfo, func);
  10331. bailOnMissingValue->SetByteCodeOffset(instr);
  10332. insertBeforeInstr->InsertBefore(bailOnMissingValue);
  10333. }
  10334. IR::RegOpnd *Lowerer::LowerBailOnNotArray(IR::Instr *const instr)
  10335. {
  10336. Assert(instr);
  10337. Assert(!instr->GetDst());
  10338. Assert(instr->GetSrc1());
  10339. Assert(instr->GetSrc1()->IsRegOpnd());
  10340. Assert(!instr->GetSrc2());
  10341. Func *const func = instr->m_func;
  10342. // Label to jump to (or fall through to) when bailing out
  10343. const auto bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true /* isOpHelper */);
  10344. instr->InsertBefore(bailOutLabel);
  10345. // Label to jump to when not bailing out
  10346. const auto skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  10347. instr->InsertAfter(skipBailOutLabel);
  10348. // Do the array tests and jump to bailOutLabel if it's not an array. Fall through if it is an array.
  10349. IR::RegOpnd *const arrayOpnd =
  10350. GenerateArrayTest(instr->UnlinkSrc1()->AsRegOpnd(), bailOutLabel, bailOutLabel, bailOutLabel, true);
  10351. // Skip bail-out when it is an array
  10352. InsertBranch(Js::OpCode::Br, skipBailOutLabel, bailOutLabel);
  10353. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  10354. // ordering instructions anymore.
  10355. GenerateBailOut(instr);
  10356. return arrayOpnd;
  10357. }
  10358. void Lowerer::LowerBailOnMissingValue(IR::Instr *const instr, IR::RegOpnd *const arrayOpnd)
  10359. {
  10360. Assert(instr);
  10361. Assert(!instr->GetDst());
  10362. Assert(!instr->GetSrc1());
  10363. Assert(!instr->GetSrc2());
  10364. Assert(arrayOpnd);
  10365. Assert(arrayOpnd->GetValueType().IsArrayOrObjectWithArray());
  10366. Func *const func = instr->m_func;
  10367. // Label to jump to when not bailing out
  10368. const auto skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  10369. instr->InsertAfter(skipBailOutLabel);
  10370. // Skip bail-out when the array has no missing values
  10371. //
  10372. // test [array + offsetOf(objectArrayOrFlags)], Js::DynamicObjectFlags::HasNoMissingValues
  10373. // jnz $skipBailOut
  10374. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, func);
  10375. CompileAssert(
  10376. static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
  10377. Js::DynamicObjectFlags::HasNoMissingValues);
  10378. InsertTestBranch(
  10379. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, func),
  10380. IR::IntConstOpnd::New(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues), TyUint8, func, true),
  10381. Js::OpCode::BrNeq_A,
  10382. skipBailOutLabel,
  10383. instr);
  10384. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  10385. // ordering instructions anymore.
  10386. GenerateBailOut(instr);
  10387. }
  10388. void Lowerer::LowerBailOnInvalidatedArrayHeadSegment(IR::Instr *const instr, const bool isInHelperBlock)
  10389. {
  10390. /*
  10391. // Generate checks for whether the head segment or the head segment length changed during the helper call
  10392. if(!(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentSym()))
  10393. {
  10394. // Record the array head segment before the helper call
  10395. headSegmentBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayHeadSegmentForArrayOrObjectWithArray(base)
  10396. }
  10397. if(!(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentLengthSym()))
  10398. {
  10399. // Record the array head segment length before the helper call
  10400. if(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentSym())
  10401. {
  10402. mov headSegmentLengthBeforeHelperCall, [headSegmentBeforeHelperCall + offsetOf(length)]
  10403. }
  10404. else
  10405. {
  10406. headSegmentLengthBeforeHelperCall =
  10407. Js::JavascriptArray::Jit_GetArrayHeadSegmentLength(headSegmentBeforeHelperCall)
  10408. }
  10409. }
  10410. helperCall:
  10411. (Helper call and other bailout checks)
  10412. // If the array has a different head segment or head segment length after the helper call, then this store needs to bail
  10413. // out
  10414. invalidatedHeadSegment =
  10415. JavascriptArray::Jit_OperationInvalidatedArrayHeadSegment(
  10416. headSegmentBeforeHelperCall,
  10417. headSegmentLengthBeforeHelperCall,
  10418. base)
  10419. test invalidatedHeadSegment, invalidatedHeadSegment
  10420. jz $skipBailOut
  10421. (Bail out with IR::BailOutOnInvalidatedArrayHeadSegment)
  10422. $skipBailOut:
  10423. */
  10424. Assert(instr);
  10425. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  10426. Assert(instr->GetDst());
  10427. Assert(instr->GetDst()->IsIndirOpnd());
  10428. Func *const func = instr->m_func;
  10429. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  10430. const ValueType baseValueType(baseOpnd->GetValueType());
  10431. Assert(!baseValueType.IsNotArrayOrObjectWithArray());
  10432. const bool isArrayOrObjectWithArray = baseValueType.IsArrayOrObjectWithArray();
  10433. IR::ArrayRegOpnd *const arrayOpnd = baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd() : nullptr;
  10434. IR::RegOpnd *headSegmentBeforeHelperCallOpnd;
  10435. IR::AutoReuseOpnd autoReuseHeadSegmentBeforeHelperCallOpnd;
  10436. if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentSym())
  10437. {
  10438. headSegmentBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->HeadSegmentSym(), TyMachPtr, func);
  10439. autoReuseHeadSegmentBeforeHelperCallOpnd.Initialize(headSegmentBeforeHelperCallOpnd, func);
  10440. }
  10441. else
  10442. {
  10443. // Record the array head segment before the helper call
  10444. // headSegmentBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayHeadSegmentForArrayOrObjectWithArray(base)
  10445. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  10446. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  10447. headSegmentBeforeHelperCallOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, func), TyMachPtr, func);
  10448. autoReuseHeadSegmentBeforeHelperCallOpnd.Initialize(headSegmentBeforeHelperCallOpnd, func);
  10449. callInstr->SetDst(headSegmentBeforeHelperCallOpnd);
  10450. instr->InsertBefore(callInstr);
  10451. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayHeadSegmentForArrayOrObjectWithArray);
  10452. }
  10453. IR::RegOpnd *headSegmentLengthBeforeHelperCallOpnd;
  10454. IR::AutoReuseOpnd autoReuseHeadSegmentLengthBeforeHelperCallOpnd;
  10455. if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentLengthSym())
  10456. {
  10457. headSegmentLengthBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->HeadSegmentLengthSym(), TyUint32, func);
  10458. autoReuseHeadSegmentLengthBeforeHelperCallOpnd.Initialize(headSegmentLengthBeforeHelperCallOpnd, func);
  10459. }
  10460. else
  10461. {
  10462. headSegmentLengthBeforeHelperCallOpnd = IR::RegOpnd::New(StackSym::New(TyUint32, func), TyUint32, func);
  10463. autoReuseHeadSegmentLengthBeforeHelperCallOpnd.Initialize(headSegmentLengthBeforeHelperCallOpnd, func);
  10464. if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentSym())
  10465. {
  10466. // Record the array head segment length before the helper call
  10467. // mov headSegmentLengthBeforeHelperCall, [headSegmentBeforeHelperCall + offsetOf(length)]
  10468. InsertMove(
  10469. headSegmentLengthBeforeHelperCallOpnd,
  10470. IR::IndirOpnd::New(
  10471. headSegmentBeforeHelperCallOpnd,
  10472. Js::SparseArraySegmentBase::GetOffsetOfLength(),
  10473. TyUint32,
  10474. func),
  10475. instr);
  10476. }
  10477. else
  10478. {
  10479. // Record the array head segment length before the helper call
  10480. // headSegmentLengthBeforeHelperCall =
  10481. // Js::JavascriptArray::Jit_GetArrayHeadSegmentLength(headSegmentBeforeHelperCall)
  10482. m_lowererMD.LoadHelperArgument(instr, headSegmentBeforeHelperCallOpnd);
  10483. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  10484. callInstr->SetDst(headSegmentLengthBeforeHelperCallOpnd);
  10485. instr->InsertBefore(callInstr);
  10486. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayHeadSegmentLength);
  10487. }
  10488. }
  10489. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
  10490. LowerOneBailOutKind(instr, IR::BailOutOnInvalidatedArrayHeadSegment, isInHelperBlock);
  10491. IR::Instr *const insertBeforeInstr = instr->m_next;
  10492. // If the array has a different head segment or head segment length after the helper call, then this store needs to bail out
  10493. // invalidatedHeadSegment =
  10494. // JavascriptArray::Jit_OperationInvalidatedArrayHeadSegment(
  10495. // headSegmentBeforeHelperCall,
  10496. // headSegmentLengthBeforeHelperCall,
  10497. // base)
  10498. m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
  10499. m_lowererMD.LoadHelperArgument(insertBeforeInstr, headSegmentLengthBeforeHelperCallOpnd);
  10500. m_lowererMD.LoadHelperArgument(insertBeforeInstr, headSegmentBeforeHelperCallOpnd);
  10501. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  10502. IR::RegOpnd *const invalidatedHeadSegmentOpnd = IR::RegOpnd::New(TyUint8, func);
  10503. const IR::AutoReuseOpnd autoReuseInvalidatedHeadSegmentOpnd(invalidatedHeadSegmentOpnd, func);
  10504. callInstr->SetDst(invalidatedHeadSegmentOpnd);
  10505. insertBeforeInstr->InsertBefore(callInstr);
  10506. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationInvalidatedArrayHeadSegment);
  10507. // test invalidatedHeadSegment, invalidatedHeadSegment
  10508. // jz $skipBailOut
  10509. InsertTestBranch(
  10510. invalidatedHeadSegmentOpnd,
  10511. invalidatedHeadSegmentOpnd,
  10512. Js::OpCode::BrEq_A,
  10513. skipBailOutLabel,
  10514. insertBeforeInstr);
  10515. // (Bail out with IR::BailOutOnInvalidatedArrayHeadSegment)
  10516. // $skipBailOut:
  10517. }
  10518. void Lowerer::LowerBailOnInvalidatedArrayLength(IR::Instr *const instr, const bool isInHelperBlock)
  10519. {
  10520. /*
  10521. // Generate checks for whether the length changed during the helper call
  10522. if(!(arrayOpnd && arrayOpnd.LengthSym() && arrayOpnd.LengthSym() != arrayOpnd.HeadSegmentLengthSym()))
  10523. {
  10524. // Record the array length before the helper call
  10525. lengthBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayLength(base)
  10526. }
  10527. helperCall:
  10528. (Helper call and other bailout checks)
  10529. // If the array has a different length after the helper call, then this store needs to bail out
  10530. invalidatedLength = JavascriptArray::Jit_OperationInvalidatedArrayLength(lengthBeforeHelperCall, base)
  10531. test invalidatedLength, invalidatedLength
  10532. jz $skipBailOut
  10533. (Bail out with IR::BailOutOnInvalidatedArrayLength)
  10534. $skipBailOut:
  10535. */
  10536. Assert(instr);
  10537. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  10538. Assert(instr->GetDst());
  10539. Assert(instr->GetDst()->IsIndirOpnd());
  10540. Func *const func = instr->m_func;
  10541. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  10542. const ValueType baseValueType(baseOpnd->GetValueType());
  10543. Assert(!baseValueType.IsNotArray());
  10544. IR::ArrayRegOpnd *const arrayOpnd = baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd() : nullptr;
  10545. IR::RegOpnd *lengthBeforeHelperCallOpnd;
  10546. IR::AutoReuseOpnd autoReuseLengthBeforeHelperCallOpnd;
  10547. if(arrayOpnd && arrayOpnd->LengthSym() && arrayOpnd->LengthSym() != arrayOpnd->HeadSegmentLengthSym())
  10548. {
  10549. lengthBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->LengthSym(), arrayOpnd->LengthSym()->GetType(), func);
  10550. autoReuseLengthBeforeHelperCallOpnd.Initialize(lengthBeforeHelperCallOpnd, func);
  10551. }
  10552. else
  10553. {
  10554. // Record the array length before the helper call
  10555. // lengthBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayLength(base)
  10556. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  10557. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  10558. lengthBeforeHelperCallOpnd = IR::RegOpnd::New(TyUint32, func);
  10559. autoReuseLengthBeforeHelperCallOpnd.Initialize(lengthBeforeHelperCallOpnd, func);
  10560. callInstr->SetDst(lengthBeforeHelperCallOpnd);
  10561. instr->InsertBefore(callInstr);
  10562. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayLength);
  10563. }
  10564. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
  10565. LowerOneBailOutKind(instr, IR::BailOutOnInvalidatedArrayLength, isInHelperBlock);
  10566. IR::Instr *const insertBeforeInstr = instr->m_next;
  10567. // If the array has a different length after the helper call, then this store needs to bail out
  10568. // invalidatedLength = JavascriptArray::Jit_OperationInvalidatedArrayLength(lengthBeforeHelperCall, base)
  10569. m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
  10570. m_lowererMD.LoadHelperArgument(insertBeforeInstr, lengthBeforeHelperCallOpnd);
  10571. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  10572. IR::RegOpnd *const invalidatedLengthOpnd = IR::RegOpnd::New(TyUint8, func);
  10573. const IR::AutoReuseOpnd autoReuseInvalidatedLengthOpnd(invalidatedLengthOpnd, func);
  10574. callInstr->SetDst(invalidatedLengthOpnd);
  10575. insertBeforeInstr->InsertBefore(callInstr);
  10576. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationInvalidatedArrayLength);
  10577. // test invalidatedLength, invalidatedLength
  10578. // jz $skipBailOut
  10579. InsertTestBranch(
  10580. invalidatedLengthOpnd,
  10581. invalidatedLengthOpnd,
  10582. Js::OpCode::BrEq_A,
  10583. skipBailOutLabel,
  10584. insertBeforeInstr);
  10585. // (Bail out with IR::BailOutOnInvalidatedArrayLength)
  10586. // $skipBailOut:
  10587. }
  10588. void Lowerer::LowerBailOnCreatedMissingValue(IR::Instr *const instr, const bool isInHelperBlock)
  10589. {
  10590. /*
  10591. // Generate checks for whether the first missing value was created during the helper call
  10592. if(!(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()))
  10593. {
  10594. // Record whether the array has missing values before the helper call
  10595. arrayFlagsBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayFlagsForArrayOrObjectWithArray(base)
  10596. }
  10597. helperCall:
  10598. (Helper call and other bailout checks)
  10599. // If the array had no missing values before the helper call, and the array has missing values after the helper
  10600. // call, then this store created the first missing value in the array and needs to bail out
  10601. if(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())
  10602. (arrayFlagsBeforeHelperCall = Js::DynamicObjectFlags::HasNoMissingValues)
  10603. createdFirstMissingValue = JavascriptArray::Jit_OperationCreatedFirstMissingValue(arrayFlagsBeforeHelperCall, base)
  10604. test createdFirstMissingValue, createdFirstMissingValue
  10605. jz $skipBailOut
  10606. (Bail out with IR::BailOutOnMissingValue)
  10607. $skipBailOut:
  10608. */
  10609. Assert(instr);
  10610. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  10611. Assert(instr->GetDst());
  10612. Assert(instr->GetDst()->IsIndirOpnd());
  10613. Func *const func = instr->m_func;
  10614. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  10615. const ValueType baseValueType(baseOpnd->GetValueType());
  10616. Assert(!baseValueType.IsNotArrayOrObjectWithArray());
  10617. IR::Opnd *arrayFlagsBeforeHelperCallOpnd = nullptr;
  10618. IR::AutoReuseOpnd autoReuseArrayFlagsBeforeHelperCallOpnd;
  10619. const IRType arrayFlagsType = sizeof(uintptr_t) == sizeof(uint32) ? TyUint32 : TyUint64;
  10620. if(!(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()))
  10621. {
  10622. // Record whether the array has missing values before the helper call
  10623. // arrayFlagsBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayFlagsForArrayOrObjectWithArray(base)
  10624. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  10625. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  10626. arrayFlagsBeforeHelperCallOpnd = IR::RegOpnd::New(arrayFlagsType, func);
  10627. autoReuseArrayFlagsBeforeHelperCallOpnd.Initialize(arrayFlagsBeforeHelperCallOpnd, func);
  10628. callInstr->SetDst(arrayFlagsBeforeHelperCallOpnd);
  10629. instr->InsertBefore(callInstr);
  10630. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayFlagsForArrayOrObjectWithArray);
  10631. }
  10632. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
  10633. LowerOneBailOutKind(instr, IR::BailOutOnMissingValue, isInHelperBlock);
  10634. IR::Instr *const insertBeforeInstr = instr->m_next;
  10635. // If the array had no missing values before the helper call, and the array has missing values after the helper
  10636. // call, then this store created the first missing value in the array and needs to bail out
  10637. if(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())
  10638. {
  10639. // (arrayFlagsBeforeHelperCall = Js::DynamicObjectFlags::HasNoMissingValues)
  10640. Assert(!arrayFlagsBeforeHelperCallOpnd);
  10641. arrayFlagsBeforeHelperCallOpnd =
  10642. arrayFlagsType == TyUint32
  10643. ? static_cast<IR::Opnd *>(
  10644. IR::IntConstOpnd::New(
  10645. static_cast<uintptr_t>(Js::DynamicObjectFlags::HasNoMissingValues),
  10646. arrayFlagsType,
  10647. func,
  10648. true))
  10649. : IR::AddrOpnd::New(
  10650. reinterpret_cast<void *>(Js::DynamicObjectFlags::HasNoMissingValues),
  10651. IR::AddrOpndKindConstantVar,
  10652. func,
  10653. true);
  10654. autoReuseArrayFlagsBeforeHelperCallOpnd.Initialize(arrayFlagsBeforeHelperCallOpnd, func);
  10655. }
  10656. else
  10657. {
  10658. Assert(arrayFlagsBeforeHelperCallOpnd);
  10659. }
  10660. // createdFirstMissingValue = JavascriptArray::Jit_OperationCreatedFirstMissingValue(arrayFlagsBeforeHelperCall, base)
  10661. m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
  10662. m_lowererMD.LoadHelperArgument(insertBeforeInstr, arrayFlagsBeforeHelperCallOpnd);
  10663. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  10664. IR::RegOpnd *const createdFirstMissingValueOpnd = IR::RegOpnd::New(TyUint8, func);
  10665. IR::AutoReuseOpnd autoReuseCreatedFirstMissingValueOpnd(createdFirstMissingValueOpnd, func);
  10666. callInstr->SetDst(createdFirstMissingValueOpnd);
  10667. insertBeforeInstr->InsertBefore(callInstr);
  10668. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationCreatedFirstMissingValue);
  10669. // test createdFirstMissingValue, createdFirstMissingValue
  10670. // jz $skipBailOut
  10671. InsertCompareBranch(
  10672. createdFirstMissingValueOpnd,
  10673. IR::IntConstOpnd::New(0, createdFirstMissingValueOpnd->GetType(), func, true),
  10674. Js::OpCode::BrEq_A,
  10675. skipBailOutLabel,
  10676. insertBeforeInstr);
  10677. // (Bail out with IR::BailOutOnMissingValue)
  10678. // $skipBailOut:
  10679. }
  10680. IR::Opnd*
  10681. Lowerer::GetFuncObjectOpnd(IR::Instr* insertBeforeInstr)
  10682. {
  10683. Func * func = insertBeforeInstr->m_func;
  10684. IR::Opnd *paramOpnd = nullptr;
  10685. if (func->IsInlinee())
  10686. {
  10687. paramOpnd = func->GetInlineeFunctionObjectSlotOpnd();
  10688. }
  10689. else
  10690. {
  10691. #if defined(_M_ARM)
  10692. StackSym * paramSym = this->m_lowererMD.GetImplicitParamSlotSym(0);
  10693. #else
  10694. StackSym *paramSym = StackSym::New(TyMachReg, this->m_func);
  10695. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  10696. this->m_func->SetHasImplicitParamLoad();
  10697. #endif
  10698. paramOpnd = IR::SymOpnd::New(paramSym, TyMachReg, this->m_func);
  10699. }
  10700. if (func->GetJITFunctionBody()->IsCoroutine())
  10701. {
  10702. // the function object for generator calls is a GeneratorVirtualScriptFunction object
  10703. // and we need to return the real JavascriptGeneratorFunction object so grab it before
  10704. // assigning to the dst
  10705. Assert(!func->IsInlinee());
  10706. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, func);
  10707. LowererMD::CreateAssign(tmpOpnd, paramOpnd, insertBeforeInstr);
  10708. paramOpnd = IR::IndirOpnd::New(tmpOpnd, Js::GeneratorVirtualScriptFunction::GetRealFunctionOffset(), TyMachPtr, func);
  10709. }
  10710. return paramOpnd;
  10711. }
  10712. ///----------------------------------------------------------------------------
  10713. ///
  10714. /// Lowerer::LoadFuncExpression
  10715. ///
  10716. /// Load the function expression to src1 from [ebp + 8]
  10717. ///
  10718. ///----------------------------------------------------------------------------
  10719. IR::Instr *
  10720. Lowerer::LoadFuncExpression(IR::Instr *instrFuncExpr)
  10721. {
  10722. ASSERT_INLINEE_FUNC(instrFuncExpr);
  10723. IR::Opnd *paramOpnd = GetFuncObjectOpnd(instrFuncExpr);
  10724. // mov dst, param
  10725. instrFuncExpr->SetSrc1(paramOpnd);
  10726. LowererMD::ChangeToAssign(instrFuncExpr);
  10727. return instrFuncExpr;
  10728. }
  10729. void Lowerer::LowerBoundCheck(IR::Instr *const instr)
  10730. {
  10731. Assert(instr);
  10732. Assert(instr->m_opcode == Js::OpCode::BoundCheck || instr->m_opcode == Js::OpCode::UnsignedBoundCheck);
  10733. #if DBG
  10734. if(instr->m_opcode == Js::OpCode::UnsignedBoundCheck)
  10735. {
  10736. // UnsignedBoundCheck is currently only supported for the pattern:
  10737. // UnsignedBoundCheck s1 <= s2 + c, where c == 0 || c == -1
  10738. Assert(instr->GetSrc1()->IsRegOpnd());
  10739. Assert(instr->GetSrc1()->IsInt32());
  10740. Assert(instr->GetSrc2());
  10741. Assert(!instr->GetSrc2()->IsIntConstOpnd());
  10742. if(instr->GetDst())
  10743. {
  10744. const int32 c = instr->GetDst()->AsIntConstOpnd()->AsInt32();
  10745. Assert(c == 0 || c == -1);
  10746. }
  10747. }
  10748. #endif
  10749. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  10750. Assert(
  10751. bailOutKind == IR::BailOutOnArrayAccessHelperCall ||
  10752. bailOutKind == IR::BailOutOnInvalidatedArrayHeadSegment ||
  10753. bailOutKind == IR::BailOutOnFailedHoistedBoundCheck ||
  10754. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  10755. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(false);
  10756. LowerOneBailOutKind(instr, bailOutKind, false);
  10757. Assert(!instr->HasBailOutInfo());
  10758. IR::Instr *insertBeforeInstr = instr->m_next;
  10759. #if DBG
  10760. const auto VerifyLeftOrRightOpnd = [&](IR::Opnd *const opnd, const bool isRightOpnd)
  10761. {
  10762. if(!opnd)
  10763. {
  10764. Assert(isRightOpnd);
  10765. return;
  10766. }
  10767. if(opnd->IsIntConstOpnd())
  10768. {
  10769. Assert(!isRightOpnd || opnd->AsIntConstOpnd()->GetValue() != 0);
  10770. return;
  10771. }
  10772. Assert(opnd->GetType() == TyInt32 || opnd->GetType() == TyUint32);
  10773. };
  10774. #endif
  10775. // left <= right + offset (src1 <= src2 + dst)
  10776. IR::Opnd *leftOpnd = instr->UnlinkSrc1();
  10777. DebugOnly(VerifyLeftOrRightOpnd(leftOpnd, false));
  10778. IR::Opnd *rightOpnd = instr->UnlinkSrc2();
  10779. DebugOnly(VerifyLeftOrRightOpnd(rightOpnd, true));
  10780. Assert(!leftOpnd->IsIntConstOpnd() || rightOpnd && !rightOpnd->IsIntConstOpnd());
  10781. IR::IntConstOpnd *offsetOpnd = instr->GetDst() ? instr->UnlinkDst()->AsIntConstOpnd() : nullptr;
  10782. Assert(!offsetOpnd || offsetOpnd->GetValue() != 0);
  10783. const bool doUnsignedCompare = instr->m_opcode == Js::OpCode::UnsignedBoundCheck;
  10784. instr->Remove();
  10785. Func *const func = insertBeforeInstr->m_func;
  10786. IntConstType offset = offsetOpnd ? offsetOpnd->GetValue() : 0;
  10787. Js::OpCode compareOpCode = Js::OpCode::BrLe_A;
  10788. if(leftOpnd->IsIntConstOpnd() && rightOpnd->IsRegOpnd() && offset != IntConstMin)
  10789. {
  10790. // Put the constants together: swap the operands, negate the offset, and invert the branch
  10791. IR::Opnd *const tempOpnd = leftOpnd;
  10792. leftOpnd = rightOpnd;
  10793. rightOpnd = tempOpnd;
  10794. offset = -offset;
  10795. compareOpCode = Js::OpCode::BrGe_A;
  10796. }
  10797. if(rightOpnd->IsIntConstOpnd())
  10798. {
  10799. // Try to aggregate right + offset into a constant offset
  10800. IntConstType newOffset;
  10801. if(!IntConstMath::Add(offset, rightOpnd->AsIntConstOpnd()->GetValue(), &newOffset))
  10802. {
  10803. offset = newOffset;
  10804. rightOpnd = nullptr;
  10805. offsetOpnd = nullptr;
  10806. }
  10807. }
  10808. // Determine if the Add for (right + offset) is necessary, and the op code that will be used for the comparison
  10809. IR::AutoReuseOpnd autoReuseAddResultOpnd;
  10810. if(offset == -1 && compareOpCode == Js::OpCode::BrLe_A)
  10811. {
  10812. offset = 0;
  10813. compareOpCode = Js::OpCode::BrLt_A;
  10814. }
  10815. else if(offset == 1 && compareOpCode == Js::OpCode::BrGe_A)
  10816. {
  10817. offset = 0;
  10818. compareOpCode = Js::OpCode::BrGt_A;
  10819. }
  10820. else if(offset != 0 && rightOpnd)
  10821. {
  10822. // Need to Add (right + offset). If it overflows, bail out.
  10823. IR::LabelInstr *const bailOutLabel = insertBeforeInstr->m_prev->GetOrCreateContinueLabel(true);
  10824. insertBeforeInstr = bailOutLabel;
  10825. // mov temp, right
  10826. // add temp, offset
  10827. // jo $bailOut
  10828. // $bailOut: (insertBeforeInstr)
  10829. Assert(!offsetOpnd || offsetOpnd->GetValue() == offset);
  10830. IR::RegOpnd *const addResultOpnd = IR::RegOpnd::New(TyInt32, func);
  10831. autoReuseAddResultOpnd.Initialize(addResultOpnd, func);
  10832. InsertAdd(
  10833. true,
  10834. addResultOpnd,
  10835. rightOpnd,
  10836. offsetOpnd ? offsetOpnd->UseWithNewType(TyInt32, func) : IR::IntConstOpnd::New(offset, TyInt32, func),
  10837. insertBeforeInstr);
  10838. InsertBranch(LowererMD::MDOverflowBranchOpcode, bailOutLabel, insertBeforeInstr);
  10839. rightOpnd = addResultOpnd;
  10840. }
  10841. // cmp left, right
  10842. // jl[e] $skipBailOut
  10843. // $bailOut:
  10844. if(!rightOpnd)
  10845. {
  10846. rightOpnd = IR::IntConstOpnd::New(offset, TyInt32, func);
  10847. }
  10848. InsertCompareBranch(leftOpnd, rightOpnd, compareOpCode, doUnsignedCompare, skipBailOutLabel, insertBeforeInstr);
  10849. }
  10850. IR::Instr *
  10851. Lowerer::LowerBailTarget(IR::Instr * instr)
  10852. {
  10853. // this is just a bailout target, just skip over it and generate a label before so other bailout can jump here.
  10854. IR::Instr * prevInstr = instr->m_prev;
  10855. IR::LabelInstr * continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  10856. instr->InsertAfter(continueLabelInstr);
  10857. IR::BranchInstr * skipInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, continueLabelInstr, this->m_func);
  10858. instr->InsertBefore(skipInstr);
  10859. this->GenerateBailOut(instr);
  10860. return prevInstr;
  10861. }
  10862. IR::Instr *
  10863. Lowerer::SplitBailOnImplicitCall(IR::Instr *& instr)
  10864. {
  10865. Assert(instr->IsPlainInstr() || instr->IsProfiledInstr());
  10866. const auto bailOutKind = instr->GetBailOutKind();
  10867. Assert(BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind));
  10868. IR::Opnd * implicitCallFlags = this->GetImplicitCallFlagsOpnd();
  10869. const IR::AutoReuseOpnd autoReuseImplicitCallFlags(implicitCallFlags, instr->m_func);
  10870. IR::IntConstOpnd * noImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_None, TyInt8, this->m_func, true);
  10871. const IR::AutoReuseOpnd autoReuseNoImplicitCall(noImplicitCall, instr->m_func);
  10872. // Reset the implicit call flag on every helper call
  10873. LowererMD::CreateAssign(implicitCallFlags, noImplicitCall, instr);
  10874. IR::Instr *disableImplicitCallsInstr = nullptr, *enableImplicitCallsInstr = nullptr;
  10875. if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
  10876. {
  10877. const auto disableImplicitCallAddress =
  10878. m_lowererMD.GenerateMemRef(
  10879. instr->m_func->GetThreadContextInfo()->GetDisableImplicitFlagsAddr(),
  10880. TyInt8,
  10881. instr);
  10882. // Disable implicit calls since they will be called after bailing out
  10883. disableImplicitCallsInstr =
  10884. IR::Instr::New(
  10885. Js::OpCode::Ld_A,
  10886. disableImplicitCallAddress,
  10887. IR::IntConstOpnd::New(DisableImplicitCallFlag, TyInt8, instr->m_func, true),
  10888. instr->m_func);
  10889. instr->InsertBefore(disableImplicitCallsInstr);
  10890. // Create instruction for re-enabling implicit calls
  10891. enableImplicitCallsInstr =
  10892. IR::Instr::New(
  10893. Js::OpCode::Ld_A,
  10894. disableImplicitCallAddress,
  10895. IR::IntConstOpnd::New(DisableImplicitNoFlag, TyInt8, instr->m_func, true),
  10896. instr->m_func);
  10897. }
  10898. IR::Instr * bailOutInstr = instr;
  10899. instr = IR::Instr::New(instr->m_opcode, instr->m_func);
  10900. bailOutInstr->TransferTo(instr);
  10901. bailOutInstr->InsertBefore(instr);
  10902. if(disableImplicitCallsInstr)
  10903. {
  10904. // Re-enable implicit calls
  10905. Assert(enableImplicitCallsInstr);
  10906. bailOutInstr->InsertBefore(enableImplicitCallsInstr);
  10907. // Lower both instructions. Lowering an instruction may free the instruction's original operands, so do that last.
  10908. LowererMD::ChangeToAssign(disableImplicitCallsInstr);
  10909. LowererMD::ChangeToAssign(enableImplicitCallsInstr);
  10910. }
  10911. bailOutInstr->m_opcode = Js::OpCode::BailOnNotEqual;
  10912. bailOutInstr->SetSrc1(implicitCallFlags);
  10913. bailOutInstr->SetSrc2(noImplicitCall);
  10914. return bailOutInstr;
  10915. }
  10916. IR::Instr *
  10917. Lowerer::SplitBailOnImplicitCall(IR::Instr * instr, IR::Instr * helperCall, IR::Instr * insertBeforeInstr)
  10918. {
  10919. IR::Opnd * implicitCallFlags = this->GetImplicitCallFlagsOpnd();
  10920. const IR::AutoReuseOpnd autoReuseImplicitCallFlags(implicitCallFlags, instr->m_func);
  10921. IR::IntConstOpnd * noImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_None, TyInt8, this->m_func, true);
  10922. const IR::AutoReuseOpnd autoReuseNoImplicitCall(noImplicitCall, instr->m_func);
  10923. // Reset the implicit call flag on every helper call
  10924. LowererMD::CreateAssign(implicitCallFlags, noImplicitCall, helperCall->m_prev);
  10925. BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
  10926. if (bailOutInfo->bailOutInstr == instr)
  10927. {
  10928. bailOutInfo->bailOutInstr = nullptr;
  10929. }
  10930. IR::Instr * bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOnNotEqual, IR::BailOutOnImplicitCalls, bailOutInfo, bailOutInfo->bailOutFunc);
  10931. bailOutInstr->SetSrc1(implicitCallFlags);
  10932. bailOutInstr->SetSrc2(noImplicitCall);
  10933. insertBeforeInstr->InsertBefore(bailOutInstr);
  10934. instr->ClearBailOutInfo();
  10935. return bailOutInstr;
  10936. }
  10937. // Split out bailout for debugger into separate bailout instr out of real instr which has bailout for debugger.
  10938. // Returns the instr which needs to lower next, which would normally be last of splitted instr.
  10939. // IR on input:
  10940. // - Real instr with BailOutInfo but it's opcode is not BailForDebugger.
  10941. // - debugger bailout is not shared. In this case we'll have debugger bailout in instr->GetBailOutKind().
  10942. // - debugger bailout is shared. In this case we'll have debugger bailout in instr->GetAuxBailOutKind().
  10943. // IR on output:
  10944. // - Either of:
  10945. // - real instr, then debuggerBailout -- in case we only had debugger bailout.
  10946. // - real instr with BailOutInfo w/o debugger bailout, then debuggerBailout, then sharedBailout -- in case bailout for debugger was shared w/some other b.o.
  10947. IR::Instr* Lowerer::SplitBailForDebugger(IR::Instr* instr)
  10948. {
  10949. Assert(m_func->IsJitInDebugMode() && instr->m_opcode != Js::OpCode::BailForDebugger);
  10950. IR::BailOutKind debuggerBailOutKind; // Used for splitted instr.
  10951. BailOutInfo* bailOutInfo = instr->GetBailOutInfo();
  10952. IR::Instr* sharedBailoutInstr = nullptr;
  10953. if (instr->GetBailOutKind() & IR::BailOutForDebuggerBits)
  10954. {
  10955. // debugger bailout is not shared.
  10956. Assert(!instr->HasAuxBailOut());
  10957. AssertMsg(!(instr->GetBailOutKind() & ~IR::BailOutForDebuggerBits), "There should only be debugger bailout bits in the instr.");
  10958. debuggerBailOutKind = instr->GetBailOutKind() & IR::BailOutForDebuggerBits;
  10959. // There is no non-debugger bailout in the instr, still can't clear bailout info, as we use it for the splitted instr,
  10960. // but we need to mark the bailout as hasn't been generated yet.
  10961. if (bailOutInfo->bailOutInstr == instr)
  10962. {
  10963. // null will be picked up by following BailOutInstr::New which will change it to new bailout instr.
  10964. bailOutInfo->bailOutInstr = nullptr;
  10965. }
  10966. // Remove bailout info from the original instr which from now on becomes just regular instr, w/o deallocating bailout info.
  10967. instr->ClearBailOutInfo();
  10968. }
  10969. else if (instr->IsBranchInstr() && instr->HasBailOutInfo() && instr->HasAuxBailOut())
  10970. {
  10971. // Branches with shared bailout are lowered in LowerCondBranchCheckBailOut,
  10972. // can't do here because we need to use BranchBailOutRecord but don't know which BrTrue/BrFalse to use for it.
  10973. debuggerBailOutKind = IR::BailOutInvalid;
  10974. }
  10975. else if (instr->HasAuxBailOut() && instr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits)
  10976. {
  10977. // debugger bailout is shared.
  10978. AssertMsg(!(instr->GetBailOutKind() & IR::BailOutForDebuggerBits), "There should be no debugger bits in main bailout kind.");
  10979. debuggerBailOutKind = instr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits;
  10980. // This will insert SharedBail instr after current instr and set bailOutInfo->bailOutInstr to the shared one.
  10981. sharedBailoutInstr = instr->ShareBailOut();
  10982. // As we extracted aux bail out, invalidate all tracks of it in the instr.
  10983. instr->ResetAuxBailOut();
  10984. }
  10985. else
  10986. {
  10987. AssertMsg(FALSE, "shouldn't get here");
  10988. debuggerBailOutKind = IR::BailOutInvalid;
  10989. }
  10990. if (debuggerBailOutKind != IR::BailOutInvalid)
  10991. {
  10992. IR::BailOutInstr* debuggerBailoutInstr = IR::BailOutInstr::New(
  10993. Js::OpCode::BailForDebugger, debuggerBailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
  10994. instr->InsertAfter(debuggerBailoutInstr);
  10995. // Since we go backwards, we need to process extracted out bailout for debugger first.
  10996. instr = sharedBailoutInstr ? sharedBailoutInstr : debuggerBailoutInstr;
  10997. }
  10998. return instr;
  10999. }
  11000. IR::Instr *
  11001. Lowerer::SplitBailOnResultCondition(IR::Instr *const instr) const
  11002. {
  11003. Assert(instr);
  11004. Assert(!instr->IsLowered());
  11005. Assert(
  11006. instr->GetBailOutKind() & IR::BailOutOnResultConditions ||
  11007. instr->GetBailOutKind() == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  11008. const auto nonBailOutInstr = IR::Instr::New(instr->m_opcode, instr->m_func);
  11009. instr->TransferTo(nonBailOutInstr);
  11010. instr->InsertBefore(nonBailOutInstr);
  11011. return nonBailOutInstr;
  11012. }
  11013. void
  11014. Lowerer::LowerBailOnResultCondition(
  11015. IR::Instr *const instr,
  11016. IR::LabelInstr * *const bailOutLabel,
  11017. IR::LabelInstr * *const skipBailOutLabel)
  11018. {
  11019. Assert(instr);
  11020. Assert(
  11021. instr->GetBailOutKind() & IR::BailOutOnResultConditions ||
  11022. instr->GetBailOutKind() == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  11023. Assert(bailOutLabel);
  11024. Assert(skipBailOutLabel);
  11025. // Label to jump to (or fall through to) when bailing out. The actual bailout label
  11026. // (bailOutInfo->bailOutInstr->AsLabelInstr()) may be shared, and code may be added to restore values before the jump to the
  11027. // actual bailout label in the cloned bailout case, so always create a new bailout label for this particular path.
  11028. *bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true /* isOpHelper */);
  11029. instr->InsertBefore(*bailOutLabel);
  11030. // Label to jump to when not bailing out
  11031. *skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  11032. instr->InsertAfter(*skipBailOutLabel);
  11033. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  11034. // ordering instructions anymore.
  11035. GenerateBailOut(instr);
  11036. }
  11037. void
  11038. Lowerer::PreserveSourcesForBailOnResultCondition(IR::Instr *const instr, IR::LabelInstr *const skipBailOutLabel) const
  11039. {
  11040. Assert(instr);
  11041. Assert(!instr->IsLowered());
  11042. Assert(!instr->HasBailOutInfo());
  11043. // Since this instruction may bail out, writing to the destination cannot overwrite one of the sources, or we may lose one
  11044. // of the sources needed to redo the equivalent byte code instruction. Determine if the sources need to be preserved.
  11045. const auto dst = instr->GetDst();
  11046. Assert(dst);
  11047. const auto dstStackSym = dst->GetStackSym();
  11048. if(!dstStackSym || !dstStackSym->HasByteCodeRegSlot())
  11049. {
  11050. // We only need to ensure that a byte-code source is not being overwritten
  11051. return;
  11052. }
  11053. switch(instr->m_opcode)
  11054. {
  11055. // The sources of these instructions don't need restoring, or will be restored in the bailout path
  11056. case Js::OpCode::Neg_I4:
  11057. // In case of overflow or zero, the result is the same as the operand
  11058. case Js::OpCode::Add_I4:
  11059. case Js::OpCode::Sub_I4:
  11060. // In case of overflow, there is always enough information to restore the operands
  11061. return;
  11062. }
  11063. Assert(instr->GetSrc1());
  11064. if(!dst->IsEqual(instr->GetSrc1()) && !(instr->GetSrc2() && dst->IsEqual(instr->GetSrc2())))
  11065. {
  11066. // The destination is different from the sources
  11067. return;
  11068. }
  11069. // The destination is the same as one of the sources and the original sources cannot be restored after the instruction, so
  11070. // use a temporary destination for the result and move it back to the original destination after deciding not to bail out
  11071. LowererMD::ChangeToAssign(instr->SinkDst(Js::OpCode::Ld_I4, RegNOREG, skipBailOutLabel));
  11072. }
  11073. void
  11074. Lowerer::LowerInstrWithBailOnResultCondition(
  11075. IR::Instr *const instr,
  11076. const IR::BailOutKind bailOutKind,
  11077. IR::LabelInstr *const bailOutLabel,
  11078. IR::LabelInstr *const skipBailOutLabel) const
  11079. {
  11080. Assert(instr);
  11081. Assert(!instr->IsLowered());
  11082. Assert(!instr->HasBailOutInfo());
  11083. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  11084. Assert(bailOutLabel);
  11085. Assert(instr->m_next == bailOutLabel);
  11086. Assert(skipBailOutLabel);
  11087. // Preserve sources that are overwritten by the instruction if needed
  11088. PreserveSourcesForBailOnResultCondition(instr, skipBailOutLabel);
  11089. // Lower the instruction
  11090. switch(instr->m_opcode)
  11091. {
  11092. case Js::OpCode::Neg_I4:
  11093. LowererMD::LowerInt4NegWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  11094. break;
  11095. case Js::OpCode::Add_I4:
  11096. LowererMD::LowerInt4AddWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  11097. break;
  11098. case Js::OpCode::Sub_I4:
  11099. LowererMD::LowerInt4SubWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  11100. break;
  11101. case Js::OpCode::Mul_I4:
  11102. LowererMD::LowerInt4MulWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  11103. break;
  11104. case Js::OpCode::Rem_I4:
  11105. m_lowererMD.LowerInt4RemWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  11106. break;
  11107. default:
  11108. Assert(false); // not implemented
  11109. __assume(false);
  11110. }
  11111. }
  11112. void
  11113. Lowerer::GenerateObjectTestAndTypeLoad(IR::Instr *instrLdSt, IR::RegOpnd *opndBase, IR::RegOpnd *opndType, IR::LabelInstr *labelHelper)
  11114. {
  11115. IR::IndirOpnd *opndIndir;
  11116. if (!opndBase->IsNotTaggedValue())
  11117. {
  11118. m_lowererMD.GenerateObjectTest(opndBase, instrLdSt, labelHelper);
  11119. }
  11120. opndIndir = IR::IndirOpnd::New(opndBase, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  11121. m_lowererMD.CreateAssign(opndType, opndIndir, instrLdSt);
  11122. }
  11123. IR::LabelInstr *
  11124. Lowerer::GenerateBailOut(IR::Instr * instr, IR::BranchInstr * branchInstr, IR::LabelInstr *bailOutLabel, IR::LabelInstr * collectRuntimeStatsLabel)
  11125. {
  11126. BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
  11127. IR::Instr * bailOutInstr = bailOutInfo->bailOutInstr;
  11128. if (instr->IsCloned())
  11129. {
  11130. Assert(bailOutInstr != instr);
  11131. // jump to the cloned bail out label
  11132. IR::LabelInstr * bailOutLabelInstr = bailOutInstr->AsLabelInstr();
  11133. IR::BranchInstr * bailOutBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, bailOutLabelInstr, this->m_func);
  11134. instr->InsertBefore(bailOutBranch);
  11135. instr->Remove();
  11136. return bailOutLabel;
  11137. }
  11138. // Add helper label to trigger layout.
  11139. if (!collectRuntimeStatsLabel)
  11140. {
  11141. collectRuntimeStatsLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  11142. }
  11143. Assert(!collectRuntimeStatsLabel->IsLinked());
  11144. instr->InsertBefore(collectRuntimeStatsLabel);
  11145. if (bailOutInstr != instr)
  11146. {
  11147. // this bailOutInfo is shared, just jump to the bailout target
  11148. IR::Opnd * indexOpndForBailOutKind = nullptr;
  11149. int bailOutRecordOffset = 0;
  11150. if (this->m_func->IsOOPJIT())
  11151. {
  11152. bailOutRecordOffset = NativeCodeData::GetDataTotalOffset(bailOutInfo->bailOutRecord);
  11153. indexOpndForBailOutKind = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), (int)(bailOutRecordOffset + BailOutRecord::GetOffsetOfBailOutKind()), TyUint32,
  11154. #if DBG
  11155. NativeCodeData::GetDataDescription(bailOutInfo->bailOutRecord, this->m_func->m_alloc),
  11156. #endif
  11157. m_func, true);
  11158. this->addToLiveOnBackEdgeSyms->Set(m_func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  11159. }
  11160. else
  11161. {
  11162. indexOpndForBailOutKind =
  11163. IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + BailOutRecord::GetOffsetOfBailOutKind(), TyUint32, this->m_func, IR::AddrOpndKindDynamicBailOutKindRef);
  11164. }
  11165. m_lowererMD.CreateAssign(
  11166. indexOpndForBailOutKind, IR::IntConstOpnd::New(instr->GetBailOutKind(), indexOpndForBailOutKind->GetType(), this->m_func), instr, false);
  11167. // No point in doing this for BailOutFailedEquivalentTypeCheck or BailOutFailedEquivalentFixedFieldTypeCheck,
  11168. // because the respective inline cache is already polymorphic, anyway.
  11169. if (instr->GetBailOutKind() == IR::BailOutFailedTypeCheck || instr->GetBailOutKind() == IR::BailOutFailedFixedFieldTypeCheck)
  11170. {
  11171. // We have a type check bailout that shares a bailout record with other instructions.
  11172. // Generate code to write the cache index into the bailout record before we jump to the call site.
  11173. Assert(bailOutInfo->polymorphicCacheIndex != (uint)-1);
  11174. Assert(bailOutInfo->bailOutRecord);
  11175. IR::Opnd * indexOpnd = nullptr;
  11176. if (this->m_func->IsOOPJIT())
  11177. {
  11178. indexOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), (int)(bailOutRecordOffset + BailOutRecord::GetOffsetOfPolymorphicCacheIndex()), TyUint32, m_func);
  11179. }
  11180. else
  11181. {
  11182. indexOpnd = IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + BailOutRecord::GetOffsetOfPolymorphicCacheIndex(), TyUint32, this->m_func);
  11183. }
  11184. m_lowererMD.CreateAssign(
  11185. indexOpnd, IR::IntConstOpnd::New(bailOutInfo->polymorphicCacheIndex, TyUint32, this->m_func), instr, false);
  11186. }
  11187. if (bailOutInfo->bailOutRecord->GetType() == BailOutRecord::BailoutRecordType::Shared)
  11188. {
  11189. IR::Opnd *functionBodyOpnd;
  11190. if (this->m_func->IsOOPJIT())
  11191. {
  11192. functionBodyOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), (int)(bailOutRecordOffset + SharedBailOutRecord::GetOffsetOfFunctionBody()), TyMachPtr, m_func);
  11193. }
  11194. else
  11195. {
  11196. functionBodyOpnd = IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + SharedBailOutRecord::GetOffsetOfFunctionBody(), TyMachPtr, this->m_func);
  11197. }
  11198. m_lowererMD.CreateAssign(
  11199. functionBodyOpnd, CreateFunctionBodyOpnd(instr->m_func), instr, false);
  11200. }
  11201. // GenerateBailOut should have replaced this as a label as we should have already lowered
  11202. // the main bailOutInstr.
  11203. IR::LabelInstr * bailOutTargetLabel = bailOutInstr->AsLabelInstr();
  11204. #if DBG
  11205. if (bailOutTargetLabel->m_noHelperAssert)
  11206. {
  11207. collectRuntimeStatsLabel->m_noHelperAssert = true;
  11208. }
  11209. #endif
  11210. Assert(bailOutLabel == nullptr || bailOutLabel == bailOutTargetLabel);
  11211. IR::BranchInstr * newBranchInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, bailOutTargetLabel, this->m_func);
  11212. instr->InsertAfter(newBranchInstr);
  11213. instr->Remove();
  11214. return collectRuntimeStatsLabel ? collectRuntimeStatsLabel : bailOutLabel;
  11215. }
  11216. // The bailout hasn't be generated yet.
  11217. Assert(!bailOutInstr->IsLabelInstr());
  11218. // capture the condition for this bailout
  11219. if (bailOutLabel == nullptr)
  11220. {
  11221. // Create a label and place it in the bailout info so that shared bailout point can jump to this one
  11222. if (instr->m_prev->IsLabelInstr())
  11223. {
  11224. bailOutLabel = instr->m_prev->AsLabelInstr();
  11225. Assert(bailOutLabel->isOpHelper);
  11226. }
  11227. else
  11228. {
  11229. bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  11230. instr->InsertBefore(bailOutLabel);
  11231. }
  11232. }
  11233. else
  11234. {
  11235. instr->InsertBefore(bailOutLabel);
  11236. }
  11237. #if DBG
  11238. const IR::BailOutKind bailOutKind = bailOutInstr->GetBailOutKind();
  11239. if (bailOutInstr->m_opcode == Js::OpCode::BailOnNoSimdTypeSpec ||
  11240. bailOutInstr->m_opcode == Js::OpCode::BailOnNoProfile ||
  11241. bailOutInstr->m_opcode == Js::OpCode::BailOnException ||
  11242. bailOutInstr->m_opcode == Js::OpCode::Yield ||
  11243. bailOutKind & (IR::BailOutConventionalTypedArrayAccessOnly |
  11244. IR::BailOutConventionalNativeArrayAccessOnly |
  11245. IR::BailOutOnArrayAccessHelperCall))
  11246. {
  11247. bailOutLabel->m_noHelperAssert = true;
  11248. }
  11249. #endif
  11250. bailOutInfo->bailOutInstr = bailOutLabel;
  11251. bailOutLabel->m_hasNonBranchRef = true;
  11252. // Create the bail out record
  11253. Assert(bailOutInfo->bailOutRecord == nullptr);
  11254. BailOutRecord * bailOutRecord;
  11255. IR::JnHelperMethod helperMethod;
  11256. if (branchInstr != nullptr)
  11257. {
  11258. Assert(branchInstr->GetSrc2() == nullptr);
  11259. Assert(branchInstr->GetDst() == nullptr);
  11260. IR::LabelInstr * targetLabel = branchInstr->GetTarget();
  11261. Assert(targetLabel->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset);
  11262. uint32 trueOffset;
  11263. uint32 falseOffset;
  11264. IR::Opnd *condOpnd = branchInstr->GetSrc1();
  11265. bool invertTarget = (branchInstr->m_opcode == Js::OpCode::BrFalse_A);
  11266. if (bailOutInfo->isInvertedBranch)
  11267. {
  11268. // Flip the condition
  11269. IR::Instr *subInstr = IR::Instr::New(Js::OpCode::Sub_I4, condOpnd, condOpnd, IR::IntConstOpnd::New(1, TyMachReg, instr->m_func), instr->m_func);
  11270. instr->InsertBefore(subInstr);
  11271. this->m_lowererMD.EmitInt4Instr(subInstr);
  11272. // We should really do a DEC/NEG for a full 2's complement flip from 0/1 to 1/0,
  11273. // but DEC is sufficient to flip from 0/1 to -1/0, which is false/true to true/false...
  11274. //instr->InsertBefore(IR::Instr::New(Js::OpCode::Neg_I4, condOpnd, condOpnd, instr->m_func));
  11275. invertTarget = invertTarget ? false : true;
  11276. }
  11277. if (!invertTarget)
  11278. {
  11279. trueOffset = targetLabel->GetByteCodeOffset();
  11280. falseOffset = bailOutInfo->bailOutOffset;
  11281. }
  11282. else
  11283. {
  11284. falseOffset = targetLabel->GetByteCodeOffset();
  11285. trueOffset = bailOutInfo->bailOutOffset;
  11286. }
  11287. bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
  11288. BranchBailOutRecord, trueOffset, falseOffset, branchInstr->GetByteCodeReg(), instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
  11289. helperMethod = IR::HelperSaveAllRegistersAndBranchBailOut;
  11290. #ifdef _M_IX86
  11291. if(!AutoSystemInfo::Data.SSE2Available())
  11292. {
  11293. helperMethod = IR::HelperSaveAllRegistersNoSse2AndBranchBailOut;
  11294. }
  11295. #endif
  11296. // Save the condition. The register allocator will generate arguments.
  11297. bailOutInfo->branchConditionOpnd = branchInstr->GetSrc1()->Copy(branchInstr->m_func);
  11298. }
  11299. else
  11300. {
  11301. if (bailOutInstr->GetBailOutKind() == IR::BailOutShared)
  11302. {
  11303. bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
  11304. SharedBailOutRecord, bailOutInfo->bailOutOffset, bailOutInfo->polymorphicCacheIndex, instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
  11305. }
  11306. else
  11307. {
  11308. bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
  11309. BailOutRecord, bailOutInfo->bailOutOffset, bailOutInfo->polymorphicCacheIndex, instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
  11310. }
  11311. helperMethod = IR::HelperSaveAllRegistersAndBailOut;
  11312. #ifdef _M_IX86
  11313. if(!AutoSystemInfo::Data.SSE2Available())
  11314. {
  11315. helperMethod = IR::HelperSaveAllRegistersNoSse2AndBailOut;
  11316. }
  11317. #endif
  11318. }
  11319. // Save the bailout record. The register allocator will generate arguments.
  11320. bailOutInfo->bailOutRecord = bailOutRecord;
  11321. #if ENABLE_DEBUG_CONFIG_OPTIONS
  11322. bailOutRecord->bailOutOpcode = bailOutInfo->bailOutOpcode;
  11323. #endif
  11324. if (instr->m_opcode == Js::OpCode::BailOnNotStackArgs && instr->GetSrc1())
  11325. {
  11326. // src1 on BailOnNotStackArgs is helping CSE
  11327. instr->FreeSrc1();
  11328. }
  11329. if (instr->GetSrc2() != nullptr)
  11330. {
  11331. // Ideally we should never be in this situation but incase we reached a
  11332. // condition where we didn't freed src2. Free it here.
  11333. instr->FreeSrc2();
  11334. }
  11335. // Call the bail out wrapper
  11336. instr->m_opcode = Js::OpCode::Call;
  11337. if(instr->GetDst())
  11338. {
  11339. // To facilitate register allocation, don't assign a destination. The result will anyway go into the return register,
  11340. // but the register allocator does not need to kill that register for the call.
  11341. instr->FreeDst();
  11342. }
  11343. instr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, this->m_func));
  11344. m_lowererMD.LowerCall(instr, 0);
  11345. if (bailOutInstr->GetBailOutKind() != IR::BailOutForGeneratorYield)
  11346. {
  11347. // Defer introducing the JMP to epilog until LowerPrologEpilog phase for Yield bailouts so
  11348. // that Yield does not appear to have flow out of its containing block for the RegAlloc phase.
  11349. // Yield is an unconditional bailout but we want to simulate the flow as if the Yield were
  11350. // just like a call.
  11351. GenerateJumpToEpilogForBailOut(bailOutInfo, instr);
  11352. }
  11353. return collectRuntimeStatsLabel ? collectRuntimeStatsLabel : bailOutLabel;
  11354. }
  11355. void
  11356. Lowerer::GenerateJumpToEpilogForBailOut(BailOutInfo * bailOutInfo, IR::Instr *instr)
  11357. {
  11358. IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->m_prev;
  11359. // JMP to the epilog
  11360. IR::LabelInstr * exitTargetInstr;
  11361. if (exitPrevInstr->IsLabelInstr())
  11362. {
  11363. exitTargetInstr = exitPrevInstr->AsLabelInstr();
  11364. }
  11365. else
  11366. {
  11367. exitTargetInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  11368. exitPrevInstr->InsertAfter(exitTargetInstr);
  11369. }
  11370. exitTargetInstr = m_lowererMD.GetBailOutStackRestoreLabel(bailOutInfo, exitTargetInstr);
  11371. IR::Instr * instrAfter = instr->m_next;
  11372. IR::BranchInstr * exitInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, exitTargetInstr, this->m_func);
  11373. instrAfter->InsertBefore(exitInstr);
  11374. }
  11375. ///----------------------------------------------------------------------------
  11376. ///
  11377. /// Lowerer::GenerateFastCondBranch
  11378. ///
  11379. ///----------------------------------------------------------------------------
  11380. bool
  11381. Lowerer::GenerateFastCondBranch(IR::BranchInstr * instrBranch, bool *pIsHelper)
  11382. {
  11383. // The idea is to do an inline compare if we can prove that both sources
  11384. // are tagged ints
  11385. //
  11386. // Given:
  11387. //
  11388. // Brxx_A $L, src1, src2
  11389. //
  11390. // Generate:
  11391. //
  11392. // (If not Int31's, goto $helper)
  11393. // Jxx $L, src1, src2
  11394. // JMP $fallthru
  11395. // $helper:
  11396. // (caller will generate normal helper call sequence)
  11397. // $fallthru:
  11398. IR::LabelInstr * labelHelper = nullptr;
  11399. IR::LabelInstr * labelFallThru;
  11400. IR::BranchInstr * instr;
  11401. IR::Opnd * opndSrc1;
  11402. IR::Opnd * opndSrc2;
  11403. opndSrc1 = instrBranch->GetSrc1();
  11404. opndSrc2 = instrBranch->GetSrc2();
  11405. AssertMsg(opndSrc1 && opndSrc2, "BrC expects 2 src operands");
  11406. // Not tagged ints?
  11407. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  11408. {
  11409. return true;
  11410. }
  11411. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  11412. {
  11413. return true;
  11414. }
  11415. // Tagged ints?
  11416. bool isTaggedInts = false;
  11417. if (opndSrc1->IsTaggedInt())
  11418. {
  11419. if (opndSrc2->IsTaggedInt())
  11420. {
  11421. isTaggedInts = true;
  11422. }
  11423. }
  11424. if (!isTaggedInts)
  11425. {
  11426. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  11427. this->m_lowererMD.GenerateSmIntPairTest(instrBranch, opndSrc1, opndSrc2, labelHelper);
  11428. }
  11429. // Jxx $L, src1, src2
  11430. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  11431. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  11432. instr = IR::BranchInstr::New(instrBranch->m_opcode, instrBranch->GetTarget(), opndSrc1, opndSrc2, this->m_func);
  11433. instrBranch->InsertBefore(instr);
  11434. this->m_lowererMD.LowerCondBranch(instr);
  11435. if (isTaggedInts)
  11436. {
  11437. instrBranch->Remove();
  11438. // Skip lowering call to helper
  11439. return false;
  11440. }
  11441. // JMP $fallthru
  11442. IR::Instr *instrNext = instrBranch->GetNextRealInstrOrLabel();
  11443. if (instrNext->IsLabelInstr())
  11444. {
  11445. labelFallThru = instrNext->AsLabelInstr();
  11446. }
  11447. else
  11448. {
  11449. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /**pIsHelper*/FALSE);
  11450. instrBranch->InsertAfter(labelFallThru);
  11451. }
  11452. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallThru, this->m_func);
  11453. instrBranch->InsertBefore(instr);
  11454. // $helper:
  11455. // (caller will generate normal helper call sequence)
  11456. // $fallthru:
  11457. AssertMsg(labelHelper, "Should not be NULL");
  11458. instrBranch->InsertBefore(labelHelper);
  11459. *pIsHelper = true;
  11460. return true;
  11461. }
  11462. void
  11463. Lowerer::LowerInlineeStart(IR::Instr * inlineeStartInstr)
  11464. {
  11465. IR::Opnd *linkOpnd = inlineeStartInstr->GetSrc2();
  11466. if (!linkOpnd)
  11467. {
  11468. Assert(inlineeStartInstr->m_func->m_hasInlineArgsOpt);
  11469. return;
  11470. }
  11471. AssertMsg(inlineeStartInstr->m_func->firstActualStackOffset != -1, "This should have been already done in backward pass");
  11472. IR::Instr *startCall;
  11473. // Free the argOut links and lower them to MOVs
  11474. inlineeStartInstr->IterateArgInstrs([&](IR::Instr* argInstr){
  11475. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A || argInstr->m_opcode == Js::OpCode::ArgOut_A_Inline);
  11476. startCall = argInstr->GetSrc2()->GetStackSym()->m_instrDef;
  11477. argInstr->FreeSrc2();
  11478. #pragma prefast(suppress:6235, "Non-Zero Constant in Condition")
  11479. if (!PHASE_ON(Js::EliminateArgoutForInlineePhase, this->m_func) || inlineeStartInstr->m_func->GetJITFunctionBody()->HasOrParentHasArguments())
  11480. {
  11481. m_lowererMD.ChangeToAssign(argInstr);
  11482. }
  11483. else
  11484. {
  11485. argInstr->m_opcode = Js::OpCode::ArgOut_A_InlineBuiltIn;
  11486. }
  11487. return false;
  11488. });
  11489. IR::Instr *argInsertInstr = inlineeStartInstr;
  11490. uint i = 0;
  11491. inlineeStartInstr->IterateMetaArgs( [&] (IR::Instr* metaArg)
  11492. {
  11493. if(i == 0)
  11494. {
  11495. LowererMD::CreateAssign(metaArg->m_func->GetNextInlineeFrameArgCountSlotOpnd(),
  11496. IR::AddrOpnd::NewNull(metaArg->m_func),
  11497. argInsertInstr);
  11498. }
  11499. if (i == Js::Constants::InlineeMetaArgIndex_FunctionObject)
  11500. {
  11501. metaArg->SetSrc1(inlineeStartInstr->GetSrc1());
  11502. }
  11503. metaArg->Unlink();
  11504. argInsertInstr->InsertBefore(metaArg);
  11505. IR::Instr* prev = metaArg->m_prev;
  11506. m_lowererMD.ChangeToAssign(metaArg);
  11507. if (i == Js::Constants::InlineeMetaArgIndex_Argc)
  11508. {
  11509. #if defined(_M_IX86) || defined(_M_X64)
  11510. Assert(metaArg == prev->m_next);
  11511. #else //defined(_M_ARM)
  11512. Assert(prev->m_next->m_opcode == Js::OpCode::LDIMM);
  11513. #endif
  11514. metaArg = prev->m_next;
  11515. Assert(metaArg->GetSrc1()->AsIntConstOpnd()->m_dontEncode == true);
  11516. metaArg->isInlineeEntryInstr = true;
  11517. LowererMD::Legalize(metaArg);
  11518. }
  11519. argInsertInstr = metaArg;
  11520. i++;
  11521. return false;
  11522. });
  11523. if (inlineeStartInstr->m_func->m_hasInlineArgsOpt)
  11524. {
  11525. inlineeStartInstr->FreeSrc1();
  11526. inlineeStartInstr->FreeSrc2();
  11527. inlineeStartInstr->FreeDst();
  11528. }
  11529. else
  11530. {
  11531. inlineeStartInstr->Remove();
  11532. }
  11533. }
  11534. void
  11535. Lowerer::LowerInlineeEnd(IR::Instr *instr)
  11536. {
  11537. Assert(instr->m_func->IsInlinee());
  11538. Assert(m_func->IsTopFunc());
  11539. // No need to emit code if the function wasn't marked as having implicit calls or bailout. Dead-Store should have removed inline overhead.
  11540. if (instr->m_func->GetHasImplicitCalls() || PHASE_OFF(Js::DeadStorePhase, this->m_func))
  11541. {
  11542. LowererMD::CreateAssign(instr->m_func->GetInlineeArgCountSlotOpnd(),
  11543. IR::IntConstOpnd::New(0, TyMachReg, instr->m_func),
  11544. instr);
  11545. }
  11546. // Keep InlineeEnd around as it is used by register allocator, if we have optimized the arguments stack
  11547. if (instr->m_func->m_hasInlineArgsOpt)
  11548. {
  11549. instr->FreeSrc1();
  11550. }
  11551. else
  11552. {
  11553. instr->Remove();
  11554. }
  11555. }
  11556. IR::Instr *
  11557. Lowerer::LoadFloatFromNonReg(IR::Opnd * opndSrc, IR::Opnd * opndDst, IR::Instr * instrInsert)
  11558. {
  11559. double value;
  11560. if (opndSrc->IsAddrOpnd())
  11561. {
  11562. Js::Var var = opndSrc->AsAddrOpnd()->m_address;
  11563. if (Js::TaggedInt::Is(var))
  11564. {
  11565. value = Js::TaggedInt::ToDouble(var);
  11566. }
  11567. else
  11568. {
  11569. value = Js::JavascriptNumber::GetValue(var);
  11570. }
  11571. }
  11572. else if (opndSrc->IsIntConstOpnd())
  11573. {
  11574. if (opndSrc->IsUInt32())
  11575. {
  11576. value = (double)(uint32)opndSrc->AsIntConstOpnd()->GetValue();
  11577. }
  11578. else
  11579. {
  11580. value = (double)opndSrc->AsIntConstOpnd()->GetValue();
  11581. }
  11582. }
  11583. else if (opndSrc->IsFloatConstOpnd())
  11584. {
  11585. value = (double)opndSrc->AsFloatConstOpnd()->m_value;
  11586. }
  11587. else
  11588. {
  11589. AssertMsg(0, "Unexpected opnd type");
  11590. value = 0;
  11591. }
  11592. return LowererMD::LoadFloatValue(opndDst, value, instrInsert);
  11593. }
  11594. void
  11595. Lowerer::LoadInt32FromUntaggedVar(IR::Instr *const instrLoad)
  11596. {
  11597. Assert(instrLoad);
  11598. Assert(instrLoad->GetDst());
  11599. Assert(instrLoad->GetDst()->IsRegOpnd());
  11600. Assert(instrLoad->GetDst()->IsInt32());
  11601. Assert(instrLoad->GetSrc1());
  11602. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  11603. Assert(instrLoad->GetSrc1()->IsVar());
  11604. Assert(!instrLoad->GetSrc2());
  11605. // push src
  11606. // int32Value = call JavascriptNumber::GetNonzeroInt32Value_NoChecks
  11607. // test int32Value, int32Value
  11608. // jne $done
  11609. // (fall through to 'instrLoad'; caller will generate code here)
  11610. // $done:
  11611. // (rest of program)
  11612. Func *const func = instrLoad->m_func;
  11613. IR::LabelInstr *const doneLabel = instrLoad->GetOrCreateContinueLabel();
  11614. // push src
  11615. // int32Value = call JavascriptNumber::GetNonzeroInt32Value_NoChecks
  11616. StackSym *const int32ValueSym = instrLoad->GetDst()->AsRegOpnd()->m_sym;
  11617. IR::Instr *const instr =
  11618. IR::Instr::New(
  11619. Js::OpCode::Call,
  11620. IR::RegOpnd::New(int32ValueSym, TyInt32, func),
  11621. instrLoad->GetSrc1()->AsRegOpnd(),
  11622. func);
  11623. instrLoad->InsertBefore(instr);
  11624. LowerUnaryHelper(instr, IR::HelperGetNonzeroInt32Value_NoTaggedIntCheck);
  11625. // test int32Value, int32Value
  11626. // jne $done
  11627. InsertCompareBranch(
  11628. IR::RegOpnd::New(int32ValueSym, TyInt32, func),
  11629. IR::IntConstOpnd::New(0, TyInt32, func, true),
  11630. Js::OpCode::BrNeq_A,
  11631. doneLabel,
  11632. instrLoad);
  11633. }
  11634. bool
  11635. Lowerer::GetValueFromIndirOpnd(IR::IndirOpnd *indirOpnd, IR::Opnd **pValueOpnd, IntConstType *pValue)
  11636. {
  11637. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  11638. IR::Opnd* valueOpnd = nullptr;
  11639. IntConstType value = 0;
  11640. if (!indexOpnd)
  11641. {
  11642. value = (IntConstType)indirOpnd->GetOffset();
  11643. if (value < 0)
  11644. {
  11645. // Can't do fast path for negative index
  11646. return false;
  11647. }
  11648. valueOpnd = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
  11649. }
  11650. else if (indexOpnd->m_sym->IsIntConst())
  11651. {
  11652. value = indexOpnd->AsRegOpnd()->m_sym->GetIntConstValue();
  11653. if (value < 0)
  11654. {
  11655. // Can't do fast path for negative index
  11656. return false;
  11657. }
  11658. valueOpnd = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
  11659. }
  11660. *pValueOpnd = valueOpnd;
  11661. *pValue = value;
  11662. return true;
  11663. }
  11664. void
  11665. Lowerer::GenerateFastBrOnObject(IR::Instr *instr)
  11666. {
  11667. Assert(instr->m_opcode == Js::OpCode::BrOnObject_A);
  11668. IR::RegOpnd *object = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  11669. IR::LabelInstr *done = instr->GetOrCreateContinueLabel();
  11670. IR::LabelInstr *target = instr->AsBranchInstr()->GetTarget();
  11671. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  11672. IR::IntConstOpnd *typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, instr->m_func);
  11673. if (!object)
  11674. {
  11675. object = IR::RegOpnd::New(TyVar, m_func);
  11676. LowererMD::CreateAssign(object, instr->GetSrc1(), instr);
  11677. }
  11678. // TEST object, 1
  11679. // JNE $done
  11680. // MOV typeRegOpnd, [object + offset(Type)]
  11681. // CMP [typeRegOpnd + offset(TypeId)], TypeIds_LastJavascriptPrimitiveType
  11682. // JGT $target
  11683. // $done:
  11684. m_lowererMD.GenerateObjectTest(object, instr, done);
  11685. InsertMove(typeRegOpnd,
  11686. IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  11687. instr);
  11688. InsertCompareBranch(
  11689. IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
  11690. typeIdOpnd, Js::OpCode::BrGt_A, target, instr);
  11691. instr->Remove();
  11692. }
  11693. void Lowerer::GenerateObjectHeaderInliningTest(IR::RegOpnd *baseOpnd, IR::LabelInstr * target,IR::Instr *insertBeforeInstr)
  11694. {
  11695. Assert(baseOpnd);
  11696. Assert(target);
  11697. AssertMsg(
  11698. baseOpnd->GetValueType().IsLikelyObject() &&
  11699. baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray,
  11700. "Why are we here, when the object is already known not to have an ObjArray");
  11701. Assert(insertBeforeInstr);
  11702. Func *const func = insertBeforeInstr->m_func;
  11703. // mov type, [base + offsetOf(type)]
  11704. IR::RegOpnd *const opnd = IR::RegOpnd::New(TyMachPtr, func);
  11705. m_lowererMD.CreateAssign(
  11706. opnd,
  11707. IR::IndirOpnd::New(
  11708. baseOpnd,
  11709. Js::DynamicObject::GetOffsetOfType(),
  11710. opnd->GetType(),
  11711. func),
  11712. insertBeforeInstr);
  11713. // mov typeHandler, [type + offsetOf(typeHandler)]
  11714. m_lowererMD.CreateAssign(
  11715. opnd,
  11716. IR::IndirOpnd::New(
  11717. opnd,
  11718. Js::DynamicType::GetOffsetOfTypeHandler(),
  11719. opnd->GetType(),
  11720. func),
  11721. insertBeforeInstr);
  11722. IR::IndirOpnd * offsetOfInlineSlotOpnd = IR::IndirOpnd::New(opnd,Js::DynamicTypeHandler::GetOffsetOfOffsetOfInlineSlots(), TyInt16, func);
  11723. IR::IntConstOpnd * objHeaderInlinedSlotOffset = IR::IntConstOpnd::New(Js::DynamicTypeHandler::GetOffsetOfObjectHeaderInlineSlots(), TyInt16, func);
  11724. // CMP [typeHandler + offsetOf(offsetOfInlineSlots)], objHeaderInlinedSlotOffset
  11725. InsertCompareBranch(
  11726. offsetOfInlineSlotOpnd,
  11727. objHeaderInlinedSlotOffset,
  11728. Js::OpCode::BrEq_A,
  11729. target,
  11730. insertBeforeInstr);
  11731. }
  11732. void Lowerer::GenerateObjectTypeTest(IR::RegOpnd *srcReg, IR::Instr *instrInsert, IR::LabelInstr *labelHelper)
  11733. {
  11734. Assert(srcReg);
  11735. if (!srcReg->IsNotTaggedValue())
  11736. {
  11737. m_lowererMD.GenerateObjectTest(srcReg, instrInsert, labelHelper);
  11738. }
  11739. // CMP [srcReg], Js::DynamicObject::`vtable'
  11740. // JNE $helper
  11741. InsertCompareBranch(
  11742. IR::IndirOpnd::New(srcReg, 0, TyMachPtr, m_func),
  11743. LoadVTableValueOpnd(instrInsert, VTableValue::VtableDynamicObject),
  11744. Js::OpCode::BrNeq_A,
  11745. labelHelper,
  11746. instrInsert);
  11747. }
  11748. const VTableValue Lowerer::VtableAddresses[static_cast<ValueType::TSize>(ObjectType::Count)] =
  11749. {
  11750. /* ObjectType::UninitializedObject */ VTableValue::VtableInvalid,
  11751. /* ObjectType::Object */ VTableValue::VtableInvalid,
  11752. /* ObjectType::RegExp */ VTableValue::VtableInvalid,
  11753. /* ObjectType::ObjectWithArray */ VTableValue::VtableJavascriptArray,
  11754. /* ObjectType::Array */ VTableValue::VtableJavascriptArray,
  11755. /* ObjectType::Int8Array */ VTableValue::VtableInt8Array,
  11756. /* ObjectType::Uint8Array */ VTableValue::VtableUint8Array,
  11757. /* ObjectType::Uint8ClampedArray */ VTableValue::VtableUint8ClampedArray,
  11758. /* ObjectType::Int16Array */ VTableValue::VtableInt16Array,
  11759. /* ObjectType::Uint16Array */ VTableValue::VtableUint16Array,
  11760. /* ObjectType::Int32Array */ VTableValue::VtableInt32Array,
  11761. /* ObjectType::Uint32Array */ VTableValue::VtableUint32Array,
  11762. /* ObjectType::Float32Array */ VTableValue::VtableFloat32Array,
  11763. /* ObjectType::Float64Array */ VTableValue::VtableFloat64Array,
  11764. /* ObjectType::Int8VirtualArray */ VTableValue::VtableInt8VirtualArray,
  11765. /* ObjectType::Uint8VirtualArray */ VTableValue::VtableUint8VirtualArray,
  11766. /* ObjectType::Uint8ClampedVirtualArray */ VTableValue::VtableUint8ClampedVirtualArray,
  11767. /* ObjectType::Int16VirtualArray */ VTableValue::VtableInt16VirtualArray,
  11768. /* ObjectType::Uint16VirtualArray */ VTableValue::VtableUint16VirtualArray,
  11769. /* ObjectType::Int32VirtualArray */ VTableValue::VtableInt32VirtualArray,
  11770. /* ObjectType::Uint32VirtualArray */ VTableValue::VtableUint32VirtualArray,
  11771. /* ObjectType::Float32VirtualArray */ VTableValue::VtableFloat32VirtualArray,
  11772. /* ObjectType::Float64VirtualArray */ VTableValue::VtableFloat64VirtualArray,
  11773. /* ObjectType::Int8MixedArray */ VTableValue::VtableInt8Array,
  11774. /* ObjectType::Uint8MixedArray */ VTableValue::VtableUint8Array,
  11775. /* ObjectType::Uint8ClampedMixedArray */ VTableValue::VtableUint8ClampedArray,
  11776. /* ObjectType::Int16MixedArray */ VTableValue::VtableInt16Array,
  11777. /* ObjectType::Uint16MixedArray */ VTableValue::VtableUint16Array,
  11778. /* ObjectType::Int32MixedArray */ VTableValue::VtableInt32Array,
  11779. /* ObjectType::Uint32MixedArray */ VTableValue::VtableUint32Array,
  11780. /* ObjectType::Float32MixedArray */ VTableValue::VtableFloat32Array,
  11781. /* ObjectType::Float64MixedArray */ VTableValue::VtableFloat64Array,
  11782. /* ObjectType::Int64Array */ VTableValue::VtableInt64Array,
  11783. /* ObjectType::Uint64Array */ VTableValue::VtableUint64Array,
  11784. /* ObjectType::BoolArray */ VTableValue::VtableBoolArray,
  11785. /* ObjectType::CharArray */ VTableValue::VtableCharArray
  11786. };
  11787. const uint32 Lowerer::OffsetsOfHeadSegment[static_cast<ValueType::TSize>(ObjectType::Count)] =
  11788. {
  11789. /* ObjectType::UninitializedObject */ static_cast<uint32>(-1),
  11790. /* ObjectType::Object */ static_cast<uint32>(-1),
  11791. /* ObjectType::RegExp */ static_cast<uint32>(-1),
  11792. /* ObjectType::ObjectWithArray */ Js::JavascriptArray::GetOffsetOfHead(),
  11793. /* ObjectType::Array */ Js::JavascriptArray::GetOffsetOfHead(),
  11794. /* ObjectType::Int8Array */ Js::Int8Array::GetOffsetOfBuffer(),
  11795. /* ObjectType::Uint8Array */ Js::Uint8Array::GetOffsetOfBuffer(),
  11796. /* ObjectType::Uint8ClampedArray */ Js::Uint8ClampedArray::GetOffsetOfBuffer(),
  11797. /* ObjectType::Int16Array */ Js::Int16Array::GetOffsetOfBuffer(),
  11798. /* ObjectType::Uint16Array */ Js::Uint16Array::GetOffsetOfBuffer(),
  11799. /* ObjectType::Int32Array */ Js::Int32Array::GetOffsetOfBuffer(),
  11800. /* ObjectType::Uint32Array */ Js::Uint32Array::GetOffsetOfBuffer(),
  11801. /* ObjectType::Float32Array */ Js::Float32Array::GetOffsetOfBuffer(),
  11802. /* ObjectType::Float64Array */ Js::Float64Array::GetOffsetOfBuffer(),
  11803. /* ObjectType::Int8VirtualArray */ Js::Int8VirtualArray::GetOffsetOfBuffer(),
  11804. /* ObjectType::Uint8VirtualArray */ Js::Uint8VirtualArray::GetOffsetOfBuffer(),
  11805. /* ObjectType::Uint8ClampedVirtualArray */ Js::Uint8ClampedVirtualArray::GetOffsetOfBuffer(),
  11806. /* ObjectType::Int16VirtualArray */ Js::Int16VirtualArray::GetOffsetOfBuffer(),
  11807. /* ObjectType::Uint16VirtualArray */ Js::Uint16VirtualArray::GetOffsetOfBuffer(),
  11808. /* ObjectType::Int32VirtualArray */ Js::Int32VirtualArray::GetOffsetOfBuffer(),
  11809. /* ObjectType::Uint32VirtualArray */ Js::Uint32VirtualArray::GetOffsetOfBuffer(),
  11810. /* ObjectType::Float32VirtualArray */ Js::Float32VirtualArray::GetOffsetOfBuffer(),
  11811. /* ObjectType::Float64VirtualArray */ Js::Float64VirtualArray::GetOffsetOfBuffer(),
  11812. /* ObjectType::Int8MixedArray */ Js::Int8Array::GetOffsetOfBuffer(),
  11813. /* ObjectType::Uint8MixedArray */ Js::Uint8Array::GetOffsetOfBuffer(),
  11814. /* ObjectType::Uint8ClampedMixedArray */ Js::Uint8ClampedArray::GetOffsetOfBuffer(),
  11815. /* ObjectType::Int16MixedArray */ Js::Int16Array::GetOffsetOfBuffer(),
  11816. /* ObjectType::Uint16MixedArray */ Js::Uint16Array::GetOffsetOfBuffer(),
  11817. /* ObjectType::Int32MixedArray */ Js::Int32Array::GetOffsetOfBuffer(),
  11818. /* ObjectType::Uint32MixedArray */ Js::Uint32Array::GetOffsetOfBuffer(),
  11819. /* ObjectType::Float32MixedArray */ Js::Float32Array::GetOffsetOfBuffer(),
  11820. /* ObjectType::Float64MixedArray */ Js::Float64Array::GetOffsetOfBuffer(),
  11821. /* ObjectType::Int64Array */ Js::Int64Array::GetOffsetOfBuffer(),
  11822. /* ObjectType::Uint64Array */ Js::Uint64Array::GetOffsetOfBuffer(),
  11823. /* ObjectType::BoolArray */ Js::BoolArray::GetOffsetOfBuffer(),
  11824. /* ObjectType::CharArray */ Js::CharArray::GetOffsetOfBuffer()
  11825. };
  11826. const uint32 Lowerer::OffsetsOfLength[static_cast<ValueType::TSize>(ObjectType::Count)] =
  11827. {
  11828. /* ObjectType::UninitializedObject */ static_cast<uint32>(-1),
  11829. /* ObjectType::Object */ static_cast<uint32>(-1),
  11830. /* ObjectType::RegExp */ static_cast<uint32>(-1),
  11831. /* ObjectType::ObjectWithArray */ Js::JavascriptArray::GetOffsetOfLength(),
  11832. /* ObjectType::Array */ Js::JavascriptArray::GetOffsetOfLength(),
  11833. /* ObjectType::Int8Array */ Js::Int8Array::GetOffsetOfLength(),
  11834. /* ObjectType::Uint8Array */ Js::Uint8Array::GetOffsetOfLength(),
  11835. /* ObjectType::Uint8ClampedArray */ Js::Uint8ClampedArray::GetOffsetOfLength(),
  11836. /* ObjectType::Int16Array */ Js::Int16Array::GetOffsetOfLength(),
  11837. /* ObjectType::Uint16Array */ Js::Uint16Array::GetOffsetOfLength(),
  11838. /* ObjectType::Int32Array */ Js::Int32Array::GetOffsetOfLength(),
  11839. /* ObjectType::Uint32Array */ Js::Uint32Array::GetOffsetOfLength(),
  11840. /* ObjectType::Float32Array */ Js::Float32Array::GetOffsetOfLength(),
  11841. /* ObjectType::Float64Array */ Js::Float64Array::GetOffsetOfLength(),
  11842. /* ObjectType::Int8VirtualArray */ Js::Int8VirtualArray::GetOffsetOfLength(),
  11843. /* ObjectType::Uint8VirtualArray */ Js::Uint8VirtualArray::GetOffsetOfLength(),
  11844. /* ObjectType::Uint8ClampedVirtualArray */ Js::Uint8ClampedVirtualArray::GetOffsetOfLength(),
  11845. /* ObjectType::Int16VirtualArray */ Js::Int16VirtualArray::GetOffsetOfLength(),
  11846. /* ObjectType::Uint16VirtualArray */ Js::Uint16VirtualArray::GetOffsetOfLength(),
  11847. /* ObjectType::Int32VirtualArray */ Js::Int32VirtualArray::GetOffsetOfLength(),
  11848. /* ObjectType::Uint32VirtualArray */ Js::Uint32VirtualArray::GetOffsetOfLength(),
  11849. /* ObjectType::Float32VirtualArray */ Js::Float32VirtualArray::GetOffsetOfLength(),
  11850. /* ObjectType::Float64VirtualArray */ Js::Float64VirtualArray::GetOffsetOfLength(),
  11851. /* ObjectType::Int8MixedArray */ Js::Int8Array::GetOffsetOfLength(),
  11852. /* ObjectType::Uint8MixedArray */ Js::Uint8Array::GetOffsetOfLength(),
  11853. /* ObjectType::Uint8ClampedMixedArray */ Js::Uint8ClampedArray::GetOffsetOfLength(),
  11854. /* ObjectType::Int16MixedArray */ Js::Int16Array::GetOffsetOfLength(),
  11855. /* ObjectType::Uint16MixedArray */ Js::Uint16Array::GetOffsetOfLength(),
  11856. /* ObjectType::Int32MixedArray */ Js::Int32Array::GetOffsetOfLength(),
  11857. /* ObjectType::Uint32MixedArray */ Js::Uint32Array::GetOffsetOfLength(),
  11858. /* ObjectType::Float32MixedArray */ Js::Float32Array::GetOffsetOfLength(),
  11859. /* ObjectType::Float64MixedArray */ Js::Float64Array::GetOffsetOfLength(),
  11860. /* ObjectType::Int64Array */ Js::Int64Array::GetOffsetOfLength(),
  11861. /* ObjectType::Uint64Array */ Js::Uint64Array::GetOffsetOfLength(),
  11862. /* ObjectType::BoolArray */ Js::BoolArray::GetOffsetOfLength(),
  11863. /* ObjectType::CharArray */ Js::CharArray::GetOffsetOfLength()
  11864. };
  11865. const IRType Lowerer::IndirTypes[static_cast<ValueType::TSize>(ObjectType::Count)] =
  11866. {
  11867. /* ObjectType::UninitializedObject */ TyIllegal,
  11868. /* ObjectType::Object */ TyIllegal,
  11869. /* ObjectType::RegExp */ TyIllegal,
  11870. /* ObjectType::ObjectWithArray */ TyVar,
  11871. /* ObjectType::Array */ TyVar,
  11872. /* ObjectType::Int8Array */ TyInt8,
  11873. /* ObjectType::Uint8Array */ TyUint8,
  11874. /* ObjectType::Uint8ClampedArray */ TyUint8,
  11875. /* ObjectType::Int16Array */ TyInt16,
  11876. /* ObjectType::Uint16Array */ TyUint16,
  11877. /* ObjectType::Int32Array */ TyInt32,
  11878. /* ObjectType::Uint32Array */ TyUint32,
  11879. /* ObjectType::Float32Array */ TyFloat32,
  11880. /* ObjectType::Float64Array */ TyFloat64,
  11881. /* ObjectType::Int8VirtualArray */ TyInt8,
  11882. /* ObjectType::Uint8VirtualArray */ TyUint8,
  11883. /* ObjectType::Uint8ClampedVirtualArray */ TyUint8,
  11884. /* ObjectType::Int16VirtualArray */ TyInt16,
  11885. /* ObjectType::Uint16vArray */ TyUint16,
  11886. /* ObjectType::Int32VirtualArray */ TyInt32,
  11887. /* ObjectType::Uint32VirtualArray */ TyUint32,
  11888. /* ObjectType::Float32VirtualArray */ TyFloat32,
  11889. /* ObjectType::Float64VirtualArray */ TyFloat64,
  11890. /* ObjectType::Int8MixedArray */ TyInt8,
  11891. /* ObjectType::Uint8MixedArray */ TyUint8,
  11892. /* ObjectType::Uint8ClampedMixedArray */ TyUint8,
  11893. /* ObjectType::Int16MixedArray */ TyInt16,
  11894. /* ObjectType::Uint16MixedArray */ TyUint16,
  11895. /* ObjectType::Int32MixedArray */ TyInt32,
  11896. /* ObjectType::Uint32MixedArray */ TyUint32,
  11897. /* ObjectType::Float32MixedArray */ TyFloat32,
  11898. /* ObjectType::Float64MixedArray */ TyFloat64,
  11899. /* ObjectType::Int64Array */ TyInt64,
  11900. /* ObjectType::Uint64Array */ TyUint64,
  11901. /* ObjectType::BoolArray */ TyUint8,
  11902. /* ObjectType::CharArray */ TyUint16
  11903. };
  11904. const BYTE Lowerer::IndirScales[static_cast<ValueType::TSize>(ObjectType::Count)] =
  11905. {
  11906. /* ObjectType::UninitializedObject */ static_cast<BYTE>(-1),
  11907. /* ObjectType::Object */ static_cast<BYTE>(-1),
  11908. /* ObjectType::RegExp */ static_cast<BYTE>(-1),
  11909. /* ObjectType::ObjectWithArray */ LowererMD::GetDefaultIndirScale(),
  11910. /* ObjectType::Array */ LowererMD::GetDefaultIndirScale(),
  11911. /* ObjectType::Int8Array */ 0, // log2(sizeof(int8))
  11912. /* ObjectType::Uint8Array */ 0, // log2(sizeof(uint8))
  11913. /* ObjectType::Uint8ClampedArray */ 0, // log2(sizeof(uint8))
  11914. /* ObjectType::Int16Array */ 1, // log2(sizeof(int16))
  11915. /* ObjectType::Uint16Array */ 1, // log2(sizeof(uint16))
  11916. /* ObjectType::Int32Array */ 2, // log2(sizeof(int32))
  11917. /* ObjectType::Uint32Array */ 2, // log2(sizeof(uint32))
  11918. /* ObjectType::Float32Array */ 2, // log2(sizeof(float))
  11919. /* ObjectType::Float64Array */ 3, // log2(sizeof(double))
  11920. /* ObjectType::Int8VirtualArray */ 0, // log2(sizeof(int8))
  11921. /* ObjectType::Uint8VirtualArray */ 0, // log2(sizeof(uint8))
  11922. /* ObjectType::Uint8ClampedVirtualArray */ 0, // log2(sizeof(uint8))
  11923. /* ObjectType::Int16VirtualArray */ 1, // log2(sizeof(int16))
  11924. /* ObjectType::Uint16VirtualArray */ 1, // log2(sizeof(uint16))
  11925. /* ObjectType::Int32VirtualArray */ 2, // log2(sizeof(int32))
  11926. /* ObjectType::Uint32VirtualArray */ 2, // log2(sizeof(uint32))
  11927. /* ObjectType::Float32VirtualArray */ 2, // log2(sizeof(float))
  11928. /* ObjectType::Float64VirtualArray */ 3, // log2(sizeof(double))
  11929. /* ObjectType::Int8MixedArray */ 0, // log2(sizeof(int8))
  11930. /* ObjectType::Uint8MixedArray */ 0, // log2(sizeof(uint8))
  11931. /* ObjectType::Uint8ClampedMixedArray */ 0, // log2(sizeof(uint8))
  11932. /* ObjectType::Int16MixedArray */ 1, // log2(sizeof(int16))
  11933. /* ObjectType::Uint16MixedArray */ 1, // log2(sizeof(uint16))
  11934. /* ObjectType::Int32MixedArray */ 2, // log2(sizeof(int32))
  11935. /* ObjectType::Uint32MixedArray */ 2, // log2(sizeof(uint32))
  11936. /* ObjectType::Float32MixedArray */ 2, // log2(sizeof(float))
  11937. /* ObjectType::Float64MixedArray */ 3, // log2(sizeof(double))
  11938. /* ObjectType::Int64Array */ 3, // log2(sizeof(int64))
  11939. /* ObjectType::Uint64Array */ 3, // log2(sizeof(uint64))
  11940. /* ObjectType::BoolArray */ 0, // log2(sizeof(bool))
  11941. /* ObjectType::CharArray */ 1 // log2(sizeof(char16))
  11942. };
  11943. VTableValue Lowerer::GetArrayVtableAddress(const ValueType valueType, bool getVirtual)
  11944. {
  11945. Assert(valueType.IsLikelyAnyOptimizedArray());
  11946. if(valueType.IsLikelyArrayOrObjectWithArray())
  11947. {
  11948. if(valueType.HasIntElements())
  11949. {
  11950. return VTableValue::VtableNativeIntArray;
  11951. }
  11952. else if(valueType.HasFloatElements())
  11953. {
  11954. return VTableValue::VtableNativeFloatArray;
  11955. }
  11956. }
  11957. if (getVirtual && valueType.IsLikelyMixedTypedArrayType())
  11958. {
  11959. return VtableAddresses[static_cast<ValueType::TSize>(valueType.GetMixedToVirtualTypedArrayObjectType())];
  11960. }
  11961. return VtableAddresses[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  11962. }
  11963. uint32 Lowerer::GetArrayOffsetOfHeadSegment(const ValueType valueType)
  11964. {
  11965. Assert(valueType.IsLikelyAnyOptimizedArray());
  11966. return OffsetsOfHeadSegment[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  11967. }
  11968. uint32 Lowerer::GetArrayOffsetOfLength(const ValueType valueType)
  11969. {
  11970. Assert(valueType.IsLikelyAnyOptimizedArray());
  11971. return OffsetsOfLength[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  11972. }
  11973. IRType Lowerer::GetArrayIndirType(const ValueType valueType)
  11974. {
  11975. Assert(valueType.IsLikelyAnyOptimizedArray());
  11976. if(valueType.IsLikelyArrayOrObjectWithArray())
  11977. {
  11978. if(valueType.HasIntElements())
  11979. {
  11980. return TyInt32;
  11981. }
  11982. else if(valueType.HasFloatElements())
  11983. {
  11984. return TyFloat64;
  11985. }
  11986. }
  11987. return IndirTypes[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  11988. }
  11989. BYTE Lowerer::GetArrayIndirScale(const ValueType valueType)
  11990. {
  11991. Assert(valueType.IsLikelyAnyOptimizedArray());
  11992. if(valueType.IsLikelyArrayOrObjectWithArray())
  11993. {
  11994. if(valueType.HasIntElements())
  11995. {
  11996. return 2; // log2(sizeof(int32))
  11997. }
  11998. else if(valueType.HasFloatElements())
  11999. {
  12000. return 3; // log2(sizeof(double))
  12001. }
  12002. }
  12003. return IndirScales[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  12004. }
  12005. int Lowerer::SimdGetElementCountFromBytes(ValueType arrValueType, uint8 dataWidth)
  12006. {
  12007. Assert(dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  12008. Assert(arrValueType.IsTypedArray());
  12009. BYTE bpe = 1 << Lowerer::GetArrayIndirScale(arrValueType);
  12010. // round up
  12011. return (int)::ceil(((float)dataWidth) / bpe);
  12012. }
  12013. bool Lowerer::ShouldGenerateArrayFastPath(
  12014. const IR::Opnd *const arrayOpnd,
  12015. const bool supportsObjectsWithArrays,
  12016. const bool supportsTypedArrays,
  12017. const bool requiresSse2ForFloatArrays) const
  12018. {
  12019. Assert(arrayOpnd);
  12020. const ValueType arrayValueType(arrayOpnd->GetValueType());
  12021. if(arrayValueType.IsUninitialized())
  12022. {
  12023. // Don't have info about the value type, better to generate the fast path anyway
  12024. return true;
  12025. }
  12026. if (!arrayValueType.IsLikelyObject())
  12027. {
  12028. if (!arrayValueType.HasBeenObject() || arrayValueType.IsLikelyString())
  12029. {
  12030. return false;
  12031. }
  12032. //We have seen at least once there is an object in the code path. Generate fastpath hoping it to be array.
  12033. //Its nice if we can get all the attributes set but valueType is only 16 bits. Consider expanding the same.
  12034. return true;
  12035. }
  12036. if( (!supportsObjectsWithArrays && arrayValueType.GetObjectType() == ObjectType::ObjectWithArray) ||
  12037. (!supportsTypedArrays && arrayValueType.IsLikelyTypedArray()) )
  12038. {
  12039. // The fast path likely would not hit
  12040. return false;
  12041. }
  12042. if(arrayValueType.GetObjectType() == ObjectType::UninitializedObject)
  12043. {
  12044. // Don't have info about the object type, better to generate the fast path anyway
  12045. return true;
  12046. }
  12047. #ifdef _M_IX86
  12048. if(requiresSse2ForFloatArrays &&
  12049. (
  12050. arrayValueType.GetObjectType() == ObjectType::Float32Array ||
  12051. arrayValueType.GetObjectType() == ObjectType::Float64Array
  12052. ) &&
  12053. !AutoSystemInfo::Data.SSE2Available())
  12054. {
  12055. // Fast paths for float arrays rely on SSE2
  12056. return false;
  12057. }
  12058. #endif
  12059. return !arrayValueType.IsLikelyAnyUnOptimizedArray();
  12060. }
  12061. IR::RegOpnd *Lowerer::LoadObjectArray(IR::RegOpnd *const baseOpnd, IR::Instr *const insertBeforeInstr)
  12062. {
  12063. Assert(baseOpnd);
  12064. Assert(
  12065. baseOpnd->GetValueType().IsLikelyObject() &&
  12066. baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray);
  12067. Assert(insertBeforeInstr);
  12068. Func *const func = insertBeforeInstr->m_func;
  12069. // mov array, [base + offsetOf(objectArrayOrFlags)]
  12070. IR::RegOpnd *const arrayOpnd =
  12071. baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd()->CopyAsRegOpnd(func) : baseOpnd->Copy(func)->AsRegOpnd();
  12072. arrayOpnd->m_sym = StackSym::New(TyVar, func);
  12073. arrayOpnd->SetValueType(arrayOpnd->GetValueType().ToArray());
  12074. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, func, false /* autoDelete */);
  12075. m_lowererMD.CreateAssign(
  12076. arrayOpnd,
  12077. IR::IndirOpnd::New(
  12078. baseOpnd,
  12079. Js::DynamicObject::GetOffsetOfObjectArray(),
  12080. arrayOpnd->GetType(),
  12081. func),
  12082. insertBeforeInstr);
  12083. return arrayOpnd;
  12084. }
  12085. void
  12086. Lowerer::GenerateIsEnabledArraySetElementFastPathCheck(
  12087. IR::LabelInstr * isDisabledLabel,
  12088. IR::Instr * const insertBeforeInstr)
  12089. {
  12090. InsertCompareBranch(
  12091. this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable),
  12092. LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableInvalid),
  12093. Js::OpCode::BrEq_A,
  12094. isDisabledLabel,
  12095. insertBeforeInstr);
  12096. }
  12097. IR::RegOpnd *Lowerer::GenerateArrayTest(
  12098. IR::RegOpnd *const baseOpnd,
  12099. IR::LabelInstr *const isNotObjectLabel,
  12100. IR::LabelInstr *const isNotArrayLabel,
  12101. IR::Instr *const insertBeforeInstr,
  12102. const bool forceFloat,
  12103. const bool isStore,
  12104. const bool allowDefiniteArray)
  12105. {
  12106. Assert(baseOpnd);
  12107. const ValueType baseValueType(baseOpnd->GetValueType());
  12108. // Shouldn't request to do an array test when it's already known to be an array, or if it's unlikely to be an array
  12109. Assert(!baseValueType.IsAnyOptimizedArray() || allowDefiniteArray || baseValueType.IsNativeArray());
  12110. Assert(baseValueType.IsUninitialized() || baseValueType.HasBeenObject());
  12111. Assert(isNotObjectLabel);
  12112. Assert(isNotArrayLabel);
  12113. Assert(insertBeforeInstr);
  12114. Func *const func = insertBeforeInstr->m_func;
  12115. IR::RegOpnd *arrayOpnd;
  12116. IR::AutoReuseOpnd autoReuseArrayOpnd;
  12117. if(baseValueType.IsLikelyObject() && baseValueType.GetObjectType() == ObjectType::ObjectWithArray)
  12118. {
  12119. // Only DynamicObject is allowed (DynamicObject vtable is ensured) because some object types have special handling for
  12120. // index properties - arguments object, string object, external object, etc.
  12121. GenerateObjectTypeTest(baseOpnd, insertBeforeInstr, isNotObjectLabel);
  12122. GenerateObjectHeaderInliningTest(baseOpnd, isNotArrayLabel, insertBeforeInstr);
  12123. arrayOpnd = LoadObjectArray(baseOpnd, insertBeforeInstr);
  12124. autoReuseArrayOpnd.Initialize(arrayOpnd, func, false /* autoDelete */);
  12125. // test array, array
  12126. // je $isNotArrayLabel
  12127. // test array, 1
  12128. // jne $isNotArrayLabel
  12129. InsertTestBranch(
  12130. arrayOpnd,
  12131. arrayOpnd,
  12132. Js::OpCode::BrEq_A,
  12133. isNotArrayLabel,
  12134. insertBeforeInstr);
  12135. InsertTestBranch(
  12136. arrayOpnd,
  12137. IR::IntConstOpnd::New(1, TyUint8, func, true),
  12138. Js::OpCode::BrNeq_A,
  12139. isNotArrayLabel,
  12140. insertBeforeInstr);
  12141. }
  12142. else
  12143. {
  12144. if(!baseOpnd->IsNotTaggedValue())
  12145. {
  12146. m_lowererMD.GenerateObjectTest(baseOpnd, insertBeforeInstr, isNotObjectLabel);
  12147. }
  12148. arrayOpnd = baseOpnd->Copy(func)->AsRegOpnd();
  12149. if(!baseValueType.IsLikelyAnyOptimizedArray())
  12150. {
  12151. arrayOpnd->SetValueType(
  12152. ValueType::GetObject(ObjectType::Array)
  12153. .ToLikely()
  12154. .SetHasNoMissingValues(false)
  12155. .SetArrayTypeId(Js::TypeIds_Array));
  12156. }
  12157. autoReuseArrayOpnd.Initialize(arrayOpnd, func, false /* autoDelete */);
  12158. }
  12159. VTableValue vtableAddress = baseValueType.IsLikelyAnyOptimizedArray()
  12160. ? GetArrayVtableAddress(baseValueType)
  12161. : VTableValue::VtableJavascriptArray;
  12162. VTableValue virtualVtableAddress = VTableValue::VtableInvalid;
  12163. if (baseValueType.IsLikelyMixedTypedArrayType())
  12164. {
  12165. virtualVtableAddress = GetArrayVtableAddress(baseValueType, true);
  12166. }
  12167. IR::Opnd * vtableOpnd;
  12168. IR::Opnd * vtableVirtualOpnd = nullptr;
  12169. if (isStore &&
  12170. (vtableAddress == VTableValue::VtableJavascriptArray ||
  12171. baseValueType.IsLikelyNativeArray()))
  12172. {
  12173. vtableOpnd = IR::RegOpnd::New(TyMachPtr, func);
  12174. if (baseValueType.IsLikelyNativeArray())
  12175. {
  12176. if (baseValueType.HasIntElements())
  12177. {
  12178. InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesIntArraySetElementFastPathVtable), insertBeforeInstr);
  12179. }
  12180. else
  12181. {
  12182. Assert(baseValueType.HasFloatElements());
  12183. InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesFloatArraySetElementFastPathVtable), insertBeforeInstr);
  12184. }
  12185. }
  12186. else
  12187. {
  12188. InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable), insertBeforeInstr);
  12189. }
  12190. }
  12191. else
  12192. {
  12193. vtableOpnd = LoadVTableValueOpnd(insertBeforeInstr, vtableAddress);
  12194. }
  12195. // cmp [array], vtableAddress
  12196. // jne $isNotArrayLabel
  12197. if (forceFloat && baseValueType.IsLikelyNativeFloatArray())
  12198. {
  12199. // We expect a native float array. If we get native int instead, convert it on the spot and bail out afterward.
  12200. const auto goodArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  12201. InsertCompareBranch(
  12202. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  12203. vtableOpnd,
  12204. Js::OpCode::BrEq_A,
  12205. goodArrayLabel,
  12206. insertBeforeInstr);
  12207. IR::LabelInstr *notFloatArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  12208. insertBeforeInstr->InsertBefore(notFloatArrayLabel);
  12209. if (isStore)
  12210. {
  12211. vtableOpnd = IR::RegOpnd::New(TyMachPtr, func);
  12212. InsertMove(vtableOpnd, IR::MemRefOpnd::New(
  12213. func->GetScriptContextInfo()->GetIntArraySetElementFastPathVtableAddr(),
  12214. TyMachPtr, func), insertBeforeInstr);
  12215. }
  12216. else
  12217. {
  12218. vtableOpnd = LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableJavascriptNativeIntArray);
  12219. }
  12220. InsertCompareBranch(
  12221. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  12222. vtableOpnd,
  12223. Js::OpCode::BrNeq_A,
  12224. isNotArrayLabel,
  12225. insertBeforeInstr);
  12226. m_lowererMD.LoadHelperArgument(insertBeforeInstr, arrayOpnd);
  12227. IR::Instr *helperInstr = IR::Instr::New(Js::OpCode::Call, m_func);
  12228. insertBeforeInstr->InsertBefore(helperInstr);
  12229. m_lowererMD.ChangeToHelperCall(helperInstr, IR::HelperIntArr_ToNativeFloatArray);
  12230. // Branch to the (bailout) label, because converting the array may have made our array checks unsafe.
  12231. InsertBranch(Js::OpCode::Br, isNotArrayLabel, insertBeforeInstr);
  12232. insertBeforeInstr->InsertBefore(goodArrayLabel);
  12233. }
  12234. else
  12235. {
  12236. IR::LabelInstr* goodArrayLabel = nullptr;
  12237. if (baseValueType.IsLikelyMixedTypedArrayType())
  12238. {
  12239. goodArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  12240. InsertCompareBranch(
  12241. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  12242. vtableOpnd,
  12243. Js::OpCode::BrEq_A,
  12244. goodArrayLabel,
  12245. insertBeforeInstr);
  12246. Assert(virtualVtableAddress);
  12247. vtableVirtualOpnd = LoadVTableValueOpnd(insertBeforeInstr, virtualVtableAddress);
  12248. Assert(vtableVirtualOpnd);
  12249. InsertCompareBranch(
  12250. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  12251. vtableVirtualOpnd,
  12252. Js::OpCode::BrNeq_A,
  12253. isNotArrayLabel,
  12254. insertBeforeInstr);
  12255. insertBeforeInstr->InsertBefore(goodArrayLabel);
  12256. }
  12257. else
  12258. {
  12259. InsertCompareBranch(
  12260. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  12261. vtableOpnd,
  12262. Js::OpCode::BrNeq_A,
  12263. isNotArrayLabel,
  12264. insertBeforeInstr);
  12265. }
  12266. }
  12267. ValueType arrayValueType(arrayOpnd->GetValueType());
  12268. if(arrayValueType.IsLikelyArrayOrObjectWithArray() && !arrayValueType.IsObject())
  12269. {
  12270. arrayValueType = arrayValueType.SetHasNoMissingValues(false);
  12271. }
  12272. arrayValueType = arrayValueType.ToDefiniteObject();
  12273. arrayOpnd->SetValueType(arrayValueType);
  12274. return arrayOpnd;
  12275. }
  12276. IR::LabelInstr *Lowerer::InsertLabel(const bool isHelper, IR::Instr *const insertBeforeInstr)
  12277. {
  12278. Assert(insertBeforeInstr);
  12279. Func *const func = insertBeforeInstr->m_func;
  12280. IR::LabelInstr *const instr = IR::LabelInstr::New(Js::OpCode::Label, func, isHelper);
  12281. insertBeforeInstr->InsertBefore(instr);
  12282. return instr;
  12283. }
  12284. IR::Instr *Lowerer::InsertMoveWithBarrier(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr)
  12285. {
  12286. return Lowerer::InsertMove(dst, src, insertBeforeInstr, true);
  12287. }
  12288. IR::Instr *Lowerer::InsertMove(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr, bool generateWriteBarrier)
  12289. {
  12290. Assert(dst);
  12291. Assert(src);
  12292. Assert(insertBeforeInstr);
  12293. Func *const func = insertBeforeInstr->m_func;
  12294. if(dst->IsFloat() && src->IsConstOpnd())
  12295. {
  12296. return LoadFloatFromNonReg(src, dst, insertBeforeInstr);
  12297. }
  12298. if(TySize[dst->GetType()] < TySize[src->GetType()])
  12299. {
  12300. src = src->UseWithNewType(dst->GetType(), func);
  12301. }
  12302. IR::Instr * instr = IR::Instr::New(Js::OpCode::Ld_A, dst, src, func);
  12303. insertBeforeInstr->InsertBefore(instr);
  12304. if (generateWriteBarrier)
  12305. {
  12306. instr = LowererMD::ChangeToWriteBarrierAssign(instr, func);
  12307. }
  12308. else
  12309. {
  12310. LowererMD::ChangeToAssignNoBarrierCheck(instr);
  12311. }
  12312. return instr;
  12313. }
  12314. IR::BranchInstr *Lowerer::InsertBranch(
  12315. const Js::OpCode opCode,
  12316. IR::LabelInstr *const target,
  12317. IR::Instr *const insertBeforeInstr)
  12318. {
  12319. return InsertBranch(opCode, false /* isUnsigned */, target, insertBeforeInstr);
  12320. }
  12321. IR::BranchInstr *Lowerer::InsertBranch(
  12322. const Js::OpCode opCode,
  12323. const bool isUnsigned,
  12324. IR::LabelInstr *const target,
  12325. IR::Instr *const insertBeforeInstr)
  12326. {
  12327. Assert(target);
  12328. Assert(insertBeforeInstr);
  12329. Func *const func = insertBeforeInstr->m_func;
  12330. IR::BranchInstr *const instr = IR::BranchInstr::New(opCode, target, func);
  12331. if(!instr->IsLowered())
  12332. {
  12333. if(opCode == Js::OpCode::Br)
  12334. {
  12335. instr->m_opcode = LowererMD::MDUncondBranchOpcode;
  12336. }
  12337. else if(isUnsigned)
  12338. {
  12339. instr->m_opcode = LowererMD::MDUnsignedBranchOpcode(opCode);
  12340. }
  12341. else
  12342. {
  12343. instr->m_opcode = LowererMD::MDBranchOpcode(opCode);
  12344. }
  12345. }
  12346. insertBeforeInstr->InsertBefore(instr);
  12347. return instr;
  12348. }
  12349. IR::Instr *Lowerer::InsertCompare(IR::Opnd *const src1, IR::Opnd *const src2, IR::Instr *const insertBeforeInstr)
  12350. {
  12351. Assert(src1);
  12352. Assert(!src1->IsFloat64()); // not implemented
  12353. Assert(src2);
  12354. Assert(!src2->IsFloat64()); // not implemented
  12355. Assert(!src1->IsEqual(src2));
  12356. Assert(insertBeforeInstr);
  12357. Func *const func = insertBeforeInstr->m_func;
  12358. IR::Instr *const instr = IR::Instr::New(Js::OpCode::CMP, func);
  12359. instr->SetSrc1(src1);
  12360. instr->SetSrc2(src2);
  12361. insertBeforeInstr->InsertBefore(instr);
  12362. LowererMD::Legalize(instr);
  12363. return instr;
  12364. }
  12365. IR::BranchInstr *Lowerer::InsertCompareBranch(
  12366. IR::Opnd *const compareSrc1,
  12367. IR::Opnd *const compareSrc2,
  12368. Js::OpCode branchOpCode,
  12369. IR::LabelInstr *const target,
  12370. IR::Instr *const insertBeforeInstr,
  12371. const bool ignoreNaN)
  12372. {
  12373. return InsertCompareBranch(compareSrc1, compareSrc2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr, ignoreNaN);
  12374. }
  12375. IR::BranchInstr *Lowerer::InsertCompareBranch(
  12376. IR::Opnd *compareSrc1,
  12377. IR::Opnd *compareSrc2,
  12378. Js::OpCode branchOpCode,
  12379. const bool isUnsigned,
  12380. IR::LabelInstr *const target,
  12381. IR::Instr *const insertBeforeInstr,
  12382. const bool ignoreNaN)
  12383. {
  12384. Assert(compareSrc1);
  12385. Assert(compareSrc2);
  12386. Func *const func = insertBeforeInstr->m_func;
  12387. if(compareSrc1->IsFloat())
  12388. {
  12389. Assert(compareSrc2->IsFloat());
  12390. Assert(!isUnsigned);
  12391. IR::BranchInstr *const instr = IR::BranchInstr::New(branchOpCode, target, compareSrc1, compareSrc2, func);
  12392. insertBeforeInstr->InsertBefore(instr);
  12393. return LowererMD::LowerFloatCondBranch(instr, ignoreNaN);
  12394. }
  12395. #ifdef _M_IX86
  12396. else if (compareSrc1->IsInt64())
  12397. {
  12398. Assert(compareSrc2->IsInt64());
  12399. IR::BranchInstr *const instr = IR::BranchInstr::New(branchOpCode, target, compareSrc1, compareSrc2, func);
  12400. insertBeforeInstr->InsertBefore(instr);
  12401. m_lowererMD.EmitInt64Instr(instr);
  12402. return instr;
  12403. }
  12404. #endif
  12405. Js::OpCode swapSrcsBranchOpCode;
  12406. switch(branchOpCode)
  12407. {
  12408. case Js::OpCode::BrEq_A:
  12409. case Js::OpCode::BrNeq_A:
  12410. swapSrcsBranchOpCode = branchOpCode;
  12411. goto Common_BrEqNeqGeGtLeLt;
  12412. case Js::OpCode::BrGe_A:
  12413. swapSrcsBranchOpCode = Js::OpCode::BrLe_A;
  12414. goto Common_BrEqNeqGeGtLeLt;
  12415. case Js::OpCode::BrGt_A:
  12416. swapSrcsBranchOpCode = Js::OpCode::BrLt_A;
  12417. goto Common_BrEqNeqGeGtLeLt;
  12418. case Js::OpCode::BrLe_A:
  12419. swapSrcsBranchOpCode = Js::OpCode::BrGe_A;
  12420. goto Common_BrEqNeqGeGtLeLt;
  12421. case Js::OpCode::BrLt_A:
  12422. swapSrcsBranchOpCode = Js::OpCode::BrGt_A;
  12423. // fall through
  12424. Common_BrEqNeqGeGtLeLt:
  12425. // Check if src1 is a constant and src2 is not, and facilitate folding the constant into the Cmp instruction
  12426. if( (
  12427. compareSrc1->IsIntConstOpnd() ||
  12428. (
  12429. compareSrc1->IsAddrOpnd() &&
  12430. Math::FitsInDWord(reinterpret_cast<size_t>(compareSrc1->AsAddrOpnd()->m_address))
  12431. )
  12432. ) &&
  12433. !compareSrc2->IsIntConstOpnd() &&
  12434. !compareSrc2->IsAddrOpnd())
  12435. {
  12436. // Swap the sources and branch
  12437. IR::Opnd *const tempSrc = compareSrc1;
  12438. compareSrc1 = compareSrc2;
  12439. compareSrc2 = tempSrc;
  12440. branchOpCode = swapSrcsBranchOpCode;
  12441. }
  12442. // Check for compare with zero, to prefer using Test instead of Cmp
  12443. if( !compareSrc1->IsRegOpnd() ||
  12444. !(
  12445. (compareSrc2->IsIntConstOpnd() && compareSrc2->AsIntConstOpnd()->GetValue() == 0) ||
  12446. (compareSrc2->IsAddrOpnd() && !compareSrc2->AsAddrOpnd()->m_address)
  12447. ) ||
  12448. branchOpCode == Js::OpCode::BrGt_A || branchOpCode == Js::OpCode::BrLe_A)
  12449. {
  12450. goto Default;
  12451. }
  12452. if(branchOpCode == Js::OpCode::BrGe_A || branchOpCode == Js::OpCode::BrLt_A)
  12453. {
  12454. if(isUnsigned)
  12455. {
  12456. goto Default;
  12457. }
  12458. branchOpCode = LowererMD::MDCompareWithZeroBranchOpcode(branchOpCode);
  12459. }
  12460. if(!compareSrc2->IsInUse())
  12461. {
  12462. compareSrc2->Free(func);
  12463. }
  12464. InsertTest(compareSrc1, compareSrc1, insertBeforeInstr);
  12465. break;
  12466. default:
  12467. Default:
  12468. InsertCompare(compareSrc1, compareSrc2, insertBeforeInstr);
  12469. break;
  12470. }
  12471. return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
  12472. }
  12473. IR::Instr *Lowerer::InsertTest(IR::Opnd *const src1, IR::Opnd *const src2, IR::Instr *const insertBeforeInstr)
  12474. {
  12475. Assert(src1);
  12476. Assert(!src1->IsFloat64()); // not implemented
  12477. Assert(src2);
  12478. Assert(!src2->IsFloat64()); // not implemented
  12479. #if !TARGET_64
  12480. Assert(!src1->IsInt64()); // not implemented
  12481. Assert(!src2->IsInt64()); // not implemented
  12482. #endif
  12483. Assert(insertBeforeInstr);
  12484. Func *const func = insertBeforeInstr->m_func;
  12485. IR::Instr *const instr = IR::Instr::New(LowererMD::MDTestOpcode, func);
  12486. instr->SetSrc1(src1);
  12487. instr->SetSrc2(src2);
  12488. insertBeforeInstr->InsertBefore(instr);
  12489. LowererMD::Legalize(instr);
  12490. return instr;
  12491. }
  12492. IR::BranchInstr *Lowerer::InsertTestBranch(
  12493. IR::Opnd *const testSrc1,
  12494. IR::Opnd *const testSrc2,
  12495. const Js::OpCode branchOpCode,
  12496. IR::LabelInstr *const target,
  12497. IR::Instr *const insertBeforeInstr)
  12498. {
  12499. return InsertTestBranch(testSrc1, testSrc2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr);
  12500. }
  12501. IR::BranchInstr *Lowerer::InsertTestBranch(
  12502. IR::Opnd *const testSrc1,
  12503. IR::Opnd *const testSrc2,
  12504. const Js::OpCode branchOpCode,
  12505. const bool isUnsigned,
  12506. IR::LabelInstr *const target,
  12507. IR::Instr *const insertBeforeInstr)
  12508. {
  12509. InsertTest(testSrc1, testSrc2, insertBeforeInstr);
  12510. return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
  12511. }
  12512. IR::Instr *Lowerer::InsertAdd(
  12513. const bool needFlags,
  12514. IR::Opnd *const dst,
  12515. IR::Opnd *src1,
  12516. IR::Opnd *src2,
  12517. IR::Instr *const insertBeforeInstr)
  12518. {
  12519. Assert(dst);
  12520. Assert(src1);
  12521. Assert(src2);
  12522. Assert(insertBeforeInstr);
  12523. Func *const func = insertBeforeInstr->m_func;
  12524. if(src2->IsIntConstOpnd())
  12525. {
  12526. IR::IntConstOpnd *const intConstOpnd = src2->AsIntConstOpnd();
  12527. const IntConstType value = intConstOpnd->GetValue();
  12528. if(value < 0 && value != IntConstMin)
  12529. {
  12530. // Change (s1 = s1 + -5) into (s1 = s1 - 5)
  12531. IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
  12532. newSrc2->SetValue(-value);
  12533. return InsertSub(needFlags, dst, src1, newSrc2, insertBeforeInstr);
  12534. }
  12535. }
  12536. else if(src1->IsIntConstOpnd())
  12537. {
  12538. IR::IntConstOpnd *const intConstOpnd = src1->AsIntConstOpnd();
  12539. const IntConstType value = intConstOpnd->GetValue();
  12540. if(value < 0 && value != IntConstMin)
  12541. {
  12542. // Change (s1 = -5 + s1) into (s1 = s1 - 5)
  12543. IR::Opnd *const newSrc1 = src2;
  12544. IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
  12545. newSrc2->SetValue(-value);
  12546. return InsertSub(needFlags, dst, newSrc1, newSrc2, insertBeforeInstr);
  12547. }
  12548. }
  12549. IR::Instr *const instr = IR::Instr::New(Js::OpCode::Add_A, dst, src1, src2, func);
  12550. insertBeforeInstr->InsertBefore(instr);
  12551. LowererMD::ChangeToAdd(instr, needFlags);
  12552. LowererMD::Legalize(instr);
  12553. return instr;
  12554. }
  12555. IR::Instr *Lowerer::InsertSub(
  12556. const bool needFlags,
  12557. IR::Opnd *const dst,
  12558. IR::Opnd *src1,
  12559. IR::Opnd *src2,
  12560. IR::Instr *const insertBeforeInstr)
  12561. {
  12562. Assert(dst);
  12563. Assert(src1);
  12564. Assert(src2);
  12565. Assert(insertBeforeInstr);
  12566. Func *const func = insertBeforeInstr->m_func;
  12567. if(src2->IsIntConstOpnd())
  12568. {
  12569. IR::IntConstOpnd *const intConstOpnd = src2->AsIntConstOpnd();
  12570. const IntConstType value = intConstOpnd->GetValue();
  12571. if(value < 0 && value != IntConstMin)
  12572. {
  12573. // Change (s1 = s1 - -5) into (s1 = s1 + 5)
  12574. IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
  12575. newSrc2->SetValue(-value);
  12576. return InsertAdd(needFlags, dst, src1, newSrc2, insertBeforeInstr);
  12577. }
  12578. }
  12579. IR::Instr *const instr = IR::Instr::New(Js::OpCode::Sub_A, dst, src1, src2, func);
  12580. insertBeforeInstr->InsertBefore(instr);
  12581. LowererMD::ChangeToSub(instr, needFlags);
  12582. LowererMD::Legalize(instr);
  12583. return instr;
  12584. }
  12585. IR::Instr *Lowerer::InsertLea(IR::RegOpnd *const dst, IR::Opnd *const src, IR::Instr *const insertBeforeInstr, bool postRegAlloc)
  12586. {
  12587. Assert(dst);
  12588. Assert(src);
  12589. Assert(src->IsIndirOpnd() || src->IsSymOpnd());
  12590. Assert(insertBeforeInstr);
  12591. Func *const func = insertBeforeInstr->m_func;
  12592. IR::Instr *const instr = IR::Instr::New(Js::OpCode::LEA, dst, src, func);
  12593. insertBeforeInstr->InsertBefore(instr);
  12594. return LowererMD::ChangeToLea(instr, postRegAlloc);
  12595. }
  12596. #if _M_X64
  12597. IR::Instr *Lowerer::InsertMoveBitCast(
  12598. IR::Opnd *const dst,
  12599. IR::Opnd *const src1,
  12600. IR::Instr *const insertBeforeInstr)
  12601. {
  12602. Assert(dst);
  12603. Assert(dst->GetType() == TyFloat64);
  12604. Assert(src1);
  12605. Assert(src1->GetType() == TyUint64);
  12606. Assert(insertBeforeInstr);
  12607. Func *const func = insertBeforeInstr->m_func;
  12608. IR::Instr *const instr = IR::Instr::New(LowererMD::MDMovUint64ToFloat64Opcode, dst, src1, func);
  12609. insertBeforeInstr->InsertBefore(instr);
  12610. LowererMD::Legalize(instr);
  12611. return instr;
  12612. }
  12613. #endif
  12614. IR::Instr *Lowerer::InsertXor(
  12615. IR::Opnd *const dst,
  12616. IR::Opnd *const src1,
  12617. IR::Opnd *const src2,
  12618. IR::Instr *const insertBeforeInstr)
  12619. {
  12620. Assert(dst);
  12621. Assert(src1);
  12622. Assert(src2);
  12623. Assert(insertBeforeInstr);
  12624. Func *const func = insertBeforeInstr->m_func;
  12625. IR::Instr *const instr = IR::Instr::New(LowererMD::MDXorOpcode, dst, src1, src2, func);
  12626. insertBeforeInstr->InsertBefore(instr);
  12627. LowererMD::Legalize(instr);
  12628. return instr;
  12629. }
  12630. IR::Instr *Lowerer::InsertAnd(
  12631. IR::Opnd *const dst,
  12632. IR::Opnd *const src1,
  12633. IR::Opnd *const src2,
  12634. IR::Instr *const insertBeforeInstr)
  12635. {
  12636. Assert(dst);
  12637. Assert(src1);
  12638. Assert(src2);
  12639. Assert(insertBeforeInstr);
  12640. Func *const func = insertBeforeInstr->m_func;
  12641. IR::Instr *const instr = IR::Instr::New(Js::OpCode::AND, dst, src1, src2, func);
  12642. insertBeforeInstr->InsertBefore(instr);
  12643. LowererMD::Legalize(instr);
  12644. return instr;
  12645. }
  12646. IR::Instr *Lowerer::InsertOr(
  12647. IR::Opnd *const dst,
  12648. IR::Opnd *const src1,
  12649. IR::Opnd *const src2,
  12650. IR::Instr *const insertBeforeInstr)
  12651. {
  12652. Assert(dst);
  12653. Assert(src1);
  12654. Assert(src2);
  12655. Assert(insertBeforeInstr);
  12656. Func *const func = insertBeforeInstr->m_func;
  12657. IR::Instr *const instr = IR::Instr::New(LowererMD::MDOrOpcode, dst, src1, src2, func);
  12658. insertBeforeInstr->InsertBefore(instr);
  12659. LowererMD::Legalize(instr);
  12660. return instr;
  12661. }
  12662. IR::Instr *Lowerer::InsertShift(
  12663. const Js::OpCode opCode,
  12664. const bool needFlags,
  12665. IR::Opnd *const dst,
  12666. IR::Opnd *const src1,
  12667. IR::Opnd *const src2,
  12668. IR::Instr *const insertBeforeInstr)
  12669. {
  12670. Assert(dst);
  12671. Assert(!dst->IsFloat64()); // not implemented
  12672. Assert(src1);
  12673. Assert(!src1->IsFloat64()); // not implemented
  12674. Assert(src2);
  12675. Assert(!src2->IsFloat64()); // not implemented
  12676. Assert(insertBeforeInstr);
  12677. Func *const func = insertBeforeInstr->m_func;
  12678. IR::Instr *const instr = IR::Instr::New(opCode, dst, src1, src2, func);
  12679. insertBeforeInstr->InsertBefore(instr);
  12680. LowererMD::ChangeToShift(instr, needFlags);
  12681. LowererMD::Legalize(instr);
  12682. return instr;
  12683. }
  12684. IR::Instr *Lowerer::InsertShiftBranch(
  12685. const Js::OpCode shiftOpCode,
  12686. IR::Opnd *const dst,
  12687. IR::Opnd *const src1,
  12688. IR::Opnd *const src2,
  12689. const Js::OpCode branchOpCode,
  12690. IR::LabelInstr *const target,
  12691. IR::Instr *const insertBeforeInstr)
  12692. {
  12693. return InsertShiftBranch(shiftOpCode, dst, src1, src2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr);
  12694. }
  12695. IR::Instr *Lowerer::InsertShiftBranch(
  12696. const Js::OpCode shiftOpCode,
  12697. IR::Opnd *const dst,
  12698. IR::Opnd *const src1,
  12699. IR::Opnd *const src2,
  12700. const Js::OpCode branchOpCode,
  12701. const bool isUnsigned,
  12702. IR::LabelInstr *const target,
  12703. IR::Instr *const insertBeforeInstr)
  12704. {
  12705. InsertShift(shiftOpCode, true /* needFlags */, dst, src1, src2, insertBeforeInstr);
  12706. return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
  12707. }
  12708. IR::Instr *Lowerer::InsertConvertFloat32ToFloat64(
  12709. IR::Opnd *const dst,
  12710. IR::Opnd *const src,
  12711. IR::Instr *const insertBeforeInstr)
  12712. {
  12713. Assert(dst);
  12714. Assert(dst->IsFloat64());
  12715. Assert(src);
  12716. Assert(src->IsFloat32());
  12717. Assert(insertBeforeInstr);
  12718. Func *const func = insertBeforeInstr->m_func;
  12719. IR::Instr *const instr = IR::Instr::New(LowererMD::MDConvertFloat32ToFloat64Opcode, dst, src, func);
  12720. insertBeforeInstr->InsertBefore(instr);
  12721. LowererMD::Legalize(instr);
  12722. return instr;
  12723. }
  12724. IR::Instr *Lowerer::InsertConvertFloat64ToFloat32(
  12725. IR::Opnd *const dst,
  12726. IR::Opnd *const src,
  12727. IR::Instr *const insertBeforeInstr)
  12728. {
  12729. Assert(dst);
  12730. Assert(dst->IsFloat32());
  12731. Assert(src);
  12732. Assert(src->IsFloat64());
  12733. Assert(insertBeforeInstr);
  12734. Func *const func = insertBeforeInstr->m_func;
  12735. IR::Instr *const instr = IR::Instr::New(LowererMD::MDConvertFloat64ToFloat32Opcode, dst, src, func);
  12736. insertBeforeInstr->InsertBefore(instr);
  12737. LowererMD::Legalize(instr);
  12738. return instr;
  12739. }
  12740. void Lowerer::InsertDecUInt32PreventOverflow(
  12741. IR::Opnd *const dst,
  12742. IR::Opnd *const src,
  12743. IR::Instr *const insertBeforeInstr,
  12744. IR::Instr * *const onOverflowInsertBeforeInstrRef)
  12745. {
  12746. Assert(dst);
  12747. Assert(dst->GetType() == TyUint32);
  12748. Assert(src);
  12749. Assert(src->GetType() == TyUint32);
  12750. Assert(insertBeforeInstr);
  12751. Func *const func = insertBeforeInstr->m_func;
  12752. // Generate:
  12753. // subs temp, src, 1
  12754. // bcs $overflow
  12755. // mov dst, temp
  12756. // b $continue
  12757. // $overflow:
  12758. // mov dst, 0
  12759. // $continue:
  12760. IR::LabelInstr *const overflowLabel = Lowerer::InsertLabel(false, insertBeforeInstr);
  12761. // subs temp, src, 1
  12762. IR::RegOpnd *const tempOpnd = IR::RegOpnd::New(StackSym::New(TyUint32, func), TyUint32, func);
  12763. const IR::AutoReuseOpnd autoReuseTempOpnd(tempOpnd, func);
  12764. Lowerer::InsertSub(true, tempOpnd, src, IR::IntConstOpnd::New(1, TyUint32, func, true), overflowLabel);
  12765. // bcs $overflow
  12766. Lowerer::InsertBranch(Js::OpCode::BrLt_A, true, overflowLabel, overflowLabel);
  12767. // mov dst, temp
  12768. Lowerer::InsertMove(dst, tempOpnd, overflowLabel);
  12769. const bool dstEqualsSrc = dst->IsEqual(src);
  12770. if(!dstEqualsSrc || onOverflowInsertBeforeInstrRef)
  12771. {
  12772. // b $continue
  12773. // $overflow:
  12774. // mov dst, 0
  12775. // $continue:
  12776. IR::LabelInstr *const continueLabel = Lowerer::InsertLabel(false, insertBeforeInstr);
  12777. Lowerer::InsertBranch(Js::OpCode::Br, continueLabel, overflowLabel);
  12778. if(!dstEqualsSrc)
  12779. {
  12780. Lowerer::InsertMove(dst, IR::IntConstOpnd::New(0, TyUint32, func, true), continueLabel);
  12781. }
  12782. if(onOverflowInsertBeforeInstrRef)
  12783. {
  12784. *onOverflowInsertBeforeInstrRef = continueLabel;
  12785. }
  12786. }
  12787. else
  12788. {
  12789. // $overflow:
  12790. }
  12791. }
  12792. void Lowerer::InsertFloatCheckForZeroOrNanBranch(
  12793. IR::Opnd *const src,
  12794. const bool branchOnZeroOrNan,
  12795. IR::LabelInstr *const target,
  12796. IR::LabelInstr *const fallthroughLabel,
  12797. IR::Instr *const insertBeforeInstr)
  12798. {
  12799. Assert(src);
  12800. Assert(src->IsFloat64());
  12801. Assert(target);
  12802. Assert(!fallthroughLabel || fallthroughLabel != target);
  12803. Assert(insertBeforeInstr);
  12804. Func *const func = insertBeforeInstr->m_func;
  12805. IR::BranchInstr *const branchOnEqualOrNotEqual =
  12806. InsertCompareBranch(
  12807. src,
  12808. IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetDoubleZeroAddr(), TyFloat64, func),
  12809. branchOnZeroOrNan ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A,
  12810. target,
  12811. insertBeforeInstr,
  12812. true /* ignoreNaN */);
  12813. // x86/x64
  12814. // When NaN is ignored, on x86 and x64, JE branches when equal or unordered since an unordered result sets the zero
  12815. // flag, and JNE branches when not equal and not unordered. By comparing with zero, JE will branch when src is zero or
  12816. // NaN, and JNE will branch when src is not zero and not NaN.
  12817. //
  12818. // ARM
  12819. // When NaN is ignored, BEQ branches when equal and not unordered, and BNE branches when not equal or unordered. So,
  12820. // when comparing src with zero, an unordered check needs to be added before the BEQ/BNE.
  12821. branchOnEqualOrNotEqual; // satisfy the compiler
  12822. #ifdef _M_ARM
  12823. InsertBranch(
  12824. Js::OpCode::BVS,
  12825. branchOnZeroOrNan
  12826. ? target
  12827. : fallthroughLabel ? fallthroughLabel : insertBeforeInstr->m_prev->GetOrCreateContinueLabel(),
  12828. branchOnEqualOrNotEqual);
  12829. #endif
  12830. }
  12831. IR::IndirOpnd *
  12832. Lowerer::GenerateFastElemICommon(
  12833. IR::Instr * instr,
  12834. bool isStore,
  12835. IR::IndirOpnd * indirOpnd,
  12836. IR::LabelInstr * labelHelper,
  12837. IR::LabelInstr * labelCantUseArray,
  12838. IR::LabelInstr *labelFallthrough,
  12839. bool * pIsTypedArrayElement,
  12840. bool * pIsStringIndex,
  12841. bool *emitBailoutRef,
  12842. IR::LabelInstr **pLabelSegmentLengthIncreased /*= nullptr*/,
  12843. bool checkArrayLengthOverflow /*= true*/,
  12844. bool forceGenerateFastPath /* = false */,
  12845. bool returnLength,
  12846. IR::LabelInstr *bailOutLabelInstr /* = nullptr*/,
  12847. bool * indirOpndOverflowed /* = nullptr*/)
  12848. {
  12849. *pIsTypedArrayElement = false;
  12850. *pIsStringIndex = false;
  12851. if(pLabelSegmentLengthIncreased)
  12852. {
  12853. *pLabelSegmentLengthIncreased = nullptr;
  12854. }
  12855. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  12856. AssertMsg(baseOpnd, "This shouldn't be NULL");
  12857. // Caution: If making changes to the conditions under which we don't emit the typical array checks, make sure
  12858. // the code in GlobOpt::ShouldAssumeIndirOpndHasNonNegativeIntIndex is updated accordingly. We don't want the
  12859. // global optimizer to type specialize instructions, for which the lowerer is forced to emit unconditional
  12860. // bailouts.
  12861. if (baseOpnd->IsTaggedInt())
  12862. {
  12863. return NULL;
  12864. }
  12865. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  12866. if (indexOpnd)
  12867. {
  12868. if (indexOpnd->GetValueType().IsLikelyString())
  12869. {
  12870. if (!baseOpnd->GetValueType().IsLikelyOptimizedTypedArray())
  12871. {
  12872. // If profile data says that it's a typed array - do not generate the property string fast path as the src. could be a temp and that would cause a bug.
  12873. *pIsTypedArrayElement = false;
  12874. *pIsStringIndex = true;
  12875. return GenerateFastElemIStringIndexCommon(instr, isStore, indirOpnd, labelHelper);
  12876. }
  12877. else
  12878. {
  12879. // There's no point in generating the int index fast path if we know the index has a string value.
  12880. return nullptr;
  12881. }
  12882. }
  12883. }
  12884. return
  12885. GenerateFastElemIIntIndexCommon(
  12886. instr,
  12887. isStore,
  12888. indirOpnd,
  12889. labelHelper,
  12890. labelCantUseArray,
  12891. labelFallthrough,
  12892. pIsTypedArrayElement,
  12893. emitBailoutRef,
  12894. pLabelSegmentLengthIncreased,
  12895. checkArrayLengthOverflow,
  12896. false,
  12897. returnLength,
  12898. bailOutLabelInstr,
  12899. indirOpndOverflowed);
  12900. }
  12901. void
  12902. Lowerer::GenerateDynamicLoadPolymorphicInlineCacheSlot(IR::Instr * instrInsert, IR::RegOpnd * inlineCacheOpnd, IR::Opnd * objectTypeOpnd)
  12903. {
  12904. // Generates:
  12905. // MOV opndOffset, objectTypeOpnd
  12906. // SHR opndOffset, PolymorphicInlineCacheShift
  12907. // MOVZX cacheIndexOpnd, inlineCacheOpnd->size
  12908. // DEC cacheIndexOpnd
  12909. // AND opndOffset, cacheIndexOpnd
  12910. // SHL opndOffset, Math::Log2(sizeof(Js::InlineCache))
  12911. // MOV inlineCacheOpnd, inlineCacheOpnd->inlineCaches
  12912. // LEA inlineCacheOpnd, [inlineCacheOpnd + opndOffset]
  12913. IntConstType rightShiftAmount = PolymorphicInlineCacheShift;
  12914. IntConstType leftShiftAmount = Math::Log2(sizeof(Js::InlineCache));
  12915. Assert(rightShiftAmount > leftShiftAmount);
  12916. IR::RegOpnd * opndOffset = IR::RegOpnd::New(TyMachPtr, m_func);
  12917. InsertShift(Js::OpCode::ShrU_A, false, opndOffset, objectTypeOpnd, IR::IntConstOpnd::New(rightShiftAmount, TyUint8, m_func, true), instrInsert);
  12918. IR::RegOpnd * cacheIndexOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  12919. InsertMove(cacheIndexOpnd, IR::IndirOpnd::New(inlineCacheOpnd, Js::PolymorphicInlineCache::GetOffsetOfSize(), TyUint16, m_func), instrInsert);
  12920. InsertSub(false, cacheIndexOpnd, cacheIndexOpnd, IR::IntConstOpnd::New(1, TyMachPtr, m_func), instrInsert);
  12921. InsertAnd(opndOffset, opndOffset, cacheIndexOpnd, instrInsert);
  12922. InsertShift(Js::OpCode::Shl_A, false, opndOffset, opndOffset, IR::IntConstOpnd::New(leftShiftAmount, TyUint8, m_func), instrInsert);
  12923. InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCacheOpnd, Js::PolymorphicInlineCache::GetOffsetOfInlineCaches(), TyMachPtr, m_func), instrInsert);
  12924. InsertLea(inlineCacheOpnd, IR::IndirOpnd::New(inlineCacheOpnd, opndOffset, TyMachPtr, m_func), instrInsert);
  12925. }
  12926. IR::IndirOpnd *
  12927. Lowerer::GenerateFastElemIStringIndexCommon(IR::Instr * instrInsert, bool isStore, IR::IndirOpnd * indirOpnd, IR::LabelInstr * labelHelper)
  12928. {
  12929. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  12930. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  12931. Assert(baseOpnd != nullptr);
  12932. Assert(indexOpnd->GetValueType().IsLikelyString());
  12933. // Generates:
  12934. // StringTest(indexOpnd, $helper) ; verify index is string type
  12935. // CMP indexOpnd, PropertyString::`vtable' ; verify index is property string
  12936. // JNE $helper
  12937. // MOV inlineCacheOpnd, index->inlineCache
  12938. // GenerateObjectTest(baseOpnd, $helper) ; verify base is an object
  12939. // MOV objectTypeOpnd, baseOpnd->type
  12940. // GenerateDynamicLoadPolymorphicInlineCacheSlot(inlineCacheOpnd, objectTypeOpnd) ; loads inline cache for given type
  12941. // LocalInlineCacheCheck(objectTypeOpnd, inlineCacheOpnd, $notInlineSlots) ; check for type in local inline slots, jump to $notInlineSlotsLabel on failure
  12942. // MOV opndSlotArray, baseOpnd
  12943. // JMP slotArrayLoadedLabel
  12944. // $notInlineSlotsLabel
  12945. // opndTaggedType = GenerateLoadTaggedType(objectTypeOpnd) ; load objectTypeOpnd with InlineCacheAuxSlotTypeTag into opndTaggedType
  12946. // LocalInlineCacheCheck(opndTaggedType, inlineCacheOpnd, $helper) ; check for type in local aux slots, jump to $helper on failure
  12947. // MOV opndSlotArray, baseOpnd->auxSlots ; load the aux slot array
  12948. // $slotArrayLoadedLabel
  12949. // MOV opndSlotIndex, inlineCacheOpnd->u.local.slotIndex ; load the cached slot offset or index
  12950. // INC indexOpnd->hitRate
  12951. GenerateStringTest(indexOpnd, instrInsert, labelHelper);
  12952. IR::LabelInstr * notPropStrLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  12953. IR::LabelInstr * propStrLoadedLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  12954. InsertCompareBranch(
  12955. IR::IndirOpnd::New(indexOpnd, 0, TyMachPtr, m_func),
  12956. LoadVTableValueOpnd(instrInsert, VTableValue::VtablePropertyString),
  12957. Js::OpCode::BrNeq_A, notPropStrLabel, instrInsert);
  12958. InsertBranch(Js::OpCode::Br, propStrLoadedLabel, instrInsert);
  12959. instrInsert->InsertBefore(notPropStrLabel);
  12960. InsertCompareBranch(
  12961. IR::IndirOpnd::New(indexOpnd, 0, TyMachPtr, m_func),
  12962. LoadVTableValueOpnd(instrInsert, VTableValue::VtableLiteralStringWithPropertyStringPtr),
  12963. Js::OpCode::BrNeq_A, labelHelper, instrInsert);
  12964. IR::IndirOpnd * propStrOpnd = IR::IndirOpnd::New(indexOpnd, Js::LiteralStringWithPropertyStringPtr::GetOffsetOfPropertyString(), TyMachPtr, m_func);
  12965. InsertCompareBranch(propStrOpnd, IR::IntConstOpnd::New(NULL, TyMachPtr, m_func), Js::OpCode::BrNeq_A, labelHelper, instrInsert);
  12966. // We don't really own indexOpnd, but it is fine to update it to be the PropertyString, since that is better to have anyway
  12967. InsertMove(indexOpnd, propStrOpnd, instrInsert);
  12968. instrInsert->InsertBefore(propStrLoadedLabel);
  12969. m_lowererMD.GenerateObjectTest(baseOpnd, instrInsert, labelHelper);
  12970. IR::RegOpnd * objectTypeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  12971. InsertMove(objectTypeOpnd, IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, m_func), instrInsert);
  12972. const uint32 inlineCacheOffset = isStore ? Js::PropertyString::GetOffsetOfStElemInlineCache() : Js::PropertyString::GetOffsetOfLdElemInlineCache();
  12973. IR::RegOpnd * inlineCacheOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  12974. InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(indexOpnd, inlineCacheOffset, TyMachPtr, m_func), instrInsert);
  12975. GenerateDynamicLoadPolymorphicInlineCacheSlot(instrInsert, inlineCacheOpnd, objectTypeOpnd);
  12976. IR::LabelInstr * notInlineSlotsLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  12977. IR::LabelInstr * slotArrayLoadedLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  12978. m_lowererMD.GenerateLocalInlineCacheCheck(instrInsert, objectTypeOpnd, inlineCacheOpnd, notInlineSlotsLabel);
  12979. IR::RegOpnd * opndSlotArray = IR::RegOpnd::New(TyMachReg, instrInsert->m_func);
  12980. InsertMove(opndSlotArray, baseOpnd, instrInsert);
  12981. InsertBranch(Js::OpCode::Br, slotArrayLoadedLabel, instrInsert);
  12982. instrInsert->InsertBefore(notInlineSlotsLabel);
  12983. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  12984. m_lowererMD.GenerateLoadTaggedType(instrInsert, objectTypeOpnd, opndTaggedType);
  12985. m_lowererMD.GenerateLocalInlineCacheCheck(instrInsert, opndTaggedType, inlineCacheOpnd, labelHelper);
  12986. IR::IndirOpnd * opndIndir = IR::IndirOpnd::New(baseOpnd, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrInsert->m_func);
  12987. InsertMove(opndSlotArray, opndIndir, instrInsert);
  12988. instrInsert->InsertBefore(slotArrayLoadedLabel);
  12989. IR::RegOpnd * opndSlotIndex = IR::RegOpnd::New(TyMachReg, instrInsert->m_func);
  12990. InsertMove(opndSlotIndex, IR::IndirOpnd::New(inlineCacheOpnd, (int32)offsetof(Js::InlineCache, u.local.slotIndex), TyUint16, instrInsert->m_func), instrInsert);
  12991. IR::IndirOpnd * hitRateOpnd = IR::IndirOpnd::New(indexOpnd, Js::PropertyString::GetOffsetOfHitRate(), TyInt32, m_func);
  12992. IR::IntConstOpnd * incOpnd = IR::IntConstOpnd::New(1, TyInt32, instrInsert->m_func);
  12993. InsertAdd(false, hitRateOpnd, hitRateOpnd, incOpnd, instrInsert);
  12994. // return [opndSlotArray + opndSlotIndex * PtrSize]
  12995. return IR::IndirOpnd::New(opndSlotArray, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, instrInsert->m_func);
  12996. }
  12997. IR::IndirOpnd *
  12998. Lowerer::GenerateFastElemIIntIndexCommon(
  12999. IR::Instr * instr,
  13000. bool isStore,
  13001. IR::IndirOpnd * indirOpnd,
  13002. IR::LabelInstr * labelHelper,
  13003. IR::LabelInstr * labelCantUseArray,
  13004. IR::LabelInstr *labelFallthrough,
  13005. bool * pIsTypedArrayElement,
  13006. bool *emitBailoutRef,
  13007. IR::LabelInstr **pLabelSegmentLengthIncreased,
  13008. bool checkArrayLengthOverflow /*= true*/,
  13009. bool forceGenerateFastPath /* = false */,
  13010. bool returnLength,
  13011. IR::LabelInstr *bailOutLabelInstr /* = nullptr*/,
  13012. bool * indirOpndOverflowed /* = nullptr */)
  13013. {
  13014. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  13015. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  13016. Assert(!baseOpnd->IsTaggedInt() || (indexOpnd && indexOpnd->IsNotInt()));
  13017. if (indirOpndOverflowed != nullptr)
  13018. {
  13019. *indirOpndOverflowed = false;
  13020. }
  13021. BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  13022. IRType indirType = TyVar;
  13023. const ValueType baseValueType(baseOpnd->GetValueType());
  13024. // TEST base, AtomTag -- check base not tagged int
  13025. // JNE $helper
  13026. // if (base.GetValueType() != Array) {
  13027. // CMP [base], JavascriptArray::`vtable'
  13028. // JNE $helper
  13029. // }
  13030. // TEST index, 1 -- index tagged int
  13031. // JEQ $helper
  13032. // if (inputIndex is not int const) {
  13033. // MOV index, inputIndex
  13034. // SAR index, Js::VarTag_Shift -- remote atom tag
  13035. // JS $helper -- exclude negative index
  13036. // }
  13037. // MOV headSegment, [base + offset(head)]
  13038. // CMP [headSegment + offset(length)], index -- bounds check
  13039. // if (opcode == StElemI_A) {
  13040. // JA $done (for typedarray, JA $toNumberHelper)
  13041. // CMP [headSegment + offset(size)], index -- chunk has room?
  13042. // JBE $helper
  13043. // if (index is not int const) {
  13044. // LEA newLength, [index + 1]
  13045. // } else {
  13046. // newLength = index + 1
  13047. // }
  13048. // if(BailOutOnInvalidatedArrayLength) {
  13049. // CMP [base + offset(length)], newlength
  13050. // JB $helper
  13051. // }
  13052. // MOV [headSegment + offset(length)], newLength -- update length on chunk
  13053. // CMP [base + offset(length)], newLength
  13054. // JAE $done
  13055. // MOV [base + offset(length)], newLength -- update length on array
  13056. // if(length to be returned){
  13057. // SHL newLength, AtomTag
  13058. // INC newLength
  13059. // MOV dst, newLength
  13060. // }
  13061. // JMP $done
  13062. //
  13063. // $toNumberHelper: Call HelperOp_ConvNumber_Full
  13064. // JMP $done
  13065. // $done
  13066. // } else {la
  13067. // JBE $helper
  13068. // }
  13069. // return [headSegment + offset(elements) + index]
  13070. // Caution: If making changes to the conditions under which we don't emit the typical array checks, make sure
  13071. // the code in GlobOpt::ShouldAssumeIndirOpndHasNonNegativeIntIndex is updated accordingly. We don't want the
  13072. // global optimizer to type specialize instructions, for which the lowerer is forced to emit unconditional
  13073. // bailouts.
  13074. bool isIndexNotInt = false;
  13075. IntConstType value = 0;
  13076. IR::Opnd * indexValueOpnd = nullptr;
  13077. bool invertBoundCheckComparison = false;
  13078. bool checkIndexConstOverflowed = false;
  13079. if (indirOpnd->TryGetIntConstIndexValue(true, &value, &isIndexNotInt))
  13080. {
  13081. if (value >= 0)
  13082. {
  13083. indexValueOpnd = IR::IntConstOpnd::New(value, TyUint32, this->m_func);
  13084. invertBoundCheckComparison = true; // facilitate folding the constant index into the compare instruction
  13085. checkIndexConstOverflowed = true;
  13086. }
  13087. else
  13088. {
  13089. // If the index is a negative int constant we go directly to helper.
  13090. Assert(!forceGenerateFastPath);
  13091. return nullptr;
  13092. }
  13093. }
  13094. else if (isIndexNotInt)
  13095. {
  13096. // If we know the index is not an int we go directly to helper.
  13097. Assert(!forceGenerateFastPath);
  13098. return nullptr;
  13099. }
  13100. //At this point indexValueOpnd is either NULL or contains the valueOpnd
  13101. if(!forceGenerateFastPath && !ShouldGenerateArrayFastPath(baseOpnd, true, true, true))
  13102. {
  13103. return nullptr;
  13104. }
  13105. if(baseValueType.IsLikelyAnyOptimizedArray())
  13106. {
  13107. indirScale = GetArrayIndirScale(baseValueType);
  13108. indirType = GetArrayIndirType(baseValueType);
  13109. }
  13110. if (checkIndexConstOverflowed && (static_cast<uint64>(value) << indirScale) > INT32_MAX &&
  13111. indirOpndOverflowed != nullptr)
  13112. {
  13113. *indirOpndOverflowed = true;
  13114. return nullptr;
  13115. }
  13116. IRType elementType = TyIllegal;
  13117. IR::Opnd * element = nullptr;
  13118. if(instr->m_opcode == Js::OpCode::InlineArrayPush)
  13119. {
  13120. element = instr->GetSrc2();
  13121. elementType = element->GetType();
  13122. }
  13123. else if(isStore && instr->GetSrc1())
  13124. {
  13125. element = instr->GetSrc1();
  13126. elementType = element->GetType();
  13127. }
  13128. Assert(isStore || (element == nullptr && elementType == TyIllegal));
  13129. if (isStore && baseValueType.IsLikelyNativeArray() && indirType != elementType)
  13130. {
  13131. // We're trying to write a value of the wrong type, which should force a conversion of the array.
  13132. // Go to the helper for that.
  13133. return nullptr;
  13134. }
  13135. IR::RegOpnd *arrayOpnd = baseOpnd;
  13136. IR::RegOpnd *headSegmentOpnd = nullptr;
  13137. IR::Opnd *headSegmentLengthOpnd = nullptr;
  13138. IR::AutoReuseOpnd autoReuseHeadSegmentOpnd, autoReuseHeadSegmentLengthOpnd;
  13139. bool indexIsNonnegative = indexValueOpnd || indexOpnd->GetType() == TyUint32 || !checkArrayLengthOverflow;
  13140. bool indexIsLessThanHeadSegmentLength = false;
  13141. if(!baseValueType.IsAnyOptimizedArray())
  13142. {
  13143. arrayOpnd = GenerateArrayTest(baseOpnd, labelCantUseArray, labelCantUseArray, instr, true, isStore);
  13144. }
  13145. else
  13146. {
  13147. if(arrayOpnd->IsArrayRegOpnd())
  13148. {
  13149. IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
  13150. if(arrayRegOpnd->HeadSegmentSym())
  13151. {
  13152. headSegmentOpnd = IR::RegOpnd::New(arrayRegOpnd->HeadSegmentSym(), TyMachPtr, m_func);
  13153. DebugOnly(headSegmentOpnd->FreezeSymValue());
  13154. autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
  13155. }
  13156. if(arrayRegOpnd->HeadSegmentLengthSym())
  13157. {
  13158. headSegmentLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->HeadSegmentLengthSym(), TyUint32, m_func);
  13159. DebugOnly(headSegmentLengthOpnd->AsRegOpnd()->FreezeSymValue());
  13160. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  13161. }
  13162. if (arrayRegOpnd->EliminatedLowerBoundCheck())
  13163. {
  13164. indexIsNonnegative = true;
  13165. }
  13166. if(arrayRegOpnd->EliminatedUpperBoundCheck())
  13167. {
  13168. indexIsLessThanHeadSegmentLength = true;
  13169. }
  13170. }
  13171. }
  13172. IR::AutoReuseOpnd autoReuseArrayOpnd;
  13173. if(arrayOpnd->GetValueType().GetObjectType() != ObjectType::ObjectWithArray)
  13174. {
  13175. autoReuseArrayOpnd.Initialize(arrayOpnd, m_func);
  13176. }
  13177. const auto EnsureObjectArrayLoaded = [&]()
  13178. {
  13179. if(arrayOpnd->GetValueType().GetObjectType() != ObjectType::ObjectWithArray)
  13180. {
  13181. return;
  13182. }
  13183. arrayOpnd = LoadObjectArray(arrayOpnd, instr);
  13184. autoReuseArrayOpnd.Initialize(arrayOpnd, m_func);
  13185. };
  13186. const bool doUpperBoundCheck = checkArrayLengthOverflow && !indexIsLessThanHeadSegmentLength;
  13187. if(!indexValueOpnd)
  13188. {
  13189. indexValueOpnd =
  13190. m_lowererMD.LoadNonnegativeIndex(
  13191. indexOpnd,
  13192. (
  13193. indexIsNonnegative
  13194. #if !INT32VAR
  13195. ||
  13196. // On 32-bit platforms, skip the negative check since for now, the unsigned upper bound check covers it
  13197. doUpperBoundCheck
  13198. #endif
  13199. ),
  13200. labelCantUseArray,
  13201. labelHelper,
  13202. instr);
  13203. }
  13204. const IR::AutoReuseOpnd autoReuseIndexValueOpnd(indexValueOpnd, m_func);
  13205. if (baseValueType.IsLikelyTypedArray())
  13206. {
  13207. *pIsTypedArrayElement = true;
  13208. if(doUpperBoundCheck)
  13209. {
  13210. if(!headSegmentLengthOpnd)
  13211. {
  13212. // (headSegmentLength = [base + offset(length)])
  13213. int lengthOffset;
  13214. lengthOffset = Js::Float64Array::GetOffsetOfLength();
  13215. headSegmentLengthOpnd = IR::IndirOpnd::New(arrayOpnd, lengthOffset, TyUint32, m_func);
  13216. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  13217. }
  13218. // CMP index, headSegmentLength -- upper bound check
  13219. if(!invertBoundCheckComparison)
  13220. {
  13221. InsertCompare(indexValueOpnd, headSegmentLengthOpnd, instr);
  13222. }
  13223. else
  13224. {
  13225. InsertCompare(headSegmentLengthOpnd, indexValueOpnd, instr);
  13226. }
  13227. }
  13228. }
  13229. else
  13230. {
  13231. *pIsTypedArrayElement = false;
  13232. if (isStore &&
  13233. baseValueType.IsLikelyNativeIntArray() &&
  13234. (!element->IsIntConstOpnd() || Js::SparseArraySegment<int32>::GetMissingItem() == element->AsIntConstOpnd()->AsInt32()))
  13235. {
  13236. Assert(instr->m_opcode != Js::OpCode::InlineArrayPush || bailOutLabelInstr);
  13237. // Check for a write of the MissingItem value.
  13238. InsertCompareBranch(
  13239. element,
  13240. GetMissingItemOpnd(elementType, m_func),
  13241. Js::OpCode::BrEq_A,
  13242. instr->m_opcode == Js::OpCode::InlineArrayPush ? bailOutLabelInstr : labelCantUseArray,
  13243. instr,
  13244. true);
  13245. }
  13246. if(!headSegmentOpnd)
  13247. {
  13248. EnsureObjectArrayLoaded();
  13249. // MOV headSegment, [base + offset(head)]
  13250. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfHead(), TyMachPtr, this->m_func);
  13251. headSegmentOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  13252. autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
  13253. InsertMove(headSegmentOpnd, indirOpnd, instr);
  13254. }
  13255. if(doUpperBoundCheck)
  13256. {
  13257. if(!headSegmentLengthOpnd)
  13258. {
  13259. // (headSegmentLength = [headSegment + offset(length)])
  13260. headSegmentLengthOpnd =
  13261. IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
  13262. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  13263. }
  13264. // CMP index, headSegmentLength -- upper bound check
  13265. if(!invertBoundCheckComparison)
  13266. {
  13267. InsertCompare(indexValueOpnd, headSegmentLengthOpnd, instr);
  13268. }
  13269. else
  13270. {
  13271. InsertCompare(headSegmentLengthOpnd, indexValueOpnd, instr);
  13272. }
  13273. }
  13274. }
  13275. const IR::BailOutKind bailOutKind = instr->HasBailOutInfo() ? instr->GetBailOutKind() : IR::BailOutInvalid;
  13276. const bool needBailOutOnInvalidLength = !!(bailOutKind & (IR::BailOutOnInvalidatedArrayHeadSegment));
  13277. const bool needBailOutToHelper = !!(bailOutKind & (IR::BailOutOnArrayAccessHelperCall));
  13278. const bool needBailOutOnSegmentLengthCompare = needBailOutToHelper || needBailOutOnInvalidLength;
  13279. if(indexIsLessThanHeadSegmentLength || needBailOutOnSegmentLengthCompare)
  13280. {
  13281. if (needBailOutOnSegmentLengthCompare)
  13282. {
  13283. // The bailout must be pre-op because it will not have completed the operation
  13284. Assert(instr->GetBailOutInfo()->bailOutOffset == instr->GetByteCodeOffset());
  13285. // Verify other bailouts these can be combined with
  13286. Assert(
  13287. !(
  13288. bailOutKind &
  13289. IR::BailOutKindBits &
  13290. ~(
  13291. IR::BailOutOnArrayAccessHelperCall |
  13292. IR::BailOutOnInvalidatedArrayHeadSegment |
  13293. IR::BailOutOnInvalidatedArrayLength |
  13294. IR::BailOutConventionalNativeArrayAccessOnly |
  13295. IR::BailOutOnMissingValue |
  13296. (bailOutKind & IR::BailOutOnArrayAccessHelperCall ? IR::BailOutInvalid : IR::BailOutConvertedNativeArray)
  13297. )
  13298. ));
  13299. if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  13300. {
  13301. // Omit the helper call and generate a bailout instead
  13302. Assert(emitBailoutRef);
  13303. *emitBailoutRef = true;
  13304. }
  13305. }
  13306. if (indexIsLessThanHeadSegmentLength)
  13307. {
  13308. Assert(!(bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment));
  13309. }
  13310. else
  13311. {
  13312. IR::LabelInstr *bailOutLabel;
  13313. if (needBailOutOnInvalidLength)
  13314. {
  13315. Assert(isStore);
  13316. // Lower a separate (but shared) bailout for this case, and preserve the bailout kind in the instruction if the
  13317. // helper call is going to be generated, because the bailout kind needs to be lowered again and differently in the
  13318. // helper call path.
  13319. //
  13320. // Generate:
  13321. // (instr)
  13322. // jmp $continue
  13323. // $bailOut:
  13324. // Bail out with IR::BailOutOnInvalidatedArrayHeadSegment
  13325. // $continue:
  13326. LowerOneBailOutKind(
  13327. instr,
  13328. IR::BailOutOnInvalidatedArrayHeadSegment,
  13329. false,
  13330. !(bailOutKind & IR::BailOutOnArrayAccessHelperCall));
  13331. bailOutLabel = instr->GetOrCreateContinueLabel(true);
  13332. InsertBranch(Js::OpCode::Br, labelFallthrough, bailOutLabel);
  13333. }
  13334. else
  13335. {
  13336. Assert(needBailOutToHelper);
  13337. bailOutLabel = labelHelper;
  13338. }
  13339. // Bail out if the index is outside the head segment bounds
  13340. // jae $bailOut
  13341. Assert(checkArrayLengthOverflow);
  13342. InsertBranch(
  13343. !invertBoundCheckComparison ? Js::OpCode::BrGe_A : Js::OpCode::BrLe_A,
  13344. true /* isUnsigned */,
  13345. bailOutLabel,
  13346. instr);
  13347. }
  13348. }
  13349. else if (isStore && !baseValueType.IsLikelyTypedArray()) // #if (opcode == StElemI_A)
  13350. {
  13351. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  13352. IR::LabelInstr *labelSegmentLengthIncreased = nullptr;
  13353. const bool isPush = instr->m_opcode != Js::OpCode::StElemI_A && instr->m_opcode != Js::OpCode::StElemI_A_Strict;
  13354. // Put the head segment size check and length updates in a helper block since they're not the common path for StElem.
  13355. // For push, that is the common path so keep it in a non-helper block.
  13356. const bool isInHelperBlock = !isPush;
  13357. if(checkArrayLengthOverflow)
  13358. {
  13359. if(pLabelSegmentLengthIncreased &&
  13360. !(
  13361. (baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()) ||
  13362. ((instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict) &&
  13363. instr->IsProfiledInstr() && !instr->AsProfiledInstr()->u.stElemInfo->LikelyFillsMissingValue())
  13364. ))
  13365. {
  13366. // For arrays that are not guaranteed to have no missing values, before storing to an element where
  13367. // (index < length), the element value needs to be checked to see if it's a missing value, and if so, fall back
  13368. // to the helper. This is done to keep the missing value tracking precise in arrays. So, create a separate label
  13369. // for the case where the length was increased (index >= length), and pass it back to GenerateFastStElemI, which
  13370. // will fill in the rest.
  13371. labelSegmentLengthIncreased = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelperBlock);
  13372. *pLabelSegmentLengthIncreased = labelSegmentLengthIncreased;
  13373. }
  13374. else
  13375. {
  13376. labelSegmentLengthIncreased = labelDone;
  13377. }
  13378. // JB $done
  13379. InsertBranch(
  13380. !invertBoundCheckComparison ? Js::OpCode::BrLt_A : Js::OpCode::BrGt_A,
  13381. true /* isUnsigned */,
  13382. labelDone,
  13383. instr);
  13384. }
  13385. if(isInHelperBlock)
  13386. {
  13387. InsertLabel(true /* isHelper */, instr);
  13388. }
  13389. EnsureObjectArrayLoaded();
  13390. do // while(false);
  13391. {
  13392. if(checkArrayLengthOverflow)
  13393. {
  13394. if(instr->HasBailOutInfo() && instr->GetBailOutKind() & IR::BailOutOnMissingValue)
  13395. {
  13396. // Need to bail out if this store would create a missing value. The store would cause a missing value to be
  13397. // created if (index > length && index < size). If (index >= size) we would go to helper anyway, and the bailout
  13398. // handling for this is done after the helper call, so just go to helper if (index > length).
  13399. //
  13400. // jne $helper // branch for (cmp index, headSegmentLength)
  13401. InsertBranch(Js::OpCode::BrNeq_A, labelHelper, instr);
  13402. }
  13403. else
  13404. {
  13405. // If (index < size) we will not call the helper, so the array flags must be updated to reflect that it no
  13406. // longer has no missing values.
  13407. //
  13408. // jne indexGreaterThanLength // branch for (cmp index, headSegmentLength)
  13409. // cmp index, [headSegment + offset(size)]
  13410. // jae $helper
  13411. // jmp indexLessThanSize
  13412. // indexGreaterThanLength:
  13413. // cmp index, [headSegment + offset(size)]
  13414. // jae $helper
  13415. // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
  13416. // indexLessThanSize:
  13417. IR::LabelInstr *const indexGreaterThanLengthLabel = InsertLabel(true /* isHelper */, instr);
  13418. IR::LabelInstr *const indexLessThanSizeLabel = InsertLabel(isInHelperBlock, instr);
  13419. // jne indexGreaterThanLength // branch for (cmp index, headSegmentLength)
  13420. // cmp index, [headSegment + offset(size)]
  13421. // jae $helper
  13422. // jmp indexLessThanSize
  13423. // indexGreaterThanLength:
  13424. InsertBranch(Js::OpCode::BrNeq_A, indexGreaterThanLengthLabel, indexGreaterThanLengthLabel);
  13425. InsertCompareBranch(
  13426. indexValueOpnd,
  13427. IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, m_func),
  13428. Js::OpCode::BrGe_A,
  13429. true /* isUnsigned */,
  13430. labelHelper,
  13431. indexGreaterThanLengthLabel);
  13432. InsertBranch(Js::OpCode::Br, indexLessThanSizeLabel, indexGreaterThanLengthLabel);
  13433. // indexGreaterThanLength:
  13434. // cmp index, [headSegment + offset(size)]
  13435. // jae $helper
  13436. // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
  13437. // indexLessThanSize:
  13438. InsertCompareBranch(
  13439. indexValueOpnd,
  13440. IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, m_func),
  13441. Js::OpCode::BrGe_A,
  13442. true /* isUnsigned */,
  13443. labelHelper,
  13444. indexLessThanSizeLabel);
  13445. CompileAssert(
  13446. static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
  13447. Js::DynamicObjectFlags::HasNoMissingValues);
  13448. InsertAnd(
  13449. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  13450. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  13451. IR::IntConstOpnd::New(
  13452. static_cast<uint8>(~Js::DynamicObjectFlags::HasNoMissingValues),
  13453. TyUint8,
  13454. m_func,
  13455. true),
  13456. indexLessThanSizeLabel);
  13457. // indexLessThanSize:
  13458. break;
  13459. }
  13460. }
  13461. // CMP index, [headSegment + offset(size)]
  13462. // JAE $helper
  13463. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, this->m_func);
  13464. InsertCompareBranch(indexValueOpnd, indirOpnd, Js::OpCode::BrGe_A, true /* isUnsigned */, labelHelper, instr);
  13465. } while(false);
  13466. if(isPush)
  13467. {
  13468. IR::LabelInstr *const updateLengthLabel = InsertLabel(isInHelperBlock, instr);
  13469. if(!doUpperBoundCheck && !headSegmentLengthOpnd)
  13470. {
  13471. // (headSegmentLength = [headSegment + offset(length)])
  13472. headSegmentLengthOpnd =
  13473. IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
  13474. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  13475. }
  13476. // For push, it is guaranteed that (index >= length). We already know that (index < size), but we need to check if
  13477. // (index > length) because in that case a missing value will be created and the missing value tracking in the array
  13478. // needs to be updated.
  13479. //
  13480. // cmp index, headSegmentLength
  13481. // je $updateLength
  13482. // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
  13483. // updateLength:
  13484. InsertCompareBranch(
  13485. indexValueOpnd,
  13486. headSegmentLengthOpnd,
  13487. Js::OpCode::BrEq_A,
  13488. updateLengthLabel,
  13489. updateLengthLabel);
  13490. CompileAssert(
  13491. static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
  13492. Js::DynamicObjectFlags::HasNoMissingValues);
  13493. InsertAnd(
  13494. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  13495. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  13496. IR::IntConstOpnd::New(
  13497. static_cast<uint8>(~Js::DynamicObjectFlags::HasNoMissingValues),
  13498. TyUint8,
  13499. m_func,
  13500. true),
  13501. updateLengthLabel);
  13502. }
  13503. if (baseValueType.IsArrayOrObjectWithArray())
  13504. {
  13505. // We didn't emit an array check, but if we are going to grow the array
  13506. // We need to go to helper if there is an ES5 array/objectarray used as prototype
  13507. GenerateIsEnabledArraySetElementFastPathCheck(labelHelper, instr);
  13508. }
  13509. IR::Opnd *newLengthOpnd;
  13510. IR::AutoReuseOpnd autoReuseNewLengthOpnd;
  13511. if (indexValueOpnd->IsRegOpnd())
  13512. {
  13513. // LEA newLength, [index + 1]
  13514. newLengthOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
  13515. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  13516. InsertAdd(false /* needFlags */, newLengthOpnd, indexValueOpnd, IR::IntConstOpnd::New(1, TyUint32, m_func), instr);
  13517. }
  13518. else
  13519. {
  13520. newLengthOpnd = IR::IntConstOpnd::New(value + 1, TyUint32, this->m_func);
  13521. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  13522. }
  13523. // This is a common enough case that we want to go through this path instead of the simpler one, since doing it this way is faster for preallocated but un-filled arrays.
  13524. if (!!(bailOutKind & IR::BailOutOnInvalidatedArrayLength))
  13525. {
  13526. // If we'd increase the array length, go to the helper
  13527. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  13528. InsertCompareBranch(
  13529. newLengthOpnd,
  13530. indirOpnd,
  13531. Js::OpCode::BrGt_A,
  13532. true,
  13533. labelHelper,
  13534. instr);
  13535. }
  13536. // MOV [headSegment + offset(length)], newLength
  13537. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, length), TyUint32, this->m_func);
  13538. InsertMove(indirOpnd, newLengthOpnd, instr);
  13539. if (checkArrayLengthOverflow)
  13540. {
  13541. // CMP newLength, [base + offset(length)]
  13542. // JBE $segmentLengthIncreased
  13543. Assert(labelSegmentLengthIncreased);
  13544. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  13545. InsertCompareBranch(
  13546. newLengthOpnd,
  13547. indirOpnd,
  13548. Js::OpCode::BrLe_A,
  13549. true /* isUnsigned */,
  13550. labelSegmentLengthIncreased,
  13551. instr);
  13552. if(!isInHelperBlock)
  13553. {
  13554. InsertLabel(true /* isHelper */, instr);
  13555. }
  13556. }
  13557. // MOV [base + offset(length)], newLength
  13558. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  13559. InsertMove(indirOpnd, newLengthOpnd, instr);
  13560. if(returnLength)
  13561. {
  13562. if(newLengthOpnd->GetSize() != MachPtr)
  13563. {
  13564. newLengthOpnd = newLengthOpnd->UseWithNewType(TyMachPtr, m_func)->AsRegOpnd();
  13565. }
  13566. // SHL newLength, AtomTag
  13567. // INC newLength
  13568. this->m_lowererMD.GenerateInt32ToVarConversion(newLengthOpnd, instr);
  13569. // MOV dst, newLength
  13570. InsertMove(instr->GetDst(), newLengthOpnd, instr);
  13571. }
  13572. if(labelSegmentLengthIncreased && labelSegmentLengthIncreased != labelDone)
  13573. {
  13574. // labelSegmentLengthIncreased:
  13575. instr->InsertBefore(labelSegmentLengthIncreased);
  13576. }
  13577. // $done
  13578. instr->InsertBefore(labelDone);
  13579. }
  13580. else // #else
  13581. {
  13582. if (checkArrayLengthOverflow)
  13583. {
  13584. if (*pIsTypedArrayElement && isStore)
  13585. {
  13586. IR::LabelInstr *labelInlineSet = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  13587. //For positive index beyond length or negative index its essentially nop for typed array store
  13588. InsertBranch(
  13589. !invertBoundCheckComparison ? Js::OpCode::BrLt_A : Js::OpCode::BrGt_A,
  13590. true /* isUnsigned */,
  13591. labelInlineSet,
  13592. instr);
  13593. // For typed array, call ToNumber before we fallThrough.
  13594. if (instr->GetSrc1()->GetType() == TyVar && !instr->GetSrc1()->GetValueType().IsPrimitive())
  13595. {
  13596. // Enter an ophelper block
  13597. IR::LabelInstr * opHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  13598. instr->InsertBefore(opHelper);
  13599. IR::Instr *toNumberInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  13600. toNumberInstr->SetSrc1(instr->GetSrc1());
  13601. instr->InsertBefore(toNumberInstr);
  13602. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  13603. {
  13604. // Bail out if this conversion triggers implicit calls.
  13605. toNumberInstr = toNumberInstr->ConvertToBailOutInstr(instr->GetBailOutInfo(), bailOutKind);
  13606. IR::Instr * instrShare = instr->ShareBailOut();
  13607. LowerBailTarget(instrShare);
  13608. }
  13609. LowerUnaryHelperMem(toNumberInstr, IR::HelperOp_ConvNumber_Full);
  13610. }
  13611. InsertBranch(Js::OpCode::Br, labelFallthrough, instr); //Jump to fallThrough
  13612. instr->InsertBefore(labelInlineSet);
  13613. }
  13614. else
  13615. {
  13616. // JAE $helper
  13617. InsertBranch(
  13618. !invertBoundCheckComparison ? Js::OpCode::BrGe_A : Js::OpCode::BrLe_A,
  13619. true /* isUnsigned */,
  13620. labelHelper,
  13621. instr);
  13622. }
  13623. }
  13624. EnsureObjectArrayLoaded();
  13625. if (instr->m_opcode == Js::OpCode::InlineArrayPop)
  13626. {
  13627. Assert(!baseValueType.IsLikelyTypedArray());
  13628. Assert(bailOutLabelInstr);
  13629. if (indexValueOpnd->IsIntConstOpnd())
  13630. {
  13631. // indirOpnd = [headSegment + index + offset(elements)]
  13632. IntConstType offset = offsetof(Js::SparseArraySegment<Js::Var>, elements) + (value << indirScale);
  13633. // TODO: Assert(Math::FitsInDWord(offset));
  13634. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
  13635. }
  13636. else
  13637. {
  13638. // indirOpnd = [headSegment + offset(elements) + (index << scale)]
  13639. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
  13640. indirOpnd->SetOffset(offsetof(Js::SparseArraySegment<Js::Var>, elements));
  13641. }
  13642. IR::Opnd * tmpDst = nullptr;
  13643. IR::Opnd * dst = instr->GetDst();
  13644. //Pop might not have a dst, if not don't worry about returning the last element. But we still have to
  13645. // worry about gaps, because these force us to access the prototype chain, which may have side-effects.
  13646. if (dst || !baseValueType.HasNoMissingValues())
  13647. {
  13648. if (!dst)
  13649. {
  13650. dst = IR::RegOpnd::New(indirType, this->m_func);
  13651. }
  13652. else if (dst->AsRegOpnd()->m_sym == arrayOpnd->m_sym)
  13653. {
  13654. tmpDst = IR::RegOpnd::New(TyVar, this->m_func);
  13655. dst = tmpDst;
  13656. }
  13657. // MOV dst, [head + offset]
  13658. InsertMove(dst, indirOpnd, instr);
  13659. //If the array has missing values, check for one
  13660. if (!baseValueType.HasNoMissingValues())
  13661. {
  13662. InsertCompareBranch(
  13663. dst,
  13664. GetMissingItemOpnd(indirType, m_func),
  13665. Js::OpCode::BrEq_A,
  13666. bailOutLabelInstr,
  13667. instr,
  13668. true);
  13669. }
  13670. }
  13671. // MOV [head + offset], missing
  13672. InsertMove(indirOpnd, GetMissingItemOpnd(indirType, m_func), instr);
  13673. IR::Opnd *newLengthOpnd;
  13674. IR::AutoReuseOpnd autoReuseNewLengthOpnd;
  13675. if (indexValueOpnd->IsRegOpnd())
  13676. {
  13677. // LEA newLength, [index]
  13678. newLengthOpnd = indexValueOpnd;
  13679. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  13680. }
  13681. else
  13682. {
  13683. newLengthOpnd = IR::IntConstOpnd::New(value, TyUint32, this->m_func);
  13684. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  13685. }
  13686. //update segment length and array length
  13687. // MOV [headSegment + offset(length)], newLength
  13688. IR::IndirOpnd *lengthIndirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, length), TyUint32, this->m_func);
  13689. InsertMove(lengthIndirOpnd, newLengthOpnd, instr);
  13690. // MOV [base + offset(length)], newLength
  13691. lengthIndirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  13692. InsertMove(lengthIndirOpnd, newLengthOpnd, instr);
  13693. if (tmpDst)
  13694. {
  13695. // The array opnd and the destination is the same, need to move the value in the tmp dst
  13696. // to the actual dst
  13697. InsertMove(instr->GetDst(), tmpDst, instr);
  13698. }
  13699. return indirOpnd;
  13700. }
  13701. } // #endif
  13702. if (baseValueType.IsLikelyTypedArray())
  13703. {
  13704. if(!headSegmentOpnd)
  13705. {
  13706. // MOV headSegment, [base + offset(arrayBuffer)]
  13707. int bufferOffset;
  13708. bufferOffset = Js::Float64Array::GetOffsetOfBuffer();
  13709. indirOpnd = IR::IndirOpnd::New(arrayOpnd, bufferOffset, TyMachPtr, this->m_func);
  13710. headSegmentOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  13711. autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
  13712. InsertMove(headSegmentOpnd, indirOpnd, instr);
  13713. }
  13714. // indirOpnd = [headSegment + index]
  13715. if (indexValueOpnd->IsIntConstOpnd())
  13716. {
  13717. IntConstType offset = (value << indirScale);
  13718. // TODO: Assert(Math::FitsInDWord(offset));
  13719. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
  13720. }
  13721. else
  13722. {
  13723. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
  13724. }
  13725. }
  13726. else if (indexValueOpnd->IsIntConstOpnd())
  13727. {
  13728. // indirOpnd = [headSegment + index + offset(elements)]
  13729. IntConstType offset = offsetof(Js::SparseArraySegment<Js::Var>, elements) + (value << indirScale);
  13730. // TODO: Assert(Math::FitsInDWord(offset));
  13731. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
  13732. }
  13733. else
  13734. {
  13735. // indirOpnd = [headSegment + offset(elements) + (index << scale)]
  13736. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
  13737. indirOpnd->SetOffset(offsetof(Js::SparseArraySegment<Js::Var>, elements));
  13738. }
  13739. return indirOpnd;
  13740. }
  13741. void
  13742. Lowerer::GenerateTypeIdCheck(Js::TypeId typeId, IR::RegOpnd * opnd, IR::LabelInstr * labelFail, IR::Instr * insertBeforeInstr, bool generateObjectCheck)
  13743. {
  13744. if (generateObjectCheck && !opnd->IsNotTaggedValue())
  13745. {
  13746. m_lowererMD.GenerateObjectTest(opnd, insertBeforeInstr, labelFail);
  13747. }
  13748. // MOV r1, [opnd + offset(type)]
  13749. IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  13750. const IR::AutoReuseOpnd autoReuseR1(r1, m_func);
  13751. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(opnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  13752. InsertMove(r1, indirOpnd, insertBeforeInstr);
  13753. // CMP [r1 + offset(typeId)], typeid -- check src isString
  13754. // JNE $fail
  13755. indirOpnd = IR::IndirOpnd::New(r1, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
  13756. InsertCompareBranch(
  13757. indirOpnd,
  13758. IR::IntConstOpnd::New(typeId, TyInt32, this->m_func),
  13759. Js::OpCode::BrNeq_A,
  13760. labelFail,
  13761. insertBeforeInstr);
  13762. }
  13763. IR::RegOpnd *
  13764. Lowerer::GenerateUntagVar(IR::RegOpnd * opnd, IR::LabelInstr * labelFail, IR::Instr * insertBeforeInstr, bool generateTagCheck)
  13765. {
  13766. if (!opnd->IsVar())
  13767. {
  13768. AssertMsg(opnd->GetSize() == 4, "This should be 32-bit wide");
  13769. return opnd;
  13770. }
  13771. return m_lowererMD.GenerateUntagVar(opnd, labelFail, insertBeforeInstr, generateTagCheck && !opnd->IsTaggedInt());
  13772. }
  13773. void
  13774. Lowerer::GenerateNotZeroTest( IR::Opnd * opndSrc, IR::LabelInstr * isZeroLabel, IR::Instr * insertBeforeInstr)
  13775. {
  13776. InsertTestBranch(opndSrc, opndSrc, Js::OpCode::BrEq_A, isZeroLabel, insertBeforeInstr);
  13777. }
  13778. bool
  13779. Lowerer::GenerateFastStringLdElem(IR::Instr * ldElem, IR::LabelInstr * labelHelper, IR::LabelInstr * labelFallThru)
  13780. {
  13781. IR::IndirOpnd * indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  13782. IR::RegOpnd * baseOpnd = indirOpnd->GetBaseOpnd();
  13783. // don't generate the fast path if the instance is not likely string
  13784. if (!baseOpnd->GetValueType().IsLikelyString())
  13785. {
  13786. return false;
  13787. }
  13788. Assert(!baseOpnd->IsTaggedInt());
  13789. IR::RegOpnd * indexOpnd = indirOpnd->GetIndexOpnd();
  13790. // Don't generate the fast path if the index operand is not likely int
  13791. if (indexOpnd && !indexOpnd->GetValueType().IsLikelyInt())
  13792. {
  13793. return false;
  13794. }
  13795. // Make sure the instance is a string
  13796. Assert(!indexOpnd || !indexOpnd->IsNotInt());
  13797. GenerateStringTest(baseOpnd, ldElem, labelHelper);
  13798. IR::Opnd * index32CmpOpnd;
  13799. IR::RegOpnd * bufferOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  13800. const IR::AutoReuseOpnd autoReuseBufferOpnd(bufferOpnd, m_func);
  13801. IR::IndirOpnd * charIndirOpnd;
  13802. if (indexOpnd)
  13803. {
  13804. // Untag the var and generate the indir into the string buffer
  13805. IR::RegOpnd * index32Opnd = GenerateUntagVar(indexOpnd, labelHelper, ldElem);
  13806. charIndirOpnd = IR::IndirOpnd::New(bufferOpnd, index32Opnd, 1, TyUint16, this->m_func);
  13807. index32CmpOpnd = index32Opnd;
  13808. }
  13809. else
  13810. {
  13811. // Just use the offset to indirect into the string buffer
  13812. charIndirOpnd = IR::IndirOpnd::New(bufferOpnd, indirOpnd->GetOffset() * sizeof(char16), TyUint16, this->m_func);
  13813. index32CmpOpnd = IR::IntConstOpnd::New((uint32)indirOpnd->GetOffset(), TyUint32, this->m_func);
  13814. }
  13815. // Check if the index is in range of the string length
  13816. // CMP [baseOpnd + offset(length)], indexOpnd -- string length
  13817. // JBE $helper -- unsigned compare, and string length are at most INT_MAX - 1
  13818. // -- so that even if we have a negative index, this will fail
  13819. InsertCompareBranch(IR::IndirOpnd::New(baseOpnd, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func)
  13820. , index32CmpOpnd, Js::OpCode::BrLe_A, true, labelHelper, ldElem);
  13821. // Load the string buffer and make sure it is not null
  13822. // MOV bufferOpnd, [baseOpnd + offset(m_pszValue)]
  13823. // TEST bufferOpnd, bufferOpnd
  13824. // JEQ $labelHelper
  13825. indirOpnd = IR::IndirOpnd::New(baseOpnd, offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func);
  13826. InsertMove(bufferOpnd, indirOpnd, ldElem);
  13827. GenerateNotZeroTest(bufferOpnd, labelHelper, ldElem);
  13828. // Load the character and check if it is 7bit ASCI (which we have the cache for)
  13829. // MOV charOpnd, [bufferOpnd + index32Opnd]
  13830. // CMP charOpnd, 0x80
  13831. // JAE $helper
  13832. IR::RegOpnd * charOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
  13833. const IR::AutoReuseOpnd autoReuseCharOpnd(charOpnd, m_func);
  13834. InsertMove(charOpnd, charIndirOpnd, ldElem);
  13835. InsertCompareBranch(charOpnd, IR::IntConstOpnd::New(Js::CharStringCache::CharStringCacheSize, TyUint16, this->m_func),
  13836. Js::OpCode::BrGe_A, true, labelHelper, ldElem);
  13837. // Load the string from the cache
  13838. // MOV charStringCache, <charStringCache, address>
  13839. // MOV stringOpnd, [charStringCache + charOpnd * 4]
  13840. IR::RegOpnd * cacheOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  13841. const IR::AutoReuseOpnd autoReuseCacheOpnd(cacheOpnd, m_func);
  13842. Assert(Js::JavascriptLibrary::GetCharStringCacheAOffset() == Js::JavascriptLibrary::GetCharStringCacheOffset());
  13843. InsertMove(cacheOpnd, this->LoadLibraryValueOpnd(ldElem, LibraryValue::ValueCharStringCache), ldElem);
  13844. // Check if we have created the string or not
  13845. // TEST stringOpnd, stringOpnd
  13846. // JE $helper
  13847. IR::RegOpnd * stringOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  13848. const IR::AutoReuseOpnd autoReuseStringOpnd(stringOpnd, m_func);
  13849. InsertMove(stringOpnd, IR::IndirOpnd::New(cacheOpnd, charOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyVar, this->m_func), ldElem);
  13850. GenerateNotZeroTest(stringOpnd, labelHelper, ldElem);
  13851. InsertMove(ldElem->GetDst(), stringOpnd, ldElem);
  13852. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  13853. return true;
  13854. }
  13855. bool
  13856. Lowerer::GenerateFastLdElemI(IR::Instr *& ldElem, bool *instrIsInHelperBlockRef)
  13857. {
  13858. Assert(instrIsInHelperBlockRef);
  13859. bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
  13860. instrIsInHelperBlock = false;
  13861. IR::LabelInstr * labelHelper;
  13862. IR::LabelInstr * labelFallThru;
  13863. IR::LabelInstr * labelBailOut = nullptr;
  13864. IR::LabelInstr * labelMissingNative = nullptr;
  13865. IR::Opnd *src1 = ldElem->GetSrc1();
  13866. AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd on LdElementI");
  13867. IR::IndirOpnd * indirOpnd = src1->AsIndirOpnd();
  13868. // From FastElemICommon:
  13869. // TEST base, AtomTag -- check base not tagged int
  13870. // JNE $helper
  13871. // MOV r1, [base + offset(type)] -- check base isArray
  13872. // CMP [r1 + offset(typeId)], TypeIds_Array
  13873. // JNE $helper
  13874. // TEST index, 1 -- index tagged int
  13875. // JEQ $helper
  13876. // MOV r2, index
  13877. // SAR r2, Js::VarTag_Shift -- remoe atom tag
  13878. // JS $helper -- exclude negative index
  13879. // MOV r4, [base + offset(head)]
  13880. // CMP r2, [r4 + offset(length)] -- bounds check
  13881. // JAE $helper
  13882. // MOV r3, [r4 + offset(elements)]
  13883. // Generated here:
  13884. // MOV dst, [r3 + r2]
  13885. // TEST dst, dst
  13886. // JNE $fallthrough
  13887. if(ldElem->m_opcode == Js::OpCode::LdMethodElem && indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyOptimizedTypedArray())
  13888. {
  13889. // Typed arrays don't return objects, so it's not worth generating a fast path for LdMethodElem. Calling the helper also
  13890. // generates a better error message. Skip the fast path and just generate a helper call.
  13891. return true;
  13892. }
  13893. labelFallThru = ldElem->GetOrCreateContinueLabel();
  13894. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  13895. // If we know for sure (based on flow graph) we're loading from the arguments object, then ignore the (path-based) profile info.
  13896. bool isNativeArrayLoad = !ldElem->DoStackArgsOpt(this->m_func) && indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyNativeArray();
  13897. bool needMissingValueCheck = true;
  13898. bool emittedFastPath = false;
  13899. bool emitBailout = false;
  13900. if (ldElem->DoStackArgsOpt(this->m_func))
  13901. {
  13902. emittedFastPath = GenerateFastArgumentsLdElemI(ldElem, labelFallThru);
  13903. emitBailout = true;
  13904. }
  13905. else if (GenerateFastStringLdElem(ldElem, labelHelper, labelFallThru))
  13906. {
  13907. emittedFastPath = true;
  13908. }
  13909. else
  13910. {
  13911. IR::LabelInstr * labelCantUseArray = labelHelper;
  13912. if (isNativeArrayLoad)
  13913. {
  13914. if (ldElem->GetDst()->GetType() == TyVar)
  13915. {
  13916. // Skip the fast path and just generate a helper call
  13917. return true;
  13918. }
  13919. // Specialized native array lowering for LdElem requires that it is profiled. When not profiled, GlobOpt should not
  13920. // have specialized it.
  13921. Assert(ldElem->IsProfiledInstr());
  13922. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  13923. labelCantUseArray = labelBailOut;
  13924. }
  13925. bool isTypedArrayElement, isStringIndex, indirOpndOverflowed = false;
  13926. indirOpnd =
  13927. GenerateFastElemICommon(
  13928. ldElem,
  13929. false,
  13930. src1->AsIndirOpnd(),
  13931. labelHelper,
  13932. labelCantUseArray,
  13933. labelFallThru,
  13934. &isTypedArrayElement,
  13935. &isStringIndex,
  13936. &emitBailout,
  13937. nullptr, /* pLabelSegmentLengthIncreased */
  13938. true, /* checkArrayLengthOverflow */
  13939. false, /* forceGenerateFastPath */
  13940. false, /* returnLength */
  13941. nullptr, /* bailOutLabelInstr */
  13942. &indirOpndOverflowed);
  13943. IR::Opnd *dst = ldElem->GetDst();
  13944. IRType dstType = dst->AsRegOpnd()->GetType();
  13945. // The index is negative or not int.
  13946. if (indirOpnd == nullptr)
  13947. {
  13948. // could have bailout kind BailOutOnArrayAccessHelperCall if indirOpnd overflows
  13949. Assert(!(ldElem->HasBailOutInfo() && ldElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall) || indirOpndOverflowed);
  13950. // don't check fast path without bailout because it might not be TypedArray
  13951. if (indirOpndOverflowed && ldElem->HasBailOutInfo())
  13952. {
  13953. bool bailoutForOpndOverflow = false;
  13954. const IR::BailOutKind bailOutKind = ldElem->GetBailOutKind();
  13955. // return undefined for typed array if load dest is var, bailout otherwise
  13956. if ((bailOutKind & ~IR::BailOutKindBits) == IR::BailOutConventionalTypedArrayAccessOnly)
  13957. {
  13958. if (dst->IsVar())
  13959. {
  13960. // returns undefined in case of indirOpnd overflow which is consistent with behavior of interpreter
  13961. IR::Opnd * undefinedOpnd = this->LoadLibraryValueOpnd(ldElem, LibraryValue::ValueUndefined);
  13962. InsertMove(dst, undefinedOpnd, ldElem);
  13963. ldElem->FreeSrc1();
  13964. ldElem->FreeDst();
  13965. ldElem->Remove();
  13966. emittedFastPath = true;
  13967. }
  13968. else
  13969. {
  13970. bailoutForOpndOverflow = true;
  13971. }
  13972. }
  13973. if (bailoutForOpndOverflow || (bailOutKind & (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)))
  13974. {
  13975. IR::Opnd * constOpnd = nullptr;
  13976. if (dst->IsFloat())
  13977. {
  13978. constOpnd = IR::FloatConstOpnd::New(Js::JavascriptNumber::NaN, TyFloat64, m_func);
  13979. }
  13980. else
  13981. {
  13982. constOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func, true);
  13983. }
  13984. InsertMove(dst, constOpnd, ldElem);
  13985. ldElem->FreeSrc1();
  13986. ldElem->FreeDst();
  13987. GenerateBailOut(ldElem, nullptr, nullptr);
  13988. emittedFastPath = true;
  13989. }
  13990. return !emittedFastPath;
  13991. }
  13992. // The global optimizer should never type specialize a LdElem for which the index is not int or an integer constant
  13993. // with a negative value. This would force an unconditional bail out on the main code path.
  13994. else if (dst->IsVar())
  13995. {
  13996. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  13997. {
  13998. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  13999. Output::Print(_u("Typed Array Lowering: function: %s (%s): instr %s, not specialized by glob opt due to negative or not likely int index.\n"),
  14000. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  14001. this->m_func->GetDebugNumberSet(debugStringBuffer),
  14002. Js::OpCodeUtil::GetOpCodeName(ldElem->m_opcode));
  14003. Output::Flush();
  14004. }
  14005. // We must be dealing with some unconventional index value. Don't emit fast path, but go directly to helper.
  14006. emittedFastPath = false;
  14007. return true;
  14008. }
  14009. else
  14010. {
  14011. AssertMsg(false, "Global optimizer shouldn't have specialized this instruction.");
  14012. Assert(dst->IsRegOpnd());
  14013. // If global optimizer failed to notice the unconventional index and type specialized the dst,
  14014. // there is nothing to do but bail out. This could happen if global optimizer's information based
  14015. // on value tracking fails to recognize a non-integer index or a constant int index that is negative.
  14016. // The bailout below ensures that we behave correctly in retail builds even under
  14017. // these (unlikely) conditions. To satisfy the downstream code we must populate the type specialized operand
  14018. // with some made up values, even though we will unconditionally bail out here and the values will never be
  14019. // used.
  14020. IR::IntConstOpnd *constOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func, true);
  14021. InsertMove(dst, constOpnd, ldElem);
  14022. ldElem->FreeSrc1();
  14023. ldElem->FreeDst();
  14024. GenerateBailOut(ldElem, nullptr, nullptr);
  14025. return false;
  14026. }
  14027. }
  14028. const IR::AutoReuseOpnd autoReuseIndirOpnd(indirOpnd, m_func);
  14029. const ValueType baseValueType(src1->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
  14030. if ((ldElem->HasBailOutInfo() &&
  14031. ldElem->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset &&
  14032. ldElem->GetBailOutInfo()->bailOutOffset <= ldElem->GetByteCodeOffset() &&
  14033. dst->IsEqual(src1->AsIndirOpnd()->GetBaseOpnd())) ||
  14034. (src1->AsIndirOpnd()->GetIndexOpnd() && dst->IsEqual(src1->AsIndirOpnd()->GetIndexOpnd())))
  14035. {
  14036. // This is a pre-op bailout where the dst is the same as one of the srcs. The dst may be trashed before bailing out,
  14037. // but since the operation will be processed again in the interpreter, src values need to be kept intact. Use a
  14038. // temporary dst until after the operation is complete.
  14039. IR::Instr *instrSink = ldElem->SinkDst(Js::OpCode::Ld_A);
  14040. // The sink instruction needs to be on the fall-through path
  14041. instrSink->Unlink();
  14042. labelFallThru->InsertAfter(instrSink);
  14043. LowererMD::ChangeToAssign(instrSink);
  14044. dst = ldElem->GetDst();
  14045. }
  14046. if (isTypedArrayElement)
  14047. {
  14048. // For typedArrays, convert the loaded element to the appropriate type
  14049. IR::RegOpnd *reg;
  14050. IR::AutoReuseOpnd autoReuseReg;
  14051. Assert(dst->IsRegOpnd());
  14052. if(indirOpnd->IsFloat())
  14053. {
  14054. AssertMsg((dstType == TyFloat64) || (dstType == TyVar), "For Float32Array LdElemI's dst should be specialized to TyFloat64 or not at all.");
  14055. if(indirOpnd->IsFloat32())
  14056. {
  14057. // MOVSS reg32.f32, indirOpnd.f32
  14058. IR::RegOpnd *reg32 = IR::RegOpnd::New(TyFloat32, this->m_func);
  14059. const IR::AutoReuseOpnd autoReuseReg32(reg32, m_func);
  14060. InsertMove(reg32, indirOpnd, ldElem);
  14061. // CVTPS2PD dst/reg.f64, reg32.f64
  14062. reg = dstType == TyFloat64 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyFloat64, this->m_func);
  14063. autoReuseReg.Initialize(reg, m_func);
  14064. InsertConvertFloat32ToFloat64(reg, reg32, ldElem);
  14065. }
  14066. else
  14067. {
  14068. Assert(indirOpnd->IsFloat64());
  14069. // MOVSD dst/reg.f64, indirOpnd.f64
  14070. reg = dstType == TyFloat64 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyFloat64, this->m_func);
  14071. autoReuseReg.Initialize(reg, m_func);
  14072. InsertMove(reg, indirOpnd, ldElem);
  14073. }
  14074. if (dstType != TyFloat64)
  14075. {
  14076. // Convert reg.f64 to var
  14077. m_lowererMD.SaveDoubleToVar(dst->AsRegOpnd(), reg, ldElem, ldElem);
  14078. }
  14079. #if FLOATVAR
  14080. // For NaNs, go to the helper to guarantee we don't have an illegal NaN
  14081. // UCOMISD reg, reg
  14082. {
  14083. IR::Instr *const instr = IR::Instr::New(Js::OpCode::UCOMISD, this->m_func);
  14084. instr->SetSrc1(reg);
  14085. instr->SetSrc2(reg);
  14086. ldElem->InsertBefore(instr);
  14087. }
  14088. // JP $helper
  14089. {
  14090. IR::Instr *const instr = IR::BranchInstr::New(Js::OpCode::JP, labelHelper, this->m_func);
  14091. ldElem->InsertBefore(instr);
  14092. }
  14093. #endif
  14094. if(dstType == TyFloat64)
  14095. {
  14096. emitBailout = true;
  14097. }
  14098. }
  14099. else
  14100. {
  14101. AssertMsg((dstType == TyInt32) || (dstType == TyVar), "For Int/UintArray LdElemI's dst should be specialized to TyInt32 or not at all.");
  14102. reg = dstType == TyInt32 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyInt32, this->m_func);
  14103. autoReuseReg.Initialize(reg, m_func);
  14104. // Int32 and Uint32 arrays could overflow an int31, but the others can't
  14105. if (indirOpnd->GetType() != TyUint32
  14106. #if !INT32VAR
  14107. && indirOpnd->GetType() != TyInt32
  14108. #endif
  14109. )
  14110. {
  14111. reg->SetValueType(ValueType::GetTaggedInt()); // Fits as a tagged-int
  14112. }
  14113. // MOV/MOVZX/MOVSX dst/reg.int32, IndirOpnd.type
  14114. IR::Instr *const instrMov = InsertMove(reg, indirOpnd, ldElem);
  14115. if (dstType == TyInt32)
  14116. {
  14117. instrMov->dstIsTempNumber = ldElem->dstIsTempNumber;
  14118. instrMov->dstIsTempNumberTransferred = ldElem->dstIsTempNumberTransferred;
  14119. if (indirOpnd->GetType() == TyUint32)
  14120. {
  14121. // TEST dst, dst
  14122. // JSB $helper (bailout)
  14123. InsertCompareBranch(
  14124. reg,
  14125. IR::IntConstOpnd::New(0, TyUint32, this->m_func, /* dontEncode = */ true),
  14126. Js::OpCode::BrLt_A,
  14127. labelHelper,
  14128. ldElem);
  14129. }
  14130. emitBailout = true;
  14131. }
  14132. else
  14133. {
  14134. // MOV dst, reg
  14135. IR::Instr *const instr = IR::Instr::New(Js::OpCode::ToVar, dst, reg, this->m_func);
  14136. instr->dstIsTempNumber = ldElem->dstIsTempNumber;
  14137. instr->dstIsTempNumberTransferred = ldElem->dstIsTempNumberTransferred;
  14138. ldElem->InsertBefore(instr);
  14139. // Convert dst to var
  14140. m_lowererMD.EmitLoadVar(instr, /* isFromUint32 = */ (indirOpnd->GetType() == TyUint32));
  14141. }
  14142. }
  14143. // JMP $fallthrough
  14144. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  14145. emittedFastPath = true;
  14146. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  14147. {
  14148. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  14149. baseValueType.ToString(baseValueTypeStr);
  14150. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  14151. Output::Print(_u("Typed Array Lowering: function: %s (%s), instr: %s, base value type: %S, %s."),
  14152. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  14153. this->m_func->GetDebugNumberSet(debugStringBuffer),
  14154. Js::OpCodeUtil::GetOpCodeName(ldElem->m_opcode),
  14155. baseValueTypeStr,
  14156. (!dst->IsVar() ? _u("specialized") : _u("not specialized")));
  14157. Output::Print(_u("\n"));
  14158. Output::Flush();
  14159. }
  14160. }
  14161. else
  14162. {
  14163. // MOV dst, indirOpnd
  14164. InsertMove(dst, indirOpnd, ldElem);
  14165. // The string index fast path does not operate on index properties (we don't get a PropertyString in that case), so
  14166. // we don't need to do any further checks in that case
  14167. // For LdMethodElem, if the loaded value is a tagged number, the error message generated by the helper call is
  14168. // better than if we were to just try to call the number. Also, the call arguments need to be evaluated before
  14169. // throwing the error, so just test whether it's an object and jump to helper if it's not.
  14170. const bool needObjectTest = !isStringIndex && !isNativeArrayLoad && ldElem->m_opcode == Js::OpCode::LdMethodElem;
  14171. needMissingValueCheck =
  14172. !isStringIndex && !(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues());
  14173. if(needMissingValueCheck)
  14174. {
  14175. // TEST dst, dst
  14176. // JEQ $helper | JNE $fallthrough
  14177. InsertCompareBranch(
  14178. dst,
  14179. GetMissingItemOpnd(dst->GetType(), m_func),
  14180. needObjectTest ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A,
  14181. needObjectTest ? labelHelper : labelFallThru,
  14182. ldElem,
  14183. true);
  14184. if (isNativeArrayLoad)
  14185. {
  14186. Assert(!needObjectTest);
  14187. Assert(labelHelper != labelBailOut);
  14188. if(ldElem->AsProfiledInstr()->u.ldElemInfo->GetElementType().HasBeenUndefined())
  14189. {
  14190. // We're going to bail out trying to load "missing value" into a type-spec'd opnd.
  14191. // Branch to a point where we'll convert the array so that we don't keep bailing here.
  14192. // (Gappy arrays are not well-suited to nativeness.)
  14193. labelMissingNative = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  14194. InsertBranch(Js::OpCode::Br, labelMissingNative, ldElem);
  14195. }
  14196. else
  14197. {
  14198. // If the value has not been profiled to be undefined at some point, jump directly to bail out
  14199. InsertBranch(Js::OpCode::Br, labelBailOut, ldElem);
  14200. }
  14201. }
  14202. }
  14203. if(needObjectTest)
  14204. {
  14205. // GenerateObjectTest(dst)
  14206. // JIsObject $fallthrough
  14207. m_lowererMD.GenerateObjectTest(dst, ldElem, labelFallThru, true);
  14208. }
  14209. else if(!needMissingValueCheck)
  14210. {
  14211. // JMP $fallthrough
  14212. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  14213. }
  14214. emittedFastPath = true;
  14215. }
  14216. }
  14217. // $helper:
  14218. // bailout or caller generated helper call
  14219. // $fallthru:
  14220. if (!emittedFastPath)
  14221. {
  14222. labelHelper->isOpHelper = false;
  14223. }
  14224. ldElem->InsertBefore(labelHelper);
  14225. instrIsInHelperBlock = true;
  14226. if (isNativeArrayLoad)
  14227. {
  14228. Assert(ldElem->HasBailOutInfo());
  14229. Assert(labelHelper != labelBailOut);
  14230. // Transform the original instr:
  14231. //
  14232. // $helper:
  14233. // dst = LdElemI_A src (BailOut)
  14234. // $fallthrough:
  14235. //
  14236. // to:
  14237. //
  14238. // b $fallthru <--- we get here if we loaded a valid element directly
  14239. // $helper:
  14240. // dst = LdElemI_A src
  14241. // cmp dst, MissingItem
  14242. // bne $fallthrough
  14243. // $bailout:
  14244. // BailOut
  14245. // $fallthrough:
  14246. LowerOneBailOutKind(ldElem, IR::BailOutConventionalNativeArrayAccessOnly, instrIsInHelperBlock);
  14247. IR::Instr *const insertBeforeInstr = ldElem->m_next;
  14248. // Do missing value check on value returned from helper so that we don't have to check the index against
  14249. // array length. (We already checked it above against the segment length.)
  14250. bool hasBeenUndefined = ldElem->AsProfiledInstr()->u.ldElemInfo->GetElementType().HasBeenUndefined();
  14251. if (hasBeenUndefined)
  14252. {
  14253. if(!emitBailout)
  14254. {
  14255. if (labelMissingNative == nullptr)
  14256. {
  14257. labelMissingNative = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  14258. }
  14259. InsertCompareBranch(GetMissingItemOpnd(ldElem->GetDst()->GetType(), m_func), ldElem->GetDst(), Js::OpCode::BrEq_A, labelMissingNative, insertBeforeInstr, true);
  14260. }
  14261. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  14262. if(labelMissingNative)
  14263. {
  14264. // We're going to bail out on a load from a gap, but convert the array to Var first, so we don't just
  14265. // bail here over and over. Gappy arrays are not well suited to nativeness.
  14266. // NOTE: only emit this call if the profile tells us that this has happened before ("hasBeenUndefined").
  14267. // Emitting this in Navier-Stokes brutalizes the score.
  14268. insertBeforeInstr->InsertBefore(labelMissingNative);
  14269. IR::JnHelperMethod helperMethod;
  14270. indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  14271. if (indirOpnd->GetBaseOpnd()->GetValueType().HasIntElements())
  14272. {
  14273. helperMethod = IR::HelperIntArr_ToVarArray;
  14274. }
  14275. else
  14276. {
  14277. Assert(indirOpnd->GetBaseOpnd()->GetValueType().HasFloatElements());
  14278. helperMethod = IR::HelperFloatArr_ToVarArray;
  14279. }
  14280. m_lowererMD.LoadHelperArgument(insertBeforeInstr, indirOpnd->GetBaseOpnd());
  14281. IR::Instr *instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
  14282. instrHelper->SetSrc1(IR::HelperCallOpnd::New(helperMethod, m_func));
  14283. insertBeforeInstr->InsertBefore(instrHelper);
  14284. m_lowererMD.LowerCall(instrHelper, 0);
  14285. }
  14286. }
  14287. else
  14288. {
  14289. if(!emitBailout)
  14290. {
  14291. InsertCompareBranch(GetMissingItemOpnd(ldElem->GetDst()->GetType(), m_func), ldElem->GetDst(), Js::OpCode::BrEq_A, labelBailOut, insertBeforeInstr, true);
  14292. }
  14293. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  14294. }
  14295. insertBeforeInstr->InsertBefore(labelBailOut);
  14296. }
  14297. if (emitBailout)
  14298. {
  14299. ldElem->UnlinkSrc1();
  14300. ldElem->UnlinkDst();
  14301. GenerateBailOut(ldElem, nullptr, nullptr);
  14302. }
  14303. return !emitBailout;
  14304. }
  14305. IR::Opnd *
  14306. Lowerer::GetMissingItemOpnd(IRType type, Func *func)
  14307. {
  14308. if (type == TyVar)
  14309. {
  14310. return IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstantAddress, func, true);
  14311. }
  14312. if (type == TyInt32)
  14313. {
  14314. return IR::IntConstOpnd::New(Js::JavascriptNativeIntArray::MissingItem, TyInt32, func, true);
  14315. }
  14316. Assert(type == TyFloat64);
  14317. return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetNativeFloatArrayMissingItemAddr(), TyFloat64, func);
  14318. }
  14319. bool
  14320. Lowerer::GenerateFastStElemI(IR::Instr *& stElem, bool *instrIsInHelperBlockRef)
  14321. {
  14322. Assert(instrIsInHelperBlockRef);
  14323. bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
  14324. instrIsInHelperBlock = false;
  14325. IR::LabelInstr * labelHelper;
  14326. IR::LabelInstr * labelSegmentLengthIncreased;
  14327. IR::LabelInstr * labelFallThru;
  14328. IR::LabelInstr * labelBailOut = nullptr;
  14329. IR::Opnd *dst = stElem->GetDst();
  14330. IR::IndirOpnd * indirOpnd = dst->AsIndirOpnd();
  14331. AssertMsg(dst->IsIndirOpnd(), "Expected indirOpnd on StElementI");
  14332. // From FastElemICommon:
  14333. // TEST base, AtomTag -- check base not tagged int
  14334. // JNE $helper
  14335. // MOV r1, [base + offset(type)] -- check base isArray
  14336. // CMP [r1 + offset(typeId)], TypeIds_Array
  14337. // JNE $helper
  14338. // TEST index, 1 -- index tagged int
  14339. // JEQ $helper
  14340. // MOV r2, index
  14341. // SAR r2, Js::VarTag_Shift -- remote atom tag
  14342. // JS $helper -- exclude negative index
  14343. // MOV r4, [base + offset(head)]
  14344. // CMP r2, [r4 + offset(length)] -- bounds check
  14345. // JB $done
  14346. // CMP r2, [r4 + offset(size)] -- chunk has room?
  14347. // JAE $helper
  14348. // LEA r5, [r2 + 1]
  14349. // MOV [r4 + offset(length)], r5 -- update length on chunk
  14350. // CMP r5, [base + offset(length)]
  14351. // JBE $done
  14352. // MOV [base + offset(length)], r5 -- update length on array
  14353. // $done
  14354. // LEA r3, [r4 + offset(elements)]
  14355. // Generated here.
  14356. // MOV [r3 + r2], src
  14357. labelFallThru = stElem->GetOrCreateContinueLabel();
  14358. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14359. bool emitBailout = false;
  14360. bool isNativeArrayStore = indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyNativeArray();
  14361. IR::LabelInstr * labelCantUseArray = labelHelper;
  14362. if (isNativeArrayStore)
  14363. {
  14364. if (stElem->GetSrc1()->GetType() != GetArrayIndirType(indirOpnd->GetBaseOpnd()->GetValueType()))
  14365. {
  14366. // Skip the fast path and just generate a helper call
  14367. return true;
  14368. }
  14369. if(stElem->HasBailOutInfo())
  14370. {
  14371. const IR::BailOutKind bailOutKind = stElem->GetBailOutKind();
  14372. if (bailOutKind & IR::BailOutConventionalNativeArrayAccessOnly)
  14373. {
  14374. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14375. labelCantUseArray = labelBailOut;
  14376. }
  14377. }
  14378. }
  14379. bool isTypedArrayElement, isStringIndex, indirOpndOverflowed = false;
  14380. indirOpnd =
  14381. GenerateFastElemICommon(
  14382. stElem,
  14383. true,
  14384. indirOpnd,
  14385. labelHelper,
  14386. labelCantUseArray,
  14387. labelFallThru,
  14388. &isTypedArrayElement,
  14389. &isStringIndex,
  14390. &emitBailout,
  14391. &labelSegmentLengthIncreased,
  14392. true, /* checkArrayLengthOverflow */
  14393. false, /* forceGenerateFastPath */
  14394. false, /* returnLength */
  14395. nullptr, /* bailOutLabelInstr */
  14396. &indirOpndOverflowed);
  14397. IR::Opnd *src = stElem->GetSrc1();
  14398. const IR::AutoReuseOpnd autoReuseSrc(src, m_func);
  14399. // The index is negative or not int.
  14400. if (indirOpnd == nullptr)
  14401. {
  14402. Assert(!(stElem->HasBailOutInfo() && stElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall) || indirOpndOverflowed);
  14403. if (indirOpndOverflowed && stElem->HasBailOutInfo())
  14404. {
  14405. bool emittedFastPath = false;
  14406. const IR::BailOutKind bailOutKind = stElem->GetBailOutKind();
  14407. // ignore StElemI in case of indirOpnd overflow only for typed array which is consistent with behavior of interpreter
  14408. if ((bailOutKind & ~IR::BailOutKindBits) == IR::BailOutConventionalTypedArrayAccessOnly)
  14409. {
  14410. stElem->FreeSrc1();
  14411. stElem->FreeDst();
  14412. stElem->Remove();
  14413. emittedFastPath = true;
  14414. }
  14415. if (!emittedFastPath && (bailOutKind & (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)))
  14416. {
  14417. stElem->FreeSrc1();
  14418. stElem->FreeDst();
  14419. GenerateBailOut(stElem, nullptr, nullptr);
  14420. emittedFastPath = true;
  14421. }
  14422. return !emittedFastPath;
  14423. }
  14424. // The global optimizer should never type specialize a StElem for which we know the index is not int or is a negative
  14425. // int constant. This would result in an unconditional bailout on the main code path.
  14426. else if (src->IsVar())
  14427. {
  14428. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  14429. {
  14430. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  14431. Output::Print(_u("Typed Array Lowering: function: %s (%s): instr %s, not specialized by glob opt due to negative or not likely int index.\n"),
  14432. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  14433. this->m_func->GetDebugNumberSet(debugStringBuffer),
  14434. Js::OpCodeUtil::GetOpCodeName(stElem->m_opcode));
  14435. Output::Flush();
  14436. }
  14437. // We must be dealing with some atypical index value. Don't emit fast path, but go directly to helper.
  14438. return true;
  14439. }
  14440. else
  14441. {
  14442. // If global optimizer failed to notice the unconventional index and type specialized the src,
  14443. // there is nothing to do but bail out. We should never hit this code path, unless the global optimizer's conditions
  14444. // for not specializing the instruction don't match the lowerer's conditions for not emitting the array checks (see above).
  14445. // This could happen if global optimizer's information based on value tracking fails to recognize a non-integer index or
  14446. // a constant int index that is negative. The bailout below ensures that we behave correctly in retail builds even under
  14447. // these (unlikely) conditions.
  14448. AssertMsg(false, "Global optimizer shouldn't have specialized this instruction.");
  14449. stElem->FreeSrc1();
  14450. stElem->FreeDst();
  14451. GenerateBailOut(stElem, nullptr, nullptr);
  14452. return false;
  14453. }
  14454. }
  14455. const IR::AutoReuseOpnd autoReuseIndirOpnd(indirOpnd, m_func);
  14456. const ValueType baseValueType(dst->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
  14457. if (isTypedArrayElement)
  14458. {
  14459. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  14460. {
  14461. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  14462. baseValueType.ToString(baseValueTypeStr);
  14463. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  14464. Output::Print(_u("Typed Array Lowering: function: %s (%s), instr: %s, base value type: %S, %s."),
  14465. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  14466. this->m_func->GetDebugNumberSet(debugStringBuffer),
  14467. Js::OpCodeUtil::GetOpCodeName(stElem->m_opcode),
  14468. baseValueTypeStr,
  14469. (!src->IsVar() ? _u("specialized") : _u("not specialized")));
  14470. Output::Print(_u("\n"));
  14471. Output::Flush();
  14472. }
  14473. ObjectType objectType = baseValueType.GetObjectType();
  14474. if(indirOpnd->IsFloat())
  14475. {
  14476. if (src->GetType() == TyFloat64)
  14477. {
  14478. IR::RegOpnd *const regSrc = src->AsRegOpnd();
  14479. if (indirOpnd->IsFloat32())
  14480. {
  14481. // CVTSD2SS reg.f32, regSrc.f64 -- Convert regSrc from f64 to f32
  14482. IR::RegOpnd *const reg = IR::RegOpnd::New(TyFloat32, this->m_func);
  14483. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  14484. InsertConvertFloat64ToFloat32(reg, regSrc, stElem);
  14485. // MOVSS indirOpnd, reg
  14486. InsertMove(indirOpnd, reg, stElem, false);
  14487. }
  14488. else
  14489. {
  14490. // MOVSD indirOpnd, regSrc
  14491. InsertMove(indirOpnd, regSrc, stElem, false);
  14492. }
  14493. emitBailout = true;
  14494. }
  14495. else
  14496. {
  14497. Assert(src->GetType() == TyVar);
  14498. // MOV reg, src
  14499. IR::RegOpnd *const reg = IR::RegOpnd::New(TyVar, this->m_func);
  14500. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  14501. InsertMove(reg, src, stElem);
  14502. // Convert to float, and assign to indirOpnd
  14503. if (baseValueType.IsLikelyOptimizedVirtualTypedArray())
  14504. {
  14505. IR::RegOpnd* dstReg = IR::RegOpnd::New(indirOpnd->GetType(), this->m_func);
  14506. m_lowererMD.EmitLoadFloat(dstReg, reg, stElem, stElem, labelHelper);
  14507. InsertMove(indirOpnd, dstReg, stElem);
  14508. }
  14509. else
  14510. {
  14511. m_lowererMD.EmitLoadFloat(indirOpnd, reg, stElem, stElem, labelHelper);
  14512. }
  14513. }
  14514. }
  14515. else if (objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedVirtualArray || objectType == ObjectType::Uint8ClampedMixedArray)
  14516. {
  14517. Assert(indirOpnd->GetType() == TyUint8);
  14518. IR::RegOpnd *regSrc;
  14519. IR::AutoReuseOpnd autoReuseRegSrc;
  14520. if(src->IsRegOpnd())
  14521. {
  14522. regSrc = src->AsRegOpnd();
  14523. }
  14524. else
  14525. {
  14526. regSrc = IR::RegOpnd::New(StackSym::New(src->GetType(), m_func), src->GetType(), m_func);
  14527. autoReuseRegSrc.Initialize(regSrc, m_func);
  14528. InsertMove(regSrc, src, stElem);
  14529. }
  14530. IR::Opnd *bitMaskOpnd;
  14531. IRType srcType = regSrc->GetType();
  14532. if ((srcType == TyFloat64) || (srcType == TyInt32))
  14533. {
  14534. // if (srcType == TyInt32) {
  14535. // TEST regSrc, ~255
  14536. // JE $storeValue
  14537. // JSB $handleNegative
  14538. // MOV indirOpnd, 255
  14539. // JMP $fallThru
  14540. // $handleNegative [isHelper = false]
  14541. // MOV indirOpnd, 0
  14542. // JMP $fallThru
  14543. // $storeValue
  14544. // MOV indirOpnd, regSrc
  14545. // }
  14546. // else {
  14547. // MOVSD regTmp, regSrc
  14548. // ADDSD regTmp, 0.5
  14549. // CVTTSD2SI regOpnd, regTmp
  14550. // TEST regOpnd, ~255
  14551. // JE $storeValue
  14552. // $handleOutOfBounds [isHelper = true]
  14553. // COMISD regSrc, [&FloatZero]
  14554. // JB $handleNegative
  14555. // MOV regOpnd, 255
  14556. // JMP $storeValue
  14557. // $handleNegative [isHelper = true]
  14558. // MOV regOpnd, 0
  14559. // $storeValue
  14560. // MOV indirOpnd, regOpnd
  14561. // }
  14562. // $fallThru
  14563. IR::RegOpnd *regOpnd;
  14564. IR::AutoReuseOpnd autoReuseRegOpnd;
  14565. if (srcType == TyInt32)
  14566. {
  14567. // When srcType == TyInt32 we will never call the helper and we will never
  14568. // modify the regOpnd. Therefore, it's okay to use regSrc directly, and it
  14569. // reduces register pressure.
  14570. regOpnd = regSrc;
  14571. }
  14572. else
  14573. {
  14574. #ifdef _M_IX86
  14575. AssertMsg(AutoSystemInfo::Data.SSE2Available(), "GlobOpt shouldn't have specialized Uint8ClampedArray StElem to float64 if SSE2 is unavailable.");
  14576. #endif
  14577. regOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  14578. autoReuseRegOpnd.Initialize(regOpnd, m_func);
  14579. Assert(objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedVirtualArray || objectType == ObjectType::Uint8ClampedMixedArray);
  14580. // Uint8ClampedArray follows IEEE 754 rounding rules for ties which round up
  14581. // odd integers and round down even integers. Both ties result in the nearest
  14582. // even integer value.
  14583. //
  14584. // CVTSD2SI regOpnd, regSrc
  14585. LowererMD::InsertConvertFloat64ToInt32(RoundModeHalfToEven, regOpnd, regSrc, stElem);
  14586. }
  14587. IR::LabelInstr *labelStoreValue = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  14588. #ifndef _M_ARM
  14589. // TEST regOpnd, ~255
  14590. // JE $storeValue
  14591. bitMaskOpnd = IR::IntConstOpnd::New(~255, TyInt32, this->m_func, true);
  14592. InsertTestBranch(regOpnd, bitMaskOpnd, Js::OpCode::BrEq_A, labelStoreValue, stElem);
  14593. #else // ARM
  14594. // Special case for ARM, a shift may be better
  14595. //
  14596. // ASRS tempReg, src, 8
  14597. // BEQ $inlineSet
  14598. InsertShiftBranch(
  14599. Js::OpCode::Shr_A,
  14600. IR::RegOpnd::New(TyInt32, this->m_func),
  14601. regOpnd,
  14602. IR::IntConstOpnd::New(8, TyInt8, this->m_func),
  14603. Js::OpCode::BrEq_A,
  14604. labelStoreValue,
  14605. stElem);
  14606. #endif
  14607. IR::LabelInstr *labelHandleNegative = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, srcType == TyFloat64);
  14608. if (srcType == TyInt32)
  14609. {
  14610. // JSB $handleNegativeOrOverflow
  14611. InsertBranch(
  14612. LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A),
  14613. labelHandleNegative,
  14614. stElem);
  14615. // MOV IndirOpnd.u8, 255
  14616. InsertMove(indirOpnd, IR::IntConstOpnd::New(255, TyUint8, this->m_func, true), stElem);
  14617. // JMP $fallThru
  14618. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  14619. // $handleNegative [isHelper = false]
  14620. stElem->InsertBefore(labelHandleNegative);
  14621. // MOV IndirOpnd.u8, 0
  14622. InsertMove(indirOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func, true), stElem);
  14623. // JMP $fallThru
  14624. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  14625. }
  14626. else
  14627. {
  14628. Assert(regOpnd != regSrc);
  14629. // This label is just to ensure the following code is moved to the helper block.
  14630. // $handleOutOfBounds [isHelper = true]
  14631. IR::LabelInstr *labelHandleOutOfBounds = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14632. stElem->InsertBefore(labelHandleOutOfBounds);
  14633. // COMISD regSrc, FloatZero
  14634. // JB labelHandleNegative
  14635. IR::MemRefOpnd * zeroOpnd = IR::MemRefOpnd::New(this->m_func->GetThreadContextInfo()->GetDoubleZeroAddr(), TyMachDouble, this->m_func);
  14636. InsertCompareBranch(regSrc, zeroOpnd, Js::OpCode::BrNotGe_A, labelHandleNegative, stElem);
  14637. // MOV regOpnd, 255
  14638. InsertMove(regOpnd, IR::IntConstOpnd::New(255, TyUint8, this->m_func, true), stElem);
  14639. // JMP $storeValue
  14640. InsertBranch(Js::OpCode::Br, labelStoreValue, stElem);
  14641. // $handleNegative [isHelper = true]
  14642. stElem->InsertBefore(labelHandleNegative);
  14643. // MOV regOpnd, 0
  14644. InsertMove(regOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func, true), stElem);
  14645. }
  14646. // $storeValue
  14647. stElem->InsertBefore(labelStoreValue);
  14648. // MOV IndirOpnd.u8, regOpnd.u8
  14649. InsertMove(indirOpnd, regOpnd, stElem);
  14650. emitBailout = true;
  14651. }
  14652. else
  14653. {
  14654. Assert(srcType == TyVar);
  14655. #if INT32VAR
  14656. bitMaskOpnd = IR::AddrOpnd::New((Js::Var)~(INT_PTR)(Js::TaggedInt::ToVarUnchecked(255)), IR::AddrOpndKindConstantVar, this->m_func, true);
  14657. #else
  14658. bitMaskOpnd = IR::IntConstOpnd::New(~(INT_PTR)(Js::TaggedInt::ToVarUnchecked(255)), TyMachReg, this->m_func, true);
  14659. #endif
  14660. // Note: We are assuming that if no bits other than ~(TaggedInt(255)) are 1, that we have a tagged
  14661. // int value between 0 - 255.
  14662. // #if INT32VAR
  14663. // This works for pointers because tagged int bit can't be on, and first 64k are not valid addresses
  14664. // This works for floats because a valid float would have one of the upper 13 bits on.
  14665. // #else
  14666. // Any pointer is larger than 512 because first 64k memory is reserved by the OS
  14667. // #endif
  14668. IR::LabelInstr *labelInlineSet = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14669. #ifndef _M_ARM
  14670. // TEST src, ~(TaggedInt(255)) -- Check for tagged int >= 255 and <= 0
  14671. // JEQ $inlineSet
  14672. InsertTestBranch(regSrc, bitMaskOpnd, Js::OpCode::BrEq_A, labelInlineSet, stElem);
  14673. #else // ARM
  14674. // Special case for ARM, a shift may be better
  14675. //
  14676. // ASRS tempReg, src, 8
  14677. // BEQ $inlineSet
  14678. InsertShiftBranch(
  14679. Js::OpCode::Shr_A,
  14680. IR::RegOpnd::New(TyInt32, this->m_func),
  14681. regSrc,
  14682. IR::IntConstOpnd::New(8, TyInt8, this->m_func),
  14683. Js::OpCode::BrEq_A,
  14684. labelInlineSet,
  14685. stElem);
  14686. #endif
  14687. // Uint8ClampedArray::DirectSetItem(array, index, value);
  14688. // Inserting a helper call. Make sure it observes the main instructions's requirements regarding implicit calls.
  14689. if (!instrIsInHelperBlock)
  14690. {
  14691. stElem->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, m_func, true));
  14692. }
  14693. if (stElem->HasBailOutInfo() && (stElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall))
  14694. {
  14695. // Bail out instead of doing the helper call.
  14696. Assert(labelHelper);
  14697. this->InsertBranch(Js::OpCode::Br, labelHelper, stElem);
  14698. }
  14699. else
  14700. {
  14701. IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  14702. stElem->InsertBefore(instr);
  14703. if (stElem->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(stElem->GetBailOutKind()))
  14704. {
  14705. // Bail out if this helper triggers implicit calls.
  14706. instr = instr->ConvertToBailOutInstr(stElem->GetBailOutInfo(), stElem->GetBailOutKind());
  14707. if (stElem->GetBailOutInfo()->bailOutInstr == stElem)
  14708. {
  14709. IR::Instr * instrShare = stElem->ShareBailOut();
  14710. LowerBailTarget(instrShare);
  14711. }
  14712. }
  14713. m_lowererMD.LoadHelperArgument(instr, regSrc);
  14714. IR::Opnd *indexOpnd = indirOpnd->GetIndexOpnd();
  14715. if (indexOpnd == nullptr)
  14716. {
  14717. indexOpnd = IR::IntConstOpnd::New(indirOpnd->GetOffset(), TyInt32, this->m_func);
  14718. }
  14719. else
  14720. {
  14721. Assert(indirOpnd->GetOffset() == 0);
  14722. }
  14723. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  14724. m_lowererMD.LoadHelperArgument(instr, stElem->GetDst()->AsIndirOpnd()->GetBaseOpnd());
  14725. Assert(objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedMixedArray || objectType == ObjectType::Uint8ClampedVirtualArray);
  14726. m_lowererMD.ChangeToHelperCall(instr, IR::JnHelperMethod::HelperUint8ClampedArraySetItem);
  14727. // JMP $fallThrough
  14728. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  14729. }
  14730. //$inlineSet
  14731. stElem->InsertBefore(labelInlineSet);
  14732. IR::RegOpnd *regOpnd;
  14733. IR::AutoReuseOpnd autoReuseRegOpnd;
  14734. #if INT32VAR
  14735. regOpnd = regSrc;
  14736. #else
  14737. // MOV r1, src
  14738. // SAR r1, 1
  14739. regOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  14740. autoReuseRegOpnd.Initialize(regOpnd, m_func);
  14741. InsertShift(
  14742. Js::OpCode::Shr_A,
  14743. false /* needFlags */,
  14744. regOpnd,
  14745. regSrc,
  14746. IR::IntConstOpnd::New(1, TyInt8, this->m_func),
  14747. stElem);
  14748. #endif
  14749. // MOV IndirOpnd.u8, reg.u8
  14750. InsertMove(indirOpnd, regOpnd, stElem);
  14751. }
  14752. }
  14753. else
  14754. {
  14755. if (src->IsInt32())
  14756. {
  14757. // MOV indirOpnd, src
  14758. InsertMove(indirOpnd, src, stElem);
  14759. emitBailout = true;
  14760. }
  14761. else if (src->IsFloat64())
  14762. {
  14763. AssertMsg(indirOpnd->GetType() == TyUint32, "Only StElemI to Uint32Array could be specialized to float64.");
  14764. #ifdef _M_IX86
  14765. AssertMsg(AutoSystemInfo::Data.SSE2Available(), "GloOpt shouldn't have specialized Uint32Array StElemI to float64 if SSE2 is unavailable.");
  14766. #endif
  14767. bool bailOutOnHelperCall = stElem->HasBailOutInfo() ? !!(stElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall) : false;
  14768. if (bailOutOnHelperCall)
  14769. {
  14770. if(!GlobOpt::DoEliminateArrayAccessHelperCall(this->m_func))
  14771. {
  14772. // Array access helper call removal is already off for some reason. Prevent trying to rejit again
  14773. // because it won't help and the same thing will happen again. Just abort jitting this function.
  14774. if(PHASE_TRACE(Js::BailOutPhase, this->m_func))
  14775. {
  14776. Output::Print(_u(" Aborting JIT because EliminateArrayAccessHelperCall is already off\n"));
  14777. Output::Flush();
  14778. }
  14779. throw Js::OperationAbortedException();
  14780. }
  14781. throw Js::RejitException(RejitReason::ArrayAccessHelperCallEliminationDisabled);
  14782. }
  14783. IR::RegOpnd *const reg = IR::RegOpnd::New(TyInt32, this->m_func);
  14784. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  14785. m_lowererMD.EmitFloatToInt(reg, src, stElem, stElem, labelHelper);
  14786. // MOV indirOpnd, reg
  14787. InsertMove(indirOpnd, reg, stElem);
  14788. emitBailout = true;
  14789. }
  14790. else
  14791. {
  14792. Assert(src->IsVar());
  14793. if(src->IsAddrOpnd())
  14794. {
  14795. IR::AddrOpnd *const addrSrc = src->AsAddrOpnd();
  14796. Assert(addrSrc->IsVar());
  14797. Assert(Js::TaggedInt::Is(addrSrc->m_address));
  14798. // MOV indirOpnd, intValue
  14799. InsertMove(
  14800. indirOpnd,
  14801. IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(addrSrc->m_address), TyInt32, m_func),
  14802. stElem);
  14803. }
  14804. else
  14805. {
  14806. IR::RegOpnd *const regSrc = src->AsRegOpnd();
  14807. // FromVar reg, Src
  14808. IR::RegOpnd *const reg = IR::RegOpnd::New(TyInt32, this->m_func);
  14809. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  14810. IR::Instr * instr = IR::Instr::New(Js::OpCode::FromVar, reg, regSrc, stElem->m_func);
  14811. stElem->InsertBefore(instr);
  14812. // Convert reg to int32
  14813. // Note: ToUint32 is implemented as (uint32)ToInt32()
  14814. IR::BailOutKind bailOutKind = stElem->HasBailOutInfo() ? stElem->GetBailOutKind() : IR::BailOutInvalid;
  14815. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  14816. {
  14817. instr = instr->ConvertToBailOutInstr(stElem->GetBailOutInfo(), bailOutKind);
  14818. if (stElem->GetBailOutInfo()->bailOutInstr == stElem)
  14819. {
  14820. IR::Instr * instrShare = stElem->ShareBailOut();
  14821. LowerBailTarget(instrShare);
  14822. }
  14823. }
  14824. bool bailOutOnHelperCall = !!(bailOutKind & IR::BailOutOnArrayAccessHelperCall);
  14825. m_lowererMD.EmitLoadInt32(instr, true /*conversionFromObjectAllowed*/, bailOutOnHelperCall, labelHelper);
  14826. // MOV indirOpnd, reg
  14827. InsertMove(indirOpnd, reg, stElem);
  14828. }
  14829. }
  14830. }
  14831. }
  14832. else
  14833. {
  14834. if(labelSegmentLengthIncreased)
  14835. {
  14836. IR::Instr *const insertBeforeInstr = labelSegmentLengthIncreased->m_next;
  14837. // labelSegmentLengthIncreased:
  14838. // mov [segment + index], src
  14839. // jmp $fallThru
  14840. InsertMove(indirOpnd, src, insertBeforeInstr);
  14841. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  14842. }
  14843. if (!(isStringIndex || (baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())))
  14844. {
  14845. if(!stElem->IsProfiledInstr() || stElem->AsProfiledInstr()->u.stElemInfo->LikelyFillsMissingValue())
  14846. {
  14847. // Check whether the store is filling a missing value. If so, fall back to the helper so that it can check whether
  14848. // this store is filling the last missing value in the array. This is necessary to keep the missing value tracking
  14849. // in arrays precise. The check is omitted when profile data says that the store is likely to create missing values.
  14850. //
  14851. // cmp [segment + index], Js::SparseArraySegment::MissingValue
  14852. // je $helper
  14853. InsertCompareBranch(
  14854. indirOpnd,
  14855. GetMissingItemOpnd(src->GetType(), m_func),
  14856. Js::OpCode::BrEq_A,
  14857. labelHelper,
  14858. stElem,
  14859. true);
  14860. }
  14861. else
  14862. {
  14863. GenerateIsEnabledArraySetElementFastPathCheck(labelHelper, stElem);
  14864. }
  14865. }
  14866. // MOV [r3 + r2], src
  14867. InsertMoveWithBarrier(indirOpnd, src, stElem);
  14868. }
  14869. // JMP $fallThru
  14870. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  14871. // $helper:
  14872. // bailout or caller generated helper call
  14873. // $fallThru:
  14874. stElem->InsertBefore(labelHelper);
  14875. instrIsInHelperBlock = true;
  14876. if (isNativeArrayStore && !isStringIndex)
  14877. {
  14878. Assert(stElem->HasBailOutInfo());
  14879. Assert(labelHelper != labelBailOut);
  14880. // Transform the original instr:
  14881. //
  14882. // $helper:
  14883. // dst = LdElemI_A src (BailOut)
  14884. // $fallthrough:
  14885. //
  14886. // to:
  14887. //
  14888. // $helper:
  14889. // dst = LdElemI_A src
  14890. // b $fallthrough
  14891. // $bailout:
  14892. // BailOut
  14893. // $fallthrough:
  14894. LowerOneBailOutKind(stElem, IR::BailOutConventionalNativeArrayAccessOnly, instrIsInHelperBlock);
  14895. IR::Instr *const insertBeforeInstr = stElem->m_next;
  14896. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  14897. insertBeforeInstr->InsertBefore(labelBailOut);
  14898. }
  14899. if (emitBailout)
  14900. {
  14901. stElem->FreeSrc1();
  14902. stElem->FreeDst();
  14903. GenerateBailOut(stElem, nullptr, nullptr);
  14904. }
  14905. return !emitBailout;
  14906. }
  14907. bool
  14908. Lowerer::GenerateFastLdLen(IR::Instr *ldLen, bool *instrIsInHelperBlockRef)
  14909. {
  14910. Assert(instrIsInHelperBlockRef);
  14911. bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
  14912. instrIsInHelperBlock = false;
  14913. // TEST src, AtomTag -- check src not tagged int
  14914. // JNE $helper
  14915. // CMP [src], JavascriptArray::`vtable' -- check base isArray
  14916. // JNE $string
  14917. // MOV length, [src + offset(length)] -- Load array length
  14918. // JMP $tovar
  14919. // $string:
  14920. // CMP [src + offset(type)], static_string_type -- check src isString
  14921. // JNE $helper
  14922. // MOV length, [src + offset(length)] -- Load string length
  14923. // $toVar:
  14924. // TEST length, 0xC0000000 -- test for overflow of SHL, or negative
  14925. // JNE $helper
  14926. // SHL length, Js::VarTag_Shift -- restore the var tag on the result
  14927. // INC length
  14928. // MOV dst, length
  14929. // JMP $fallthru
  14930. // $helper:
  14931. // CALL GetProperty(src, length_property_id, scriptContext)
  14932. // $fallthru:
  14933. IR::RegOpnd * opnd = ldLen->GetSrc1()->AsRegOpnd();
  14934. IR::RegOpnd * dst = ldLen->GetDst()->AsRegOpnd();
  14935. IR::RegOpnd * src = opnd->AsRegOpnd();
  14936. const ValueType srcValueType(src->GetValueType());
  14937. AssertMsg(src->IsRegOpnd(), "Expected regOpnd on LdLen");
  14938. IR::LabelInstr *const labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14939. if (ldLen->DoStackArgsOpt(this->m_func))
  14940. {
  14941. GenerateFastArgumentsLdLen(ldLen, ldLen->GetOrCreateContinueLabel());
  14942. ldLen->Remove();
  14943. return false;
  14944. }
  14945. else
  14946. {
  14947. const bool arrayFastPath = ShouldGenerateArrayFastPath(src, false, true, false);
  14948. // HasBeenString instead of IsLikelyString because it could be a merge between StringObject and String, and this
  14949. // information about whether it's a StringObject or some other object is not available in the profile data
  14950. const bool stringFastPath = srcValueType.IsUninitialized() || srcValueType.HasBeenString();
  14951. if(!(arrayFastPath || stringFastPath))
  14952. {
  14953. return true;
  14954. }
  14955. const int32 arrayOffsetOfLength =
  14956. srcValueType.IsLikelyAnyOptimizedArray()
  14957. ? GetArrayOffsetOfLength(srcValueType)
  14958. : Js::JavascriptArray::GetOffsetOfLength();
  14959. IR::LabelInstr *labelString = nullptr;
  14960. IR::RegOpnd *arrayOpnd = src;
  14961. IR::RegOpnd *arrayLengthOpnd = nullptr;
  14962. IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
  14963. if(arrayFastPath)
  14964. {
  14965. if(!srcValueType.IsAnyOptimizedArray())
  14966. {
  14967. if(stringFastPath)
  14968. {
  14969. // If we don't have info about the src value type or its object type, the array and string fast paths are
  14970. // generated
  14971. labelString = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14972. }
  14973. arrayOpnd = GenerateArrayTest(src, labelHelper, stringFastPath ? labelString : labelHelper, ldLen, false);
  14974. }
  14975. else if(src->IsArrayRegOpnd())
  14976. {
  14977. IR::ArrayRegOpnd *const arrayRegOpnd = src->AsArrayRegOpnd();
  14978. if(arrayRegOpnd->LengthSym())
  14979. {
  14980. arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), TyUint32, m_func);
  14981. DebugOnly(arrayLengthOpnd->FreezeSymValue());
  14982. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  14983. }
  14984. }
  14985. }
  14986. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
  14987. IR::RegOpnd *lengthOpnd = nullptr;
  14988. IR::AutoReuseOpnd autoReuseLengthOpnd;
  14989. const auto EnsureLengthOpnd = [&]()
  14990. {
  14991. if(lengthOpnd)
  14992. {
  14993. return;
  14994. }
  14995. lengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  14996. autoReuseLengthOpnd.Initialize(lengthOpnd, m_func);
  14997. };
  14998. if(arrayFastPath)
  14999. {
  15000. if(arrayLengthOpnd)
  15001. {
  15002. lengthOpnd = arrayLengthOpnd;
  15003. autoReuseLengthOpnd.Initialize(lengthOpnd, m_func);
  15004. Assert(!stringFastPath);
  15005. }
  15006. else
  15007. {
  15008. // MOV length, [array + offset(length)] -- Load array length
  15009. EnsureLengthOpnd();
  15010. IR::IndirOpnd *const indirOpnd = IR::IndirOpnd::New(arrayOpnd, arrayOffsetOfLength, TyUint32, this->m_func);
  15011. InsertMove(lengthOpnd, indirOpnd, ldLen);
  15012. }
  15013. }
  15014. if(stringFastPath)
  15015. {
  15016. IR::LabelInstr *labelToVar = nullptr;
  15017. if(arrayFastPath)
  15018. {
  15019. // JMP $tovar
  15020. labelToVar = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  15021. InsertBranch(Js::OpCode::Br, labelToVar, ldLen);
  15022. // $string:
  15023. ldLen->InsertBefore(labelString);
  15024. }
  15025. // CMP [src + offset(type)], static_stringtype -- check src isString
  15026. // JNE $helper
  15027. GenerateStringTest(src, ldLen, labelHelper, nullptr, !arrayFastPath);
  15028. // MOV length, [src + offset(length)] -- Load string length
  15029. EnsureLengthOpnd();
  15030. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(src, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func);
  15031. InsertMove(lengthOpnd, indirOpnd, ldLen);
  15032. if(arrayFastPath)
  15033. {
  15034. // $toVar:
  15035. ldLen->InsertBefore(labelToVar);
  15036. }
  15037. }
  15038. Assert(lengthOpnd);
  15039. if(ldLen->HasBailOutInfo() && (ldLen->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnIrregularLength)
  15040. {
  15041. Assert(ldLen->GetBailOutKind() == IR::BailOutOnIrregularLength);
  15042. Assert(dst->IsInt32());
  15043. // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative
  15044. // test length, length
  15045. // js $helper
  15046. // mov dst, length
  15047. // jmp $fallthrough
  15048. InsertCompareBranch(
  15049. lengthOpnd,
  15050. IR::IntConstOpnd::New(0, lengthOpnd->GetType(), m_func, true),
  15051. Js::OpCode::BrLt_A,
  15052. labelHelper,
  15053. ldLen);
  15054. InsertMove(dst, lengthOpnd, ldLen);
  15055. InsertBranch(Js::OpCode::Br, ldLen->GetOrCreateContinueLabel(), ldLen);
  15056. // $helper:
  15057. // (Bail out with IR::BailOutOnIrregularLength)
  15058. ldLen->InsertBefore(labelHelper);
  15059. instrIsInHelperBlock = true;
  15060. ldLen->FreeDst();
  15061. ldLen->FreeSrc1();
  15062. GenerateBailOut(ldLen);
  15063. return false;
  15064. }
  15065. #if INT32VAR
  15066. // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative
  15067. // test length, length
  15068. // js $helper
  15069. InsertCompareBranch(
  15070. lengthOpnd,
  15071. IR::IntConstOpnd::New(0, lengthOpnd->GetType(), m_func, true),
  15072. Js::OpCode::BrLt_A,
  15073. labelHelper,
  15074. ldLen);
  15075. #else
  15076. // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative.
  15077. // Additionally, verify that the signed value's width is not greater than 31 bits, since it needs to be tagged.
  15078. // test length, 0xC0000000
  15079. // jne $helper
  15080. InsertTestBranch(
  15081. lengthOpnd,
  15082. IR::IntConstOpnd::New(0xC0000000, TyUint32, this->m_func, true),
  15083. Js::OpCode::BrNeq_A,
  15084. labelHelper,
  15085. ldLen);
  15086. #endif
  15087. #if INT32VAR
  15088. //
  15089. // dst_32 = MOV length
  15090. // dst_64 = OR dst_64, Js::AtomTag_IntPtr
  15091. //
  15092. Assert(dst->GetType() == TyVar);
  15093. IR::Opnd *dst32 = dst->Copy(this->m_func);
  15094. dst32->SetType(TyInt32);
  15095. // This will clear the top bits.
  15096. InsertMove(dst32, lengthOpnd, ldLen);
  15097. m_lowererMD.GenerateInt32ToVarConversion(dst, ldLen);
  15098. #else
  15099. // dst = SHL length, Js::VarTag_Shift -- restore the var tag on the result
  15100. InsertShift(
  15101. Js::OpCode::Shl_A,
  15102. false /* needFlags */,
  15103. dst,
  15104. lengthOpnd,
  15105. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func),
  15106. ldLen);
  15107. // dst = ADD dst, AtomTag
  15108. InsertAdd(
  15109. false /* needFlags */,
  15110. dst,
  15111. dst,
  15112. IR::IntConstOpnd::New(Js::AtomTag_Int32, TyUint32, m_func, true),
  15113. ldLen);
  15114. #endif
  15115. // JMP $fallthrough
  15116. InsertBranch(Js::OpCode::Br, ldLen->GetOrCreateContinueLabel(), ldLen);
  15117. }
  15118. // $helper:
  15119. // (caller generates helper call)
  15120. ldLen->InsertBefore(labelHelper);
  15121. instrIsInHelperBlock = true;
  15122. return true; // fast path was generated, helper call will be in a helper block
  15123. }
  15124. void
  15125. Lowerer::GenerateFastInlineStringCodePointAt(IR::Instr* lastInstr, Func* func, IR::Opnd *strLength, IR::Opnd *srcIndex, IR::RegOpnd *lowerChar, IR::RegOpnd *strPtr)
  15126. {
  15127. //// Required State:
  15128. // strLength - UInt32
  15129. // srcIndex - TyVar if not Address
  15130. // lowerChar - TyMachReg
  15131. // strPtr - Addr
  15132. //// Instructions
  15133. // CMP [strLength], srcIndex + 1
  15134. // JBE charCodeAt
  15135. // CMP lowerChar 0xDC00
  15136. // JGE charCodeAt
  15137. // CMP lowerChar 0xD7FF
  15138. // JLE charCodeAt
  15139. // upperChar = MOVZX [strPtr + srcIndex + 1]
  15140. // CMP upperChar 0xE000
  15141. // JGE charCodeAt
  15142. // CMP lowerChar 0xDBFF
  15143. // JLE charCodeAt
  15144. // lowerChar = SUB lowerChar - 0xD800
  15145. // lowerChar = SHL lowerChar, 10
  15146. // lowerChar = ADD lowerChar + upperChar
  15147. // lowerChar = ADD lowerChar + 0x2400
  15148. // :charCodeAt
  15149. // :done
  15150. // Asserts
  15151. // Arm should change to Uint32 for the strLength
  15152. Assert(strLength->GetType() == TyUint32 || strLength->GetType() == TyMachReg);
  15153. Assert(srcIndex->GetType() == TyVar || srcIndex->IsAddrOpnd());
  15154. Assert(lowerChar->GetType() == TyMachReg || lowerChar->GetType() == TyUint32);
  15155. Assert(strPtr->IsRegOpnd());
  15156. IR::RegOpnd *tempReg = IR::RegOpnd::New(TyMachReg, func);
  15157. IR::LabelInstr *labelCharCodeAt = IR::LabelInstr::New(Js::OpCode::Label, func);
  15158. IR::IndirOpnd *tempIndirOpnd;
  15159. if (srcIndex->IsAddrOpnd())
  15160. {
  15161. uint32 length = Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address) + 1U;
  15162. InsertCompareBranch(strLength, IR::IntConstOpnd::New(length, TyUint32, func), Js::OpCode::BrLe_A, true, labelCharCodeAt, lastInstr);
  15163. tempIndirOpnd = IR::IndirOpnd::New(strPtr, (length) * sizeof(char16), TyUint16, func);
  15164. }
  15165. else
  15166. {
  15167. InsertMove(tempReg, srcIndex, lastInstr);
  15168. #if INT32VAR
  15169. IR::Opnd * reg32Bit = tempReg->UseWithNewType(TyInt32, func);
  15170. InsertMove(tempReg, reg32Bit, lastInstr);
  15171. tempReg = reg32Bit->AsRegOpnd();
  15172. #else
  15173. InsertShift(Js::OpCode::Shr_A, false, tempReg, tempReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, func), lastInstr);
  15174. #endif
  15175. InsertAdd(false, tempReg, tempReg, IR::IntConstOpnd::New(1, TyInt32, func), lastInstr);
  15176. InsertCompareBranch(strLength, tempReg, Js::OpCode::BrLe_A, true, labelCharCodeAt, lastInstr);
  15177. if(tempReg->GetSize() != MachPtr)
  15178. {
  15179. tempReg = tempReg->UseWithNewType(TyMachPtr, func)->AsRegOpnd();
  15180. }
  15181. tempIndirOpnd = IR::IndirOpnd::New(strPtr, tempReg, 1, TyUint16, func);
  15182. }
  15183. // By this point, we have added instructions before labelCharCodeAt to check for extra length required for the surrogate pair
  15184. // The branching for that is already handled, all we have to do now is to check for correct values.
  15185. // Validate char is in range [D800, DBFF]; otherwise just get a charCodeAt
  15186. InsertCompareBranch(lowerChar, IR::IntConstOpnd::New(0xDC00, TyUint32, func), Js::OpCode::BrGe_A, labelCharCodeAt, lastInstr);
  15187. InsertCompareBranch(lowerChar, IR::IntConstOpnd::New(0xD7FF, TyUint32, func), Js::OpCode::BrLe_A, labelCharCodeAt, lastInstr);
  15188. // upperChar = MOVZX r3, [r1 + r3 * 2] -- this is the value of the upper surrogate pair char
  15189. IR::RegOpnd *upperChar = IR::RegOpnd::New(TyInt32, func);
  15190. InsertMove(upperChar, tempIndirOpnd, lastInstr);
  15191. // Validate upper is in range [DC00, DFFF]; otherwise just get a charCodeAt
  15192. InsertCompareBranch(upperChar, IR::IntConstOpnd::New(0xE000, TyUint32, func), Js::OpCode::BrGe_A, labelCharCodeAt, lastInstr);
  15193. InsertCompareBranch(upperChar, IR::IntConstOpnd::New(0xDBFF, TyUint32, func), Js::OpCode::BrLe_A, labelCharCodeAt, lastInstr);
  15194. // (lower - 0xD800) << 10 + second - 0xDC00 + 0x10000 -- 0x10000 - 0xDC00 = 0x2400
  15195. // lowerChar = SUB lowerChar - 0xD800
  15196. // lowerChar = SHL lowerChar, 10
  15197. // lowerChar = ADD lowerChar + upperChar
  15198. // lowerChar = ADD lowerChar + 0x2400
  15199. InsertSub(false, lowerChar, lowerChar, IR::IntConstOpnd::New(0xD800, TyUint32, func), lastInstr);
  15200. InsertShift(Js::OpCode::Shl_A, false, lowerChar, lowerChar, IR::IntConstOpnd::New(10, TyUint32, func), lastInstr);
  15201. InsertAdd(false, lowerChar, lowerChar, upperChar, lastInstr);
  15202. InsertAdd(false, lowerChar, lowerChar, IR::IntConstOpnd::New(0x2400, TyUint32, func), lastInstr);
  15203. lastInstr->InsertBefore(labelCharCodeAt);
  15204. }
  15205. bool
  15206. Lowerer::GenerateFastInlineStringFromCodePoint(IR::Instr* instr)
  15207. {
  15208. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  15209. // ArgOut sequence
  15210. // s8.var = StartCall 2 (0x2).i32 #000c
  15211. // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, String object
  15212. // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to FromCharCode
  15213. // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
  15214. // s0[LikelyTaggedInt].var = CallDirect String_FromCodePoint.u32, arg1(s11)<0>.u32 #001c
  15215. IR::Opnd * linkOpnd = instr->GetSrc2();
  15216. IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  15217. linkOpnd = tmpInstr->GetSrc2();
  15218. #if DBG
  15219. IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  15220. Assert(argCount == 2);
  15221. #endif
  15222. IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
  15223. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  15224. IR::Opnd *src1 = argInstr->GetSrc1();
  15225. if (src1->GetValueType().IsLikelyNumber())
  15226. {
  15227. //Trying to generate this code
  15228. // MOV resultOpnd, dst
  15229. // MOV fromCharCodeIntArgOpnd, src1
  15230. // SAR fromCharCodeIntArgOpnd, Js::VarTag_Shift
  15231. // JAE $Helper
  15232. // CMP fromCharCodeIntArgOpnd, Js::ScriptContext::CharStringCacheSize
  15233. //
  15234. // JAE $labelWCharStringCheck <
  15235. // MOV resultOpnd, GetCharStringCache[fromCharCodeIntArgOpnd]
  15236. // TST resultOpnd, resultOpnd //Check for null
  15237. // JEQ $helper
  15238. // JMP $Done
  15239. //
  15240. //$labelWCharStringCheck:
  15241. // resultOpnd = Call HelperGetStringForCharW
  15242. // JMP $Done
  15243. //$helper:
  15244. IR::RegOpnd * resultOpnd = nullptr;
  15245. if (!instr->GetDst()->IsRegOpnd() || instr->GetDst()->IsEqual(src1))
  15246. {
  15247. resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  15248. }
  15249. else
  15250. {
  15251. resultOpnd = instr->GetDst()->AsRegOpnd();
  15252. }
  15253. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  15254. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  15255. IR::RegOpnd * fromCodePointIntArgOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  15256. IR::AutoReuseOpnd autoReuseFromCodePointIntArgOpnd(fromCodePointIntArgOpnd, instr->m_func);
  15257. InsertMove(fromCodePointIntArgOpnd, src1, instr);
  15258. //Check for tagged int and get the untagged version.
  15259. fromCodePointIntArgOpnd = GenerateUntagVar(fromCodePointIntArgOpnd, labelHelper, instr);
  15260. GenerateGetSingleCharString(fromCodePointIntArgOpnd, resultOpnd, labelHelper, doneLabel, instr, true);
  15261. instr->InsertBefore(labelHelper);
  15262. instr->InsertAfter(doneLabel);
  15263. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  15264. }
  15265. return true;
  15266. }
  15267. bool
  15268. Lowerer::GenerateFastInlineStringFromCharCode(IR::Instr* instr)
  15269. {
  15270. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  15271. // ArgOut sequence
  15272. // s8.var = StartCall 2 (0x2).i32 #000c
  15273. // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, String object
  15274. // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to FromCharCode
  15275. // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
  15276. // s0[LikelyTaggedInt].var = CallDirect String_FromCharCode.u32, arg1(s11)<0>.u32 #001c
  15277. IR::Opnd * linkOpnd = instr->GetSrc2();
  15278. IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  15279. linkOpnd = tmpInstr->GetSrc2();
  15280. #if DBG
  15281. IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  15282. Assert(argCount == 2);
  15283. #endif
  15284. IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
  15285. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  15286. IR::Opnd *src1 = argInstr->GetSrc1();
  15287. if (src1->GetValueType().IsLikelyNumber())
  15288. {
  15289. //Trying to generate this code
  15290. // MOV resultOpnd, dst
  15291. // MOV fromCharCodeIntArgOpnd, src1
  15292. // SAR fromCharCodeIntArgOpnd, Js::VarTag_Shift
  15293. // JAE $Helper
  15294. // CMP fromCharCodeIntArgOpnd, Js::ScriptContext::CharStringCacheSize
  15295. //
  15296. // JAE $labelWCharStringCheck <
  15297. // MOV resultOpnd, GetCharStringCache[fromCharCodeIntArgOpnd]
  15298. // TST resultOpnd, resultOpnd //Check for null
  15299. // JEQ $helper
  15300. // JMP $Done
  15301. //
  15302. //$labelWCharStringCheck:
  15303. // resultOpnd = Call HelperGetStringForCharW
  15304. // JMP $Done
  15305. //$helper:
  15306. IR::RegOpnd * resultOpnd = nullptr;
  15307. if (!instr->GetDst()->IsRegOpnd() || instr->GetDst()->IsEqual(src1))
  15308. {
  15309. resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  15310. }
  15311. else
  15312. {
  15313. resultOpnd = instr->GetDst()->AsRegOpnd();
  15314. }
  15315. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  15316. IR::RegOpnd * fromCharCodeIntArgOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  15317. IR::AutoReuseOpnd autoReuseFromCharCodeIntArgOpnd(fromCharCodeIntArgOpnd, instr->m_func);
  15318. InsertMove(fromCharCodeIntArgOpnd, src1, instr);
  15319. //Check for tagged int and get the untagged version.
  15320. fromCharCodeIntArgOpnd = GenerateUntagVar(fromCharCodeIntArgOpnd, labelHelper, instr);
  15321. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  15322. GenerateGetSingleCharString(fromCharCodeIntArgOpnd, resultOpnd, labelHelper, doneLabel, instr, false);
  15323. instr->InsertBefore(labelHelper);
  15324. instr->InsertAfter(doneLabel);
  15325. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  15326. }
  15327. return true;
  15328. }
  15329. void
  15330. Lowerer::GenerateGetSingleCharString(IR::RegOpnd * charCodeOpnd, IR::Opnd * resultOpnd, IR::LabelInstr * labelHelper, IR::LabelInstr * doneLabel, IR::Instr * instr, bool isCodePoint)
  15331. {
  15332. // MOV cacheReg, CharStringCache
  15333. // CMP charCodeOpnd, Js::ScriptContext::CharStringCacheSize
  15334. // JAE $labelWCharStringCheck <
  15335. // MOV resultOpnd, cacheReg[charCodeOpnd]
  15336. // TST resultOpnd, resultOpnd //Check for null
  15337. // JEQ $helper
  15338. // JMP $Done
  15339. //
  15340. //$labelWCharStringCheck:
  15341. // Arg1 = charCodeOpnd
  15342. // Arg0 = cacheReg
  15343. // resultOpnd = Call HelperGetStringForCharW/CodePoint
  15344. // JMP $Done
  15345. //$helper:
  15346. IR::LabelInstr *labelWCharStringCheck = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  15347. //Try to load from in CharStringCacheA
  15348. IR::RegOpnd *cacheRegOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  15349. IR::AutoReuseOpnd autoReuseCacheRegOpnd(cacheRegOpnd, instr->m_func);
  15350. Assert(Js::JavascriptLibrary::GetCharStringCacheAOffset() == Js::JavascriptLibrary::GetCharStringCacheOffset());
  15351. InsertMove(cacheRegOpnd, this->LoadLibraryValueOpnd(instr, LibraryValue::ValueCharStringCache), instr);
  15352. InsertCompareBranch(charCodeOpnd, IR::IntConstOpnd::New(Js::CharStringCache::CharStringCacheSize, TyUint32, this->m_func), Js::OpCode::BrGe_A, true, labelWCharStringCheck, instr);
  15353. InsertMove(resultOpnd, IR::IndirOpnd::New(cacheRegOpnd, charCodeOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyVar, instr->m_func), instr);
  15354. InsertTestBranch(resultOpnd, resultOpnd, Js::OpCode::BrEq_A, labelHelper, instr);
  15355. InsertMove(instr->GetDst(), resultOpnd, instr);
  15356. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  15357. instr->InsertBefore(labelWCharStringCheck);
  15358. IR::JnHelperMethod helperMethod;
  15359. if (isCodePoint)
  15360. {
  15361. helperMethod = IR::HelperGetStringForCharCodePoint;
  15362. }
  15363. else
  15364. {
  15365. InsertMove(charCodeOpnd, charCodeOpnd->UseWithNewType(TyUint16, instr->m_func), instr);
  15366. helperMethod = IR::HelperGetStringForChar;
  15367. }
  15368. //Try to load from in CharStringCacheW or CharStringCacheCodePoint, this is a helper call.
  15369. this->m_lowererMD.LoadHelperArgument(instr, charCodeOpnd);
  15370. this->m_lowererMD.LoadHelperArgument(instr, cacheRegOpnd);
  15371. IR::Instr* helperCallInstr = IR::Instr::New(Js::OpCode::Call, resultOpnd, IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
  15372. instr->InsertBefore(helperCallInstr);
  15373. this->m_lowererMD.LowerCall(helperCallInstr, 0);
  15374. InsertMove(instr->GetDst(), resultOpnd, instr);
  15375. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  15376. }
  15377. bool
  15378. Lowerer::GenerateFastInlineGlobalObjectParseInt(IR::Instr *instr)
  15379. {
  15380. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  15381. // ArgOut sequence
  15382. // s8.var = StartCall 2 (0x2).i32 #000c
  15383. // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, global object
  15384. // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to parseInt
  15385. // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
  15386. // s0[LikelyTaggedInt].var = CallDirect GlobalObject_ParseInt.u32, arg1(s11)<0>.u32 #001c
  15387. IR::Opnd * linkOpnd = instr->GetSrc2();
  15388. IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  15389. linkOpnd = tmpInstr->GetSrc2();
  15390. #if DBG
  15391. IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  15392. Assert(argCount == 2);
  15393. #endif
  15394. IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
  15395. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  15396. IR::Opnd *parseIntArgOpnd = argInstr->GetSrc1();
  15397. if (parseIntArgOpnd->GetValueType().IsLikelyNumber())
  15398. {
  15399. //If likely int check for tagged int and set the dst
  15400. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  15401. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  15402. if (!parseIntArgOpnd->IsTaggedInt())
  15403. {
  15404. this->m_lowererMD.GenerateSmIntTest(parseIntArgOpnd, instr, labelHelper);
  15405. }
  15406. if (instr->GetDst())
  15407. {
  15408. this->m_lowererMD.CreateAssign(instr->GetDst(), parseIntArgOpnd, instr);
  15409. }
  15410. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  15411. instr->InsertBefore(labelHelper);
  15412. instr->InsertAfter(doneLabel);
  15413. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  15414. }
  15415. return true;
  15416. }
  15417. void
  15418. Lowerer::GenerateFastInlineArrayPop(IR::Instr * instr)
  15419. {
  15420. Assert(instr->m_opcode == Js::OpCode::InlineArrayPop);
  15421. IR::Opnd *arrayOpnd = instr->GetSrc1();
  15422. IR::LabelInstr *bailOutLabelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  15423. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  15424. bool isLikelyNativeArray = arrayOpnd->GetValueType().IsLikelyNativeArray();
  15425. if (ShouldGenerateArrayFastPath(arrayOpnd, false, false, false))
  15426. {
  15427. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  15428. if(isLikelyNativeArray)
  15429. {
  15430. //We bailOut on cases like length == 0, Array Test failing cases (Runtime helper cannot handle these cases)
  15431. GenerateFastPop(arrayOpnd, instr, labelHelper, doneLabel, bailOutLabelHelper);
  15432. }
  15433. else
  15434. {
  15435. //We jump to helper on cases like length == 0, Array Test failing cases
  15436. GenerateFastPop(arrayOpnd, instr, labelHelper, doneLabel, labelHelper);
  15437. }
  15438. instr->InsertBefore(labelHelper);
  15439. ///JMP to $doneLabel
  15440. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  15441. }
  15442. else
  15443. {
  15444. //We assume here that the array will be a Var array. - Runtime Helper calls assume this.
  15445. Assert(!isLikelyNativeArray);
  15446. }
  15447. instr->InsertAfter(doneLabel);
  15448. if(isLikelyNativeArray)
  15449. {
  15450. //Lower IR::BailOutConventionalNativeArrayAccessOnly here.
  15451. LowerOneBailOutKind(instr, IR::BailOutConventionalNativeArrayAccessOnly, false, false);
  15452. instr->InsertAfter(bailOutLabelHelper);
  15453. }
  15454. GenerateHelperToArrayPopFastPath(instr, doneLabel, bailOutLabelHelper);
  15455. }
  15456. void
  15457. Lowerer::GenerateFastInlineIsArray(IR::Instr * instr)
  15458. {
  15459. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  15460. IR::Opnd * dst = instr->GetDst();
  15461. Assert(dst);
  15462. //CallDirect src2
  15463. IR::Opnd * linkOpnd = instr->GetSrc2();
  15464. //ArgOut_A_InlineSpecialized
  15465. IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  15466. IR::Opnd * argsOpnd[2] = { 0 };
  15467. bool result = instr->FetchOperands(argsOpnd, 2);
  15468. Assert(result);
  15469. AnalysisAssert(argsOpnd[1]);
  15470. IR::LabelInstr *helperLabel = InsertLabel(true, instr);
  15471. IR::Instr * insertInstr = helperLabel;
  15472. IR::LabelInstr *doneLabel = InsertLabel(false, instr->m_next);
  15473. IR::RegOpnd * src;
  15474. ValueType valueType = argsOpnd[1]->GetValueType();
  15475. if (argsOpnd[1]->IsRegOpnd())
  15476. {
  15477. src = argsOpnd[1]->AsRegOpnd();
  15478. }
  15479. else
  15480. {
  15481. src = IR::RegOpnd::New(argsOpnd[1]->GetType(), m_func);
  15482. InsertMove(src, argsOpnd[1], insertInstr);
  15483. }
  15484. IR::LabelInstr *checkNotArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, valueType.IsLikelyArray());
  15485. IR::LabelInstr *notArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, valueType.IsLikelyArray());
  15486. if (!src->IsNotTaggedValue())
  15487. {
  15488. m_lowererMD.GenerateObjectTest(src, insertInstr, notArrayLabel);
  15489. }
  15490. // MOV typeOpnd, [opnd + offset(type)]
  15491. IR::RegOpnd *typeOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  15492. const IR::AutoReuseOpnd autoReuseTypeOpnd(typeOpnd, m_func);
  15493. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, m_func);
  15494. InsertMove(typeOpnd, indirOpnd, insertInstr);
  15495. // MOV typeIdOpnd, [typeOpnd + offset(typeId)]
  15496. IR::RegOpnd *typeIdOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  15497. const IR::AutoReuseOpnd autoReuseTypeIdOpnd(typeIdOpnd, m_func);
  15498. indirOpnd = IR::IndirOpnd::New(typeOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func);
  15499. InsertMove(typeIdOpnd, indirOpnd, insertInstr);
  15500. // CMP typeIdOpnd, TypeIds_ArrayFirst
  15501. // JLT $notArray
  15502. InsertCompareBranch(
  15503. typeIdOpnd,
  15504. IR::IntConstOpnd::New(Js::TypeIds_ArrayFirst, TyInt32, m_func),
  15505. Js::OpCode::BrLt_A,
  15506. checkNotArrayLabel,
  15507. insertInstr);
  15508. // CMP typeIdOpnd, TypeIds_ArrayLastWithES5
  15509. // JGT $notArray
  15510. InsertCompareBranch(
  15511. typeIdOpnd,
  15512. IR::IntConstOpnd::New(Js::TypeIds_ArrayLastWithES5, TyInt32, m_func),
  15513. Js::OpCode::BrGt_A,
  15514. notArrayLabel,
  15515. insertInstr);
  15516. // MOV dst, True
  15517. InsertMove(dst, LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), insertInstr);
  15518. // JMP $done
  15519. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  15520. // $checkNotArray:
  15521. insertInstr->InsertBefore(checkNotArrayLabel);
  15522. // CMP typeIdOpnd, TypeIds_Proxy
  15523. // JEQ $helperLabel
  15524. InsertCompareBranch(
  15525. typeIdOpnd,
  15526. IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
  15527. Js::OpCode::BrEq_A,
  15528. helperLabel,
  15529. insertInstr);
  15530. CompileAssert(Js::TypeIds_Proxy < Js::TypeIds_ArrayFirst);
  15531. // CMP typeIdOpnd, TypeIds_HostDispatch
  15532. // JEQ $helperLabel
  15533. InsertCompareBranch(
  15534. typeIdOpnd,
  15535. IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
  15536. Js::OpCode::BrEq_A,
  15537. helperLabel,
  15538. insertInstr);
  15539. CompileAssert(Js::TypeIds_HostDispatch < Js::TypeIds_ArrayFirst);
  15540. // $notObjectLabel:
  15541. insertInstr->InsertBefore(notArrayLabel);
  15542. // MOV dst, False
  15543. InsertMove(dst, LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), insertInstr);
  15544. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  15545. RelocateCallDirectToHelperPath(tmpInstr, helperLabel);
  15546. }
  15547. void
  15548. Lowerer::GenerateFastInlineHasOwnProperty(IR::Instr * instr)
  15549. {
  15550. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  15551. //CallDirect src2
  15552. IR::Opnd * linkOpnd = instr->GetSrc2();
  15553. //ArgOut_A_InlineSpecialized
  15554. IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  15555. IR::Opnd * argsOpnd[2] = { 0 };
  15556. bool result = instr->FetchOperands(argsOpnd, 2);
  15557. Assert(result);
  15558. AnalysisAssert(argsOpnd[0] && argsOpnd[1]);
  15559. if (argsOpnd[1]->GetValueType().IsNotString()
  15560. || argsOpnd[0]->GetValueType().IsNotObject()
  15561. || !argsOpnd[0]->IsRegOpnd()
  15562. || !argsOpnd[1]->IsRegOpnd())
  15563. {
  15564. return;
  15565. }
  15566. IR::RegOpnd * thisObj = argsOpnd[0]->AsRegOpnd();
  15567. IR::RegOpnd * propOpnd = argsOpnd[1]->AsRegOpnd();
  15568. // fast path case where hasOwnProperty is being called using a property name loaded via a for-in loop
  15569. bool generateForInFastpath = propOpnd->GetValueType().IsString()
  15570. && propOpnd->m_sym->m_isSingleDef
  15571. && (propOpnd->m_sym->m_instrDef->m_opcode == Js::OpCode::BrOnEmpty
  15572. || propOpnd->m_sym->m_instrDef->m_opcode == Js::OpCode::BrOnNotEmpty);
  15573. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  15574. IR::LabelInstr * labelHelper = InsertLabel(true, instr);
  15575. IR::LabelInstr * cacheMissLabel = generateForInFastpath ? IR::LabelInstr::New(Js::OpCode::Label, m_func, true) : labelHelper;
  15576. IR::Instr * insertInstr = labelHelper;
  15577. // GenerateObjectTest(propOpnd, $labelHelper)
  15578. // CMP indexOpnd, PropertyString::`vtable'
  15579. // JNE $helper
  15580. // GenerateObjectTest(thisObj, $labelHelper)
  15581. // MOV inlineCacheOpnd, propOpnd->lsElemInlineCache
  15582. // MOV objectTypeOpnd, thisObj->type
  15583. // GenerateDynamicLoadPolymorphicInlineCacheSlot(inlineCacheOpnd, objectTypeOpnd) ; loads inline cache for given type
  15584. // GenerateLocalInlineCacheCheck(objectTypeOpnd, inlineCacheOpnd, $notInlineSlotsLabel) ; check for type in inline slots, jump to $notInlineSlotsLabel on failure
  15585. // MOV dst, ValueTrue
  15586. // JMP $done
  15587. // $notInlineSlotsLabel:
  15588. // GenerateLoadTaggedType(objectTypeOpnd, opndTaggedType)
  15589. // GenerateLocalInlineCacheCheck(opndTaggedType, inlineCacheOpnd, $cacheMissLabel) ; check for type in aux slot, jump to $cacheMissLabel on failure
  15590. // MOV dst, ValueTrue
  15591. // JMP $done
  15592. m_lowererMD.GenerateObjectTest(propOpnd, insertInstr, labelHelper);
  15593. InsertCompareBranch(IR::IndirOpnd::New(propOpnd, 0, TyMachPtr, m_func), LoadVTableValueOpnd(insertInstr, VTableValue::VtablePropertyString), Js::OpCode::BrNeq_A, labelHelper, insertInstr);
  15594. m_lowererMD.GenerateObjectTest(thisObj, insertInstr, labelHelper);
  15595. IR::RegOpnd * inlineCacheOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  15596. InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(propOpnd, Js::PropertyString::GetOffsetOfLdElemInlineCache(), TyMachPtr, m_func), insertInstr);
  15597. IR::RegOpnd * objectTypeOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  15598. InsertMove(objectTypeOpnd, IR::IndirOpnd::New(thisObj, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, m_func), insertInstr);
  15599. GenerateDynamicLoadPolymorphicInlineCacheSlot(insertInstr, inlineCacheOpnd, objectTypeOpnd);
  15600. IR::LabelInstr * notInlineSlotsLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  15601. m_lowererMD.GenerateLocalInlineCacheCheck(insertInstr, objectTypeOpnd, inlineCacheOpnd, notInlineSlotsLabel);
  15602. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), insertInstr);
  15603. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  15604. insertInstr->InsertBefore(notInlineSlotsLabel);
  15605. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, m_func);
  15606. m_lowererMD.GenerateLoadTaggedType(insertInstr, objectTypeOpnd, opndTaggedType);
  15607. m_lowererMD.GenerateLocalInlineCacheCheck(insertInstr, opndTaggedType, inlineCacheOpnd, cacheMissLabel);
  15608. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), insertInstr);
  15609. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  15610. if (!generateForInFastpath)
  15611. {
  15612. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  15613. return;
  15614. }
  15615. insertInstr->InsertBefore(cacheMissLabel);
  15616. // CMP forInEnumeratorOpnd->canUseJitFastPath, 0
  15617. // JEQ $labelHelper
  15618. // MOV cachedDataTypeOpnd, forInEnumeratorOpnd->enumeratorInitialType
  15619. // CMP thisObj->type, cachedDataTypeOpnd
  15620. // JNE $labelHelper
  15621. // CMP forInEnumeratorOpnd->enumeratingPrototype, 0
  15622. // JNE $falseLabel
  15623. // MOV dst, True
  15624. // JMP $doneLabel
  15625. // $falseLabel: [helper]
  15626. // MOV dst, False
  15627. // JMP $doneLabel
  15628. // $labelHelper: [helper]
  15629. // CallDirect code
  15630. // ...
  15631. // $doneLabel:
  15632. IR::Opnd * forInEnumeratorOpnd = argsOpnd[1]->AsRegOpnd()->m_sym->m_instrDef->GetSrc1();
  15633. // go to helper if we can't use JIT fastpath
  15634. IR::Opnd * canUseJitFastPathOpnd = GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfCanUseJitFastPath(), TyInt8);
  15635. InsertCompareBranch(canUseJitFastPathOpnd, IR::IntConstOpnd::New(0, TyInt8, m_func), Js::OpCode::BrEq_A, labelHelper, insertInstr);
  15636. // go to helper if initial type is not same as the object we are querying
  15637. IR::RegOpnd * cachedDataTypeOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  15638. InsertMove(cachedDataTypeOpnd, GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorInitialType(), TyMachPtr), insertInstr);
  15639. InsertCompareBranch(cachedDataTypeOpnd, IR::IndirOpnd::New(thisObj, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, m_func), Js::OpCode::BrNeq_A, labelHelper, insertInstr);
  15640. // if we haven't yet gone to helper, then we can check if we are enumerating the prototype to know if property is an own property
  15641. IR::LabelInstr *falseLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  15642. IR::Opnd * enumeratingPrototype = GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratingPrototype(), TyInt8);
  15643. InsertCompareBranch(enumeratingPrototype, IR::IntConstOpnd::New(0, TyInt8, m_func), Js::OpCode::BrNeq_A, falseLabel, insertInstr);
  15644. // assume true is the main path
  15645. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), insertInstr);
  15646. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  15647. // load false on helper path
  15648. insertInstr->InsertBefore(falseLabel);
  15649. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), insertInstr);
  15650. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  15651. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  15652. }
  15653. bool
  15654. Lowerer::ShouldGenerateStringReplaceFastPath(IR::Instr * callInstr, IntConstType argCount)
  15655. {
  15656. // a.replace(b,c)
  15657. // We want to emit the fast path if 'a' and 'c' are strings and 'b' is a regex
  15658. //
  15659. // argout sequence:
  15660. // arg1(s12)<0>.var = ArgOut_A s2.var, s11.var #0014 <---- a
  15661. // arg2(s13)<4>.var = ArgOut_A s3.var, arg1(s12)<0>.var #0018 <---- b
  15662. // arg3(s14)<8>.var = ArgOut_A s4.var, arg2(s13)<4>.var #001c <---- c
  15663. // s0[LikelyString].var = CallI s5[ffunc].var, arg3(s14)<8>.var #0020
  15664. IR::Opnd *linkOpnd = callInstr->GetSrc2();
  15665. Assert(argCount == 2);
  15666. while(linkOpnd->IsSymOpnd())
  15667. {
  15668. IR::SymOpnd *src2 = linkOpnd->AsSymOpnd();
  15669. StackSym *sym = src2->m_sym->AsStackSym();
  15670. Assert(sym->m_isSingleDef);
  15671. IR::Instr *argInstr = sym->m_instrDef;
  15672. Assert(argCount >= 0);
  15673. // check to see if 'a' and 'c' are likely strings
  15674. if((argCount == 2 || argCount == 0) && (!argInstr->GetSrc1()->GetValueType().IsLikelyString()))
  15675. {
  15676. return false;
  15677. }
  15678. // we want 'b' to be regex. Don't generate fastpath if it is a tagged int
  15679. if((argCount == 1) && (argInstr->GetSrc1()->IsTaggedInt()))
  15680. {
  15681. return false;
  15682. }
  15683. argCount--;
  15684. linkOpnd = argInstr->GetSrc2();
  15685. }
  15686. return true;
  15687. }
  15688. bool
  15689. Lowerer::GenerateFastReplace(IR::Opnd* strOpnd, IR::Opnd* src1, IR::Opnd* src2, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
  15690. {
  15691. // a.replace(b,c)
  15692. // We want to emit the fast path if 'a' and 'c' are strings and 'b' is a regex
  15693. //
  15694. // strOpnd --> a
  15695. // src1 --> b
  15696. // src2 --> c
  15697. IR::Opnd * callDst = callInstr->GetDst();
  15698. Assert(strOpnd->GetValueType().IsLikelyString() && src2->GetValueType().IsLikelyString());
  15699. if(!strOpnd->GetValueType().IsString())
  15700. {
  15701. if(!strOpnd->IsRegOpnd())
  15702. {
  15703. IR::RegOpnd *strOpndReg = IR::RegOpnd::New(TyVar, m_func);
  15704. LowererMD::CreateAssign(strOpndReg, strOpnd, insertInstr);
  15705. strOpnd = strOpndReg;
  15706. }
  15707. this->GenerateStringTest(strOpnd->AsRegOpnd(), insertInstr, labelHelper);
  15708. }
  15709. if(!src1->IsNotTaggedValue())
  15710. {
  15711. m_lowererMD.GenerateObjectTest(src1, insertInstr, labelHelper);
  15712. }
  15713. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptRegExp);
  15714. // cmp [regex], vtableAddress
  15715. // jne $labelHelper
  15716. if(!src1->IsRegOpnd())
  15717. {
  15718. IR::RegOpnd *src1Reg = IR::RegOpnd::New(TyVar, m_func);
  15719. LowererMD::CreateAssign(src1Reg, src1, insertInstr);
  15720. src1 = src1Reg;
  15721. }
  15722. InsertCompareBranch(
  15723. IR::IndirOpnd::New(src1->AsRegOpnd(), 0, TyMachPtr, insertInstr->m_func),
  15724. vtableOpnd,
  15725. Js::OpCode::BrNeq_A,
  15726. labelHelper,
  15727. insertInstr);
  15728. if(!src2->GetValueType().IsString())
  15729. {
  15730. if(!src2->IsRegOpnd())
  15731. {
  15732. IR::RegOpnd *src2Reg = IR::RegOpnd::New(TyVar, m_func);
  15733. LowererMD::CreateAssign(src2Reg, src2, insertInstr);
  15734. src2 = src2Reg;
  15735. }
  15736. this->GenerateStringTest(src2->AsRegOpnd(), insertInstr, labelHelper);
  15737. }
  15738. //scriptContext, pRegEx, pThis, pReplace (to be pushed in reverse order)
  15739. // pReplace, pThis, pRegEx
  15740. this->m_lowererMD.LoadHelperArgument(insertInstr, src2);
  15741. this->m_lowererMD.LoadHelperArgument(insertInstr, strOpnd);
  15742. this->m_lowererMD.LoadHelperArgument(insertInstr, src1);
  15743. // script context
  15744. LoadScriptContext(insertInstr);
  15745. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, insertInstr->m_func);
  15746. if(callDst)
  15747. {
  15748. helperCallInstr->SetDst(callDst);
  15749. }
  15750. insertInstr->InsertBefore(helperCallInstr);
  15751. if(callDst)
  15752. {
  15753. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperRegExp_ReplaceStringResultUsed);
  15754. }
  15755. else
  15756. {
  15757. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperRegExp_ReplaceStringResultNotUsed);
  15758. }
  15759. return true;
  15760. }
  15761. ///----
  15762. void
  15763. Lowerer::GenerateFastInlineStringSplitMatch(IR::Instr * instr)
  15764. {
  15765. // a.split(b,c (optional) )
  15766. // We want to emit the fast path when
  15767. // 1. c is not present, and
  15768. // 2. 'a' is a string and 'b' is a regex.
  15769. //
  15770. // a.match(b)
  15771. // We want to emit the fast path when 'a' is a string and 'b' is a regex.
  15772. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  15773. IR::Opnd * callDst = instr->GetDst();
  15774. //helperCallOpnd
  15775. IR::Opnd * src1 = instr->GetSrc1();
  15776. //ArgOut_A_InlineSpecialized
  15777. IR::Instr * tmpInstr = instr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  15778. IR::Opnd * argsOpnd[2];
  15779. if(!instr->FetchOperands(argsOpnd, 2))
  15780. {
  15781. return;
  15782. }
  15783. if(!argsOpnd[0]->GetValueType().IsLikelyString() || argsOpnd[1]->IsTaggedInt())
  15784. {
  15785. return;
  15786. }
  15787. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  15788. if(!argsOpnd[0]->GetValueType().IsString())
  15789. {
  15790. if(!argsOpnd[0]->IsRegOpnd())
  15791. {
  15792. IR::RegOpnd *opndReg = IR::RegOpnd::New(TyVar, m_func);
  15793. LowererMD::CreateAssign(opndReg, argsOpnd[0], instr);
  15794. argsOpnd[0] = opndReg;
  15795. }
  15796. this->GenerateStringTest(argsOpnd[0]->AsRegOpnd(), instr, labelHelper);
  15797. }
  15798. if(!argsOpnd[1]->IsNotTaggedValue())
  15799. {
  15800. m_lowererMD.GenerateObjectTest(argsOpnd[1], instr, labelHelper);
  15801. }
  15802. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  15803. // cmp [regex], vtableAddress
  15804. // jne $labelHelper
  15805. if(!argsOpnd[1]->IsRegOpnd())
  15806. {
  15807. IR::RegOpnd *opndReg = IR::RegOpnd::New(TyVar, m_func);
  15808. LowererMD::CreateAssign(opndReg, argsOpnd[1], instr);
  15809. argsOpnd[1] = opndReg;
  15810. }
  15811. InsertCompareBranch(
  15812. IR::IndirOpnd::New(argsOpnd[1]->AsRegOpnd(), 0, TyMachPtr, instr->m_func),
  15813. vtableOpnd,
  15814. Js::OpCode::BrNeq_A,
  15815. labelHelper,
  15816. instr);
  15817. // [stackAllocationPointer, ]scriptcontext, regexp, input[, limit] (to be pushed in reverse order)
  15818. if(src1->AsHelperCallOpnd()->m_fnHelper == IR::JnHelperMethod::HelperString_Split)
  15819. {
  15820. //limit
  15821. //As we are optimizing only for two operands, make limit UINT_MAX
  15822. IR::Opnd* limit = IR::IntConstOpnd::New(UINT_MAX, TyUint32, instr->m_func);
  15823. this->m_lowererMD.LoadHelperArgument(instr, limit);
  15824. }
  15825. //input, regexp
  15826. this->m_lowererMD.LoadHelperArgument(instr, argsOpnd[0]);
  15827. this->m_lowererMD.LoadHelperArgument(instr, argsOpnd[1]);
  15828. // script context
  15829. LoadScriptContext(instr);
  15830. IR::JnHelperMethod helperMethod = IR::JnHelperMethod::HelperInvalid;
  15831. IR::AutoReuseOpnd autoReuseStackAllocationOpnd;
  15832. if(callDst && instr->dstIsTempObject)
  15833. {
  15834. switch(src1->AsHelperCallOpnd()->m_fnHelper)
  15835. {
  15836. case IR::JnHelperMethod::HelperString_Split:
  15837. helperMethod = IR::JnHelperMethod::HelperRegExp_SplitResultUsedAndMayBeTemp;
  15838. break;
  15839. case IR::JnHelperMethod::HelperString_Match:
  15840. helperMethod = IR::JnHelperMethod::HelperRegExp_MatchResultUsedAndMayBeTemp;
  15841. break;
  15842. default:
  15843. Assert(false);
  15844. __assume(false);
  15845. }
  15846. // Allocate some space on the stack for the result array
  15847. IR::RegOpnd *const stackAllocationOpnd = IR::RegOpnd::New(TyVar, m_func);
  15848. autoReuseStackAllocationOpnd.Initialize(stackAllocationOpnd, m_func);
  15849. stackAllocationOpnd->SetValueType(callDst->GetValueType());
  15850. GenerateMarkTempAlloc(stackAllocationOpnd, Js::JavascriptArray::StackAllocationSize, instr);
  15851. m_lowererMD.LoadHelperArgument(instr, stackAllocationOpnd);
  15852. }
  15853. else
  15854. {
  15855. switch(src1->AsHelperCallOpnd()->m_fnHelper)
  15856. {
  15857. case IR::JnHelperMethod::HelperString_Split:
  15858. helperMethod =
  15859. callDst
  15860. ? IR::JnHelperMethod::HelperRegExp_SplitResultUsed
  15861. : IR::JnHelperMethod::HelperRegExp_SplitResultNotUsed;
  15862. break;
  15863. case IR::JnHelperMethod::HelperString_Match:
  15864. helperMethod =
  15865. callDst
  15866. ? IR::JnHelperMethod::HelperRegExp_MatchResultUsed
  15867. : IR::JnHelperMethod::HelperRegExp_MatchResultNotUsed;
  15868. break;
  15869. default:
  15870. Assert(false);
  15871. __assume(false);
  15872. }
  15873. }
  15874. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
  15875. if(callDst)
  15876. {
  15877. helperCallInstr->SetDst(callDst);
  15878. }
  15879. instr->InsertBefore(helperCallInstr);
  15880. m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
  15881. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  15882. instr->InsertAfter(doneLabel);
  15883. instr->InsertBefore(labelHelper);
  15884. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  15885. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  15886. }
  15887. void
  15888. Lowerer::GenerateFastInlineRegExpExec(IR::Instr * instr)
  15889. {
  15890. // a.exec(b)
  15891. // We want to emit the fast path when 'a' is a regex and 'b' is a string
  15892. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  15893. IR::Opnd * callDst = instr->GetDst();
  15894. //ArgOut_A_InlineSpecialized
  15895. IR::Instr * tmpInstr = instr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  15896. IR::Opnd * argsOpnd[2];
  15897. if (!instr->FetchOperands(argsOpnd, 2))
  15898. {
  15899. return;
  15900. }
  15901. IR::Opnd *opndString = argsOpnd[1];
  15902. if(!opndString->GetValueType().IsLikelyString() || argsOpnd[0]->IsTaggedInt())
  15903. {
  15904. return;
  15905. }
  15906. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  15907. if(!opndString->GetValueType().IsString())
  15908. {
  15909. if(!opndString->IsRegOpnd())
  15910. {
  15911. IR::RegOpnd *opndReg = IR::RegOpnd::New(TyVar, m_func);
  15912. LowererMD::CreateAssign(opndReg, opndString, instr);
  15913. opndString = opndReg;
  15914. }
  15915. this->GenerateStringTest(opndString->AsRegOpnd(), instr, labelHelper);
  15916. }
  15917. IR::Opnd *opndRegex = argsOpnd[0];
  15918. if(!opndRegex->IsNotTaggedValue())
  15919. {
  15920. m_lowererMD.GenerateObjectTest(opndRegex, instr, labelHelper);
  15921. }
  15922. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  15923. // cmp [regex], vtableAddress
  15924. // jne $labelHelper
  15925. if(!opndRegex->IsRegOpnd())
  15926. {
  15927. IR::RegOpnd *opndReg = IR::RegOpnd::New(TyVar, m_func);
  15928. LowererMD::CreateAssign(opndReg, opndRegex, instr);
  15929. opndRegex = opndReg;
  15930. }
  15931. InsertCompareBranch(
  15932. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), 0, TyMachPtr, instr->m_func),
  15933. vtableOpnd,
  15934. Js::OpCode::BrNeq_A,
  15935. labelHelper,
  15936. instr);
  15937. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  15938. if (!PHASE_OFF(Js::ExecBOIFastPathPhase, m_func))
  15939. {
  15940. // Load pattern from regex operand
  15941. IR::RegOpnd *opndPattern = IR::RegOpnd::New(TyMachPtr, m_func);
  15942. LowererMD::CreateAssign(
  15943. opndPattern,
  15944. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfPattern(), TyMachPtr, m_func),
  15945. instr);
  15946. // Load program from pattern
  15947. IR::RegOpnd *opndProgram = IR::RegOpnd::New(TyMachPtr, m_func);
  15948. LowererMD::CreateAssign(
  15949. opndProgram,
  15950. IR::IndirOpnd::New(opndPattern, offsetof(UnifiedRegex::RegexPattern, rep) + offsetof(UnifiedRegex::RegexPattern::UnifiedRep, program), TyMachPtr, m_func),
  15951. instr);
  15952. IR::LabelInstr *labelFastHelper = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  15953. // We want the program's tag to be BOILiteral2Tag
  15954. InsertCompareBranch(
  15955. IR::IndirOpnd::New(opndProgram, (int32)UnifiedRegex::Program::GetOffsetOfTag(), TyUint8, m_func),
  15956. IR::IntConstOpnd::New(UnifiedRegex::Program::GetBOILiteral2Tag(), TyUint8, m_func),
  15957. Js::OpCode::BrNeq_A,
  15958. labelFastHelper,
  15959. instr);
  15960. // Test the program's flags for "global"
  15961. InsertTestBranch(
  15962. IR::IndirOpnd::New(opndProgram, offsetof(UnifiedRegex::Program, flags), TyUint8, m_func),
  15963. IR::IntConstOpnd::New(UnifiedRegex::GlobalRegexFlag, TyUint8, m_func),
  15964. Js::OpCode::BrNeq_A,
  15965. labelFastHelper,
  15966. instr);
  15967. IR::LabelInstr *labelNoMatch = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  15968. // If string length < 2...
  15969. InsertCompareBranch(
  15970. IR::IndirOpnd::New(opndString->AsRegOpnd(), offsetof(Js::JavascriptString, m_charLength), TyUint32, m_func),
  15971. IR::IntConstOpnd::New(2, TyUint32, m_func),
  15972. Js::OpCode::BrLt_A,
  15973. labelNoMatch,
  15974. instr);
  15975. // ...or the DWORD doesn't match the pattern...
  15976. IR::RegOpnd *opndBuffer = IR::RegOpnd::New(TyMachReg, m_func);
  15977. LowererMD::CreateAssign(
  15978. opndBuffer,
  15979. IR::IndirOpnd::New(opndString->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, m_func),
  15980. instr);
  15981. IR::LabelInstr *labelGotString = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  15982. InsertTestBranch(opndBuffer, opndBuffer, Js::OpCode::BrNeq_A, labelGotString, instr);
  15983. m_lowererMD.LoadHelperArgument(instr, opndString);
  15984. IR::Instr *instrCall = IR::Instr::New(Js::OpCode::Call, opndBuffer, IR::HelperCallOpnd::New(IR::HelperString_GetSz, m_func), m_func);
  15985. instr->InsertBefore(instrCall);
  15986. m_lowererMD.LowerCall(instrCall, 0);
  15987. instr->InsertBefore(labelGotString);
  15988. IR::RegOpnd *opndBufferDWORD = IR::RegOpnd::New(TyUint32, m_func);
  15989. LowererMD::CreateAssign(
  15990. opndBufferDWORD,
  15991. IR::IndirOpnd::New(opndBuffer, 0, TyUint32, m_func),
  15992. instr);
  15993. InsertCompareBranch(
  15994. IR::IndirOpnd::New(opndProgram, (int32)(UnifiedRegex::Program::GetOffsetOfRep() + UnifiedRegex::Program::GetOffsetOfBOILiteral2Literal()), TyUint32, m_func),
  15995. opndBufferDWORD,
  15996. Js::OpCode::BrEq_A,
  15997. labelFastHelper,
  15998. instr);
  15999. // ...then set the last index to 0...
  16000. instr->InsertBefore(labelNoMatch);
  16001. LowererMD::CreateAssign(
  16002. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), TyVar, m_func),
  16003. IR::AddrOpnd::NewNull(m_func),
  16004. instr);
  16005. LowererMD::CreateAssign(
  16006. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), TyUint32, m_func),
  16007. IR::IntConstOpnd::New(0, TyUint32, m_func),
  16008. instr);
  16009. // ...and set the dst to null...
  16010. if (callDst)
  16011. {
  16012. LowererMD::CreateAssign(
  16013. callDst,
  16014. LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  16015. instr);
  16016. }
  16017. // ...and we're done.
  16018. this->InsertBranch(Js::OpCode::Br, doneLabel, instr);
  16019. instr->InsertBefore(labelFastHelper);
  16020. }
  16021. // [stackAllocationPointer, ]scriptcontext, regexp, string (to be pushed in reverse order)
  16022. //string, regexp
  16023. this->m_lowererMD.LoadHelperArgument(instr, opndString);
  16024. this->m_lowererMD.LoadHelperArgument(instr, opndRegex);
  16025. // script context
  16026. LoadScriptContext(instr);
  16027. IR::JnHelperMethod helperMethod;
  16028. IR::AutoReuseOpnd autoReuseStackAllocationOpnd;
  16029. if(callDst)
  16030. {
  16031. if(instr->dstIsTempObject)
  16032. {
  16033. helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultUsedAndMayBeTemp;
  16034. // Allocate some space on the stack for the result array
  16035. IR::RegOpnd *const stackAllocationOpnd = IR::RegOpnd::New(TyVar, m_func);
  16036. autoReuseStackAllocationOpnd.Initialize(stackAllocationOpnd, m_func);
  16037. stackAllocationOpnd->SetValueType(callDst->GetValueType());
  16038. GenerateMarkTempAlloc(stackAllocationOpnd, Js::JavascriptArray::StackAllocationSize, instr);
  16039. m_lowererMD.LoadHelperArgument(instr, stackAllocationOpnd);
  16040. }
  16041. else
  16042. {
  16043. helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultUsed;
  16044. }
  16045. }
  16046. else
  16047. {
  16048. helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultNotUsed;
  16049. }
  16050. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
  16051. if(callDst)
  16052. {
  16053. helperCallInstr->SetDst(callDst);
  16054. }
  16055. instr->InsertBefore(helperCallInstr);
  16056. m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
  16057. instr->InsertAfter(doneLabel);
  16058. instr->InsertBefore(labelHelper);
  16059. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  16060. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  16061. }
  16062. void Lowerer::GenerateTruncWithCheck(IR::Instr* instr)
  16063. {
  16064. Assert(instr->GetSrc1()->IsFloat());
  16065. if (instr->GetDst()->IsInt32() || instr->GetDst()->IsUInt32())
  16066. {
  16067. m_lowererMD.GenerateTruncWithCheck(instr);
  16068. }
  16069. else
  16070. {
  16071. Assert(instr->GetDst()->IsInt64());
  16072. LoadScriptContext(instr);
  16073. if (instr->GetSrc1()->IsFloat32())
  16074. {
  16075. m_lowererMD.LoadFloatHelperArgument(instr, instr->GetSrc1());
  16076. }
  16077. else
  16078. {
  16079. m_lowererMD.LoadDoubleHelperArgument(instr, instr->GetSrc1());
  16080. }
  16081. IR::JnHelperMethod helperList[2][2] = { IR::HelperF32TOI64, IR::HelperF32TOU64, IR::HelperF64TOI64 ,IR::HelperF64TOU64 };
  16082. IR::JnHelperMethod helper = helperList[instr->GetSrc1()->GetType() != TyFloat32][instr->GetDst()->GetType() == TyUint64];
  16083. instr->UnlinkSrc1();
  16084. this->m_lowererMD.ChangeToHelperCall(instr, helper);
  16085. }
  16086. }
  16087. void
  16088. Lowerer::RelocateCallDirectToHelperPath(IR::Instr* argoutInlineSpecialized, IR::LabelInstr* labelHelper)
  16089. {
  16090. IR::Opnd *linkOpnd = argoutInlineSpecialized->GetSrc2(); //ArgOut_A_InlineSpecialized src2; link to actual argouts.
  16091. argoutInlineSpecialized->Unlink();
  16092. labelHelper->InsertAfter(argoutInlineSpecialized);
  16093. while(linkOpnd->IsSymOpnd())
  16094. {
  16095. IR::SymOpnd *src2 = linkOpnd->AsSymOpnd();
  16096. StackSym *sym = src2->m_sym->AsStackSym();
  16097. Assert(sym->m_isSingleDef);
  16098. IR::Instr *argInstr = sym->m_instrDef;
  16099. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  16100. argInstr->Unlink();
  16101. labelHelper->InsertAfter(argInstr);
  16102. linkOpnd = argInstr->GetSrc2();
  16103. }
  16104. // Move startcall
  16105. Assert(linkOpnd->IsRegOpnd());
  16106. StackSym *sym = linkOpnd->AsRegOpnd()->m_sym;
  16107. Assert(sym->m_isSingleDef);
  16108. IR::Instr *startCall = sym->m_instrDef;
  16109. Assert(startCall->m_opcode == Js::OpCode::StartCall);
  16110. startCall->Unlink();
  16111. labelHelper->InsertAfter(startCall);
  16112. }
  16113. bool
  16114. Lowerer::GenerateFastInlineStringCharCodeAt(IR::Instr * instr, Js::BuiltinFunction index)
  16115. {
  16116. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  16117. //CallDirect src2
  16118. IR::Opnd * linkOpnd = instr->GetSrc2();
  16119. //ArgOut_A_InlineSpecialized
  16120. IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  16121. IR::Opnd * argsOpnd[2] = {0};
  16122. bool result = instr->FetchOperands(argsOpnd, 2);
  16123. Assert(result);
  16124. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16125. instr->InsertAfter(doneLabel);
  16126. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16127. bool success = this->m_lowererMD.GenerateFastCharAt(index, instr->GetDst(), argsOpnd[0], argsOpnd[1],
  16128. instr, instr, labelHelper, doneLabel);
  16129. instr->InsertBefore(labelHelper);
  16130. if (!success)
  16131. {
  16132. return false;
  16133. }
  16134. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  16135. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  16136. return true;
  16137. }
  16138. void
  16139. Lowerer::GenerateCtz(IR::Instr* instr)
  16140. {
  16141. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsInt64());
  16142. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsInt64());
  16143. m_lowererMD.GenerateCtz(instr);
  16144. }
  16145. void
  16146. Lowerer::GeneratePopCnt(IR::Instr* instr)
  16147. {
  16148. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  16149. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsUInt32() || instr->GetDst()->IsInt64());
  16150. m_lowererMD.GeneratePopCnt(instr);
  16151. }
  16152. void
  16153. Lowerer::GenerateFastInlineMathClz(IR::Instr* instr)
  16154. {
  16155. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsInt64());
  16156. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsInt64());
  16157. m_lowererMD.GenerateClz(instr);
  16158. }
  16159. void
  16160. Lowerer::GenerateFastInlineMathImul(IR::Instr* instr)
  16161. {
  16162. IR::Opnd* src1 = instr->GetSrc1();
  16163. IR::Opnd* src2 = instr->GetSrc2();
  16164. IR::Opnd* dst = instr->GetDst();
  16165. Assert(dst->IsInt32());
  16166. Assert(src1->IsInt32());
  16167. Assert(src2->IsInt32());
  16168. IR::Instr* imul = IR::Instr::New(LowererMD::MDImulOpcode, dst, src1, src2, instr->m_func);
  16169. instr->InsertBefore(imul);
  16170. LowererMD::Legalize(imul);
  16171. instr->Remove();
  16172. }
  16173. void
  16174. Lowerer::LowerReinterpretPrimitive(IR::Instr* instr)
  16175. {
  16176. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  16177. IR::Opnd* src1 = instr->GetSrc1();
  16178. IR::Opnd* dst = instr->GetDst();
  16179. Assert(dst->GetSize() == src1->GetSize());
  16180. Assert((dst->IsFloat32() && src1->IsInt32()) ||
  16181. (dst->IsInt32() && src1->IsFloat32()) ||
  16182. (dst->IsInt64() && src1->IsFloat64()) ||
  16183. (dst->IsFloat64() && src1->IsInt64()) );
  16184. m_lowererMD.EmitReinterpretPrimitive(dst, src1, instr);
  16185. instr->Remove();
  16186. }
  16187. void
  16188. Lowerer::GenerateFastInlineMathFround(IR::Instr* instr)
  16189. {
  16190. IR::Opnd* src1 = instr->GetSrc1();
  16191. IR::Opnd* dst = instr->GetDst();
  16192. Assert(dst->IsFloat());
  16193. Assert(src1->IsFloat());
  16194. IR::Instr* fcvt64to32 = IR::Instr::New(LowererMD::MDConvertFloat64ToFloat32Opcode, dst, src1, instr->m_func);
  16195. instr->InsertBefore(fcvt64to32);
  16196. LowererMD::Legalize(fcvt64to32);
  16197. if (dst->IsFloat64())
  16198. {
  16199. IR::Instr* fcvt32to64 = IR::Instr::New(LowererMD::MDConvertFloat32ToFloat64Opcode, dst, dst, instr->m_func);
  16200. instr->InsertBefore(fcvt32to64);
  16201. LowererMD::Legalize(fcvt32to64);
  16202. }
  16203. instr->Remove();
  16204. return;
  16205. }
  16206. bool
  16207. Lowerer::GenerateFastInlineStringReplace(IR::Instr * instr)
  16208. {
  16209. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  16210. //CallDirect src2
  16211. IR::Opnd * linkOpnd = instr->GetSrc2();
  16212. //ArgOut_A_InlineSpecialized
  16213. IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  16214. IR::Opnd * argsOpnd[3] = {0};
  16215. bool result = instr->FetchOperands(argsOpnd, 3);
  16216. Assert(result);
  16217. AnalysisAssert(argsOpnd[0] && argsOpnd[1] && argsOpnd[2]);
  16218. if (!argsOpnd[0]->GetValueType().IsLikelyString()
  16219. || argsOpnd[1]->GetValueType().IsNotObject()
  16220. || !argsOpnd[2]->GetValueType().IsLikelyString())
  16221. {
  16222. return false;
  16223. }
  16224. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16225. instr->InsertAfter(doneLabel);
  16226. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16227. bool success = this->GenerateFastReplace(argsOpnd[0], argsOpnd[1], argsOpnd[2],
  16228. instr, instr, labelHelper, doneLabel);
  16229. instr->InsertBefore(labelHelper);
  16230. if (!success)
  16231. {
  16232. return false;
  16233. }
  16234. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  16235. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  16236. return true;
  16237. }
  16238. #ifdef ENABLE_DOM_FAST_PATH
  16239. /*
  16240. Lower the DOMFastPathGetter opcode
  16241. We have inliner generated bytecode:
  16242. (dst)helpArg1: ExtendArg_A (src1)thisObject (src2)null
  16243. (dst)helpArg2: ExtendArg_A (src1)funcObject (src2)helpArg1
  16244. method: DOMFastPathGetter (src1)HelperCall (src2)helpArg2
  16245. We'll convert it to a JavascriptFunction entry method call:
  16246. CALL Helper funcObject CallInfo(CallFlags_Value, 3) thisObj
  16247. */
  16248. void
  16249. Lowerer::LowerFastInlineDOMFastPathGetter(IR::Instr* instr)
  16250. {
  16251. IR::Opnd* helperOpnd = instr->UnlinkSrc1();
  16252. Assert(helperOpnd->IsHelperCallOpnd());
  16253. IR::Opnd *linkOpnd = instr->UnlinkSrc2();
  16254. Assert(linkOpnd->IsRegOpnd());
  16255. IR::Instr* prevInstr = linkOpnd->AsRegOpnd()->m_sym->m_instrDef;
  16256. Assert(prevInstr->m_opcode == Js::OpCode::ExtendArg_A);
  16257. IR::Opnd* funcObj = prevInstr->GetSrc1();
  16258. Assert(funcObj->IsRegOpnd());
  16259. // If the Extended_arg was CSE's across a loop or hoisted out of a loop,
  16260. // adding a new reference down here might cause funcObj to now be liveOnBackEdge.
  16261. // Use the addToLiveOnBackEdgeSyms bit vector to add it to a loop if we encounter one.
  16262. // We'll clear it once we reach the Extended arg.
  16263. this->addToLiveOnBackEdgeSyms->Set(funcObj->AsRegOpnd()->m_sym->m_id);
  16264. Assert(prevInstr->GetSrc2() != nullptr);
  16265. prevInstr = prevInstr->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
  16266. Assert(prevInstr->m_opcode == Js::OpCode::ExtendArg_A);
  16267. IR::Opnd* thisObj = prevInstr->GetSrc1();
  16268. Assert(prevInstr->GetSrc2() == nullptr);
  16269. Assert(thisObj->IsRegOpnd());
  16270. this->addToLiveOnBackEdgeSyms->Set(thisObj->AsRegOpnd()->m_sym->m_id);
  16271. const auto info = Lowerer::MakeCallInfoConst(Js::CallFlags_Value, 1, m_func);
  16272. m_lowererMD.LoadHelperArgument(instr, thisObj);
  16273. m_lowererMD.LoadHelperArgument(instr, info);
  16274. m_lowererMD.LoadHelperArgument(instr, funcObj);
  16275. instr->m_opcode = Js::OpCode::Call;
  16276. IR::HelperCallOpnd *helperCallOpnd = Lowerer::CreateHelperCallOpnd(helperOpnd->AsHelperCallOpnd()->m_fnHelper, 3, m_func);
  16277. instr->SetSrc1(helperCallOpnd);
  16278. m_lowererMD.LowerCall(instr, 3); // we have funcobj, callInfo, and this.
  16279. }
  16280. #endif
  16281. void
  16282. Lowerer::GenerateFastInlineArrayPush(IR::Instr * instr)
  16283. {
  16284. Assert(instr->m_opcode == Js::OpCode::InlineArrayPush);
  16285. IR::Opnd * baseOpnd = instr->GetSrc1();
  16286. IR::Opnd * srcOpnd = instr->GetSrc2();
  16287. bool returnLength = false;
  16288. if(instr->GetDst())
  16289. {
  16290. returnLength = true;
  16291. }
  16292. IR::LabelInstr * bailOutLabelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16293. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16294. instr->InsertAfter(doneLabel);
  16295. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16296. //Don't Generate fast path according to ShouldGenerateArrayFastPath()
  16297. //AND, Don't Generate fast path if the array is LikelyNative and the element is not specialized
  16298. if(ShouldGenerateArrayFastPath(baseOpnd, false, false, false) &&
  16299. !(baseOpnd->GetValueType().IsLikelyNativeArray() && srcOpnd->IsVar()))
  16300. {
  16301. GenerateFastPush(baseOpnd, srcOpnd, instr, instr, labelHelper, doneLabel, bailOutLabelHelper, returnLength);
  16302. instr->InsertBefore(labelHelper);
  16303. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  16304. }
  16305. if(baseOpnd->GetValueType().IsLikelyNativeArray())
  16306. {
  16307. //Lower IR::BailOutConventionalNativeArrayAccessOnly here.
  16308. LowerOneBailOutKind(instr, IR::BailOutConventionalNativeArrayAccessOnly, false, false);
  16309. instr->InsertAfter(bailOutLabelHelper);
  16310. InsertBranch(Js::OpCode::Br, doneLabel, bailOutLabelHelper);
  16311. }
  16312. GenerateHelperToArrayPushFastPath(instr, bailOutLabelHelper);
  16313. }
  16314. bool Lowerer::GenerateFastPop(IR::Opnd *baseOpndParam, IR::Instr *callInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel, IR::LabelInstr * bailOutLabelHelper)
  16315. {
  16316. Assert(ShouldGenerateArrayFastPath(baseOpndParam, false, false, false));
  16317. // TEST baseOpnd, AtomTag -- check baseOpnd not tagged int
  16318. // JNE $helper
  16319. // CMP [baseOpnd], JavascriptArray::`vtable' -- check baseOpnd isArray
  16320. // JNE $helper
  16321. // MOV r2, [baseOpnd + offset(length)] -- Load array length
  16322. IR::RegOpnd * baseOpnd = baseOpndParam->AsRegOpnd();
  16323. const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
  16324. ValueType arrValueType(baseOpndParam->GetValueType());
  16325. IR::RegOpnd *arrayOpnd = baseOpnd;
  16326. IR::RegOpnd *arrayLengthOpnd = nullptr;
  16327. IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
  16328. if(!arrValueType.IsAnyOptimizedArray())
  16329. {
  16330. arrayOpnd = GenerateArrayTest(baseOpnd, bailOutLabelHelper, bailOutLabelHelper, callInstr, false, true);
  16331. arrValueType = arrayOpnd->GetValueType().ToDefiniteObject().SetHasNoMissingValues(false);
  16332. }
  16333. else if(arrayOpnd->IsArrayRegOpnd())
  16334. {
  16335. IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
  16336. if(arrayRegOpnd->LengthSym())
  16337. {
  16338. arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), arrayRegOpnd->LengthSym()->GetType(), m_func);
  16339. DebugOnly(arrayLengthOpnd->FreezeSymValue());
  16340. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  16341. }
  16342. }
  16343. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
  16344. IR::AutoReuseOpnd autoReuseMutableArrayLengthOpnd;
  16345. {
  16346. IR::RegOpnd *const mutableArrayLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  16347. autoReuseMutableArrayLengthOpnd.Initialize(mutableArrayLengthOpnd, m_func);
  16348. if(arrayLengthOpnd)
  16349. {
  16350. // mov mutableArrayLength, arrayLength
  16351. InsertMove(mutableArrayLengthOpnd, arrayLengthOpnd, callInstr);
  16352. }
  16353. else
  16354. {
  16355. // MOV mutableArrayLength, [array + offset(length)] -- Load array length
  16356. // We know this index is safe since, so mark it as UInt32 to avoid unnecessary conversion/checks
  16357. InsertMove(
  16358. mutableArrayLengthOpnd,
  16359. IR::IndirOpnd::New(
  16360. arrayOpnd,
  16361. Js::JavascriptArray::GetOffsetOfLength(),
  16362. mutableArrayLengthOpnd->GetType(),
  16363. this->m_func),
  16364. callInstr);
  16365. }
  16366. arrayLengthOpnd = mutableArrayLengthOpnd;
  16367. }
  16368. InsertCompareBranch(arrayLengthOpnd, IR::IntConstOpnd::New(0, TyUint32, this->m_func), Js::OpCode::BrEq_A, true, bailOutLabelHelper, callInstr);
  16369. InsertSub(false, arrayLengthOpnd, arrayLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func),callInstr);
  16370. IR::IndirOpnd *arrayRef = IR::IndirOpnd::New(arrayOpnd, arrayLengthOpnd, TyVar, this->m_func);
  16371. arrayRef->GetBaseOpnd()->SetValueType(arrValueType);
  16372. //Array length is going to overflow, hence don't check for Array.length and Segment.length overflow.
  16373. bool isTypedArrayElement, isStringIndex;
  16374. IR::IndirOpnd *const indirOpnd =
  16375. GenerateFastElemICommon(
  16376. callInstr,
  16377. false,
  16378. arrayRef,
  16379. labelHelper,
  16380. labelHelper,
  16381. nullptr,
  16382. &isTypedArrayElement,
  16383. &isStringIndex,
  16384. nullptr,
  16385. nullptr /*pLabelSegmentLengthIncreased*/,
  16386. true /*checkArrayLengthOverflow*/,
  16387. true /* forceGenerateFastPath */,
  16388. false/* = returnLength */,
  16389. bailOutLabelHelper /* = bailOutLabelInstr*/);
  16390. Assert(!isTypedArrayElement);
  16391. Assert(indirOpnd);
  16392. return true;
  16393. }
  16394. bool Lowerer::GenerateFastPush(IR::Opnd *baseOpndParam, IR::Opnd *src, IR::Instr *callInstr,
  16395. IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel, IR::LabelInstr * bailOutLabelHelper, bool returnLength)
  16396. {
  16397. Assert(ShouldGenerateArrayFastPath(baseOpndParam, false, false, false));
  16398. // TEST baseOpnd, AtomTag -- check baseOpnd not tagged int
  16399. // JNE $helper
  16400. // CMP [baseOpnd], JavascriptArray::`vtable' -- check baseOpnd isArray
  16401. // JNE $helper
  16402. // MOV r2, [baseOpnd + offset(length)] -- Load array length
  16403. IR::RegOpnd * baseOpnd = baseOpndParam->AsRegOpnd();
  16404. const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
  16405. ValueType arrValueType(baseOpndParam->GetValueType());
  16406. IR::RegOpnd *arrayOpnd = baseOpnd;
  16407. IR::RegOpnd *arrayLengthOpnd = nullptr;
  16408. IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
  16409. if(!arrValueType.IsAnyOptimizedArray())
  16410. {
  16411. arrayOpnd = GenerateArrayTest(baseOpnd, labelHelper, labelHelper, insertInstr, false, true);
  16412. arrValueType = arrayOpnd->GetValueType().ToDefiniteObject().SetHasNoMissingValues(false);
  16413. }
  16414. else if(arrayOpnd->IsArrayRegOpnd())
  16415. {
  16416. IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
  16417. if(arrayRegOpnd->LengthSym())
  16418. {
  16419. arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), arrayRegOpnd->LengthSym()->GetType(), m_func);
  16420. DebugOnly(arrayLengthOpnd->FreezeSymValue());
  16421. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  16422. }
  16423. }
  16424. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
  16425. if(!arrayLengthOpnd)
  16426. {
  16427. // MOV arrayLength, [array + offset(length)] -- Load array length
  16428. // We know this index is safe since, so mark it as UInt32 to avoid unnecessary conversion/checks
  16429. arrayLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  16430. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  16431. InsertMove(
  16432. arrayLengthOpnd,
  16433. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), arrayLengthOpnd->GetType(), this->m_func),
  16434. insertInstr);
  16435. }
  16436. IR::IndirOpnd *arrayRef = IR::IndirOpnd::New(arrayOpnd, arrayLengthOpnd, TyVar, this->m_func);
  16437. arrayRef->GetBaseOpnd()->SetValueType(arrValueType);
  16438. if (returnLength && src->IsEqual(insertInstr->GetDst()))
  16439. {
  16440. //If the dst is same as the src, then dst is going to be overridden by GenerateFastElemICommon in process of updating the length.
  16441. //Save it in a temp register.
  16442. IR::RegOpnd *opnd = IR::RegOpnd::New(src->GetType(), this->m_func);
  16443. InsertMove(opnd, src, insertInstr);
  16444. src = opnd;
  16445. }
  16446. //Array length is going to overflow, hence don't check for Array.length and Segment.length overflow.
  16447. bool isTypedArrayElement, isStringIndex;
  16448. IR::IndirOpnd *const indirOpnd =
  16449. GenerateFastElemICommon(
  16450. insertInstr,
  16451. true,
  16452. arrayRef,
  16453. labelHelper,
  16454. labelHelper,
  16455. nullptr,
  16456. &isTypedArrayElement,
  16457. &isStringIndex,
  16458. nullptr,
  16459. nullptr /*pLabelSegmentLengthIncreased*/,
  16460. false /*checkArrayLengthOverflow*/,
  16461. true /* forceGenerateFastPath */,
  16462. returnLength,
  16463. bailOutLabelHelper);
  16464. Assert(!isTypedArrayElement);
  16465. Assert(indirOpnd);
  16466. // MOV [r3 + r2], src
  16467. InsertMoveWithBarrier(indirOpnd, src, insertInstr);
  16468. return true;
  16469. }
  16470. IR::Opnd*
  16471. Lowerer::GenerateArgOutForInlineeStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  16472. {
  16473. Assert(callInstr->m_func->IsInlinee());
  16474. Func *func = callInstr->m_func;
  16475. uint32 actualCount = func->actualCount - 1; // don't count this pointer
  16476. Assert(actualCount < Js::InlineeCallInfo::MaxInlineeArgoutCount);
  16477. const auto firstRealArgStackSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  16478. this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr); //Start after this pointer
  16479. IR::SymOpnd *firstArg = IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, func);
  16480. const IR::AutoReuseOpnd autoReuseFirstArg(firstArg, func);
  16481. IR::RegOpnd* argInOpnd = IR::RegOpnd::New(TyMachReg, func);
  16482. const IR::AutoReuseOpnd autoReuseArgInOpnd(argInOpnd, func);
  16483. InsertLea(argInOpnd, firstArg, callInstr);
  16484. IR::IndirOpnd *argIndirOpnd = nullptr;
  16485. IR::Instr* argout = nullptr;
  16486. #if defined(_M_IX86)
  16487. // Maintain alignment
  16488. if ((actualCount & 1) == 0)
  16489. {
  16490. IR::Instr *alignPush = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  16491. alignPush->SetSrc1(IR::IntConstOpnd::New(1, TyInt32, this->m_func));
  16492. callInstr->InsertBefore(alignPush);
  16493. }
  16494. #endif
  16495. for(uint i = actualCount; i > 0; i--)
  16496. {
  16497. argIndirOpnd = IR::IndirOpnd::New(argInOpnd, (i - 1) * MachPtr, TyMachReg, func);
  16498. argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  16499. argout->SetSrc1(argIndirOpnd);
  16500. callInstr->InsertBefore(argout);
  16501. // i represents ith arguments from actuals, with is i + 3 counting this, callInfo and function object
  16502. this->m_lowererMD.LoadDynamicArgument(argout, i + 3);
  16503. }
  16504. return IR::IntConstOpnd::New(func->actualCount, TyMachReg, func);
  16505. }
  16506. // For AMD64 and ARM only.
  16507. void
  16508. Lowerer::LowerInlineSpreadArgOutLoopUsingRegisters(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd)
  16509. {
  16510. Func *const func = callInstr->m_func;
  16511. IR::LabelInstr *oneArgLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  16512. InsertCompareBranch(indexOpnd, IR::IntConstOpnd::New(1, TyUint8, func), Js::OpCode::BrEq_A, true, oneArgLabel, callInstr);
  16513. IR::LabelInstr *startLoopLabel = InsertLoopTopLabel(callInstr);
  16514. Loop * loop = startLoopLabel->GetLoop();
  16515. loop->regAlloc.liveOnBackEdgeSyms->Set(indexOpnd->m_sym->m_id);
  16516. loop->regAlloc.liveOnBackEdgeSyms->Set(arrayElementsStartOpnd->m_sym->m_id);
  16517. InsertSub(false, indexOpnd, indexOpnd, IR::IntConstOpnd::New(1, TyInt8, func), callInstr);
  16518. IR::IndirOpnd *elemPtrOpnd = IR::IndirOpnd::New(arrayElementsStartOpnd, indexOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyMachPtr, func);
  16519. // Generate argout for n+2 arg (skipping function object + this)
  16520. IR::Instr *argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  16521. // X64 requires a reg opnd
  16522. IR::RegOpnd *elemRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  16523. LowererMD::CreateAssign(elemRegOpnd, elemPtrOpnd, callInstr);
  16524. argout->SetSrc1(elemRegOpnd);
  16525. argout->SetSrc2(indexOpnd);
  16526. callInstr->InsertBefore(argout);
  16527. this->m_lowererMD.LoadDynamicArgumentUsingLength(argout);
  16528. InsertCompareBranch(indexOpnd, IR::IntConstOpnd::New(1, TyUint8, func), Js::OpCode::BrNeq_A, true, startLoopLabel, callInstr);
  16529. // Emit final argument into register 4 on AMD64 and ARM
  16530. callInstr->InsertBefore(oneArgLabel);
  16531. argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  16532. argout->SetSrc1(elemPtrOpnd);
  16533. callInstr->InsertBefore(argout);
  16534. this->m_lowererMD.LoadDynamicArgument(argout, 4); //4 to denote this is 4th register after this, callinfo & function object
  16535. }
  16536. IR::Instr *
  16537. Lowerer::LowerCallIDynamicSpread(IR::Instr *callInstr, ushort callFlags)
  16538. {
  16539. Assert(callInstr->m_opcode == Js::OpCode::CallIDynamicSpread);
  16540. IR::Instr * insertBeforeInstrForCFG = nullptr;
  16541. Func *const func = callInstr->m_func;
  16542. if (func->IsInlinee())
  16543. {
  16544. throw Js::RejitException(RejitReason::InlineSpreadDisabled);
  16545. }
  16546. IR::Instr *spreadArrayInstr = callInstr;
  16547. IR::SymOpnd *argLinkOpnd = spreadArrayInstr->UnlinkSrc2()->AsSymOpnd();
  16548. StackSym *argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  16549. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  16550. argLinkOpnd->Free(this->m_func);
  16551. spreadArrayInstr = argLinkSym->m_instrDef;
  16552. Assert(spreadArrayInstr->m_opcode == Js::OpCode::ArgOut_A_SpreadArg);
  16553. IR::RegOpnd *arrayOpnd = nullptr;
  16554. IR::Opnd *arraySrcOpnd = spreadArrayInstr->UnlinkSrc1();
  16555. if (!arraySrcOpnd->IsRegOpnd())
  16556. {
  16557. arrayOpnd = IR::RegOpnd::New(TyMachPtr, func);
  16558. LowererMD::CreateAssign(arrayOpnd, arraySrcOpnd, spreadArrayInstr);
  16559. }
  16560. else
  16561. {
  16562. arrayOpnd = arraySrcOpnd->AsRegOpnd();
  16563. }
  16564. argLinkOpnd = spreadArrayInstr->UnlinkSrc2()->AsSymOpnd();
  16565. // Walk the arg chain and find the start call
  16566. argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  16567. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  16568. argLinkOpnd->Free(this->m_func);
  16569. // Nothing to be done for the function object, emit as normal
  16570. IR::Instr *thisInstr = argLinkSym->m_instrDef;
  16571. IR::RegOpnd *thisOpnd = thisInstr->UnlinkSrc2()->AsRegOpnd();
  16572. argLinkSym = thisOpnd->m_sym->AsStackSym();
  16573. thisInstr->Unlink();
  16574. thisInstr->FreeDst();
  16575. // Remove the array ArgOut instr and StartCall, they are no longer needed
  16576. spreadArrayInstr->Unlink();
  16577. spreadArrayInstr->FreeDst();
  16578. IR::Instr *startCallInstr = argLinkSym->m_instrDef;
  16579. Assert(startCallInstr->m_opcode == Js::OpCode::StartCall);
  16580. insertBeforeInstrForCFG = startCallInstr->GetNextRealInstr();
  16581. startCallInstr->Remove();
  16582. IR::RegOpnd *argsLengthOpnd = IR::RegOpnd::New(TyUint32, func);
  16583. IR::IndirOpnd *arrayLengthPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, func);
  16584. LowererMD::CreateAssign(argsLengthOpnd, arrayLengthPtrOpnd, callInstr);
  16585. // Don't bother expanding args if there are zero
  16586. IR::LabelInstr *zeroArgsLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  16587. InsertCompareBranch(argsLengthOpnd, IR::IntConstOpnd::New(0, TyInt8, func), Js::OpCode::BrEq_A, true, zeroArgsLabel, callInstr);
  16588. IR::RegOpnd *indexOpnd = IR::RegOpnd::New(TyUint32, func);
  16589. LowererMD::CreateAssign(indexOpnd, argsLengthOpnd, callInstr);
  16590. // Get the array head offset and length
  16591. IR::IndirOpnd *arrayHeadPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfHead(), TyMachPtr, func);
  16592. IR::RegOpnd *arrayElementsStartOpnd = IR::RegOpnd::New(TyMachPtr, func);
  16593. InsertAdd(false, arrayElementsStartOpnd, arrayHeadPtrOpnd, IR::IntConstOpnd::New(offsetof(Js::SparseArraySegment<Js::Var>, elements), TyUint8, func), callInstr);
  16594. this->m_lowererMD.LowerInlineSpreadArgOutLoop(callInstr, indexOpnd, arrayElementsStartOpnd);
  16595. // Resume if we have zero args
  16596. callInstr->InsertBefore(zeroArgsLabel);
  16597. // Lower call
  16598. callInstr->m_opcode = Js::OpCode::CallIDynamic;
  16599. callInstr = m_lowererMD.LowerCallIDynamic(callInstr, thisInstr, argsLengthOpnd, callFlags, insertBeforeInstrForCFG);
  16600. return callInstr;
  16601. }
  16602. IR::Instr *
  16603. Lowerer::LowerCallIDynamic(IR::Instr * callInstr, ushort callFlags)
  16604. {
  16605. if (!this->m_func->GetHasStackArgs())
  16606. {
  16607. throw Js::RejitException(RejitReason::InlineApplyDisabled);
  16608. }
  16609. IR::Instr * insertBeforeInstrForCFG = nullptr;
  16610. // Lower args and look for StartCall
  16611. IR::Instr * argInstr = callInstr;
  16612. IR::SymOpnd * argLinkOpnd = argInstr->UnlinkSrc2()->AsSymOpnd();
  16613. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  16614. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  16615. argLinkOpnd->Free(this->m_func);
  16616. argInstr = argLinkSym->m_instrDef;
  16617. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  16618. IR::Instr* saveThisArgOutInstr = argInstr;
  16619. saveThisArgOutInstr->Unlink();
  16620. saveThisArgOutInstr->FreeDst();
  16621. argLinkOpnd = argInstr->UnlinkSrc2()->AsSymOpnd();
  16622. argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  16623. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  16624. argLinkOpnd->Free(this->m_func);
  16625. argInstr = argLinkSym->m_instrDef;
  16626. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_FromStackArgs);
  16627. IR::Opnd* argsLength = m_lowererMD.GenerateArgOutForStackArgs(callInstr, argInstr);
  16628. IR::RegOpnd* startCallDstOpnd = argInstr->UnlinkSrc2()->AsRegOpnd();
  16629. argLinkSym = startCallDstOpnd->m_sym->AsStackSym();
  16630. startCallDstOpnd->Free(this->m_func);
  16631. argInstr->Remove();// Remove ArgOut_A_FromStackArgs
  16632. argInstr = argLinkSym->m_instrDef;
  16633. Assert(argInstr->m_opcode == Js::OpCode::StartCall);
  16634. insertBeforeInstrForCFG = argInstr->GetNextRealInstr();
  16635. argInstr->Remove(); //Remove start call
  16636. return m_lowererMD.LowerCallIDynamic(callInstr, saveThisArgOutInstr, argsLength, callFlags, insertBeforeInstrForCFG);
  16637. }
  16638. //This is only for x64 & ARM.
  16639. IR::Opnd*
  16640. Lowerer::GenerateArgOutForStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  16641. {
  16642. // s25.var = LdLen_A s4.var
  16643. // s26.var = Ld_A s25.var
  16644. // BrNeq_I4 $L3, s25.var,0
  16645. // $L2:
  16646. // BrNeq_I4 $L4, s25.var,1
  16647. // s25.var = SUB_I4 s25.var, 0x1
  16648. // s10.var = LdElemI_A [s4.var+s25.var].var
  16649. // ArgOut_A_Dynamic s10.var, s25.var
  16650. // Br $L2
  16651. // $L4:
  16652. // s10.var = LdElemI_A [s4.var].var
  16653. // ArgOut_A_Dynamic s10.var, 4
  16654. // $L3
  16655. #if defined(_M_IX86)
  16656. Assert(false);
  16657. #endif
  16658. Assert(stackArgsInstr->m_opcode == Js::OpCode::ArgOut_A_FromStackArgs);
  16659. Assert(callInstr->m_opcode == Js::OpCode::CallIDynamic);
  16660. this->m_lowererMD.GenerateFunctionObjectTest(callInstr, callInstr->GetSrc1()->AsRegOpnd(), false);
  16661. if (callInstr->m_func->IsInlinee())
  16662. {
  16663. return this->GenerateArgOutForInlineeStackArgs(callInstr, stackArgsInstr);
  16664. }
  16665. Func *func = callInstr->m_func;
  16666. IR::RegOpnd* stackArgs = stackArgsInstr->GetSrc1()->AsRegOpnd();
  16667. IR::RegOpnd* ldLenDstOpnd = IR::RegOpnd::New(TyMachReg, func);
  16668. IR::Instr* ldLen = IR::Instr::New(Js::OpCode::LdLen_A, ldLenDstOpnd ,stackArgs, func);
  16669. ldLenDstOpnd->SetValueType(ValueType::GetTaggedInt()); /*LdLen_A works only on stack arguments*/
  16670. callInstr->InsertBefore(ldLen);
  16671. GenerateFastRealStackArgumentsLdLen(ldLen);
  16672. IR::Instr* saveLenInstr = IR::Instr::New(Js::OpCode::MOV, IR::RegOpnd::New(TyMachReg, func), ldLenDstOpnd, func);
  16673. saveLenInstr->GetDst()->SetValueType(ValueType::GetTaggedInt());
  16674. callInstr->InsertBefore(saveLenInstr);
  16675. IR::LabelInstr* doneArgs = IR::LabelInstr::New(Js::OpCode::Label, func);
  16676. IR::Instr* branchDoneArgs = IR::BranchInstr::New(Js::OpCode::BrEq_I4, doneArgs, ldLenDstOpnd, IR::IntConstOpnd::New(0, TyInt8, func),func);
  16677. callInstr->InsertBefore(branchDoneArgs);
  16678. this->m_lowererMD.EmitInt4Instr(branchDoneArgs);
  16679. IR::LabelInstr* startLoop = InsertLoopTopLabel(callInstr);
  16680. Loop * loop = startLoop->GetLoop();
  16681. IR::LabelInstr* endLoop = IR::LabelInstr::New(Js::OpCode::Label, func);
  16682. IR::Instr* branchOutOfLoop = IR::BranchInstr::New(Js::OpCode::BrEq_I4, endLoop, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyInt8, func),func);
  16683. callInstr->InsertBefore(branchOutOfLoop);
  16684. this->m_lowererMD.EmitInt4Instr(branchOutOfLoop);
  16685. IR::Instr* subInstr = IR::Instr::New(Js::OpCode::Sub_I4, ldLenDstOpnd, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyMachReg, func),func);
  16686. callInstr->InsertBefore(subInstr);
  16687. this->m_lowererMD.EmitInt4Instr(subInstr);
  16688. IR::IndirOpnd *nthArgument = IR::IndirOpnd::New(stackArgs, ldLenDstOpnd, TyMachReg, func);
  16689. IR::RegOpnd* ldElemDstOpnd = IR::RegOpnd::New(TyMachReg,func);
  16690. IR::Instr* ldElem = IR::Instr::New(Js::OpCode::LdElemI_A, ldElemDstOpnd, nthArgument, func);
  16691. callInstr->InsertBefore(ldElem);
  16692. GenerateFastStackArgumentsLdElemI(ldElem);
  16693. IR::Instr* argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  16694. argout->SetSrc1(ldElemDstOpnd);
  16695. argout->SetSrc2(ldLenDstOpnd);
  16696. callInstr->InsertBefore(argout);
  16697. this->m_lowererMD.LoadDynamicArgumentUsingLength(argout);
  16698. IR::BranchInstr *tailBranch = IR::BranchInstr::New(Js::OpCode::Br, startLoop, func);
  16699. callInstr->InsertBefore(tailBranch);
  16700. callInstr->InsertBefore(endLoop);
  16701. this->m_lowererMD.LowerUncondBranch(tailBranch);
  16702. loop->regAlloc.liveOnBackEdgeSyms->Set(ldLenDstOpnd->m_sym->m_id);
  16703. subInstr = IR::Instr::New(Js::OpCode::Sub_I4, ldLenDstOpnd, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyMachReg, func),func);
  16704. callInstr->InsertBefore(subInstr);
  16705. this->m_lowererMD.EmitInt4Instr(subInstr);
  16706. nthArgument = IR::IndirOpnd::New(stackArgs, ldLenDstOpnd, TyMachReg, func);
  16707. ldElemDstOpnd = IR::RegOpnd::New(TyMachReg,func);
  16708. ldElem = IR::Instr::New(Js::OpCode::LdElemI_A, ldElemDstOpnd, nthArgument, func);
  16709. callInstr->InsertBefore(ldElem);
  16710. GenerateFastStackArgumentsLdElemI(ldElem);
  16711. argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  16712. argout->SetSrc1(ldElemDstOpnd);
  16713. callInstr->InsertBefore(argout);
  16714. this->m_lowererMD.LoadDynamicArgument(argout, 4); //4 to denote this is 4th register after this, callinfo & function object
  16715. callInstr->InsertBefore(doneArgs);
  16716. /*return the length which will be used for callInfo generations & stack allocation*/
  16717. return saveLenInstr->GetDst()->AsRegOpnd();
  16718. }
  16719. void
  16720. Lowerer::GenerateLoadStackArgumentByIndex(IR::Opnd *dst, IR::RegOpnd *indexOpnd, IR::Instr *instr, int32 offset, Func *func)
  16721. {
  16722. // Load argument set dst = [ebp + index].
  16723. IR::RegOpnd *ebpOpnd = IR::Opnd::CreateFramePointerOpnd(func);
  16724. IR::IndirOpnd *argIndirOpnd = nullptr;
  16725. // The stack looks like this:
  16726. // [new.target or FrameDisplay] <== EBP + formalParamOffset (4) + callInfo.Count - 1
  16727. // arguments[n] <== EBP + formalParamOffset (4) + n
  16728. // ...
  16729. // arguments[1] <== EBP + formalParamOffset (4) + 2
  16730. // arguments[0] <== EBP + formalParamOffset (4) + 1
  16731. // this or new.target <== EBP + formalParamOffset (4)
  16732. // callinfo
  16733. // function object
  16734. // return addr
  16735. // EBP-> EBP chain
  16736. //actual arguments offset is LowererMD::GetFormalParamOffset() + 1 (this)
  16737. int32 actualOffset = GetFormalParamOffset() + offset;
  16738. Assert(GetFormalParamOffset() == 4);
  16739. const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  16740. argIndirOpnd = IR::IndirOpnd::New(ebpOpnd, indexOpnd, indirScale, TyMachReg, this->m_func);
  16741. argIndirOpnd->SetOffset(actualOffset << indirScale);
  16742. LowererMD::CreateAssign(dst, argIndirOpnd, instr);
  16743. }
  16744. //This function assumes there is stackargs bailout and index is always on the range.
  16745. bool
  16746. Lowerer::GenerateFastStackArgumentsLdElemI(IR::Instr* ldElem)
  16747. {
  16748. // MOV dst, ebp [(valueOpnd + 5) *4] // 5 for the stack layout
  16749. //
  16750. IR::IndirOpnd *indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  16751. // Now load the index and check if it is an integer.
  16752. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  16753. Assert (indexOpnd && indexOpnd->IsTaggedInt());
  16754. if(ldElem->m_func->IsInlinee())
  16755. {
  16756. IR::IndirOpnd *argIndirOpnd = GetArgsIndirOpndForInlinee(ldElem, indexOpnd);
  16757. LowererMD::CreateAssign(ldElem->GetDst(), argIndirOpnd, ldElem);
  16758. }
  16759. else
  16760. {
  16761. GenerateLoadStackArgumentByIndex(ldElem->GetDst(), indexOpnd, ldElem, indirOpnd->GetOffset() + 1, m_func); // +1 to offset 'this'
  16762. }
  16763. ldElem->Remove();
  16764. return false;
  16765. }
  16766. IR::IndirOpnd*
  16767. Lowerer::GetArgsIndirOpndForInlinee(IR::Instr* ldElem, IR::Opnd* valueOpnd)
  16768. {
  16769. Assert(ldElem->m_func->IsInlinee());
  16770. IR::IndirOpnd* argIndirOpnd = nullptr;
  16771. // Address of argument after 'this'
  16772. const auto firstRealArgStackSym = ldElem->m_func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  16773. this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr); //Start after this pointer
  16774. IR::SymOpnd *firstArg = IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, ldElem->m_func);
  16775. const IR::AutoReuseOpnd autoReuseFirstArg(firstArg, m_func);
  16776. IR::RegOpnd *const baseOpnd = IR::RegOpnd::New(TyMachReg, ldElem->m_func);
  16777. const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
  16778. InsertLea(baseOpnd, firstArg, ldElem);
  16779. if (valueOpnd->IsIntConstOpnd())
  16780. {
  16781. IntConstType offset = valueOpnd->AsIntConstOpnd()->GetValue() * MachPtr;
  16782. // TODO: Assert(Math::FitsInDWord(offset));
  16783. argIndirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, ldElem->m_func);
  16784. }
  16785. else
  16786. {
  16787. Assert(valueOpnd->IsRegOpnd());
  16788. const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  16789. argIndirOpnd = IR::IndirOpnd::New(baseOpnd, valueOpnd->AsRegOpnd(), indirScale, TyMachReg, ldElem->m_func);
  16790. }
  16791. return argIndirOpnd;
  16792. }
  16793. IR::IndirOpnd*
  16794. Lowerer::GetArgsIndirOpndForTopFunction(IR::Instr* ldElem, IR::Opnd* valueOpnd)
  16795. {
  16796. // Load argument set dst = [ebp + index] (or grab from the generator object if m_func is a generator function).
  16797. IR::RegOpnd *baseOpnd = m_func->GetJITFunctionBody()->IsCoroutine() ? LoadGeneratorArgsPtr(ldElem) : IR::Opnd::CreateFramePointerOpnd(m_func);
  16798. IR::IndirOpnd* argIndirOpnd = nullptr;
  16799. // The stack looks like this:
  16800. // ...
  16801. // arguments[1]
  16802. // arguments[0]
  16803. // this
  16804. // callinfo
  16805. // function object
  16806. // return addr
  16807. // EBP-> EBP chain
  16808. //actual arguments offset is LowererMD::GetFormalParamOffset() + 1 (this)
  16809. uint16 actualOffset = m_func->GetJITFunctionBody()->IsCoroutine() ? 1 : GetFormalParamOffset() + 1; //5
  16810. Assert(actualOffset == 5 || m_func->GetJITFunctionBody()->IsGenerator());
  16811. if (valueOpnd->IsIntConstOpnd())
  16812. {
  16813. IntConstType offset = (valueOpnd->AsIntConstOpnd()->GetValue() + actualOffset) * MachPtr;
  16814. // TODO: Assert(Math::FitsInDWord(offset));
  16815. argIndirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, this->m_func);
  16816. }
  16817. else
  16818. {
  16819. const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  16820. argIndirOpnd = IR::IndirOpnd::New(baseOpnd->AsRegOpnd(), valueOpnd->AsRegOpnd(), indirScale, TyMachReg, this->m_func);
  16821. // Need to offset valueOpnd by 5. Instead of changing valueOpnd, we can just add an offset to the indir. Changing
  16822. // valueOpnd requires creation of a temp sym (if it's not already a temp) so that the value of the sym that
  16823. // valueOpnd represents is not changed.
  16824. argIndirOpnd->SetOffset(actualOffset << indirScale);
  16825. }
  16826. return argIndirOpnd;
  16827. }
  16828. void
  16829. Lowerer::GenerateCheckForArgumentsLength(IR::Instr* ldElem, IR::LabelInstr* labelCreateHeapArgs, IR::Opnd* actualParamOpnd, IR::Opnd* valueOpnd, Js::OpCode opcode)
  16830. {
  16831. // Check if index < nr_actuals.
  16832. InsertCompare(actualParamOpnd, valueOpnd, ldElem);
  16833. // Jump to helper if index >= nr_actuals.
  16834. // Do an unsigned check here so that a negative index will also fail.
  16835. // (GenerateLdValueFromCheckedIndexOpnd does not guarantee positive index on x86.)
  16836. InsertBranch(opcode, true, labelCreateHeapArgs, ldElem);
  16837. }
  16838. bool
  16839. Lowerer::GenerateFastArgumentsLdElemI(IR::Instr* ldElem, IR::LabelInstr *labelFallThru)
  16840. {
  16841. // ---GenerateSmIntTest
  16842. // ---GenerateLdValueFromCheckedIndexOpnd
  16843. // ---LoadInputParamCount
  16844. // CMP actualParamOpnd, valueOpnd //Compare between the actual count & the index count (say i in arguments[i])
  16845. // JLE $labelCreateHeapArgs
  16846. // MOV dst, ebp [(valueOpnd + 5) *4] // 5 for the stack layout
  16847. // JMP $fallthrough
  16848. //
  16849. //labelCreateHeapArgs:
  16850. // ---Bail out to create Heap Arguments object
  16851. Assert(ldElem->DoStackArgsOpt(this->m_func));
  16852. IR::IndirOpnd *indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  16853. bool isInlinee = ldElem->m_func->IsInlinee();
  16854. Func *func = ldElem->m_func;
  16855. IR::LabelInstr *labelCreateHeapArgs = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  16856. // Now load the index and check if it is an integer.
  16857. bool emittedFastPath = false;
  16858. bool isNotInt = false;
  16859. IntConstType value = 0;
  16860. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  16861. IR::Opnd *valueOpnd = nullptr;
  16862. IR::Opnd *actualParamOpnd = nullptr;
  16863. bool hasIntConstIndex = indirOpnd->TryGetIntConstIndexValue(true, &value, &isNotInt);
  16864. if (isInlinee && hasIntConstIndex && value >= (ldElem->m_func->actualCount - 1))
  16865. {
  16866. //Outside the range of actuals, skip
  16867. }
  16868. else if (labelFallThru != nullptr && !(hasIntConstIndex && value < 0)) //if index is not a negative int constant
  16869. {
  16870. if (isInlinee)
  16871. {
  16872. actualParamOpnd = IR::IntConstOpnd::New(ldElem->m_func->actualCount - 1, TyInt32, func);
  16873. }
  16874. else
  16875. {
  16876. // Load actuals count, LoadHeapArguments will reuse the generated instructions here
  16877. IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldElem, -1 /* don't include 'this' while counting actuals. */);
  16878. actualParamOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
  16879. }
  16880. if (hasIntConstIndex)
  16881. {
  16882. //Constant index
  16883. valueOpnd = IR::IntConstOpnd::New(value, TyInt32, func);
  16884. }
  16885. else
  16886. {
  16887. //Load valueOpnd from the index
  16888. valueOpnd =
  16889. m_lowererMD.LoadNonnegativeIndex(
  16890. indexOpnd,
  16891. (
  16892. #if INT32VAR
  16893. indexOpnd->GetType() == TyUint32
  16894. #else
  16895. // On 32-bit platforms, skip the negative check since for now, the unsigned upper bound check covers it
  16896. true
  16897. #endif
  16898. ),
  16899. labelCreateHeapArgs,
  16900. labelCreateHeapArgs,
  16901. ldElem);
  16902. }
  16903. if (isInlinee)
  16904. {
  16905. if (!hasIntConstIndex)
  16906. {
  16907. //Runtime check if to make sure length is within the arguments.length range.
  16908. GenerateCheckForArgumentsLength(ldElem, labelCreateHeapArgs, valueOpnd, actualParamOpnd, Js::OpCode::BrGe_A);
  16909. }
  16910. }
  16911. else
  16912. {
  16913. GenerateCheckForArgumentsLength(ldElem, labelCreateHeapArgs, actualParamOpnd, valueOpnd, Js::OpCode::BrLe_A);
  16914. }
  16915. IR::Opnd *argIndirOpnd = nullptr;
  16916. if (isInlinee)
  16917. {
  16918. argIndirOpnd = GetArgsIndirOpndForInlinee(ldElem, valueOpnd);
  16919. }
  16920. else
  16921. {
  16922. argIndirOpnd = GetArgsIndirOpndForTopFunction(ldElem, valueOpnd);
  16923. }
  16924. LowererMD::CreateAssign(ldElem->GetDst(), argIndirOpnd, ldElem);
  16925. // JMP $done
  16926. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  16927. // $labelCreateHeapArgs:
  16928. ldElem->InsertBefore(labelCreateHeapArgs);
  16929. emittedFastPath = true;
  16930. }
  16931. if (!emittedFastPath)
  16932. {
  16933. throw Js::RejitException(RejitReason::DisableStackArgOpt);
  16934. }
  16935. return emittedFastPath;
  16936. }
  16937. bool
  16938. Lowerer::GenerateFastRealStackArgumentsLdLen(IR::Instr *ldLen)
  16939. {
  16940. if(ldLen->m_func->IsInlinee())
  16941. {
  16942. //Get the length of the arguments
  16943. LowererMD::CreateAssign(ldLen->GetDst(),
  16944. IR::IntConstOpnd::New(ldLen->m_func->actualCount - 1, TyUint32, ldLen->m_func),
  16945. ldLen);
  16946. }
  16947. else
  16948. {
  16949. IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldLen, -1);
  16950. IR::RegOpnd *actualCountOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
  16951. LowererMD::CreateAssign(ldLen->GetDst(), actualCountOpnd, ldLen);
  16952. }
  16953. ldLen->Remove();
  16954. return false;
  16955. }
  16956. bool
  16957. Lowerer::GenerateFastArgumentsLdLen(IR::Instr *ldLen, IR::LabelInstr* labelFallThru)
  16958. {
  16959. // TEST argslot, argslot //Test if the arguments slot is zero
  16960. // JNE $helper
  16961. // actualCountOpnd <-LoadInputParamCount fastpath
  16962. // SHL actualCountOpnd, actualCountOpnd, 1 // Left shift for tagging
  16963. // INC actualCountOpnd // Tagging
  16964. // MOV dst, actualCountOpnd
  16965. // JMP $fallthrough
  16966. //$helper:
  16967. Assert(ldLen->DoStackArgsOpt(this->m_func));
  16968. if(ldLen->m_func->IsInlinee())
  16969. {
  16970. //Get the length of the arguments
  16971. LowererMD::CreateAssign(ldLen->GetDst(),
  16972. IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(ldLen->m_func->actualCount - 1), IR::AddrOpndKindConstantVar, ldLen->m_func), // -1 to exclude this pointer
  16973. ldLen);
  16974. }
  16975. else
  16976. {
  16977. IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldLen, -1);
  16978. IR::RegOpnd *actualCountOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
  16979. this->m_lowererMD.GenerateInt32ToVarConversion(actualCountOpnd, ldLen);
  16980. LowererMD::CreateAssign(ldLen->GetDst(), actualCountOpnd, ldLen);
  16981. }
  16982. return true;
  16983. }
  16984. IR::RegOpnd*
  16985. Lowerer::GenerateFunctionTypeFromFixedFunctionObject(IR::Instr *insertInstrPt, IR::Opnd* functionObjOpnd)
  16986. {
  16987. IR::RegOpnd * functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  16988. IR::Opnd *functionTypeOpnd = nullptr;
  16989. if(functionObjOpnd->IsAddrOpnd())
  16990. {
  16991. IR::AddrOpnd* functionObjAddrOpnd = functionObjOpnd->AsAddrOpnd();
  16992. // functionTypeRegOpnd = MOV [fixed function address + type offset]
  16993. functionObjAddrOpnd->m_address;
  16994. functionTypeOpnd = IR::MemRefOpnd::New((void *)((intptr_t)functionObjAddrOpnd->m_address + Js::RecyclableObject::GetOffsetOfType()), TyMachPtr, this->m_func,
  16995. IR::AddrOpndKindDynamicObjectTypeRef);
  16996. }
  16997. else
  16998. {
  16999. functionTypeOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, this->m_func);
  17000. }
  17001. LowererMD::CreateAssign(functionTypeRegOpnd, functionTypeOpnd, insertInstrPt);
  17002. return functionTypeRegOpnd;
  17003. }
  17004. void
  17005. Lowerer::FinalLower()
  17006. {
  17007. this->m_lowererMD.FinalLower();
  17008. // ensure that the StartLabel and EndLabel are inserted
  17009. // before the prolog and after the epilog respectively
  17010. IR::LabelInstr * startLabel = m_func->GetFuncStartLabel();
  17011. if (startLabel != nullptr)
  17012. {
  17013. m_func->m_headInstr->InsertAfter(startLabel);
  17014. }
  17015. IR::LabelInstr * endLabel = m_func->GetFuncEndLabel();
  17016. if (endLabel != nullptr)
  17017. {
  17018. m_func->m_tailInstr->GetPrevRealInstr()->InsertBefore(endLabel);
  17019. }
  17020. }
  17021. void
  17022. Lowerer::EHBailoutPatchUp()
  17023. {
  17024. Assert(this->m_func->isPostLayout);
  17025. // 1. Insert return thunks for all the regions.
  17026. // 2. Set the hasBailedOut bit to true on all bailout paths in EH regions.
  17027. // 3. Insert code after every bailout in a try or catch region to save the return value on the stack, and jump to the return thunk (See Region.h) of that region.
  17028. // 4. Insert code right before the epilog, to restore the return value (saved in 2.) from a bailout into eax.
  17029. IR::LabelInstr * restoreReturnValueFromBailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  17030. IR::LabelInstr * epilogLabel;
  17031. IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->GetPrevRealInstrOrLabel();
  17032. if (exitPrevInstr->IsLabelInstr())
  17033. {
  17034. epilogLabel = exitPrevInstr->AsLabelInstr();
  17035. }
  17036. else
  17037. {
  17038. epilogLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  17039. this->m_func->m_exitInstr->InsertBefore(epilogLabel);
  17040. }
  17041. IR::Instr * tmpInstr = nullptr;
  17042. bool restoreReturnFromBailoutEmitted = false;
  17043. FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, this->m_func)
  17044. {
  17045. if (instr->IsLabelInstr())
  17046. {
  17047. this->currentRegion = instr->AsLabelInstr()->GetRegion();
  17048. }
  17049. // Consider (radua): Assert(this->currentRegion) here?
  17050. if (this->currentRegion)
  17051. {
  17052. RegionType currentRegionType = this->currentRegion->GetType();
  17053. if (currentRegionType == RegionTypeTry || currentRegionType == RegionTypeCatch || currentRegionType == RegionTypeFinally)
  17054. {
  17055. if (this->currentRegion->IsNonExceptingFinally())
  17056. {
  17057. Region * parent = this->currentRegion->GetParent();
  17058. while (parent->IsNonExceptingFinally())
  17059. {
  17060. parent = parent->GetParent();
  17061. }
  17062. if (parent->GetType() == RegionTypeRoot)
  17063. {
  17064. continue;
  17065. }
  17066. }
  17067. this->InsertReturnThunkForRegion(this->currentRegion, restoreReturnValueFromBailoutLabel);
  17068. if (instr->HasBailOutInfo())
  17069. {
  17070. this->SetHasBailedOut(instr);
  17071. tmpInstr = this->EmitEHBailoutStackRestore(instr);
  17072. this->EmitSaveEHBailoutReturnValueAndJumpToRetThunk(tmpInstr);
  17073. if (!restoreReturnFromBailoutEmitted)
  17074. {
  17075. this->EmitRestoreReturnValueFromEHBailout(restoreReturnValueFromBailoutLabel, epilogLabel);
  17076. restoreReturnFromBailoutEmitted = true;
  17077. }
  17078. }
  17079. }
  17080. }
  17081. }
  17082. NEXT_INSTR_IN_FUNC_EDITING
  17083. }
  17084. bool
  17085. Lowerer::GenerateFastLdFld(IR::Instr * const instrLdFld, IR::JnHelperMethod helperMethod, IR::JnHelperMethod polymorphicHelperMethod,
  17086. IR::LabelInstr ** labelBailOut, IR::RegOpnd* typeOpnd, bool* pIsHelper, IR::LabelInstr** pLabelHelper)
  17087. {
  17088. // Generates:
  17089. //
  17090. // r1 = object->type
  17091. // if (r1 is taggedInt) goto helper
  17092. // Load inline cache
  17093. // if monomorphic
  17094. // r2 = address of the monomorphic inline cache
  17095. // if polymorphic
  17096. // r2 = address of the polymorphic inline cache array
  17097. // r3 = (type >> PIC shift amount) & (PIC size - 1)
  17098. // r2 = r2 + r3
  17099. // Try load property using proto cache (if protoFirst)
  17100. // Try load property using local cache
  17101. // Try loading property using proto cache (if !protoFirst)
  17102. // Try loading property using flags cache
  17103. //
  17104. // Loading property using local cache:
  17105. // if (r1 == r2->u.local.type)
  17106. // result = load inline slot r2->u.local.slotIndex from r1
  17107. // goto fallthru
  17108. // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.local.type)
  17109. // result = load aux slot r2->u.local.slotIndex from r1
  17110. // goto fallthru
  17111. //
  17112. // Loading property using proto cache:
  17113. // if (r1 == r2->u.proto.type)
  17114. // r3 = r2->u.proto.prototypeObject
  17115. // result = load inline slot r2->u.proto.slotIndex from r3
  17116. // goto fallthru
  17117. // if (r1 | InlineCacheAuxSlotTypeTag) == r2.u.proto.type)
  17118. // r3 = r2->u.proto.prototypeObject
  17119. // result = load aux slot r2->u.proto.slotIndex from r3
  17120. // goto fallthru
  17121. //
  17122. // Loading property using flags cache:
  17123. // if (r2->u.accessor.flags & (Js::InlineCacheGetterFlag | Js::InlineCacheSetterFlag) == 0)
  17124. // if (r1 == r2->u.accessor.type)
  17125. // result = load inline slot r2->u.accessor.slotIndex from r1
  17126. // goto fallthru
  17127. // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.accessor.type)
  17128. // result = load aux slot r2->u.accessor.slotIndex from r1
  17129. // goto fallthru
  17130. //
  17131. // Loading an inline slot:
  17132. // result = [r1 + slotIndex * sizeof(Var)]
  17133. //
  17134. // Loading an aux slot:
  17135. // slotArray = r1->auxSlots
  17136. // result = [slotArray + slotIndex * sizeof(Var)]
  17137. //
  17138. // We only emit the code block for a type of cache (local/proto/flags) if the profile data
  17139. // indicates that type of cache was used to load the property in the past.
  17140. // We don't emit the type check with aux slot tag if the profile data indicates that we didn't
  17141. // load the property from an aux slot before.
  17142. // We don't emit the type check without an aux slot tag if the profile data indicates that we didn't
  17143. // load the property from an inline slot before.
  17144. IR::Opnd * opndSrc = instrLdFld->GetSrc1();
  17145. AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->IsPropertySymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(), "Expected PropertySym as src of LdFld");
  17146. Assert(!instrLdFld->DoStackArgsOpt(this->m_func));
  17147. IR::PropertySymOpnd * propertySymOpnd = opndSrc->AsPropertySymOpnd();
  17148. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  17149. PHASE_PRINT_TESTTRACE(
  17150. Js::ObjTypeSpecPhase,
  17151. this->m_func,
  17152. _u("Field load: %s, property ID: %d, func: %s, cache ID: %d, cloned cache: false\n"),
  17153. Js::OpCodeUtil::GetOpCodeName(instrLdFld->m_opcode),
  17154. propertySym->m_propertyId,
  17155. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  17156. propertySymOpnd->m_inlineCacheIndex);
  17157. Assert(pIsHelper != nullptr);
  17158. bool& isHelper = *pIsHelper;
  17159. Assert(pLabelHelper != nullptr);
  17160. IR::LabelInstr*& labelHelper = *pLabelHelper;
  17161. bool doLocal = true;
  17162. bool doProto = instrLdFld->m_opcode == Js::OpCode::LdMethodFld
  17163. || instrLdFld->m_opcode == Js::OpCode::LdRootMethodFld
  17164. || instrLdFld->m_opcode == Js::OpCode::ScopedLdMethodFld;
  17165. bool doProtoFirst = doProto;
  17166. bool doInlineSlots = true;
  17167. bool doAuxSlots = true;
  17168. if (!PHASE_OFF(Js::ProfileBasedFldFastPathPhase, this->m_func) && instrLdFld->IsProfiledInstr())
  17169. {
  17170. IR::ProfiledInstr * profiledInstrLdFld = instrLdFld->AsProfiledInstr();
  17171. if (profiledInstrLdFld->u.FldInfo().flags != Js::FldInfo_NoInfo)
  17172. {
  17173. doProto = !!(profiledInstrLdFld->u.FldInfo().flags & Js::FldInfo_FromProto);
  17174. doLocal = !!(profiledInstrLdFld->u.FldInfo().flags & Js::FldInfo_FromLocal);
  17175. if ((profiledInstrLdFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromInlineSlots)
  17176. {
  17177. // If the inline slots flag is set and the aux slots flag is not, only generate the inline slots check
  17178. doAuxSlots = false;
  17179. }
  17180. else if ((profiledInstrLdFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromAuxSlots)
  17181. {
  17182. // If the aux slots flag is set and the inline slots flag is not, only generate the aux slots check
  17183. doInlineSlots = false;
  17184. }
  17185. }
  17186. else if (!profiledInstrLdFld->u.FldInfo().valueType.IsUninitialized())
  17187. {
  17188. // We have value type info about the field but no flags. This means we shouldn't generate any
  17189. // fast paths for this field load.
  17190. doLocal = false;
  17191. doProto = false;
  17192. }
  17193. }
  17194. if (!doLocal && !doProto)
  17195. {
  17196. return false;
  17197. }
  17198. IR::LabelInstr * labelFallThru = instrLdFld->GetOrCreateContinueLabel();
  17199. if (labelHelper == nullptr)
  17200. {
  17201. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  17202. }
  17203. IR::RegOpnd * opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  17204. bool usePolymorphicInlineCache = !!propertySymOpnd->m_runtimePolymorphicInlineCache;
  17205. IR::RegOpnd * opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  17206. if (usePolymorphicInlineCache)
  17207. {
  17208. LowererMD::CreateAssign(opndInlineCache, IR::AddrOpnd::New(propertySymOpnd->m_runtimePolymorphicInlineCache->GetInlineCachesAddr(), IR::AddrOpndKindDynamicInlineCache, this->m_func, true), instrLdFld);
  17209. }
  17210. else
  17211. {
  17212. LowererMD::CreateAssign(opndInlineCache, this->LoadRuntimeInlineCacheOpnd(instrLdFld, propertySymOpnd, isHelper), instrLdFld);
  17213. }
  17214. if (typeOpnd == nullptr)
  17215. {
  17216. typeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  17217. GenerateObjectTestAndTypeLoad(instrLdFld, opndBase, typeOpnd, labelHelper);
  17218. }
  17219. if (usePolymorphicInlineCache)
  17220. {
  17221. LowererMD::GenerateLoadPolymorphicInlineCacheSlot(instrLdFld, opndInlineCache, typeOpnd, propertySymOpnd->m_runtimePolymorphicInlineCache->GetSize());
  17222. }
  17223. IR::LabelInstr * labelNext = nullptr;
  17224. IR::Opnd * opndDst = instrLdFld->GetDst();
  17225. IR::RegOpnd * opndTaggedType = nullptr;
  17226. IR::BranchInstr * labelNextBranchToPatch = nullptr;
  17227. if (doProto && doProtoFirst)
  17228. {
  17229. if (doInlineSlots)
  17230. {
  17231. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  17232. labelNextBranchToPatch = LowererMD::GenerateProtoInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
  17233. LowererMD::GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  17234. instrLdFld->InsertBefore(labelNext);
  17235. }
  17236. if (doAuxSlots)
  17237. {
  17238. if (opndTaggedType == nullptr)
  17239. {
  17240. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  17241. LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
  17242. }
  17243. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  17244. labelNextBranchToPatch = LowererMD::GenerateProtoInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
  17245. LowererMD::GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  17246. instrLdFld->InsertBefore(labelNext);
  17247. }
  17248. }
  17249. if (doLocal)
  17250. {
  17251. if (doInlineSlots)
  17252. {
  17253. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  17254. labelNextBranchToPatch = LowererMD::GenerateLocalInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
  17255. LowererMD::GenerateLdFldFromLocalInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  17256. instrLdFld->InsertBefore(labelNext);
  17257. }
  17258. if (doAuxSlots)
  17259. {
  17260. if (opndTaggedType == nullptr)
  17261. {
  17262. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  17263. LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
  17264. }
  17265. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  17266. labelNextBranchToPatch = LowererMD::GenerateLocalInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
  17267. LowererMD::GenerateLdFldFromLocalInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  17268. instrLdFld->InsertBefore(labelNext);
  17269. }
  17270. }
  17271. if (doProto && !doProtoFirst)
  17272. {
  17273. if (doInlineSlots)
  17274. {
  17275. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  17276. labelNextBranchToPatch = LowererMD::GenerateProtoInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
  17277. LowererMD::GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  17278. instrLdFld->InsertBefore(labelNext);
  17279. }
  17280. if (doAuxSlots)
  17281. {
  17282. if (opndTaggedType == nullptr)
  17283. {
  17284. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  17285. LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
  17286. }
  17287. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  17288. labelNextBranchToPatch = LowererMD::GenerateProtoInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
  17289. LowererMD::GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  17290. instrLdFld->InsertBefore(labelNext);
  17291. }
  17292. }
  17293. Assert(labelNextBranchToPatch);
  17294. labelNextBranchToPatch->SetTarget(labelHelper);
  17295. labelNext->Remove();
  17296. // $helper:
  17297. // dst = CALL Helper(inlineCache, base, field, scriptContext)
  17298. // $fallthru:
  17299. isHelper = true;
  17300. // Return false to indicate the original instruction was not lowered. Caller will insert the helper label.
  17301. return false;
  17302. }
  17303. void
  17304. Lowerer::GenerateAuxSlotAdjustmentRequiredCheck(
  17305. IR::Instr * instrToInsertBefore,
  17306. IR::RegOpnd * opndInlineCache,
  17307. IR::LabelInstr * labelHelper)
  17308. {
  17309. // regSlotCap = MOV [&(inlineCache->u.local.rawUInt16)] // sized to 16 bits
  17310. IR::RegOpnd * regSlotCap = IR::RegOpnd::New(TyMachReg, instrToInsertBefore->m_func);
  17311. IR::IndirOpnd * memSlotCap = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.rawUInt16), TyUint16, instrToInsertBefore->m_func);
  17312. InsertMove(regSlotCap, memSlotCap, instrToInsertBefore);
  17313. // SAR regSlotCap, Js::InlineCache::CacheLayoutSelectorBitCount
  17314. IR::IntConstOpnd * constSelectorBitCount = IR::IntConstOpnd::New(Js::InlineCache::CacheLayoutSelectorBitCount, TyUint16, instrToInsertBefore->m_func, /* dontEncode = */ true);
  17315. InsertShiftBranch(Js::OpCode::Shr_A, regSlotCap, regSlotCap, constSelectorBitCount, Js::OpCode::BrNeq_A, true, labelHelper, instrToInsertBefore);
  17316. }
  17317. void
  17318. Lowerer::GenerateSetObjectTypeFromInlineCache(
  17319. IR::Instr * instrToInsertBefore,
  17320. IR::RegOpnd * opndBase,
  17321. IR::RegOpnd * opndInlineCache,
  17322. bool isTypeTagged)
  17323. {
  17324. // regNewType = MOV [&(inlineCache->u.local.type)]
  17325. IR::RegOpnd * regNewType = IR::RegOpnd::New(TyMachReg, instrToInsertBefore->m_func);
  17326. IR::IndirOpnd * memNewType = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.type), TyMachReg, instrToInsertBefore->m_func);
  17327. InsertMove(regNewType, memNewType, instrToInsertBefore);
  17328. // AND regNewType, ~InlineCacheAuxSlotTypeTag
  17329. if (isTypeTagged)
  17330. {
  17331. // On 64-bit platforms IntConstOpnd isn't big enough to hold TyMachReg values.
  17332. IR::IntConstOpnd * constTypeTagComplement = IR::IntConstOpnd::New(~InlineCacheAuxSlotTypeTag, TyMachReg, instrToInsertBefore->m_func, /* dontEncode = */ true);
  17333. InsertAnd(regNewType, regNewType, constTypeTagComplement, instrToInsertBefore);
  17334. }
  17335. // MOV base->type, regNewType
  17336. IR::IndirOpnd * memObjType = IR::IndirOpnd::New(opndBase, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrToInsertBefore->m_func);
  17337. InsertMove(memObjType, regNewType, instrToInsertBefore);
  17338. }
  17339. bool
  17340. Lowerer::GenerateFastStFld(IR::Instr * const instrStFld, IR::JnHelperMethod helperMethod, IR::JnHelperMethod polymorphicHelperMethod, IR::LabelInstr ** labelBailOut, IR::RegOpnd* typeOpnd,
  17341. bool* pIsHelper, IR::LabelInstr** pLabelHelper, bool withPutFlags, Js::PropertyOperationFlags flags)
  17342. {
  17343. // Generates:
  17344. //
  17345. // r1 = object->type
  17346. // if (r1 is taggedInt) goto helper
  17347. // Load inline cache
  17348. // if monomorphic
  17349. // r2 = address of the monomorphic inline cache
  17350. // if polymorphic
  17351. // r2 = address of the polymorphic inline cache array
  17352. // r3 = (type >> PIC shift amount) & (PIC size - 1)
  17353. // r2 = r2 + r3
  17354. // Try store property using local cache
  17355. //
  17356. // Loading property using local cache:
  17357. // if (r1 == r2->u.local.type)
  17358. // store value to inline slot r2->u.local.slotIndex on r1
  17359. // goto fallthru
  17360. // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.local.type)
  17361. // store value to aux slot r2->u.local.slotIndex on r1
  17362. // goto fallthru
  17363. //
  17364. // Storing to an inline slot:
  17365. // [r1 + slotIndex * sizeof(Var)] = value
  17366. //
  17367. // Storing to an aux slot:
  17368. // slotArray = r1->auxSlots
  17369. // [slotArray + slotIndex * sizeof(Var)] = value
  17370. //
  17371. // We don't emit the type check with aux slot tag if the profile data indicates that we didn't
  17372. // store the property to an aux slot before.
  17373. // We don't emit the type check without an aux slot tag if the profile data indicates that we didn't
  17374. // store the property to an inline slot before.
  17375. IR::Opnd * opndSrc = instrStFld->GetSrc1();
  17376. IR::Opnd * opndDst = instrStFld->GetDst();
  17377. AssertMsg(opndDst->IsSymOpnd() && opndDst->AsSymOpnd()->IsPropertySymOpnd() && opndDst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected PropertySym as dst of StFld");
  17378. IR::PropertySymOpnd * propertySymOpnd = opndDst->AsPropertySymOpnd();
  17379. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  17380. PHASE_PRINT_TESTTRACE(
  17381. Js::ObjTypeSpecPhase,
  17382. this->m_func,
  17383. _u("Field store: %s, property ID: %u, func: %s, cache ID: %d, cloned cache: false\n"),
  17384. Js::OpCodeUtil::GetOpCodeName(instrStFld->m_opcode),
  17385. propertySym->m_propertyId,
  17386. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  17387. propertySymOpnd->m_inlineCacheIndex);
  17388. Assert(pIsHelper != nullptr);
  17389. bool& isHelper = *pIsHelper;
  17390. Assert(pLabelHelper != nullptr);
  17391. IR::LabelInstr*& labelHelper = *pLabelHelper;
  17392. bool doStore = true;
  17393. bool doAdd = false;
  17394. bool doInlineSlots = true;
  17395. bool doAuxSlots = true;
  17396. if (!PHASE_OFF(Js::ProfileBasedFldFastPathPhase, this->m_func) && instrStFld->IsProfiledInstr())
  17397. {
  17398. IR::ProfiledInstr * profiledInstrStFld = instrStFld->AsProfiledInstr();
  17399. if (profiledInstrStFld->u.FldInfo().flags != Js::FldInfo_NoInfo)
  17400. {
  17401. if (!(profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromLocal | Js::FldInfo_FromLocalWithoutProperty)))
  17402. {
  17403. return false;
  17404. }
  17405. if (!PHASE_OFF(Js::AddFldFastPathPhase, this->m_func))
  17406. {
  17407. // We always try to do the store field fast path, unless the profile specifically says we never set, but always add a property here.
  17408. if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromLocal | Js::FldInfo_FromLocalWithoutProperty)) == Js::FldInfo_FromLocalWithoutProperty)
  17409. {
  17410. doStore = false;
  17411. }
  17412. // On the other hand, we only emit the add field fast path, if the profile explicitly says we do add properties here.
  17413. if (!!(profiledInstrStFld->u.FldInfo().flags & Js::FldInfo_FromLocalWithoutProperty))
  17414. {
  17415. doAdd = true;
  17416. }
  17417. }
  17418. else
  17419. {
  17420. #if ENABLE_DEBUG_CONFIG_OPTIONS
  17421. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  17422. #endif
  17423. PHASE_PRINT_TRACE(Js::AddFldFastPathPhase, this->m_func,
  17424. _u("AddFldFastPath: function: %s(%s) property ID: %u no fast path, because the phase is off.\n"),
  17425. this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  17426. propertySym->m_propertyId);
  17427. }
  17428. if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromInlineSlots)
  17429. {
  17430. // If the inline slots flag is set and the aux slots flag is not, only generate the inline slots check
  17431. doAuxSlots = false;
  17432. }
  17433. else if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromAuxSlots)
  17434. {
  17435. // If the aux slots flag is set and the inline slots flag is not, only generate the aux slots check
  17436. doInlineSlots = false;
  17437. }
  17438. }
  17439. else if (!profiledInstrStFld->u.FldInfo().valueType.IsUninitialized())
  17440. {
  17441. // We have value type info about the field but no flags. This means we shouldn't generate any
  17442. // fast paths for this field store.
  17443. return false;
  17444. }
  17445. }
  17446. Assert(doStore || doAdd);
  17447. if (labelHelper == nullptr)
  17448. {
  17449. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  17450. }
  17451. IR::LabelInstr * labelFallThru = instrStFld->GetOrCreateContinueLabel();
  17452. IR::RegOpnd * opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  17453. bool usePolymorphicInlineCache = !!propertySymOpnd->m_runtimePolymorphicInlineCache;
  17454. if (doAdd)
  17455. {
  17456. #if ENABLE_DEBUG_CONFIG_OPTIONS
  17457. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  17458. #endif
  17459. PHASE_PRINT_TRACE(Js::AddFldFastPathPhase, this->m_func,
  17460. _u("AddFldFastPath: function: %s(%s) property ID: %d %s fast path for %s.\n"),
  17461. this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  17462. propertySym->m_propertyId,
  17463. usePolymorphicInlineCache ? _u("poly") : _u("mono"), doStore ? _u("store and add") : _u("add only"));
  17464. }
  17465. IR::RegOpnd * opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  17466. if (usePolymorphicInlineCache)
  17467. {
  17468. LowererMD::CreateAssign(opndInlineCache, IR::AddrOpnd::New(propertySymOpnd->m_runtimePolymorphicInlineCache->GetInlineCachesAddr(), IR::AddrOpndKindDynamicInlineCache, this->m_func, true), instrStFld);
  17469. }
  17470. else
  17471. {
  17472. LowererMD::CreateAssign(opndInlineCache, this->LoadRuntimeInlineCacheOpnd(instrStFld, propertySymOpnd, isHelper), instrStFld);
  17473. }
  17474. if (typeOpnd == nullptr)
  17475. {
  17476. typeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  17477. GenerateObjectTestAndTypeLoad(instrStFld, opndBase, typeOpnd, labelHelper);
  17478. }
  17479. if (usePolymorphicInlineCache)
  17480. {
  17481. LowererMD::GenerateLoadPolymorphicInlineCacheSlot(instrStFld, opndInlineCache, typeOpnd, propertySymOpnd->m_runtimePolymorphicInlineCache->GetSize());
  17482. }
  17483. IR::LabelInstr * labelNext = nullptr;
  17484. IR::RegOpnd * opndTaggedType = nullptr;
  17485. IR::BranchInstr * lastBranchToNext = nullptr;
  17486. if (doStore)
  17487. {
  17488. if (doInlineSlots)
  17489. {
  17490. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  17491. lastBranchToNext = LowererMD::GenerateLocalInlineCacheCheck(instrStFld, typeOpnd, opndInlineCache, labelNext);
  17492. this->GetLowererMD()->GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, true);
  17493. instrStFld->InsertBefore(labelNext);
  17494. }
  17495. if (doAuxSlots)
  17496. {
  17497. if (opndTaggedType == nullptr)
  17498. {
  17499. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  17500. LowererMD::GenerateLoadTaggedType(instrStFld, typeOpnd, opndTaggedType);
  17501. }
  17502. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  17503. lastBranchToNext = LowererMD::GenerateLocalInlineCacheCheck(instrStFld, opndTaggedType, opndInlineCache, labelNext);
  17504. this->GetLowererMD()->GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, false);
  17505. instrStFld->InsertBefore(labelNext);
  17506. }
  17507. }
  17508. if (doAdd)
  17509. {
  17510. if (doInlineSlots)
  17511. {
  17512. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  17513. lastBranchToNext = LowererMD::GenerateLocalInlineCacheCheck(instrStFld, typeOpnd, opndInlineCache, labelNext, true);
  17514. GenerateSetObjectTypeFromInlineCache(instrStFld, opndBase, opndInlineCache, false);
  17515. this->GetLowererMD()->GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, true);
  17516. instrStFld->InsertBefore(labelNext);
  17517. }
  17518. if (doAuxSlots)
  17519. {
  17520. if (opndTaggedType == nullptr)
  17521. {
  17522. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  17523. LowererMD::GenerateLoadTaggedType(instrStFld, typeOpnd, opndTaggedType);
  17524. }
  17525. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  17526. lastBranchToNext = LowererMD::GenerateLocalInlineCacheCheck(instrStFld, opndTaggedType, opndInlineCache, labelNext, true);
  17527. GenerateAuxSlotAdjustmentRequiredCheck(instrStFld, opndInlineCache, labelHelper);
  17528. GenerateSetObjectTypeFromInlineCache(instrStFld, opndBase, opndInlineCache, true);
  17529. this->GetLowererMD()->GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, false);
  17530. instrStFld->InsertBefore(labelNext);
  17531. }
  17532. }
  17533. Assert(lastBranchToNext);
  17534. lastBranchToNext->SetTarget(labelHelper);
  17535. labelNext->Remove();
  17536. // $helper:
  17537. // CALL Helper(inlineCache, base, field, src, scriptContext)
  17538. // $fallthru:
  17539. isHelper = true;
  17540. // Return false to indicate the original instruction was not lowered. Caller will insert the helper label.
  17541. return false;
  17542. }
  17543. bool Lowerer::GenerateFastStFldForCustomProperty(IR::Instr *const instr, IR::LabelInstr * *const labelHelperRef)
  17544. {
  17545. Assert(instr);
  17546. Assert(labelHelperRef);
  17547. Assert(!*labelHelperRef);
  17548. switch(instr->m_opcode)
  17549. {
  17550. case Js::OpCode::StFld:
  17551. case Js::OpCode::StFldStrict:
  17552. break;
  17553. default:
  17554. return false;
  17555. }
  17556. IR::SymOpnd *const symOpnd = instr->GetDst()->AsSymOpnd();
  17557. PropertySym *const propertySym = symOpnd->m_sym->AsPropertySym();
  17558. if(propertySym->m_propertyId != Js::PropertyIds::lastIndex || !symOpnd->IsPropertySymOpnd())
  17559. {
  17560. return false;
  17561. }
  17562. const ValueType objectValueType(symOpnd->GetPropertyOwnerValueType());
  17563. if(!objectValueType.IsLikelyRegExp())
  17564. {
  17565. return false;
  17566. }
  17567. if(instr->HasBailOutInfo())
  17568. {
  17569. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  17570. if(!BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind) || bailOutKind & IR::BailOutKindBits)
  17571. {
  17572. // Other bailout kinds will likely need bailout checks that would not be generated here. In particular, if a type
  17573. // check is necessary here to guard against downstream property accesses on the same object, the type check will
  17574. // fail and cause a bailout if the object is a RegExp object since the "lastIndex" property accesses are not cached.
  17575. return false;
  17576. }
  17577. }
  17578. Func *const func = instr->m_func;
  17579. IR::RegOpnd *const objectOpnd = symOpnd->CreatePropertyOwnerOpnd(func);
  17580. const IR::AutoReuseOpnd autoReuseObjectOpnd(objectOpnd, func);
  17581. IR::LabelInstr *labelHelper = nullptr;
  17582. if(!objectOpnd->IsNotTaggedValue())
  17583. {
  17584. // test object, 1
  17585. // jnz $helper
  17586. if(!labelHelper)
  17587. {
  17588. *labelHelperRef = labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  17589. }
  17590. m_lowererMD.GenerateObjectTest(objectOpnd, instr, labelHelper);
  17591. }
  17592. if(!objectValueType.IsObject())
  17593. {
  17594. // cmp [object], Js::JavascriptRegExp::vtable
  17595. // jne $helper
  17596. if(!labelHelper)
  17597. {
  17598. *labelHelperRef = labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  17599. }
  17600. InsertCompareBranch(
  17601. IR::IndirOpnd::New(objectOpnd, 0, TyMachPtr, func),
  17602. LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp),
  17603. Js::OpCode::BrNeq_A,
  17604. labelHelper,
  17605. instr);
  17606. objectOpnd->SetValueType(objectValueType.ToDefiniteObject());
  17607. }
  17608. // mov [object + offset(lastIndexVar)], src
  17609. // mov [object + offset(lastIndexOrFlag)], Js::JavascriptRegExp::NotCachedValue
  17610. // jmp $done
  17611. InsertMove(
  17612. IR::IndirOpnd::New(objectOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), TyVar, func),
  17613. instr->GetSrc1(),
  17614. instr);
  17615. InsertMove(
  17616. IR::IndirOpnd::New(objectOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), TyUint32, func),
  17617. IR::IntConstOpnd::New(Js::JavascriptRegExp::NotCachedValue, TyUint32, func, true),
  17618. instr);
  17619. InsertBranch(Js::OpCode::Br, instr->GetOrCreateContinueLabel(), instr);
  17620. return true;
  17621. }
  17622. IR::RegOpnd *
  17623. Lowerer::GenerateIsBuiltinRecyclableObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool checkObjectAndDynamicObject, IR::LabelInstr *labelContinue, bool isInHelper)
  17624. {
  17625. // CMP [srcReg], Js::DynamicObject::`vtable'
  17626. // JEQ $fallThough
  17627. // MOV r1, [src1 + offset(type)] -- get the type id
  17628. // MOV r1, [r1 + offset(typeId)]
  17629. // ADD r1, ~TypeIds_LastStaticType -- if (typeId > TypeIds_LastStaticType && typeId <= TypeIds_LastBuiltinDynamicObject)
  17630. // CMP r1, (TypeIds_LastBuiltinDynamicObject - TypeIds_LastStaticType - 1)
  17631. // JA $helper
  17632. //fallThrough:
  17633. IR::LabelInstr *labelFallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  17634. if (checkObjectAndDynamicObject)
  17635. {
  17636. if (!regOpnd->IsNotTaggedValue())
  17637. {
  17638. m_lowererMD.GenerateObjectTest(regOpnd, insertInstr, labelHelper);
  17639. }
  17640. m_lowererMD.GenerateIsDynamicObject(regOpnd, insertInstr, labelFallthrough, true);
  17641. }
  17642. IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  17643. IR::RegOpnd * typeIdRegOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  17644. IR::IndirOpnd *indirOpnd;
  17645. // MOV typeRegOpnd, [src1 + offset(type)]
  17646. indirOpnd = IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  17647. m_lowererMD.CreateAssign(typeRegOpnd, indirOpnd, insertInstr);
  17648. // MOV typeIdRegOpnd, [typeRegOpnd + offset(typeId)]
  17649. indirOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
  17650. m_lowererMD.CreateAssign(typeIdRegOpnd, indirOpnd, insertInstr);
  17651. // ADD typeIdRegOpnd, ~TypeIds_LastStaticType
  17652. InsertAdd(false, typeIdRegOpnd, typeIdRegOpnd,
  17653. IR::IntConstOpnd::New(~Js::TypeIds_LastStaticType, TyInt32, this->m_func, true), insertInstr);
  17654. // CMP typeIdRegOpnd, (TypeIds_LastBuiltinDynamicObject - TypeIds_LastStaticType - 1)
  17655. InsertCompare(
  17656. typeIdRegOpnd,
  17657. IR::IntConstOpnd::New(Js::TypeIds_LastBuiltinDynamicObject - Js::TypeIds_LastStaticType - 1, TyInt32, this->m_func),
  17658. insertInstr);
  17659. if (labelContinue)
  17660. {
  17661. // On success, go to continuation label.
  17662. InsertBranch(Js::OpCode::BrLe_A, true, labelContinue, insertInstr);
  17663. }
  17664. else
  17665. {
  17666. // On failure, go to helper.
  17667. InsertBranch(Js::OpCode::BrGt_A, true, labelHelper, insertInstr);
  17668. }
  17669. // $fallThrough
  17670. insertInstr->InsertBefore(labelFallthrough);
  17671. return typeRegOpnd;
  17672. }
  17673. void Lowerer::GenerateBooleanNegate(IR::Instr * instr, IR::Opnd * srcBool, IR::Opnd * dst)
  17674. {
  17675. // dst = src
  17676. // dst = dst ^ (true ^ false) (= !src)
  17677. LowererMD::CreateAssign(dst, srcBool, instr);
  17678. ScriptContextInfo* sci = instr->m_func->GetScriptContextInfo();
  17679. IR::AddrOpnd* xorval = IR::AddrOpnd::New(sci->GetTrueAddr() ^ sci->GetFalseAddr(), IR::AddrOpndKindDynamicMisc, instr->m_func, true);
  17680. InsertXor(dst, dst, xorval, instr);
  17681. }
  17682. bool Lowerer::GenerateFastEqBoolInt(IR::Instr * instr, bool *pNeedHelper, bool isInHelper)
  17683. {
  17684. Assert(instr);
  17685. // There's a total of 8 modes for this function, based on these inferred flags
  17686. bool isBranchNotCompare = instr->IsBranchInstr();
  17687. bool isStrict = false;
  17688. bool isNegOp = false;
  17689. switch (instr->m_opcode)
  17690. {
  17691. case Js::OpCode::BrSrEq_A:
  17692. case Js::OpCode::BrSrNotNeq_A:
  17693. case Js::OpCode::BrSrNeq_A:
  17694. case Js::OpCode::BrSrNotEq_A:
  17695. case Js::OpCode::CmSrEq_A:
  17696. case Js::OpCode::CmSrNeq_A:
  17697. isStrict = true;
  17698. break;
  17699. default:
  17700. break;
  17701. }
  17702. switch (instr->m_opcode)
  17703. {
  17704. case Js::OpCode::BrSrEq_A:
  17705. case Js::OpCode::BrSrNotNeq_A:
  17706. case Js::OpCode::CmSrEq_A:
  17707. case Js::OpCode::BrEq_A:
  17708. case Js::OpCode::BrNotNeq_A:
  17709. case Js::OpCode::CmEq_A:
  17710. isNegOp = false;
  17711. break;
  17712. case Js::OpCode::BrSrNeq_A:
  17713. case Js::OpCode::BrSrNotEq_A:
  17714. case Js::OpCode::CmSrNeq_A:
  17715. case Js::OpCode::BrNeq_A:
  17716. case Js::OpCode::BrNotEq_A:
  17717. case Js::OpCode::CmNeq_A:
  17718. isNegOp = true;
  17719. break;
  17720. default:
  17721. // This opcode is not one of the ones that should be handled here.
  17722. return false;
  17723. break;
  17724. }
  17725. IR::Opnd *src1 = instr->GetSrc1();
  17726. IR::Opnd *src2 = instr->GetSrc2();
  17727. // The instrucions given to this _should_ all be 2-arg.
  17728. Assert(src1 && src2);
  17729. if (!(src1 && src2))
  17730. {
  17731. return false;
  17732. }
  17733. // If it's a branch instruction, we'll want these to be defined
  17734. //IR::BranchInstr *instrBranch = nullptr;
  17735. IR::LabelInstr *targetInstr = nullptr;
  17736. IR::LabelInstr *labelFallthrough = nullptr;
  17737. if (isBranchNotCompare)
  17738. {
  17739. IR::BranchInstr * instrBranch = instr->AsBranchInstr();
  17740. targetInstr = instrBranch->GetTarget();
  17741. labelFallthrough = instrBranch->GetOrCreateContinueLabel(isInHelper);
  17742. }
  17743. // Assume we need the helper until we can show otherwise.
  17744. *pNeedHelper = true;
  17745. // If we don't know the final types well enough at JIT time, a helper block to set
  17746. // the inputs to the correct types will be needed.
  17747. IR::LabelInstr *labelHelper = nullptr;
  17748. // If we're doing a compare and can handle it early, then we want to skip the helper
  17749. IR::LabelInstr *labelDone = instr->GetOrCreateContinueLabel(isInHelper);
  17750. // Normallize for orderings
  17751. IR::Opnd *srcBool = nullptr;
  17752. IR::Opnd *srcInt = nullptr;
  17753. if (src1->GetValueType().IsLikelyBoolean() && src2->GetValueType().IsLikelyTaggedInt())
  17754. {
  17755. srcBool = src1;
  17756. srcInt = src2;
  17757. }
  17758. else if (src1->GetValueType().IsLikelyTaggedInt() && src2->GetValueType().IsLikelyBoolean())
  17759. {
  17760. srcInt = src1;
  17761. srcBool = src2;
  17762. }
  17763. else
  17764. {
  17765. return false;
  17766. }
  17767. // If either instruction is constant, we can simplify the check. If both are constant, we can eliminate it
  17768. bool srcIntConst = false;
  17769. bool srcIntConstVal = false;
  17770. // If we're comparing with a number that is not 0 or 1, then the two are inequal by default
  17771. bool srcIntIsBoolable = false;
  17772. bool srcBoolConst = false;
  17773. bool srcBoolConstVal = false;
  17774. if (srcInt->IsIntConstOpnd())
  17775. {
  17776. IR::IntConstOpnd * constSrcInt = srcInt->AsIntConstOpnd();
  17777. IntConstType constIntVal = constSrcInt->GetValue();
  17778. srcIntConst = true;
  17779. if (constIntVal == 0)
  17780. {
  17781. srcIntConstVal = false;
  17782. srcIntIsBoolable = true;
  17783. }
  17784. else if (constIntVal == 1)
  17785. {
  17786. srcIntConstVal = true;
  17787. srcIntIsBoolable = true;
  17788. }
  17789. }
  17790. else if (srcInt->IsAddrOpnd())
  17791. {
  17792. IR::AddrOpnd * addrSrcInt = srcInt->AsAddrOpnd();
  17793. if (!(addrSrcInt && addrSrcInt->IsVar() && Js::TaggedInt::Is(addrSrcInt->m_address)))
  17794. {
  17795. return false;
  17796. }
  17797. int32 constIntVal = Js::TaggedInt::ToInt32(addrSrcInt->m_address);
  17798. srcIntConst = true;
  17799. if (constIntVal == 0)
  17800. {
  17801. srcIntConstVal = false;
  17802. srcIntIsBoolable = true;
  17803. }
  17804. else if (constIntVal == 1)
  17805. {
  17806. srcIntConstVal = true;
  17807. srcIntIsBoolable = true;
  17808. }
  17809. }
  17810. else if (srcInt->IsConstOpnd())
  17811. {
  17812. // Not handled yet
  17813. return false;
  17814. }
  17815. if (srcBool->IsIntConstOpnd())
  17816. {
  17817. IR::IntConstOpnd * constSrcBool = srcBool->AsIntConstOpnd();
  17818. IntConstType constIntVal = constSrcBool->GetValue();
  17819. srcBoolConst = true;
  17820. srcBoolConstVal = constIntVal != 0;
  17821. }
  17822. else if (srcBool->IsAddrOpnd())
  17823. {
  17824. IR::AddrOpnd * addrSrcBool = srcInt->AsAddrOpnd();
  17825. if (!(addrSrcBool && addrSrcBool->IsVar() && Js::TaggedInt::Is(addrSrcBool->m_address)))
  17826. {
  17827. return false;
  17828. }
  17829. int32 value = Js::TaggedInt::ToInt32(addrSrcBool->m_address);
  17830. srcBoolConst = true;
  17831. srcBoolConstVal = value != 0;
  17832. }
  17833. else if (srcBool->IsConstOpnd())
  17834. {
  17835. // Not handled yet
  17836. return false;
  17837. }
  17838. // Do these checks here, since that way we avoid emitting instructions before exiting earlier
  17839. if (srcInt->GetValueType().IsTaggedInt() && srcBool->GetValueType().IsBoolean()) {
  17840. // ok, we know the types, so no helper needed
  17841. *pNeedHelper = false;
  17842. }
  17843. else
  17844. {
  17845. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  17846. // check the types and jump to the helper if incorrect
  17847. if (!srcInt->IsConstOpnd() && !srcInt->GetValueType().IsTaggedInt())
  17848. {
  17849. this->m_lowererMD.GenerateSmIntTest(srcInt->AsRegOpnd(), instr, labelHelper);
  17850. }
  17851. if (!srcBool->IsConstOpnd() && !srcBool->GetValueType().IsBoolean())
  17852. {
  17853. if (!srcBool->GetValueType().IsObject())
  17854. {
  17855. this->m_lowererMD.GenerateObjectTest(srcBool->AsRegOpnd(), instr, labelHelper, false);
  17856. }
  17857. this->m_lowererMD.GenerateJSBooleanTest(srcBool->AsRegOpnd(), instr, labelHelper, false);
  17858. }
  17859. }
  17860. // At this point, we know both which operand is an integer and which is a boolean,
  17861. // whether either operand is constant, and what the constant true/false values are
  17862. // for any constant operands. This should allow us to emit some decent code.
  17863. LibraryValue equalResultValue = !isNegOp ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
  17864. LibraryValue inequalResultValue = !isNegOp ? LibraryValue::ValueFalse : LibraryValue::ValueTrue;
  17865. IR::LabelInstr *equalResultTarget = !isNegOp ? targetInstr : labelFallthrough;
  17866. IR::LabelInstr *inequalResultTarget = !isNegOp ? labelFallthrough : targetInstr;
  17867. // For the Sr instructions, we now know that the types are different, so we can immediately
  17868. // decide what the result will be.
  17869. if (isStrict)
  17870. {
  17871. if (isBranchNotCompare)
  17872. {
  17873. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, inequalResultTarget, this->m_func));
  17874. #if DBG
  17875. // Since we're not making a non-helper path to one of the branches, we need to tell
  17876. // DbCheckPostLower that we are going to have a non-helper label without non-helper
  17877. // branches.
  17878. // Note: this following line isn't good practice in general
  17879. equalResultTarget->m_noHelperAssert = true;
  17880. #endif
  17881. }
  17882. else
  17883. {
  17884. LowererMD::CreateAssign(instr->GetDst(), this->LoadLibraryValueOpnd(instr, inequalResultValue), instr);
  17885. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  17886. }
  17887. }
  17888. // Now that we've checked the types, we can lower some instructions to quickly do the check
  17889. // in the case that it's not a type-strict strict equality/inequality check.
  17890. else if (srcIntConst && srcBoolConst)
  17891. {
  17892. // If both arguments are constant, we can statically determine the result.
  17893. bool sameVal = srcIntConstVal == srcBoolConstVal;
  17894. if (isBranchNotCompare)
  17895. {
  17896. // For constant branches, branch to the target
  17897. Assert(instr);
  17898. IR::LabelInstr * target = sameVal && srcIntIsBoolable ? equalResultTarget : inequalResultTarget;
  17899. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, target, this->m_func));
  17900. #if DBG
  17901. // Since we're not making a non-helper path to one of the branches, we need to tell
  17902. // DbCheckPostLower that we are going to have a non-helper label without non-helper
  17903. // branches.
  17904. // Note: this following line isn't good practice in general
  17905. (sameVal && srcIntIsBoolable ? inequalResultTarget : equalResultTarget)->m_noHelperAssert = true;
  17906. #endif
  17907. }
  17908. else
  17909. {
  17910. // For constant compares, load the constant result
  17911. LowererMD::CreateAssign(instr->GetDst(), this->LoadLibraryValueOpnd(instr, sameVal && srcIntIsBoolable ? equalResultValue : inequalResultValue), instr);
  17912. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  17913. }
  17914. }
  17915. else if (!srcIntConst && !srcBoolConst)
  17916. {
  17917. // If neither is constant, we can still do a bit better than loading the helper
  17918. IR::LabelInstr * firstFalse = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  17919. IR::LabelInstr * forceInequal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  17920. // We branch based on the zero-ness of the integer argument to two checks against the boolean argument
  17921. this->m_lowererMD.GenerateTaggedZeroTest(srcInt->AsRegOpnd(), instr, firstFalse);
  17922. // If it's not zero, then it's either 1, in which case it's true, or it's something else, in which
  17923. // case the two will compare as inequal
  17924. InsertCompareBranch(
  17925. IR::IntConstOpnd::New((((IntConstType)1) << Js::VarTag_Shift) + Js::AtomTag, IRType::TyVar, this->m_func, true),
  17926. srcInt->AsRegOpnd(),
  17927. Js::OpCode::BrNeq_A,
  17928. isBranchNotCompare ? inequalResultTarget : forceInequal, // in the case of branching, we can go straight to the inequal target; for compares, we need to load the value
  17929. instr,
  17930. true);
  17931. if (isBranchNotCompare)
  17932. {
  17933. // if the int evaluates to 1 (true)
  17934. InsertCompareBranch(
  17935. srcBool,
  17936. LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue),
  17937. instr->m_opcode,
  17938. targetInstr,
  17939. instr);
  17940. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallthrough, this->m_func));
  17941. // if the int evaluates to 0 (false)
  17942. instr->InsertBefore(firstFalse);
  17943. InsertCompareBranch(
  17944. srcBool,
  17945. LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse),
  17946. instr->m_opcode,
  17947. targetInstr,
  17948. instr);
  17949. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallthrough, this->m_func));
  17950. }
  17951. else
  17952. {
  17953. // the int resolves to 1 (true)
  17954. // Load either the bool or its complement into the dst reg, depending on the opcode
  17955. if (isNegOp)
  17956. {
  17957. GenerateBooleanNegate(instr, srcBool, instr->GetDst());
  17958. }
  17959. else
  17960. {
  17961. this->InsertMove(instr->GetDst(), srcBool, instr);
  17962. }
  17963. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  17964. // the int resolves to 0 (false)
  17965. // Handle the complement case
  17966. instr->InsertBefore(firstFalse);
  17967. if (!isNegOp)
  17968. {
  17969. GenerateBooleanNegate(instr, srcBool, instr->GetDst());
  17970. }
  17971. else
  17972. {
  17973. this->InsertMove(instr->GetDst(), srcBool, instr);
  17974. }
  17975. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  17976. // the int resolves to something other than 0 or 1 (inequal to a bool)
  17977. instr->InsertBefore(forceInequal);
  17978. LowererMD::CreateAssign(instr->GetDst(), this->LoadLibraryValueOpnd(instr, inequalResultValue), instr);
  17979. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  17980. }
  17981. }
  17982. else if (srcIntConst)
  17983. {
  17984. if (isBranchNotCompare)
  17985. {
  17986. if (srcIntIsBoolable)
  17987. {
  17988. LibraryValue intval = srcIntConstVal ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
  17989. InsertCompareBranch(
  17990. srcBool,
  17991. LoadLibraryValueOpnd(instr, intval),
  17992. instr->m_opcode,
  17993. targetInstr,
  17994. instr);
  17995. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallthrough, this->m_func));
  17996. }
  17997. else
  17998. {
  17999. // Since a constant int that isn't 0 or 1 will always be inequal to bools, just jump to the inequal result
  18000. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, inequalResultTarget, this->m_func));
  18001. #if DBG
  18002. // Since we're not making a non-helper path to one of the branches, we need to tell
  18003. // DbCheckPostLower that we are going to have a non-helper label without non-helper
  18004. // branches.
  18005. // Note: this following line isn't good practice in general
  18006. equalResultTarget->m_noHelperAssert = true;
  18007. #endif
  18008. }
  18009. }
  18010. else
  18011. {
  18012. if (srcIntIsBoolable)
  18013. {
  18014. bool directPassthrough = isNegOp != srcIntConstVal;
  18015. if (directPassthrough)
  18016. {
  18017. // If this case is hit, the result value is the same as the value in srcBool
  18018. this->InsertMove(instr->GetDst(), srcBool, instr);
  18019. }
  18020. else
  18021. {
  18022. // Otherwise, the result value is the negation of the value in srcBool
  18023. GenerateBooleanNegate(instr, srcBool, instr->GetDst());
  18024. }
  18025. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  18026. }
  18027. else
  18028. {
  18029. LowererMD::CreateAssign(instr->GetDst(), this->LoadLibraryValueOpnd(instr, inequalResultValue), instr);
  18030. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  18031. }
  18032. }
  18033. }
  18034. else if (srcBoolConst)
  18035. {
  18036. if (isBranchNotCompare)
  18037. {
  18038. this->m_lowererMD.GenerateTaggedZeroTest(srcInt->AsRegOpnd(), instr, srcBoolConstVal ? inequalResultTarget : equalResultTarget);
  18039. if (srcBoolConstVal)
  18040. {
  18041. // If it's not zero, then it's either 1, in which case it's true, or it's something else, in which
  18042. // case we have an issue.
  18043. InsertCompareBranch(IR::IntConstOpnd::New((((IntConstType)1) << Js::VarTag_Shift) + Js::AtomTag, IRType::TyVar, this->m_func), srcInt->AsRegOpnd(), Js::OpCode::BrNeq_A, inequalResultTarget, instr, true);
  18044. }
  18045. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, srcBoolConstVal ? equalResultTarget : inequalResultTarget, this->m_func));
  18046. }
  18047. else
  18048. {
  18049. IR::LabelInstr* isNonZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  18050. IR::LabelInstr* isZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  18051. this->m_lowererMD.GenerateTaggedZeroTest(srcInt->AsRegOpnd(), instr, isZero);
  18052. if (srcBoolConstVal)
  18053. {
  18054. // If it's not zero, then it's either 1, in which case it's true, or it's something else, in which
  18055. // case we have an issue.
  18056. InsertCompareBranch(IR::IntConstOpnd::New((((IntConstType)1) << Js::VarTag_Shift) + Js::AtomTag, IRType::TyVar, this->m_func), srcInt->AsRegOpnd(), Js::OpCode::BrNeq_A, isZero, instr, true);
  18057. }
  18058. instr->InsertBefore(isNonZero);
  18059. LowererMD::CreateAssign(instr->GetDst(), this->LoadLibraryValueOpnd(instr, srcBoolConstVal ? equalResultValue : inequalResultValue), instr);
  18060. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  18061. instr->InsertBefore(isZero);
  18062. LowererMD::CreateAssign(instr->GetDst(), this->LoadLibraryValueOpnd(instr, !srcBoolConstVal ? equalResultValue : inequalResultValue), instr);
  18063. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  18064. }
  18065. }
  18066. if (*pNeedHelper)
  18067. {
  18068. instr->InsertBefore(labelHelper);
  18069. }
  18070. return true;
  18071. }
  18072. bool Lowerer::GenerateFastBrEqLikely(IR::BranchInstr * instrBranch, bool *pNeedHelper, bool isInHelper)
  18073. {
  18074. IR::Opnd *src1 = instrBranch->GetSrc1();
  18075. IR::Opnd *src2 = instrBranch->GetSrc2();
  18076. IR::LabelInstr *targetInstr = instrBranch->GetTarget();
  18077. IR::LabelInstr *labelEqualLikely = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  18078. IR::LabelInstr *labelTrue = instrBranch->GetOrCreateContinueLabel(isInHelper);
  18079. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  18080. *pNeedHelper = true;
  18081. if (!this->GenerateFastBooleanAndObjectEqLikely(instrBranch, src1, src2, labelHelper, labelEqualLikely, pNeedHelper, isInHelper))
  18082. {
  18083. return false;
  18084. }
  18085. instrBranch->InsertBefore(labelEqualLikely);
  18086. IR::BranchInstr *newBranch = IR::BranchInstr::New(instrBranch->m_opcode, targetInstr, src1, src2, this->m_func);
  18087. instrBranch->InsertBefore(newBranch);
  18088. this->m_lowererMD.LowerCondBranch(newBranch);
  18089. newBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelTrue, this->m_func);
  18090. instrBranch->InsertBefore(newBranch);
  18091. instrBranch->InsertBefore(labelHelper);
  18092. return true;
  18093. }
  18094. bool Lowerer::GenerateFastBooleanAndObjectEqLikely(IR::Instr * instr, IR::Opnd *src1, IR::Opnd *src2, IR::LabelInstr * labelHelper, IR::LabelInstr * labelEqualLikely, bool *pNeedHelper, bool isInHelper)
  18095. {
  18096. *pNeedHelper = true;
  18097. if (!src1 || !src2)
  18098. {
  18099. return false;
  18100. }
  18101. bool isStrictCompare = false;
  18102. bool isStrictMode = this->m_func->GetJITFunctionBody()->IsStrictMode();
  18103. switch (instr->m_opcode)
  18104. {
  18105. case Js::OpCode::BrSrEq_A:
  18106. case Js::OpCode::BrSrNotNeq_A:
  18107. case Js::OpCode::BrSrNeq_A:
  18108. case Js::OpCode::BrSrNotEq_A:
  18109. case Js::OpCode::CmSrEq_A:
  18110. case Js::OpCode::CmSrNeq_A:
  18111. isStrictCompare = true;
  18112. break;
  18113. }
  18114. if (src1->GetValueType().IsLikelyBoolean() && src2->GetValueType().IsLikelyBoolean())
  18115. {
  18116. //
  18117. // Booleans
  18118. //
  18119. if (isStrictCompare)
  18120. {
  18121. if (!src1->GetValueType().IsBoolean() && !src2->GetValueType().IsBoolean())
  18122. {
  18123. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper, false);
  18124. if (this->m_lowererMD.GenerateJSBooleanTest(src2->AsRegOpnd(), instr, labelEqualLikely, true))
  18125. {
  18126. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func));
  18127. }
  18128. }
  18129. else
  18130. {
  18131. *pNeedHelper = false;
  18132. }
  18133. }
  18134. else
  18135. {
  18136. this->m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instr, labelHelper, false);
  18137. this->m_lowererMD.GenerateJSBooleanTest(src1->AsRegOpnd(), instr, labelHelper, false);
  18138. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper, false);
  18139. if (this->m_lowererMD.GenerateJSBooleanTest(src2->AsRegOpnd(), instr, labelEqualLikely, true))
  18140. {
  18141. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func));
  18142. }
  18143. }
  18144. }
  18145. else if (src1->GetValueType().HasBeenObject() && src2->GetValueType().HasBeenObject())
  18146. {
  18147. //
  18148. // Objects
  18149. //
  18150. IR::LabelInstr *labelTypeIdCheck = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  18151. if (!isStrictCompare)
  18152. {
  18153. // If not strictBr, verify both sides are dynamic objects
  18154. this->m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instr, labelHelper, false);
  18155. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper, false);
  18156. this->m_lowererMD.GenerateIsDynamicObject(src1->AsRegOpnd(), instr, labelTypeIdCheck, false);
  18157. }
  18158. else
  18159. {
  18160. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper, false);
  18161. }
  18162. this->m_lowererMD.GenerateIsDynamicObject(src2->AsRegOpnd(), instr, labelEqualLikely, true);
  18163. instr->InsertBefore(labelTypeIdCheck);
  18164. if (isStrictMode)
  18165. {
  18166. labelTypeIdCheck->isOpHelper = true;
  18167. IR::BranchInstr *branchToHelper = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func);
  18168. instr->InsertBefore(branchToHelper);
  18169. }
  18170. else
  18171. {
  18172. if (!ExternalLowerer::TryGenerateFastExternalEqTest(src1, src2, instr, labelHelper, labelEqualLikely, this, isStrictCompare, isInHelper))
  18173. {
  18174. if (!isStrictCompare)
  18175. {
  18176. GenerateIsBuiltinRecyclableObject(src1->AsRegOpnd(), instr, labelHelper, false /*checkObjectAndDynamicObject*/, nullptr /*labelContinue*/, isInHelper);
  18177. }
  18178. GenerateIsBuiltinRecyclableObject(src2->AsRegOpnd(), instr, labelHelper, false /*checkObjectAndDynamicObject*/, nullptr /*labelContinue*/, isInHelper);
  18179. }
  18180. }
  18181. }
  18182. else
  18183. {
  18184. return false;
  18185. }
  18186. return true;
  18187. }
  18188. bool Lowerer::GenerateFastCmEqLikely(IR::Instr * instr, bool *pNeedHelper, bool isInHelper)
  18189. {
  18190. *pNeedHelper = false;
  18191. Assert(instr->m_opcode == Js::OpCode::CmSrEq_A ||
  18192. instr->m_opcode == Js::OpCode::CmSrNeq_A ||
  18193. instr->m_opcode == Js::OpCode::CmEq_A ||
  18194. instr->m_opcode == Js::OpCode::CmNeq_A);
  18195. bool isNegOp = false;
  18196. bool isStrict = false;
  18197. switch (instr->m_opcode)
  18198. {
  18199. case Js::OpCode::CmSrEq_A:
  18200. isStrict = true;
  18201. break;
  18202. case Js::OpCode::CmSrNeq_A:
  18203. isStrict = true;
  18204. case Js::OpCode::CmNeq_A:
  18205. isNegOp = true;
  18206. break;
  18207. }
  18208. IR::Opnd *src1 = instr->GetSrc1();
  18209. IR::Opnd *src2 = instr->GetSrc2();
  18210. IR::LabelInstr *labelEqualLikely = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  18211. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  18212. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  18213. if (!this->GenerateFastBooleanAndObjectEqLikely(instr, src1, src2, labelHelper, labelEqualLikely, pNeedHelper, isInHelper))
  18214. {
  18215. return false;
  18216. }
  18217. instr->InsertBefore(labelEqualLikely);
  18218. // $labelEqualLikely
  18219. //
  18220. // Will only come here for
  18221. // if src2 is dynamic object(matches Js::DynamicObject::`vtable'), for non strict cm both src1 and src2 should be dynamic object
  18222. // or if src2 is builtin recyclableobject(typeId > TypeIds_LastStaticType && typeId <= TypeIds_LastBuiltinDynamicObject)
  18223. // or if CustomExternalType with no operations usage flags
  18224. //
  18225. // src1->IsEqual(src2)
  18226. // MOV DST SUCCESS
  18227. // JMP $DONE
  18228. // CMP src1, src2
  18229. // MOV DST SUCCESS
  18230. // JEQ $DONE
  18231. // MOV DST FAILURE
  18232. // JMP $DONE
  18233. LibraryValue successValueType = !isNegOp ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
  18234. LibraryValue failureValueType = !isNegOp ? LibraryValue::ValueFalse : LibraryValue::ValueTrue;
  18235. if (src1->IsEqual(src2))
  18236. {
  18237. LowererMD::CreateAssign(instr->GetDst(), this->LoadLibraryValueOpnd(instr, successValueType), instr);
  18238. instr->InsertBefore(IR::BranchInstr::New(this->m_lowererMD.MDUncondBranchOpcode, labelDone, this->m_func));
  18239. }
  18240. else
  18241. {
  18242. IR::LabelInstr *cmEqual = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  18243. this->InsertCompareBranch(src1, src2, isStrict ? Js::OpCode::BrSrEq_A : Js::OpCode::BrEq_A, cmEqual, instr);
  18244. LowererMD::CreateAssign(instr->GetDst(), this->LoadLibraryValueOpnd(instr, failureValueType), instr);
  18245. instr->InsertBefore(IR::BranchInstr::New(this->m_lowererMD.MDUncondBranchOpcode, labelDone, this->m_func));
  18246. instr->InsertBefore(cmEqual);
  18247. LowererMD::CreateAssign(instr->GetDst(), this->LoadLibraryValueOpnd(instr, successValueType), instr);
  18248. instr->InsertBefore(IR::BranchInstr::New(this->m_lowererMD.MDUncondBranchOpcode, labelDone, this->m_func));
  18249. }
  18250. instr->InsertBefore(labelHelper);
  18251. instr->InsertAfter(labelDone);
  18252. return true;
  18253. }
  18254. bool
  18255. Lowerer::GenerateFastBrOrCmString(IR::Instr* instr)
  18256. {
  18257. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  18258. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  18259. if (!srcReg1 ||
  18260. !srcReg2 ||
  18261. srcReg1->IsTaggedInt() ||
  18262. srcReg2->IsTaggedInt() ||
  18263. !srcReg1->GetValueType().HasHadStringTag() ||
  18264. !srcReg2->GetValueType().HasHadStringTag())
  18265. {
  18266. return false;
  18267. }
  18268. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  18269. IR::LabelInstr *labelBranchFail = nullptr;
  18270. IR::LabelInstr *labelBranchSuccess = nullptr;
  18271. bool isEqual = false;
  18272. bool isStrict = false;
  18273. bool isBranch = true;
  18274. bool isCmNegOp = false;
  18275. switch (instr->m_opcode)
  18276. {
  18277. case Js::OpCode::BrSrEq_A:
  18278. case Js::OpCode::BrSrNotNeq_A:
  18279. isStrict = true;
  18280. case Js::OpCode::BrEq_A:
  18281. case Js::OpCode::BrNotNeq_A:
  18282. labelBranchFail = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  18283. labelBranchSuccess = instr->AsBranchInstr()->GetTarget();
  18284. instr->InsertAfter(labelBranchFail);
  18285. isEqual = true;
  18286. break;
  18287. case Js::OpCode::BrSrNeq_A:
  18288. case Js::OpCode::BrSrNotEq_A:
  18289. isStrict = true;
  18290. case Js::OpCode::BrNeq_A:
  18291. case Js::OpCode::BrNotEq_A:
  18292. labelBranchSuccess = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  18293. labelBranchFail = instr->AsBranchInstr()->GetTarget();
  18294. instr->InsertAfter(labelBranchSuccess);
  18295. isEqual = false;
  18296. break;
  18297. case Js::OpCode::CmSrEq_A:
  18298. isStrict = true;
  18299. case Js::OpCode::CmEq_A:
  18300. isEqual = true;
  18301. isBranch = false;
  18302. break;
  18303. case Js::OpCode::CmSrNeq_A:
  18304. isStrict = true;
  18305. case Js::OpCode::CmNeq_A:
  18306. isEqual = false;
  18307. isBranch = false;
  18308. isCmNegOp = true;
  18309. break;
  18310. default:
  18311. Assume(UNREACHED);
  18312. }
  18313. if (!isBranch)
  18314. {
  18315. labelBranchSuccess = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  18316. labelBranchFail = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  18317. }
  18318. GenerateFastStringCheck(instr, srcReg1, srcReg2, isEqual, isStrict, labelHelper, labelBranchSuccess, labelBranchFail);
  18319. IR::LabelInstr *labelFallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  18320. if (!isBranch)
  18321. {
  18322. const LibraryValue successValueType = !isCmNegOp ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
  18323. const LibraryValue failureValueType = !isCmNegOp ? LibraryValue::ValueFalse : LibraryValue::ValueTrue;
  18324. instr->InsertBefore(labelBranchSuccess);
  18325. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, successValueType), instr);
  18326. InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  18327. instr->InsertBefore(labelBranchFail);
  18328. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, failureValueType), instr);
  18329. InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  18330. }
  18331. instr->InsertBefore(labelHelper);
  18332. instr->InsertAfter(labelFallthrough);
  18333. #if DBG
  18334. // The fast-path for strings assumes the case where 2 strings are equal is rare, and marks that path as 'helper'.
  18335. // This breaks the helper label dbchecks as it can result in non-helper blocks be reachable only from helper blocks.
  18336. // Use m_isHelperToNonHelperBranch and m_noHelperAssert to fix this.
  18337. IR::Instr *blockEndInstr;
  18338. if (isEqual)
  18339. {
  18340. blockEndInstr = labelHelper->GetNextBranchOrLabel();
  18341. }
  18342. else
  18343. {
  18344. blockEndInstr = instr->GetNextBranchOrLabel();
  18345. }
  18346. if (blockEndInstr->IsBranchInstr())
  18347. {
  18348. blockEndInstr->AsBranchInstr()->m_isHelperToNonHelperBranch = true;
  18349. }
  18350. labelFallthrough->m_noHelperAssert = true;
  18351. #endif
  18352. return true;
  18353. }
  18354. bool
  18355. Lowerer::GenerateFastStringCheck(IR::Instr *instr, IR::RegOpnd *srcReg1, IR::RegOpnd *srcReg2, bool isEqual, bool isStrict, IR::LabelInstr *labelHelper, IR::LabelInstr *labelBranchSuccess, IR::LabelInstr *labelBranchFail)
  18356. {
  18357. Assert(instr->m_opcode == Js::OpCode::BrSrEq_A ||
  18358. instr->m_opcode == Js::OpCode::BrSrNeq_A ||
  18359. instr->m_opcode == Js::OpCode::BrEq_A ||
  18360. instr->m_opcode == Js::OpCode::BrNeq_A ||
  18361. instr->m_opcode == Js::OpCode::BrSrNotEq_A ||
  18362. instr->m_opcode == Js::OpCode::BrSrNotNeq_A ||
  18363. instr->m_opcode == Js::OpCode::BrNotEq_A ||
  18364. instr->m_opcode == Js::OpCode::BrNotNeq_A ||
  18365. instr->m_opcode == Js::OpCode::CmEq_A ||
  18366. instr->m_opcode == Js::OpCode::CmNeq_A ||
  18367. instr->m_opcode == Js::OpCode::CmSrEq_A ||
  18368. instr->m_opcode == Js::OpCode::CmSrNeq_A);
  18369. // if src1 is not string
  18370. // generate object test, if not equal jump to $helper
  18371. // compare type check to string, if not jump to $helper
  18372. //
  18373. // if strict mode generate string test as above for src1 and jump to $failure if failed any time
  18374. // else if not strict generate string test as above for src1 and jump to $helper if failed any time
  18375. //
  18376. // Compare length of src1 and src2 if not equal goto $failure
  18377. //
  18378. // if src1 is not flat string jump to $helper
  18379. //
  18380. // if src1 and src2 m_pszValue pointer match goto $success
  18381. //
  18382. // if src2 is not flat string jump to $helper
  18383. //
  18384. // if first character of src1 and src2 doesn't match goto $failure
  18385. //
  18386. // shift left by 1 length of src1 (length*2)
  18387. //
  18388. // wmemcmp src1 and src2 flat strings till length * 2
  18389. //
  18390. // test eax (result of wmemcmp)
  18391. // if equal jump to $success else to $failure
  18392. //
  18393. // $success
  18394. // jmp to $fallthrough
  18395. // $failure
  18396. // jmp to $fallthrough
  18397. // $helper
  18398. //
  18399. // $fallthrough
  18400. // Generates:
  18401. // GenerateObjectTest(src1);
  18402. // CMP srcReg1, srcReg2
  18403. // JEQ $success
  18404. // MOV s1, [srcReg1 + offset(Type)]
  18405. // CMP type, static_string_type
  18406. // JNE $helper
  18407. // GenerateObjectTest(src2);
  18408. // MOV s2, [srcReg2 + offset(Type)]
  18409. // CMP type, static_string_type
  18410. // JNE $fail ; if src1 is string but not src2, src1 !== src2 if isStrict
  18411. // MOV s3, [srcReg1,offset(m_charLength)]
  18412. // CMP [srcReg2,offset(m_charLength)], s3
  18413. // JNE $fail <--- length check done
  18414. // MOV s4, [srcReg1,offset(m_pszValue)]
  18415. // CMP s4, 0
  18416. // JEQ $helper
  18417. // MOV s5, [srcReg2,offset(m_pszValue)]
  18418. // CMP s5, 0
  18419. // JEQ $helper
  18420. // MOV s6,[s4]
  18421. // CMP [s5], s6 -First character comparison
  18422. // JNE $fail
  18423. // SHL length, 1
  18424. // eax = wmemcmp(src1String, src2String, length*2)
  18425. // TEST eax, eax
  18426. // JEQ $success
  18427. // JMP $fail
  18428. IR::Instr* instrInsert = instr;
  18429. GenerateStringTest(srcReg1, instrInsert, labelHelper);
  18430. if (srcReg1->IsEqual(srcReg2))
  18431. {
  18432. InsertBranch(Js::OpCode::Br, labelBranchSuccess, instrInsert);
  18433. #if DBG
  18434. if (instr->IsBranchInstr())
  18435. {
  18436. // we might have other cases on helper path which will generate branch to the target
  18437. instr->AsBranchInstr()->GetTarget()->m_noHelperAssert = true;
  18438. }
  18439. #endif
  18440. return true;
  18441. }
  18442. // CMP srcReg1, srcReg2 - Ptr comparison
  18443. // JEQ $branchSuccess
  18444. InsertCompareBranch(srcReg1, srcReg2, Js::OpCode::BrEq_A, labelBranchSuccess, instrInsert);
  18445. if (isStrict)
  18446. {
  18447. GenerateStringTest(srcReg2, instrInsert, labelBranchFail);
  18448. }
  18449. else
  18450. {
  18451. GenerateStringTest(srcReg2, instrInsert, labelHelper);
  18452. }
  18453. // MOV s3, [srcReg1,offset(m_charLength)]
  18454. // CMP [srcReg2,offset(m_charLength)], s3
  18455. // JNE $branchfail
  18456. IR::RegOpnd * src1LengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  18457. InsertMove(src1LengthOpnd, IR::IndirOpnd::New(srcReg1, Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, m_func), instrInsert);
  18458. InsertCompareBranch(IR::IndirOpnd::New(srcReg2, Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, m_func), src1LengthOpnd, Js::OpCode::BrNeq_A, labelBranchFail, instrInsert);
  18459. // MOV s4, [src1,offset(m_pszValue)]
  18460. // CMP s4, 0
  18461. // JEQ $helper
  18462. // MOV s5, [src2,offset(m_pszValue)]
  18463. // CMP s5, 0
  18464. // JEQ $helper
  18465. IR::RegOpnd * src1FlatString = IR::RegOpnd::New(TyMachPtr, m_func);
  18466. InsertMove(src1FlatString, IR::IndirOpnd::New(srcReg1, Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, m_func), instrInsert);
  18467. InsertCompareBranch(src1FlatString, IR::IntConstOpnd::New(0, TyUint32, m_func), Js::OpCode::BrEq_A, labelHelper, instrInsert);
  18468. IR::RegOpnd * src2FlatString = IR::RegOpnd::New(TyMachPtr, m_func);
  18469. InsertMove(src2FlatString, IR::IndirOpnd::New(srcReg2, Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, m_func), instrInsert);
  18470. InsertCompareBranch(src2FlatString, IR::IntConstOpnd::New(0, TyUint32, m_func), Js::OpCode::BrEq_A, labelHelper, instrInsert);
  18471. // MOV s6,[s4]
  18472. // CMP [s5], s6 -First character comparison
  18473. // JNE $branchfail
  18474. IR::RegOpnd * src1FirstChar = IR::RegOpnd::New(TyUint16, m_func);
  18475. InsertMove(src1FirstChar, IR::IndirOpnd::New(src1FlatString, 0, TyUint16, m_func), instrInsert);
  18476. InsertCompareBranch(IR::IndirOpnd::New(src2FlatString, 0, TyUint16, m_func), src1FirstChar, Js::OpCode::BrNeq_A, labelBranchFail, instrInsert);
  18477. // eax = wmemcmp(src1String, src2String, length)
  18478. m_lowererMD.LoadHelperArgument(instr, src1LengthOpnd);
  18479. m_lowererMD.LoadHelperArgument(instr, src1FlatString);
  18480. m_lowererMD.LoadHelperArgument(instr, src2FlatString);
  18481. IR::RegOpnd *dstOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  18482. IR::Instr *instrCall = IR::Instr::New(Js::OpCode::Call, dstOpnd, IR::HelperCallOpnd::New(IR::HelperWMemCmp, m_func), m_func);
  18483. instr->InsertBefore(instrCall);
  18484. m_lowererMD.LowerCall(instrCall, 3);
  18485. // TEST eax, eax
  18486. // JEQ success
  18487. InsertTestBranch(dstOpnd, dstOpnd, Js::OpCode::BrEq_A, labelBranchSuccess, instrInsert);
  18488. // JMP fail
  18489. InsertBranch(Js::OpCode::Br, labelBranchFail, instrInsert);
  18490. return true;
  18491. }
  18492. bool Lowerer::GenerateFastBrBool(IR::BranchInstr *const instr)
  18493. {
  18494. Assert(instr);
  18495. Assert(instr->m_opcode == Js::OpCode::BrFalse_A || instr->m_opcode == Js::OpCode::BrTrue_A);
  18496. Func *const func = instr->m_func;
  18497. if(!instr->GetSrc1()->IsRegOpnd())
  18498. {
  18499. LowererMD::ChangeToAssign(instr->HoistSrc1(Js::OpCode::Ld_A));
  18500. }
  18501. IR::RegOpnd *const src = instr->GetSrc1()->Copy(func)->AsRegOpnd();
  18502. const IR::AutoReuseOpnd autoReuseSrc(src, func);
  18503. const ValueType srcOriginalValueType(src->GetValueType());
  18504. ValueType srcValueType(srcOriginalValueType);
  18505. IR::LabelInstr *const labelTarget = instr->GetTarget();
  18506. IR::LabelInstr *const labelFallthrough = instr->GetOrCreateContinueLabel();
  18507. if(labelTarget == labelFallthrough)
  18508. {
  18509. // Nothing to do
  18510. instr->Remove();
  18511. return false;
  18512. }
  18513. const bool branchOnFalse = instr->m_opcode == Js::OpCode::BrFalse_A;
  18514. IR::LabelInstr *const labelFalse = branchOnFalse ? labelTarget : labelFallthrough;
  18515. IR::LabelInstr *const labelTrue = branchOnFalse ? labelFallthrough : labelTarget;
  18516. const Js::OpCode compareWithFalseBranchToTargetOpCode = branchOnFalse ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A;
  18517. IR::LabelInstr *lastLabelBeforeHelper = nullptr;
  18518. /// Typespec'd float
  18519. if (instr->GetSrc1()->GetType() == TyFloat64)
  18520. {
  18521. InsertFloatCheckForZeroOrNanBranch(instr->GetSrc1(), branchOnFalse, labelTarget, labelFallthrough, instr);
  18522. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  18523. instr->Remove();
  18524. return false;
  18525. }
  18526. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  18527. // Null fast path
  18528. if (srcValueType.HasBeenNull() || srcOriginalValueType.IsUninitialized())
  18529. {
  18530. if(srcValueType.IsNull())
  18531. {
  18532. // jmp $false
  18533. InsertBranch(Js::OpCode::Br, labelFalse, instr);
  18534. // Skip lowering call to helper
  18535. Assert(instr->m_prev->IsBranchInstr());
  18536. instr->Remove();
  18537. return false;
  18538. }
  18539. // cmp src, null
  18540. // je $false
  18541. InsertCompareBranch(
  18542. src,
  18543. LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  18544. Js::OpCode::BrEq_A,
  18545. labelFalse,
  18546. instr);
  18547. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Null));
  18548. }
  18549. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  18550. // Undefined fast path
  18551. if(srcValueType.HasBeenUndefined() || srcOriginalValueType.IsUninitialized())
  18552. {
  18553. if(srcValueType.IsUndefined())
  18554. {
  18555. // jmp $false
  18556. InsertBranch(Js::OpCode::Br, labelFalse, instr);
  18557. // Skip lowering call to helper
  18558. Assert(instr->m_prev->IsBranchInstr());
  18559. instr->Remove();
  18560. return false;
  18561. }
  18562. // cmp src, undefined
  18563. // je $false
  18564. InsertCompareBranch(
  18565. src,
  18566. LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
  18567. Js::OpCode::BrEq_A,
  18568. labelFalse,
  18569. instr);
  18570. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Undefined));
  18571. }
  18572. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  18573. // Tagged int fast path
  18574. const bool isNotInt = src->IsNotInt();
  18575. bool checkedForTaggedInt = isNotInt;
  18576. if( (
  18577. srcValueType.HasBeenInt() ||
  18578. srcValueType.HasBeenUnknownNumber() ||
  18579. srcOriginalValueType.IsUninitialized()
  18580. ) && !isNotInt)
  18581. {
  18582. checkedForTaggedInt = true;
  18583. IR::LabelInstr *notTaggedIntLabel = nullptr;
  18584. if(!src->IsTaggedInt())
  18585. {
  18586. // test src, 1
  18587. // jz $notTaggedInt
  18588. notTaggedIntLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  18589. m_lowererMD.GenerateSmIntTest(src, instr, notTaggedIntLabel);
  18590. }
  18591. // cmp src, tag(0)
  18592. // je/jne $target
  18593. m_lowererMD.GenerateTaggedZeroTest(src, instr);
  18594. Lowerer::InsertBranch(compareWithFalseBranchToTargetOpCode, labelTarget, instr);
  18595. if(src->IsTaggedInt())
  18596. {
  18597. // Skip lowering call to helper
  18598. Assert(instr->m_prev->IsBranchInstr());
  18599. instr->Remove();
  18600. return false;
  18601. }
  18602. // jmp $fallthrough
  18603. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  18604. // $notTaggedInt:
  18605. if(notTaggedIntLabel)
  18606. {
  18607. instr->InsertBefore(notTaggedIntLabel);
  18608. lastLabelBeforeHelper = notTaggedIntLabel;
  18609. }
  18610. }
  18611. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  18612. // Float fast path
  18613. bool generateFloatTest = srcValueType.IsLikelyFloat();
  18614. #ifdef _M_IX86
  18615. if (!AutoSystemInfo::Data.SSE2Available())
  18616. {
  18617. generateFloatTest = false;
  18618. }
  18619. #endif
  18620. bool checkedForTaggedFloat =
  18621. #if FLOATVAR
  18622. srcValueType.IsNotNumber();
  18623. #else
  18624. true; // there are no tagged floats, indicate that it has been checked
  18625. #endif
  18626. if (generateFloatTest)
  18627. {
  18628. // if(srcValueType.IsFloat()) // skip tagged int check?
  18629. //
  18630. // ValueType::IsFloat() does not guarantee that the storage is not in a tagged int.
  18631. // The tagged int check is necessary. It does, however, guarantee that as long as the value is not
  18632. // stored in a tagged int, that it is definitely stored in a JavascriptNumber/TaggedFloat.
  18633. IR::LabelInstr *const notFloatLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  18634. if(!checkedForTaggedInt)
  18635. {
  18636. checkedForTaggedInt = true;
  18637. m_lowererMD.GenerateSmIntTest(src, instr, notFloatLabel, nullptr, true);
  18638. }
  18639. // cmp [src], JavascriptNumber::vtable
  18640. // jne $notFloat
  18641. #if FLOATVAR
  18642. checkedForTaggedFloat = true;
  18643. IR::RegOpnd *const floatOpnd = m_lowererMD.CheckFloatAndUntag(src, instr, notFloatLabel);
  18644. #else
  18645. m_lowererMD.GenerateFloatTest(src, instr, notFloatLabel);
  18646. IR::IndirOpnd *const floatOpnd = IR::IndirOpnd::New(src, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, func);
  18647. #endif
  18648. // cmp src, 0.0
  18649. // jp $false
  18650. // je/jne $target
  18651. // jmp $fallthrough
  18652. InsertFloatCheckForZeroOrNanBranch(floatOpnd, branchOnFalse, labelTarget, labelFallthrough, instr);
  18653. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  18654. // $notFloat:
  18655. instr->InsertBefore(notFloatLabel);
  18656. lastLabelBeforeHelper = notFloatLabel;
  18657. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::AnyNumber));
  18658. }
  18659. IR::LabelInstr *labelHelper = nullptr;
  18660. bool _didObjectTest = checkedForTaggedInt && checkedForTaggedFloat;
  18661. const auto EnsureObjectTest = [&]()
  18662. {
  18663. if(_didObjectTest)
  18664. {
  18665. return;
  18666. }
  18667. if(!labelHelper)
  18668. {
  18669. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  18670. }
  18671. m_lowererMD.GenerateObjectTest(src, instr, labelHelper);
  18672. _didObjectTest = true;
  18673. };
  18674. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  18675. // Boolean fast path
  18676. if (srcValueType.HasBeenBoolean() || srcOriginalValueType.IsUninitialized())
  18677. {
  18678. IR::LabelInstr *notBooleanLabel = nullptr;
  18679. if (!srcValueType.IsBoolean())
  18680. {
  18681. EnsureObjectTest();
  18682. // cmp [src], JavascriptBoolean::vtable
  18683. // jne $notBoolean
  18684. notBooleanLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  18685. InsertCompareBranch(
  18686. IR::IndirOpnd::New(src, 0, TyMachPtr, func),
  18687. LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptBoolean),
  18688. Js::OpCode::BrNeq_A,
  18689. notBooleanLabel,
  18690. instr);
  18691. }
  18692. // cmp src, false
  18693. // je/jne $target
  18694. InsertCompareBranch(
  18695. src,
  18696. LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse),
  18697. compareWithFalseBranchToTargetOpCode,
  18698. labelTarget,
  18699. instr);
  18700. if (srcValueType.IsBoolean())
  18701. {
  18702. // Skip lowering call to helper
  18703. Assert(!labelHelper);
  18704. Assert(instr->m_prev->IsBranchInstr());
  18705. instr->Remove();
  18706. return false;
  18707. }
  18708. // jmp $fallthrough
  18709. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  18710. if (notBooleanLabel)
  18711. {
  18712. instr->InsertBefore(notBooleanLabel);
  18713. lastLabelBeforeHelper = notBooleanLabel;
  18714. }
  18715. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Boolean));
  18716. }
  18717. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  18718. // String fast path
  18719. if(srcValueType.HasBeenString())
  18720. {
  18721. IR::LabelInstr *notStringLabel = nullptr;
  18722. if(!srcValueType.IsString())
  18723. {
  18724. EnsureObjectTest();
  18725. notStringLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  18726. GenerateStringTest(src, instr, notStringLabel, nullptr, false);
  18727. }
  18728. // cmp [src + offset(length)], 0
  18729. // jeq/jne $target
  18730. InsertCompareBranch(
  18731. IR::IndirOpnd::New(src, Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func),
  18732. IR::IntConstOpnd::New(0, TyUint32, func, true),
  18733. compareWithFalseBranchToTargetOpCode,
  18734. labelTarget,
  18735. instr);
  18736. if(srcValueType.IsString())
  18737. {
  18738. // Skip lowering call to helper
  18739. Assert(!labelHelper);
  18740. Assert(instr->m_prev->IsBranchInstr());
  18741. instr->Remove();
  18742. return false;
  18743. }
  18744. // jmp $fallthrough
  18745. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  18746. if(notStringLabel)
  18747. {
  18748. instr->InsertBefore(notStringLabel);
  18749. lastLabelBeforeHelper = notStringLabel;
  18750. }
  18751. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::String));
  18752. }
  18753. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  18754. // Object fast path
  18755. if (srcValueType.IsLikelyObject())
  18756. {
  18757. if(srcValueType.IsObject())
  18758. {
  18759. if(srcValueType.GetObjectType() > ObjectType::Object)
  18760. {
  18761. // Specific object types that are tracked are equivalent to 'true'
  18762. // jmp $true
  18763. InsertBranch(Js::OpCode::Br, labelTrue, instr);
  18764. // Skip lowering call to helper
  18765. Assert(!labelHelper);
  18766. Assert(instr->m_prev->IsBranchInstr());
  18767. instr->Remove();
  18768. return false;
  18769. }
  18770. }
  18771. else
  18772. {
  18773. EnsureObjectTest();
  18774. }
  18775. // mov srcType, [src + offset(type)] -- load type
  18776. IR::RegOpnd *const srcType = IR::RegOpnd::New(TyMachPtr, func);
  18777. const IR::AutoReuseOpnd autoReuseR1(srcType, func);
  18778. InsertMove(srcType, IR::IndirOpnd::New(src, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func), instr);
  18779. // test [srcType + offset(flags)], TypeFlagMask_IsFalsy -- check if falsy
  18780. // jnz $false
  18781. InsertTestBranch(
  18782. IR::IndirOpnd::New(srcType, Js::Type::GetOffsetOfFlags(), TyUint8, func),
  18783. IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyUint8, func),
  18784. Js::OpCode::BrNeq_A,
  18785. labelFalse,
  18786. instr);
  18787. // cmp [srcType + offset(typeId)], TypeIds_LastJavascriptPrimitiveType -- check base TypeIds_LastJavascriptPrimitiveType
  18788. // ja $true
  18789. InsertCompareBranch(
  18790. IR::IndirOpnd::New(srcType, Js::Type::GetOffsetOfTypeId(), TyInt32, func),
  18791. IR::IntConstOpnd::New(Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, func),
  18792. Js::OpCode::BrGt_A,
  18793. true /* isUnsigned */,
  18794. labelTrue,
  18795. instr);
  18796. if(!labelHelper)
  18797. {
  18798. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  18799. }
  18800. lastLabelBeforeHelper = nullptr;
  18801. }
  18802. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  18803. // Helper call
  18804. // $helper:
  18805. if(lastLabelBeforeHelper)
  18806. {
  18807. Assert(instr->m_prev == lastLabelBeforeHelper);
  18808. lastLabelBeforeHelper->isOpHelper = true;
  18809. }
  18810. if (labelHelper)
  18811. {
  18812. Assert(labelHelper->isOpHelper);
  18813. instr->InsertBefore(labelHelper);
  18814. }
  18815. // call JavascriptConversion::ToBoolean
  18816. IR::RegOpnd *const toBoolDst = IR::RegOpnd::New(TyInt32, func);
  18817. const IR::AutoReuseOpnd autoReuseToBoolDst(toBoolDst, func);
  18818. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, toBoolDst, instr->GetSrc1(), func);
  18819. instr->InsertBefore(callInstr);
  18820. LowerUnaryHelperMem(callInstr, IR::HelperConv_ToBoolean);
  18821. // test eax, eax
  18822. InsertTest(toBoolDst, toBoolDst, instr);
  18823. // je/jne $target
  18824. Assert(instr->IsBranchInstr());
  18825. instr->FreeSrc1();
  18826. instr->m_opcode = LowererMD::MDBranchOpcode(compareWithFalseBranchToTargetOpCode);
  18827. Assert(instr->AsBranchInstr()->GetTarget() == labelTarget);
  18828. // Skip lowering another call to helper
  18829. return false;
  18830. }
  18831. // Helper method used in LowerMD by all platforms.
  18832. // Creates HelperCallOpnd or DiagHelperCallOpnd, based on helperMethod and state.
  18833. // static
  18834. IR::HelperCallOpnd*
  18835. Lowerer::CreateHelperCallOpnd(IR::JnHelperMethod helperMethod, int helperArgCount, Func* func)
  18836. {
  18837. Assert(func);
  18838. IR::HelperCallOpnd* helperCallOpnd;
  18839. if (CONFIG_FLAG(EnableContinueAfterExceptionWrappersForHelpers) &&
  18840. func->IsJitInDebugMode() &&
  18841. HelperMethodAttributes::CanThrow(helperMethod))
  18842. {
  18843. // Create DiagHelperCallOpnd to indicate that it's needed to wrap original helper with try-catch wrapper,
  18844. // so that we can ignore exception and bailout to next stmt in debugger.
  18845. // For details, see: Lib\Runtime\Debug\DiagHelperMethodWrapper.{h,cpp}.
  18846. helperCallOpnd = IR::DiagHelperCallOpnd::New(helperMethod, func, helperArgCount);
  18847. }
  18848. else
  18849. {
  18850. helperCallOpnd = IR::HelperCallOpnd::New(helperMethod, func);
  18851. }
  18852. return helperCallOpnd;
  18853. }
  18854. bool
  18855. Lowerer::TryGenerateFastBrOrCmTypeOf(IR::Instr *instr, IR::Instr **prev, bool isNeqOp, bool *pfNoLower)
  18856. {
  18857. Assert(prev);
  18858. Assert(instr->m_opcode == Js::OpCode::BrSrEq_A ||
  18859. instr->m_opcode == Js::OpCode::BrSrNeq_A ||
  18860. instr->m_opcode == Js::OpCode::BrSrNotEq_A ||
  18861. instr->m_opcode == Js::OpCode::BrSrNotNeq_A ||
  18862. instr->m_opcode == Js::OpCode::CmSrEq_A ||
  18863. instr->m_opcode == Js::OpCode::CmSrNeq_A ||
  18864. instr->m_opcode == Js::OpCode::BrEq_A ||
  18865. instr->m_opcode == Js::OpCode::BrNeq_A ||
  18866. instr->m_opcode == Js::OpCode::BrNotEq_A ||
  18867. instr->m_opcode == Js::OpCode::BrNotNeq_A ||
  18868. instr->m_opcode == Js::OpCode::CmEq_A ||
  18869. instr->m_opcode == Js::OpCode::CmNeq_A);
  18870. //
  18871. // instr - (Br/Cm)(Sr)(N(ot))eq_A
  18872. // instr->m_prev - typeOf
  18873. //
  18874. IR::Instr *instrLd = instr->GetPrevRealInstrOrLabel();
  18875. bool skippedLoads = false;
  18876. //Skip intermediate Ld_A which might be inserted by flow graph peeps
  18877. while (instrLd && instrLd->m_opcode == Js::OpCode::Ld_A )
  18878. {
  18879. if (!(instrLd->GetDst()->IsRegOpnd() && instrLd->GetDst()->AsRegOpnd()->m_fgPeepTmp))
  18880. {
  18881. return false;
  18882. }
  18883. if (instrLd->HasBailOutInfo())
  18884. {
  18885. return false;
  18886. }
  18887. instrLd = instrLd->GetPrevRealInstrOrLabel();
  18888. skippedLoads = true;
  18889. }
  18890. IR::Instr *typeOf = instrLd;
  18891. IR::RegOpnd *instrSrc1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  18892. IR::RegOpnd *instrSrc2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  18893. if (typeOf && (typeOf->m_opcode == Js::OpCode::Typeof))
  18894. {
  18895. IR::RegOpnd *typeOfDst = typeOf->GetDst()->IsRegOpnd() ? typeOf->GetDst()->AsRegOpnd() : nullptr;
  18896. if (typeOfDst && instrSrc1 && instrSrc2)
  18897. {
  18898. IR::RegOpnd *typeOpnd = nullptr;
  18899. IR::RegOpnd *idOpnd = nullptr;
  18900. if (instrSrc1->m_sym == typeOfDst->m_sym)
  18901. {
  18902. typeOpnd = instrSrc1;
  18903. idOpnd = instrSrc2;
  18904. }
  18905. else if (instrSrc2->m_sym == typeOfDst->m_sym)
  18906. {
  18907. typeOpnd = instrSrc2;
  18908. idOpnd = instrSrc1;
  18909. }
  18910. else
  18911. {
  18912. // Neither source turned out to be the typeOpnd
  18913. return false;
  18914. }
  18915. if (!typeOpnd->m_isTempLastUse)
  18916. {
  18917. return false;
  18918. }
  18919. if (!(idOpnd->m_sym->m_isSingleDef && idOpnd->m_sym->m_isStrConst))
  18920. {
  18921. return false;
  18922. }
  18923. // The second argument to [Cm|Br]TypeOf is the typeid.
  18924. IR::IntConstOpnd *typeIdOpnd = nullptr;
  18925. Assert(idOpnd->m_sym->m_isSingleDef);
  18926. Assert(idOpnd->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd());
  18927. // We can't optimize non-javascript type strings.
  18928. JITJavascriptString *typeNameJsString = JITJavascriptString::FromVar(idOpnd->m_sym->m_instrDef->GetSrc1()->AsAddrOpnd()->m_localAddress);
  18929. const char16 *typeName = typeNameJsString->GetString();
  18930. Js::InternalString typeNameString(typeName, typeNameJsString->GetLength());
  18931. if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::UndefinedTypeNameString))
  18932. {
  18933. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Undefined, TyInt32, instr->m_func);
  18934. }
  18935. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::ObjectTypeNameString))
  18936. {
  18937. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Object, TyInt32, instr->m_func);
  18938. }
  18939. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::BooleanTypeNameString))
  18940. {
  18941. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Boolean, TyInt32, instr->m_func);
  18942. }
  18943. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::NumberTypeNameString))
  18944. {
  18945. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Number, TyInt32, instr->m_func);
  18946. }
  18947. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::StringTypeNameString))
  18948. {
  18949. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_String, TyInt32, instr->m_func);
  18950. }
  18951. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::FunctionTypeNameString))
  18952. {
  18953. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, instr->m_func);
  18954. }
  18955. else
  18956. {
  18957. return false;
  18958. }
  18959. if (skippedLoads)
  18960. {
  18961. //validate none of dst of Ld_A overlaps with typeof src or dst
  18962. IR::Opnd* typeOfSrc = typeOf->GetSrc1();
  18963. instrLd = typeOf->GetNextRealInstr();
  18964. while (instrLd != instr)
  18965. {
  18966. if (instrLd->GetDst()->IsEqual(typeOfDst) || instrLd->GetDst()->IsEqual(typeOfSrc))
  18967. {
  18968. return false;
  18969. }
  18970. instrLd = instrLd->GetNextRealInstr();
  18971. }
  18972. typeOf->Unlink();
  18973. instr->InsertBefore(typeOf);
  18974. }
  18975. // The first argument to [Cm|Br]TypeOf is the first arg to the TypeOf instruction.
  18976. IR::Opnd *objectOpnd = typeOf->GetSrc1();
  18977. Assert(objectOpnd->IsRegOpnd());
  18978. // Now emit this instruction and remove the ldstr and typeOf.
  18979. *prev = typeOf->m_prev;
  18980. *pfNoLower = false;
  18981. if (instr->IsBranchInstr())
  18982. {
  18983. GenerateFastBrTypeOf(instr, objectOpnd->AsRegOpnd(), typeIdOpnd, typeOf, pfNoLower, isNeqOp);
  18984. }
  18985. else
  18986. {
  18987. GenerateFastCmTypeOf(instr, objectOpnd->AsRegOpnd(), typeIdOpnd, typeOf, pfNoLower, isNeqOp);
  18988. }
  18989. return true;
  18990. }
  18991. }
  18992. if (instrSrc1 && instrSrc1->GetStackSym()->IsSingleDef() && instrSrc2 && instrSrc2->GetStackSym()->IsSingleDef() &&
  18993. instrSrc1->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::Typeof &&
  18994. instrSrc2->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::Typeof)
  18995. {
  18996. *pfNoLower = true;
  18997. if (instr->IsBranchInstr())
  18998. {
  18999. if (instrSrc1->IsEqual(instrSrc2))
  19000. {
  19001. if (!isNeqOp)
  19002. {
  19003. InsertBranch(Js::OpCode::Br, instr->AsBranchInstr()->GetTarget(), instr);
  19004. }
  19005. }
  19006. else
  19007. {
  19008. InsertCompareBranch(instrSrc1, instrSrc2, isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, instr->AsBranchInstr()->GetTarget(), instr);
  19009. }
  19010. instr->Remove();
  19011. }
  19012. else
  19013. {
  19014. if (instrSrc1->IsEqual(instrSrc2))
  19015. {
  19016. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, isNeqOp ? LibraryValue::ValueFalse : LibraryValue::ValueTrue), instr);
  19017. }
  19018. else
  19019. {
  19020. // t1 = typeof o1
  19021. // t2 = typeof o2
  19022. // dst = t1 == t2
  19023. // MOV dst, true
  19024. // CMP t1, t2
  19025. // x86, amd64
  19026. // CMOVNE dst, false
  19027. // arm
  19028. // BEQ $done
  19029. // MOV dst, false
  19030. // $done
  19031. if (instr->GetDst()->IsEqual(instrSrc1))
  19032. {
  19033. IR::Instr* hoistInstr = m_lowererMD.ChangeToAssign(instr->HoistSrc1(Js::OpCode::Ld_A));
  19034. instrSrc1 = hoistInstr->GetDst()->AsRegOpnd();
  19035. }
  19036. if (instr->GetDst()->IsEqual(instrSrc2))
  19037. {
  19038. IR::Instr* hoistInstr = m_lowererMD.ChangeToAssign(instr->HoistSrc2(Js::OpCode::Ld_A));
  19039. instrSrc2 = hoistInstr->GetDst()->AsRegOpnd();
  19040. }
  19041. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), instr);
  19042. #if defined(_M_ARM32_OR_ARM64)
  19043. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  19044. InsertCompareBranch(instrSrc1, instrSrc2, isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, doneLabel, instr);
  19045. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
  19046. instr->InsertBefore(doneLabel);
  19047. #else
  19048. InsertCompare(instrSrc1, instrSrc2, instr);
  19049. m_lowererMD.InsertCmovCC(isNeqOp ? Js::OpCode::CMOVE : Js::OpCode::CMOVNE, instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
  19050. #endif
  19051. }
  19052. instr->Remove();
  19053. }
  19054. return true;
  19055. }
  19056. return false;
  19057. }
  19058. void
  19059. Lowerer::GenerateFalsyObjectTest(IR::Instr * insertInstr, IR::RegOpnd * typeOpnd, IR::LabelInstr * falsyLabel)
  19060. {
  19061. IR::Opnd *flagsOpnd = IR::IndirOpnd::New(typeOpnd, Js::Type::GetOffsetOfFlags(), TyInt32, this->m_func);
  19062. InsertTestBranch(flagsOpnd, IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyInt32, this->m_func), Js::OpCode::BrNeq_A, falsyLabel, insertInstr);
  19063. }
  19064. void
  19065. Lowerer::GenerateFalsyObjectTest(IR::Instr *insertInstr, IR::RegOpnd *typeOpnd, Js::TypeId typeIdToCheck, IR::LabelInstr* target, IR::LabelInstr* done, bool isNeqOp)
  19066. {
  19067. if (!this->m_func->GetThreadContextInfo()->CanBeFalsy(typeIdToCheck) && typeIdToCheck != Js::TypeIds_Undefined)
  19068. {
  19069. // Don't need the check for falsy, the typeId we are looking for doesn't care
  19070. return;
  19071. }
  19072. IR::Opnd *flagsOpnd = IR::IndirOpnd::New(typeOpnd, Js::Type::GetOffsetOfFlags(), TyInt32, this->m_func);
  19073. InsertTest(flagsOpnd, IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyInt32, this->m_func), insertInstr);
  19074. if (typeIdToCheck == Js::TypeIds_Undefined)
  19075. {
  19076. //Falsy object returns true for undefined ((typeof falsyObj) == "undefined")
  19077. InsertBranch( Js::OpCode::BrNeq_A, true, isNeqOp ? done : target, insertInstr);
  19078. }
  19079. else
  19080. {
  19081. //Falsy object returns false for all other types ((typeof falsyObj) != "function")
  19082. InsertBranch( Js::OpCode::BrNeq_A, true, isNeqOp? target : done , insertInstr);
  19083. }
  19084. }
  19085. ///----------------------------------------------------------------------------
  19086. ///
  19087. /// LowererMD::GenerateFastBrTypeOf
  19088. ///
  19089. ///----------------------------------------------------------------------------
  19090. void
  19091. Lowerer::GenerateFastBrTypeOf(IR::Instr *branch, IR::RegOpnd *object, IR::IntConstOpnd *typeIdOpnd, IR::Instr *typeOf, bool *pfNoLower, bool isNeqOp)
  19092. {
  19093. Js::TypeId typeId = static_cast<Js::TypeId>(typeIdOpnd->GetValue());
  19094. IR::LabelInstr *target = branch->AsBranchInstr()->GetTarget();
  19095. IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  19096. IR::LabelInstr *helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  19097. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  19098. switch(branch->m_opcode)
  19099. {
  19100. case Js::OpCode::BrSrNeq_A:
  19101. case Js::OpCode::BrNeq_A:
  19102. case Js::OpCode::BrSrNotEq_A:
  19103. case Js::OpCode::BrNotEq_A:
  19104. case Js::OpCode::BrSrEq_A:
  19105. case Js::OpCode::BrEq_A:
  19106. case Js::OpCode::BrSrNotNeq_A:
  19107. case Js::OpCode::BrNotNeq_A:
  19108. break;
  19109. default:
  19110. Assert(UNREACHED);
  19111. __assume(UNREACHED);
  19112. }
  19113. // JNE/BNE (typeId == Js::TypeIds_Number) ? $target : $done
  19114. IR::LabelInstr *label = (typeId == Js::TypeIds_Number) ? target : done;
  19115. if (isNeqOp)
  19116. label = (label == target) ? done : target;
  19117. m_lowererMD.GenerateObjectTest(object, branch, label);
  19118. // MOV typeRegOpnd, [object + offset(Type)]
  19119. InsertMove(typeRegOpnd,
  19120. IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  19121. branch);
  19122. GenerateFalsyObjectTest(branch, typeRegOpnd, typeId, target, done, isNeqOp);
  19123. // MOV objTypeId, [typeRegOpnd + offset(TypeId)]
  19124. IR::RegOpnd* objTypeIdOpnd = IR::RegOpnd::New(TyInt32, m_func);
  19125. InsertMove(objTypeIdOpnd,
  19126. IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
  19127. branch);
  19128. // CMP objTypeId, typeId
  19129. // JEQ/JGE $done
  19130. if (typeId == Js::TypeIds_Object)
  19131. {
  19132. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrGe_A, isNeqOp ? done : target, branch);
  19133. }
  19134. else if (typeId == Js::TypeIds_Function)
  19135. {
  19136. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrEq_A, isNeqOp ? done : target, branch);
  19137. }
  19138. else if (typeId == Js::TypeIds_Number)
  19139. {
  19140. //Check for the typeIds between TypeIds_FirstNumberType <= typeIds <= TypeIds_LastNumberType
  19141. InsertSub(false, objTypeIdOpnd, objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_FirstNumberType, TyInt32, branch->m_func),branch);
  19142. InsertCompare(objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_LastNumberType - Js::TypeIds_FirstNumberType, TyInt32, branch->m_func), branch);
  19143. InsertBranch(isNeqOp ? Js::OpCode::BrGt_A : Js::OpCode::BrLe_A, true, target, branch);
  19144. }
  19145. else
  19146. {
  19147. InsertCompare(objTypeIdOpnd, typeIdOpnd, branch);
  19148. InsertBranch(isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, target, branch);
  19149. }
  19150. // This could be 'null' which, for historical reasons, has a TypeId < TypeIds_Object but
  19151. // is still a Javascript "object."
  19152. if (typeId == Js::TypeIds_Object)
  19153. {
  19154. // CMP object, 0xXXXXXXXX
  19155. // JEQ isNeqOp ? $done : $target
  19156. InsertCompareBranch(object,
  19157. LoadLibraryValueOpnd(branch, LibraryValue::ValueNull),
  19158. Js::OpCode::BrEq_A,
  19159. isNeqOp ? done : target,
  19160. branch);
  19161. }
  19162. branch->InsertAfter(done); // Get this label first
  19163. // "object" or "function" may come from HostDispatch. Needs helper if that's the case.
  19164. if (typeId == Js::TypeIds_Object || typeId == Js::TypeIds_Function)
  19165. {
  19166. // CMP objTypeId, TypeIds_Proxy. typeof proxy could be 'object' or 'function' depends on the target
  19167. // JNE isNeqOp ? $target : $done
  19168. InsertCompareBranch(objTypeIdOpnd,
  19169. IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
  19170. Js::OpCode::BrEq_A,
  19171. helper,
  19172. branch);
  19173. // CMP objTypeId, TypeIds_HostDispatch
  19174. // JNE isNeqOp ? $target : $done
  19175. InsertCompareBranch(objTypeIdOpnd,
  19176. IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
  19177. Js::OpCode::BrNeq_A,
  19178. isNeqOp ? target : done,
  19179. branch);
  19180. // Now emit Typeof and lower it like we would've for the helper call.
  19181. {
  19182. branch->InsertBefore(helper);
  19183. typeOf->Unlink();
  19184. branch->InsertBefore(typeOf);
  19185. LowerUnaryHelperMem(typeOf, IR::HelperOp_Typeof);
  19186. }
  19187. }
  19188. else // Other primitive types don't need helper
  19189. {
  19190. typeOf->Remove();
  19191. branch->Remove();
  19192. *pfNoLower = true;
  19193. }
  19194. // $done:
  19195. }
  19196. ///----------------------------------------------------------------------------
  19197. ///
  19198. /// LowererMD::GenerateFastCmTypeOf
  19199. ///
  19200. ///----------------------------------------------------------------------------
  19201. void
  19202. Lowerer::GenerateFastCmTypeOf(IR::Instr *compare, IR::RegOpnd *object, IR::IntConstOpnd *typeIdOpnd, IR::Instr *typeOf, bool *pfNoLower, bool isNeqOp)
  19203. {
  19204. Assert(compare->m_opcode == Js::OpCode::CmSrEq_A ||
  19205. compare->m_opcode == Js::OpCode::CmEq_A ||
  19206. compare->m_opcode == Js::OpCode::CmSrNeq_A ||
  19207. compare->m_opcode == Js::OpCode::CmNeq_A);
  19208. Js::TypeId typeId = static_cast<Js::TypeId>(typeIdOpnd->GetValue());
  19209. IR::LabelInstr *movFalse = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  19210. IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  19211. IR::LabelInstr *helper= IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  19212. IR::RegOpnd *dst = compare->GetDst()->IsRegOpnd() ? compare->GetDst()->AsRegOpnd() : nullptr;
  19213. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  19214. Assert(dst);
  19215. if (dst->IsEqual(object))
  19216. {
  19217. //dst same as the src of typeof. As we need to move true to dst first we need to save the src to a new opnd
  19218. IR::RegOpnd *newObject = IR::RegOpnd::New(object->GetType(), m_func);
  19219. InsertMove(newObject, object, compare); //Save src
  19220. object = newObject;
  19221. }
  19222. // mov dst, 'true'
  19223. InsertMove(dst,
  19224. LoadLibraryValueOpnd(compare, LibraryValue::ValueTrue),
  19225. compare);
  19226. // TEST object, 1
  19227. // JNE (typeId == Js::TypeIds_Number) ? $done : $movFalse
  19228. IR::LabelInstr *target = (typeId == Js::TypeIds_Number) ? done : movFalse;
  19229. if (isNeqOp)
  19230. {
  19231. target = (target == done) ? movFalse : done;
  19232. }
  19233. m_lowererMD.GenerateObjectTest(object, compare, target);
  19234. // MOV typeRegOpnd, [object + offset(Type)]
  19235. InsertMove(typeRegOpnd,
  19236. IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  19237. compare);
  19238. GenerateFalsyObjectTest(compare, typeRegOpnd, typeId, done, movFalse, isNeqOp);
  19239. // MOV objTypeId, [typeRegOpnd + offset(TypeId)]
  19240. IR::RegOpnd* objTypeIdOpnd = IR::RegOpnd::New(TyInt32, m_func);
  19241. InsertMove(objTypeIdOpnd,
  19242. IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
  19243. compare);
  19244. // CMP objTypeId, typeId
  19245. // JEQ/JGE $done
  19246. if (typeId == Js::TypeIds_Object)
  19247. {
  19248. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrGe_A, isNeqOp ? movFalse : done, compare);
  19249. }
  19250. else if (typeId == Js::TypeIds_Function)
  19251. {
  19252. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrEq_A, isNeqOp ? movFalse : done, compare);
  19253. }
  19254. else if (typeId == Js::TypeIds_Number)
  19255. {
  19256. //Check for the typeIds between TypeIds_FirstNumberType <= typeIds <= TypeIds_LastNumberType
  19257. InsertCompareBranch(objTypeIdOpnd,
  19258. IR::IntConstOpnd::New(Js::TypeIds_LastNumberType, TyInt32, compare->m_func),
  19259. Js::OpCode::BrGt_A,
  19260. isNeqOp ? done : movFalse,
  19261. compare);
  19262. InsertCompareBranch(objTypeIdOpnd,
  19263. IR::IntConstOpnd::New(Js::TypeIds_FirstNumberType, TyInt32, compare->m_func),
  19264. isNeqOp? Js::OpCode::BrLt_A : Js::OpCode::BrGe_A,
  19265. done,
  19266. compare);
  19267. }
  19268. else
  19269. {
  19270. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, done, compare);
  19271. }
  19272. // This could be 'null' which, for historical reasons, has a TypeId < TypeIds_Object but
  19273. // is still a Javascript "object."
  19274. if (typeId == Js::TypeIds_Object)
  19275. {
  19276. // CMP object, 0xXXXXXXXX
  19277. // JEQ isNeqOp ? $movFalse : $done
  19278. InsertCompareBranch(object,
  19279. LoadLibraryValueOpnd(compare, LibraryValue::ValueNull),
  19280. Js::OpCode::BrEq_A,
  19281. isNeqOp ? movFalse : done,
  19282. compare);
  19283. }
  19284. compare->InsertAfter(done); // Get this label first
  19285. // "object" or "function" may come from HostDispatch. Needs helper if that's the case.
  19286. if (typeId == Js::TypeIds_Object || typeId == Js::TypeIds_Function)
  19287. {
  19288. // CMP objTypeId, TypeIds_Proxy
  19289. // JNE isNeqOp ? $done : $movFalse
  19290. InsertCompareBranch(objTypeIdOpnd,
  19291. IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
  19292. Js::OpCode::BrEq_A,
  19293. helper,
  19294. compare);
  19295. // CMP objTypeId, TypeIds_HostDispatch
  19296. // JNE isNeqOp ? $done : $movFalse
  19297. InsertCompareBranch(objTypeIdOpnd,
  19298. IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
  19299. Js::OpCode::BrNeq_A,
  19300. isNeqOp ? done : movFalse,
  19301. compare);
  19302. // Now emit Typeof like we would've for the helper call.
  19303. {
  19304. compare->InsertBefore(helper);
  19305. typeOf->Unlink();
  19306. compare->InsertBefore(typeOf);
  19307. LowerUnaryHelperMem(typeOf, IR::HelperOp_Typeof);
  19308. }
  19309. // JMP/B $done
  19310. InsertBranch(Js::OpCode::Br, done, done);
  19311. }
  19312. else // Other primitive types don't need helper
  19313. {
  19314. typeOf->Remove();
  19315. compare->Remove();
  19316. *pfNoLower = true;
  19317. }
  19318. // $movFalse: (insert before $done)
  19319. done->InsertBefore(movFalse);
  19320. // MOV dst, 'false'
  19321. InsertMove(dst, LoadLibraryValueOpnd(done, LibraryValue::ValueFalse), done);
  19322. // $done:
  19323. }
  19324. void
  19325. Lowerer::GenerateCheckForCallFlagNew(IR::Instr* instrInsert)
  19326. {
  19327. Func *func = instrInsert->m_func;
  19328. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  19329. Assert(!func->IsInlinee());
  19330. // MOV s1, [ebp + 4] // s1 = call info
  19331. // AND s2, s1, Js::CallFlags_New // s2 = s1 & Js::CallFlags_New
  19332. // CMP s2, 0
  19333. // JNE $Done
  19334. // CALL RuntimeTypeError
  19335. // $Done
  19336. IR::SymOpnd* callInfoOpnd = Lowerer::LoadCallInfo(instrInsert);
  19337. Assert(Js::CallInfo::ksizeofCount == 24);
  19338. IR::RegOpnd* isNewFlagSetRegOpnd = IR::RegOpnd::New(TyUint32, func);
  19339. InsertAnd(isNewFlagSetRegOpnd, callInfoOpnd, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_New << Js::CallInfo::ksizeofCount, TyUint32, func, true), instrInsert);
  19340. InsertTestBranch(isNewFlagSetRegOpnd, isNewFlagSetRegOpnd, Js::OpCode::BrNeq_A, labelDone, instrInsert);
  19341. IR::Instr *throwInstr = IR::Instr::New(
  19342. Js::OpCode::RuntimeTypeError,
  19343. IR::RegOpnd::New(TyMachReg, m_func),
  19344. IR::IntConstOpnd::New(SCODE_CODE(JSERR_ClassConstructorCannotBeCalledWithoutNew), TyInt32, m_func),
  19345. m_func);
  19346. instrInsert->InsertBefore(throwInstr);
  19347. this->LowerUnaryHelperMem(throwInstr, IR::HelperOp_RuntimeTypeError);
  19348. instrInsert->InsertBefore(labelDone);
  19349. instrInsert->Remove();
  19350. }
  19351. void
  19352. Lowerer::GenerateJavascriptOperatorsIsConstructorGotoElse(IR::Instr *instrInsert, IR::RegOpnd *instanceRegOpnd, IR::LabelInstr *labelReturnTrue, IR::LabelInstr *labelReturnFalse)
  19353. {
  19354. // $ProxyLoop:
  19355. // // if (!RecyclableObject::Is(instance)) { goto $ReturnFalse }; // omitted: RecyclableObject::Is(instance) always true
  19356. // MOV s0, instance->type
  19357. // MOV s1, s0->typeId
  19358. // CMP s1, TypeIds_Proxy
  19359. // JNE $NotProxy
  19360. //
  19361. // MOV instance, instance->target
  19362. // JMP $ProxyLoop
  19363. //
  19364. // $NotProxy:
  19365. // CMP s1, TypeIds_Function
  19366. // JNE $ReturnFalse // external
  19367. //
  19368. // MOV s0, instance->functionInfo
  19369. // MOV s1, s0->attributes
  19370. // TEST s1, ErrorOnNew
  19371. // JNE $ReturnFalse // external
  19372. //
  19373. // JMP $ReturnTrue // external
  19374. Func *func = instrInsert->m_func;
  19375. IR::LabelInstr *labelProxyLoop = InsertLoopTopLabel(instrInsert);
  19376. IR::LabelInstr *labelNotProxy = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  19377. IR::RegOpnd *indir0RegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  19378. IR::RegOpnd *indir1RegOpnd = IR::RegOpnd::New(TyUint32, func);
  19379. Loop * loop = labelProxyLoop->GetLoop();
  19380. loop->regAlloc.liveOnBackEdgeSyms->Set(instanceRegOpnd->m_sym->m_id);
  19381. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func);
  19382. LowererMD::CreateAssign(indir0RegOpnd, indirOpnd, instrInsert);
  19383. indirOpnd = IR::IndirOpnd::New(indir0RegOpnd, Js::Type::GetOffsetOfTypeId(), TyUint32, func);
  19384. LowererMD::CreateAssign(indir1RegOpnd, indirOpnd, instrInsert);
  19385. InsertCompareBranch(indir1RegOpnd, IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyUint32, func, true), Js::OpCode::BrNeq_A, labelNotProxy, instrInsert);
  19386. indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::JavascriptProxy::GetOffsetOfTarget(), TyMachPtr, func);
  19387. LowererMD::CreateAssign(instanceRegOpnd, indirOpnd, instrInsert);
  19388. InsertBranch(Js::OpCode::Br, labelProxyLoop, instrInsert);
  19389. instrInsert->InsertBefore(labelNotProxy);
  19390. InsertCompareBranch(indir1RegOpnd, IR::IntConstOpnd::New(Js::TypeIds_Function, TyUint32, func, true), Js::OpCode::BrNeq_A, labelReturnFalse, instrInsert);
  19391. indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::JavascriptFunction::GetOffsetOfFunctionInfo(), TyMachPtr, func);
  19392. LowererMD::CreateAssign(indir0RegOpnd, indirOpnd, instrInsert);
  19393. indirOpnd = IR::IndirOpnd::New(indir0RegOpnd, Js::FunctionInfo::GetAttributesOffset(), TyUint32, func);
  19394. LowererMD::CreateAssign(indir1RegOpnd, indirOpnd, instrInsert);
  19395. InsertTestBranch(indir1RegOpnd, IR::IntConstOpnd::New(Js::FunctionInfo::Attributes::ErrorOnNew, TyUint32, func, true), Js::OpCode::BrNeq_A, labelReturnFalse, instrInsert);
  19396. InsertBranch(Js::OpCode::Br, labelReturnTrue, instrInsert);
  19397. }
  19398. void
  19399. Lowerer::GenerateRecyclableObjectGetPrototypeNullptrGoto(IR::Instr *instrInsert, IR::RegOpnd *instanceRegOpnd, IR::LabelInstr *labelReturnNullptr)
  19400. {
  19401. // MOV instance, instance->type
  19402. // MOV flags, instance->flags
  19403. // TEST flags, TypeFlagMask_HasSpecialPrototype
  19404. // JNE $ReturnNullptr // external, bypassing nullptr check
  19405. // MOV instance, instance->prototype
  19406. Func *func = instrInsert->m_func;
  19407. IR::RegOpnd *flagsRegOpnd = IR::RegOpnd::New(TyUint32, func);
  19408. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func);
  19409. LowererMD::CreateAssign(instanceRegOpnd, indirOpnd, instrInsert);
  19410. indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::Type::GetOffsetOfFlags(), TyUint32, func);
  19411. LowererMD::CreateAssign(flagsRegOpnd, indirOpnd, instrInsert);
  19412. InsertTestBranch(flagsRegOpnd, IR::IntConstOpnd::New(TypeFlagMask_HasSpecialPrototype, TyUint32, func, true), Js::OpCode::BrNeq_A, labelReturnNullptr, instrInsert);
  19413. indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::Type::GetOffsetOfPrototype(), TyMachPtr, func);
  19414. LowererMD::CreateAssign(instanceRegOpnd, indirOpnd, instrInsert);
  19415. }
  19416. void
  19417. Lowerer::GenerateRecyclableObjectIsElse(IR::Instr *instrInsert, IR::RegOpnd *instanceRegOpnd, IR::LabelInstr *labelFalse)
  19418. {
  19419. Func *func = instrInsert->m_func;
  19420. #if INT32VAR
  19421. InsertTestBranch(instanceRegOpnd, IR::AddrOpnd::New((Js::Var)0xffff000000000000, IR::AddrOpndKindConstantVar, func, true), Js::OpCode::BrNeq_A, labelFalse, instrInsert);
  19422. #else
  19423. InsertTestBranch(instanceRegOpnd, IR::IntConstOpnd::New(Js::AtomTag, TyUint32, func, true), Js::OpCode::BrNeq_A, labelFalse, instrInsert);
  19424. #endif
  19425. }
  19426. void
  19427. Lowerer::GenerateLdHomeObj(IR::Instr* instr)
  19428. {
  19429. // MOV dst, undefined
  19430. // MOV instance, functionObject // functionObject through stack params or src1
  19431. // CMP [instance], VtableStackScriptFunction
  19432. // JE $Done
  19433. // MOV instance, instance->homeObj
  19434. // TEST instance, instance
  19435. // JZ $Done
  19436. // MOV dst, instance
  19437. // $Done:
  19438. Func *func = instr->m_func;
  19439. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  19440. IR::Opnd *opndUndefAddress = this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined);
  19441. IR::RegOpnd *instanceRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  19442. IR::Opnd *dstOpnd = instr->GetDst();
  19443. Assert(dstOpnd->IsRegOpnd());
  19444. LowererMD::CreateAssign(dstOpnd, opndUndefAddress, instr);
  19445. IR::Opnd * functionObjOpnd = nullptr;
  19446. m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  19447. LowererMD::CreateAssign(instanceRegOpnd, functionObjOpnd, instr);
  19448. IR::Opnd * vtableAddressOpnd = this->LoadVTableValueOpnd(instr, VTableValue::VtableStackScriptFunction);
  19449. InsertCompareBranch(IR::IndirOpnd::New(instanceRegOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
  19450. Js::OpCode::BrEq_A, true, labelDone, instr);
  19451. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::ScriptFunction::GetOffsetOfHomeObj(), TyMachPtr, func);
  19452. LowererMD::CreateAssign(instanceRegOpnd, indirOpnd, instr);
  19453. InsertTestBranch(instanceRegOpnd, instanceRegOpnd, Js::OpCode::BrEq_A, labelDone, instr);
  19454. LowererMD::CreateAssign(dstOpnd, instanceRegOpnd, instr);
  19455. instr->InsertBefore(labelDone);
  19456. instr->Remove();
  19457. }
  19458. void
  19459. Lowerer::GenerateLdHomeObjProto(IR::Instr* instr)
  19460. {
  19461. // MOV dst, undefined
  19462. // MOV instance, src1 // homeObj
  19463. // TEST instance, instance
  19464. // JZ $Done
  19465. //
  19466. // if (!RecyclableObject::Is(instance)) goto $Done
  19467. // MOV type, [instance+Offset(type)]
  19468. // MOV typeId, [type+Offset(typeId)]
  19469. // CMP typeId, TypeIds_Null
  19470. // JEQ $Err
  19471. // CMP typeId, TypeIds_Undefined
  19472. // JNE $NoErr
  19473. //
  19474. // $Err:
  19475. // ThrowRuntimeReferenceError(JSERR_BadSuperReference);
  19476. //
  19477. // $NoErr:
  19478. // instance = ((RecyclableObject*)instance)->GetPrototype();
  19479. // if (instance == nullptr) goto $Done;
  19480. //
  19481. // if (!RecyclableObject::Is(instance)) goto $Done
  19482. //
  19483. // MOV dst, instance
  19484. // $Done:
  19485. Func *func = instr->m_func;
  19486. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  19487. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  19488. IR::LabelInstr *labelErr = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  19489. IR::LabelInstr *labelNoErr = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  19490. IR::Opnd *opndUndefAddress = this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined);
  19491. IR::RegOpnd *instanceRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  19492. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  19493. IR::RegOpnd *typeIdRegOpnd = IR::RegOpnd::New(TyUint32, func);
  19494. IR::Opnd *dstOpnd = instr->GetDst();
  19495. Assert(dstOpnd->IsRegOpnd());
  19496. LowererMD::CreateAssign(dstOpnd, opndUndefAddress, instr);
  19497. LowererMD::CreateAssign(instanceRegOpnd, src1Opnd, instr);
  19498. InsertTestBranch(instanceRegOpnd, instanceRegOpnd, Js::OpCode::BrEq_A, labelDone, instr);
  19499. this->GenerateRecyclableObjectIsElse(instr, instanceRegOpnd, labelDone);
  19500. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func);
  19501. LowererMD::CreateAssign(typeRegOpnd, indirOpnd, instr);
  19502. indirOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyUint32, func);
  19503. LowererMD::CreateAssign(typeIdRegOpnd, indirOpnd, instr);
  19504. InsertCompareBranch(typeIdRegOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_Null, TyUint32, func, true), Js::OpCode::BrEq_A, labelErr, instr);
  19505. InsertCompareBranch(typeIdRegOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_Undefined, TyUint32, func, true), Js::OpCode::BrNeq_A, labelNoErr, instr);
  19506. instr->InsertBefore(labelErr);
  19507. this->GenerateRuntimeError(instr, JSERR_BadSuperReference, IR::HelperOp_RuntimeReferenceError);
  19508. instr->InsertBefore(labelNoErr);
  19509. this->GenerateRecyclableObjectGetPrototypeNullptrGoto(instr, instanceRegOpnd, labelDone);
  19510. this->GenerateRecyclableObjectIsElse(instr, instanceRegOpnd, labelDone);
  19511. LowererMD::CreateAssign(dstOpnd, instanceRegOpnd, instr);
  19512. instr->InsertBefore(labelDone);
  19513. instr->Remove();
  19514. }
  19515. void
  19516. Lowerer::GenerateLdFuncObj(IR::Instr* instr)
  19517. {
  19518. // MOV dst, functionObject // functionObject through stack params or src1
  19519. IR::Opnd *dstOpnd = instr->GetDst();
  19520. IR::Opnd *functionObjOpnd = nullptr;
  19521. m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  19522. LowererMD::CreateAssign(dstOpnd, functionObjOpnd, instr);
  19523. instr->Remove();
  19524. }
  19525. void
  19526. Lowerer::GenerateLdFuncObjProto(IR::Instr* instr)
  19527. {
  19528. // MOV instance, src1
  19529. //
  19530. // instance = ((RecyclableObject*)instance)->GetPrototype();
  19531. // if (instance == nullptr) goto $ThrowTypeError;
  19532. //
  19533. // MOV dst, instance
  19534. //
  19535. // if (!JavascriptOperators::IsConstructor(instance))
  19536. // goto $ThrowTypeError;
  19537. // else
  19538. // goto $Done;
  19539. //
  19540. // $helperLabelThrowTypeError:
  19541. // ThrowRuntimeTypeError(JSERR_NotAConstructor);
  19542. //
  19543. // $Done:
  19544. Func *func = instr->m_func;
  19545. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  19546. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  19547. IR::LabelInstr *helperLabelThrowTypeError = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  19548. IR::RegOpnd *instanceRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  19549. IR::Opnd *dstOpnd = instr->GetDst();
  19550. LowererMD::CreateAssign(instanceRegOpnd, src1Opnd, instr);
  19551. this->GenerateRecyclableObjectGetPrototypeNullptrGoto(instr, instanceRegOpnd, helperLabelThrowTypeError);
  19552. LowererMD::CreateAssign(dstOpnd, instanceRegOpnd, instr);
  19553. this->GenerateJavascriptOperatorsIsConstructorGotoElse(instr, instanceRegOpnd, labelDone, helperLabelThrowTypeError);
  19554. instr->InsertBefore(helperLabelThrowTypeError);
  19555. this->GenerateRuntimeError(instr, JSERR_NotAConstructor, IR::HelperOp_RuntimeTypeError);
  19556. instr->InsertBefore(labelDone);
  19557. instr->Remove();
  19558. }
  19559. void
  19560. Lowerer::GenerateSetHomeObj(IR::Instr* instrInsert)
  19561. {
  19562. // MOV funcObj, src1
  19563. // CMP [funcObj], VtableJavascriptGeneratorFunction
  19564. // JNE $ScriptFunction
  19565. //
  19566. // MOV funcObj, funcObj->scriptFunction
  19567. //
  19568. // $ScriptFunction:
  19569. // MOV funcObj->homeObj, src2
  19570. Func *func = instrInsert->m_func;
  19571. IR::LabelInstr *labelScriptFunction = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  19572. IR::LabelInstr *labelForGeneratorScriptFunction = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  19573. IR::Opnd *src2Opnd = instrInsert->UnlinkSrc2();
  19574. IR::Opnd *src1Opnd = instrInsert->UnlinkSrc1();
  19575. IR::RegOpnd *funcObjRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  19576. IR::IndirOpnd *indirOpnd = nullptr;
  19577. Assert(src1Opnd != nullptr && src2Opnd != nullptr);
  19578. LowererMD::CreateAssign(funcObjRegOpnd, src1Opnd, instrInsert);
  19579. IR::Opnd * vtableAddressOpnd = this->LoadVTableValueOpnd(instrInsert, VTableValue::VtableJavascriptGeneratorFunction);
  19580. InsertCompareBranch(IR::IndirOpnd::New(funcObjRegOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
  19581. Js::OpCode::BrEq_A, true, labelForGeneratorScriptFunction, instrInsert);
  19582. vtableAddressOpnd = this->LoadVTableValueOpnd(instrInsert, VTableValue::VtableJavascriptAsyncFunction);
  19583. InsertCompareBranch(IR::IndirOpnd::New(funcObjRegOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
  19584. Js::OpCode::BrNeq_A, true, labelScriptFunction, instrInsert);
  19585. instrInsert->InsertBefore(labelForGeneratorScriptFunction);
  19586. indirOpnd = IR::IndirOpnd::New(funcObjRegOpnd, Js::JavascriptGeneratorFunction::GetOffsetOfScriptFunction(), TyMachPtr, func);
  19587. LowererMD::CreateAssign(funcObjRegOpnd, indirOpnd, instrInsert);
  19588. instrInsert->InsertBefore(labelScriptFunction);
  19589. indirOpnd = IR::IndirOpnd::New(funcObjRegOpnd, Js::ScriptFunction::GetOffsetOfHomeObj(), TyMachPtr, func);
  19590. LowererMD::CreateAssign(indirOpnd, src2Opnd, instrInsert);
  19591. instrInsert->Remove();
  19592. }
  19593. void
  19594. Lowerer::GenerateLoadNewTarget(IR::Instr* instrInsert)
  19595. {
  19596. Func *func = instrInsert->m_func;
  19597. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  19598. IR::LabelInstr * labelLoadArgNewTarget = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  19599. IR::Opnd* opndUndefAddress = this->LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueUndefined);
  19600. Assert(!func->IsInlinee());
  19601. if (func->GetJITFunctionBody()->IsCoroutine())
  19602. {
  19603. instrInsert->SetSrc1(opndUndefAddress);
  19604. LowererMD::ChangeToAssign(instrInsert);
  19605. return;
  19606. }
  19607. // MOV dst, undefined // dst = undefined
  19608. // MOV s1, callInfo // s1 = callInfo
  19609. // TEST s1, Js::CallFlags_NewTarget << 24 // if (callInfo.Flags & Js::CallFlags_NewTarget)
  19610. // JNE $LoadLastArgument // goto $LoadLastArgument
  19611. // TEST s1, Js::CallFlags_New << 24 // if (!(callInfo.Flags & Js::CallFlags_New))
  19612. // JE $Done // goto $Done
  19613. // MOV dst, functionObject // dst = functionObject
  19614. // JMP $Done // goto $Done
  19615. // $LoadLastArgument
  19616. // AND s1, s1, (0x00FFFFFF) // s2 = callInfo.Count == arguments.length + 2
  19617. // MOV dst, [ebp + (s1 - 1) * sizeof(Var) + formalParamOffset * sizeof(Var) ] // points to new.target
  19618. // $Done
  19619. IR::Opnd *dstOpnd = instrInsert->GetDst();
  19620. Assert(dstOpnd->IsRegOpnd());
  19621. LowererMD::CreateAssign(dstOpnd, opndUndefAddress, instrInsert);
  19622. IR::SymOpnd *callInfoOpnd = Lowerer::LoadCallInfo(instrInsert);
  19623. Assert(Js::CallInfo::ksizeofCount == 24);
  19624. IR::RegOpnd *s1 = IR::RegOpnd::New(TyUint32, func);
  19625. LowererMD::CreateAssign(s1, callInfoOpnd, instrInsert);
  19626. InsertTestBranch(s1, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_NewTarget << Js::CallInfo::ksizeofCount, TyUint32, func, true), Js::OpCode::BrNeq_A, labelLoadArgNewTarget, instrInsert);
  19627. InsertTestBranch(s1, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_New << Js::CallInfo::ksizeofCount, TyUint32, func, true), Js::OpCode::BrEq_A, labelDone, instrInsert);
  19628. IR::Instr* loadFuncInstr = IR::Instr::New(Js::OpCode::AND, func);
  19629. loadFuncInstr->SetDst(instrInsert->GetDst());
  19630. LoadFuncExpression(loadFuncInstr);
  19631. instrInsert->InsertBefore(loadFuncInstr);
  19632. InsertBranch(Js::OpCode::Br, labelDone, instrInsert);
  19633. instrInsert->InsertBefore(labelLoadArgNewTarget);
  19634. InsertAnd(s1, s1, IR::IntConstOpnd::New(0x00FFFFFF, TyUint32, func, true), instrInsert); // callInfo.Count
  19635. // [formalOffset (4) + callInfo.Count -1] points to 'new.target' - see diagram in GenerateLoadStackArgumentByIndex()
  19636. GenerateLoadStackArgumentByIndex(dstOpnd, s1, instrInsert, -1, m_func);
  19637. instrInsert->InsertBefore(labelDone);
  19638. instrInsert->Remove();
  19639. }
  19640. void
  19641. Lowerer::GenerateGetCurrentFunctionObject(IR::Instr * instr)
  19642. {
  19643. Func * func = this->m_func;
  19644. IR::Instr * insertBeforeInstr = instr->m_next;
  19645. IR::RegOpnd * functionObjectOpnd = instr->GetDst()->AsRegOpnd();
  19646. IR::Opnd * vtableAddressOpnd = this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableStackScriptFunction);
  19647. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  19648. InsertCompareBranch(IR::IndirOpnd::New(functionObjectOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
  19649. Js::OpCode::BrNeq_A, true, labelDone, insertBeforeInstr);
  19650. IR::RegOpnd * boxedFunctionObjectOpnd = IR::RegOpnd::New(TyMachPtr, func);
  19651. InsertMove(boxedFunctionObjectOpnd, IR::IndirOpnd::New(functionObjectOpnd,
  19652. Js::StackScriptFunction::GetOffsetOfBoxedScriptFunction(), TyMachPtr, func), insertBeforeInstr);
  19653. InsertTestBranch(boxedFunctionObjectOpnd, boxedFunctionObjectOpnd, Js::OpCode::BrEq_A, true, labelDone, insertBeforeInstr);
  19654. InsertMove(functionObjectOpnd, boxedFunctionObjectOpnd, insertBeforeInstr);
  19655. insertBeforeInstr->InsertBefore(labelDone);
  19656. }
  19657. IR::Opnd *
  19658. Lowerer::GetInlineCacheFromFuncObjectForRuntimeUse(IR::Instr * instr, IR::PropertySymOpnd * propSymOpnd, bool isHelper)
  19659. {
  19660. // MOV s1, [ebp + 8] //s1 = function object
  19661. // MOV s2, [s1 + offset(hasInlineCaches)]
  19662. // TEST s2, s2
  19663. // JE $L1
  19664. // MOV s3, [s1 + offset(m_inlineCaches)] //s3 = inlineCaches from function object
  19665. // MOV s4, [s3 + index*scale] //s4 = inlineCaches[index]
  19666. // JMP $L2
  19667. // $L1
  19668. // MOV s3, propSym->m_runtimeCache
  19669. // $L2
  19670. byte indirScale = this->m_lowererMD.GetDefaultIndirScale();
  19671. IR::RegOpnd * funcObjOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  19672. IR::Instr * funcObjInstr = IR::Instr::New(Js::OpCode::Ld_A, funcObjOpnd, instr->m_func);
  19673. instr->InsertBefore(funcObjInstr);
  19674. LoadFuncExpression(funcObjInstr);
  19675. IR::RegOpnd * funcObjHasInlineCachesOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  19676. this->m_lowererMD.CreateAssign(funcObjHasInlineCachesOpnd, IR::IndirOpnd::New(funcObjOpnd, Js::ScriptFunction::GetOffsetOfHasInlineCaches(), TyUint8, instr->m_func), instr);
  19677. IR::LabelInstr * inlineCachesNullLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, isHelper);
  19678. InsertTestBranch(funcObjHasInlineCachesOpnd, funcObjHasInlineCachesOpnd, Js::OpCode::BrEq_A, inlineCachesNullLabel, instr);
  19679. IR::RegOpnd * inlineCachesOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  19680. Lowerer::InsertMove(inlineCachesOpnd, IR::IndirOpnd::New(funcObjOpnd, Js::ScriptFunctionWithInlineCache::GetOffsetOfInlineCaches(), TyMachPtr, instr->m_func), instr);
  19681. IR::RegOpnd * inlineCacheOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  19682. IR::RegOpnd * indexOpnd = IR::RegOpnd::New(TyMachReg, instr->m_func);
  19683. int inlineCacheOffset;
  19684. if (!Int32Math::Mul(sizeof(Js::InlineCache *), propSymOpnd->m_inlineCacheIndex, &inlineCacheOffset))
  19685. {
  19686. Lowerer::InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCachesOpnd, inlineCacheOffset, TyMachPtr, instr->m_func), instr);
  19687. }
  19688. else
  19689. {
  19690. Lowerer::InsertMove(indexOpnd, IR::IntConstOpnd::New(propSymOpnd->m_inlineCacheIndex, TyUint32, instr->m_func), instr);
  19691. Lowerer::InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCachesOpnd, indexOpnd, indirScale, TyMachPtr, instr->m_func), instr);
  19692. }
  19693. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, isHelper);
  19694. InsertBranch(LowererMD::MDUncondBranchOpcode, continueLabel, instr);
  19695. IR::Instr * ldCacheFromPropSymOpndInstr = this->m_lowererMD.CreateAssign(inlineCacheOpnd, IR::AddrOpnd::New(propSymOpnd->m_runtimeInlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func), instr);
  19696. ldCacheFromPropSymOpndInstr->InsertBefore(inlineCachesNullLabel);
  19697. ldCacheFromPropSymOpndInstr->InsertAfter(continueLabel);
  19698. return inlineCacheOpnd;
  19699. }
  19700. IR::Instr *
  19701. Lowerer::LowerInitClass(IR::Instr * instr)
  19702. {
  19703. // scriptContext
  19704. IR::Instr * prevInstr = LoadScriptContext(instr);
  19705. // extends
  19706. if (instr->GetSrc2() != nullptr)
  19707. {
  19708. IR::Opnd * extendsOpnd = instr->UnlinkSrc2();
  19709. m_lowererMD.LoadHelperArgument(instr, extendsOpnd);
  19710. }
  19711. else
  19712. {
  19713. IR::AddrOpnd* extendsOpnd = IR::AddrOpnd::NewNull(this->m_func);
  19714. m_lowererMD.LoadHelperArgument(instr, extendsOpnd);
  19715. }
  19716. // constructor
  19717. IR::Opnd * ctorOpnd = instr->UnlinkSrc1();
  19718. m_lowererMD.LoadHelperArgument(instr, ctorOpnd);
  19719. // call
  19720. m_lowererMD.ChangeToHelperCall(instr, IR::HelperOP_InitClass);
  19721. return prevInstr;
  19722. }
  19723. void
  19724. Lowerer::LowerNewConcatStrMulti(IR::Instr * instr)
  19725. {
  19726. IR::IntConstOpnd * countOpnd = instr->UnlinkSrc1()->AsIntConstOpnd();
  19727. IR::RegOpnd * dstOpnd = instr->UnlinkDst()->AsRegOpnd();
  19728. uint8 count = (uint8)countOpnd->GetValue();
  19729. Assert(dstOpnd->GetValueType().IsString());
  19730. GenerateRecyclerAlloc(IR::HelperAllocMemForConcatStringMulti, Js::ConcatStringMulti::GetAllocSize(count), dstOpnd, instr);
  19731. GenerateRecyclerMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, VTableValue::VtableConcatStringMulti), instr);
  19732. GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfType(),
  19733. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueStringTypeStatic), instr);
  19734. GenerateRecyclerMemInitNull(dstOpnd, Js::ConcatStringMulti::GetOffsetOfpszValue(), instr);
  19735. GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), 0, instr);
  19736. GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfSlotCount(), countOpnd->AsUint32(), instr);
  19737. instr->Remove();
  19738. }
  19739. void
  19740. Lowerer::LowerNewConcatStrMultiBE(IR::Instr * instr)
  19741. {
  19742. // Lower
  19743. // t1 = SetConcatStrMultiBE s1
  19744. // t2 = SetConcatStrMultiBE s2, t1
  19745. // t3 = SetConcatStrMultiBE s3, t2
  19746. // s = NewConcatStrMultiBE 3, t3
  19747. // to
  19748. // s = new concat string
  19749. // s+0 = s1
  19750. // s+1 = s2
  19751. // s+2 = s3
  19752. Assert(instr->GetSrc1()->IsConstOpnd());
  19753. Assert(instr->GetDst()->IsRegOpnd());
  19754. IR::RegOpnd * newString = instr->GetDst()->AsRegOpnd();
  19755. IR::Opnd * newConcatItemOpnd = nullptr;
  19756. uint index = instr->GetSrc1()->AsIntConstOpnd()->AsUint32() - 1;
  19757. IR::Instr * concatItemInstr = nullptr;
  19758. IR::Opnd * linkOpnd = instr->GetSrc2();
  19759. while (linkOpnd)
  19760. {
  19761. Assert(linkOpnd->IsRegOpnd());
  19762. concatItemInstr = linkOpnd->GetStackSym()->GetInstrDef();
  19763. Assert(concatItemInstr->m_opcode == Js::OpCode::SetConcatStrMultiItemBE);
  19764. IR::Opnd * concatItemOpnd = concatItemInstr->GetSrc1();
  19765. Assert(concatItemOpnd->IsRegOpnd());
  19766. // If one of the concat items is equal to the dst of the concat expressions (s = s + a + b),
  19767. // hoist the load of that item to before the setting of the new string to the dst.
  19768. if (concatItemOpnd->IsEqual(newString))
  19769. {
  19770. if (!newConcatItemOpnd)
  19771. {
  19772. IR::Instr * hoistSrcInstr = concatItemInstr->HoistSrc1(Js::OpCode::Ld_A);
  19773. newConcatItemOpnd = hoistSrcInstr->GetDst();
  19774. }
  19775. concatItemOpnd = newConcatItemOpnd;
  19776. }
  19777. else
  19778. {
  19779. // If only some of the SetConcatStrMultiItemBE instructions were CSE'd and the rest, along with the NewConcatStrMultiBE
  19780. // instruction, were in a loop, the strings on the CSE'd Set*BE instructions will become live on back edge. Add them to
  19781. // addToLiveOnBackEdgeSyms here and clear when we reach the Set*BE instruction.
  19782. // Note that we are doing this only for string opnds which are not the same as the dst of the concat expression. Reasoning
  19783. // behind this is that if a loop has a concat expression with one of its sources same as the dst, the Set*BE instruction
  19784. // for the dst wouldn't have been CSE'd as the dst's value is changing in the loop and the backward pass should have set the
  19785. // symbol as live on backedge.
  19786. this->addToLiveOnBackEdgeSyms->Set(concatItemOpnd->GetStackSym()->m_id);
  19787. }
  19788. IR::Instr * newConcatItemInstr = IR::Instr::New(Js::OpCode::SetConcatStrMultiItem,
  19789. IR::IndirOpnd::New(newString, index, TyVar, instr->m_func),
  19790. concatItemOpnd,
  19791. instr->m_func);
  19792. instr->InsertAfter(newConcatItemInstr);
  19793. this->LowerSetConcatStrMultiItem(newConcatItemInstr);
  19794. linkOpnd = concatItemInstr->GetSrc2();
  19795. index--;
  19796. }
  19797. Assert(index == -1);
  19798. this->LowerNewConcatStrMulti(instr);
  19799. }
  19800. void
  19801. Lowerer::LowerSetConcatStrMultiItem(IR::Instr * instr)
  19802. {
  19803. Func * func = this->m_func;
  19804. IR::IndirOpnd * dstOpnd = instr->GetDst()->AsIndirOpnd();
  19805. IR::RegOpnd * concatStrOpnd = dstOpnd->GetBaseOpnd();
  19806. IR::RegOpnd * srcOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  19807. Assert(concatStrOpnd->GetValueType().IsString());
  19808. Assert(srcOpnd->GetValueType().IsString());
  19809. srcOpnd = GenerateGetImmutableOrScriptUnreferencedString(srcOpnd, instr, IR::HelperOp_CompoundStringCloneForConcat);
  19810. instr->SetSrc1(srcOpnd);
  19811. IR::IndirOpnd * dstLength = IR::IndirOpnd::New(concatStrOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), TyUint32, func);
  19812. IR::Opnd * srcLength;
  19813. if (srcOpnd->m_sym->m_isStrConst)
  19814. {
  19815. srcLength = IR::IntConstOpnd::New(JITJavascriptString::FromVar(srcOpnd->m_sym->GetConstAddress(true))->GetLength(), TyUint32, func);
  19816. }
  19817. else
  19818. {
  19819. srcLength = IR::RegOpnd::New(TyUint32, func);
  19820. InsertMove(srcLength, IR::IndirOpnd::New(srcOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), TyUint32, func), instr);
  19821. }
  19822. InsertAdd(false, dstLength, dstLength, srcLength, instr);
  19823. dstOpnd->SetOffset(dstOpnd->GetOffset() * sizeof(Js::JavascriptString *) + Js::ConcatStringMulti::GetOffsetOfSlots());
  19824. LowererMD::ChangeToWriteBarrierAssign(instr, func);
  19825. }
  19826. IR::RegOpnd *
  19827. Lowerer::GenerateGetImmutableOrScriptUnreferencedString(IR::RegOpnd * strOpnd, IR::Instr * insertBeforeInstr, IR::JnHelperMethod helperMethod, bool reloadDst)
  19828. {
  19829. if (strOpnd->m_sym->m_isStrConst)
  19830. {
  19831. return strOpnd;
  19832. }
  19833. Func * const func = this->m_func;
  19834. IR::RegOpnd *dstOpnd = reloadDst == true ? IR::RegOpnd::New(TyVar, func) : strOpnd;
  19835. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  19836. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  19837. if (!strOpnd->IsNotTaggedValue())
  19838. {
  19839. this->m_lowererMD.GenerateObjectTest(strOpnd, insertBeforeInstr, doneLabel);
  19840. }
  19841. // CMP [strOpnd], Js::CompoundString::`vtable'
  19842. // JEQ $helper
  19843. InsertCompareBranch(
  19844. IR::IndirOpnd::New(strOpnd, 0, TyMachPtr, func),
  19845. this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableCompoundString),
  19846. Js::OpCode::BrEq_A,
  19847. helperLabel,
  19848. insertBeforeInstr);
  19849. if (reloadDst)
  19850. {
  19851. InsertMove(dstOpnd, strOpnd, insertBeforeInstr);
  19852. }
  19853. InsertBranch(Js::OpCode::Br, doneLabel, insertBeforeInstr);
  19854. insertBeforeInstr->InsertBefore(helperLabel);
  19855. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, strOpnd);
  19856. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, dstOpnd, func);
  19857. callInstr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
  19858. insertBeforeInstr->InsertBefore(callInstr);
  19859. this->m_lowererMD.LowerCall(callInstr, 0);
  19860. insertBeforeInstr->InsertBefore(doneLabel);
  19861. return dstOpnd;
  19862. }
  19863. void
  19864. Lowerer::LowerConvStrCommon(IR::JnHelperMethod helper, IR::Instr * instr)
  19865. {
  19866. IR::RegOpnd * src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
  19867. if (!src1Opnd->GetValueType().IsNotString())
  19868. {
  19869. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  19870. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  19871. this->GenerateStringTest(src1Opnd, instr, helperLabel);
  19872. InsertMove(instr->GetDst(), src1Opnd, instr);
  19873. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  19874. instr->InsertBefore(helperLabel);
  19875. instr->InsertAfter(doneLabel);
  19876. }
  19877. if (instr->GetSrc2())
  19878. {
  19879. this->m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
  19880. }
  19881. this->LoadScriptContext(instr);
  19882. this->m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  19883. this->m_lowererMD.ChangeToHelperCall(instr, helper);
  19884. }
  19885. void
  19886. Lowerer::LowerConvStr(IR::Instr * instr)
  19887. {
  19888. LowerConvStrCommon(IR::HelperOp_ConvString, instr);
  19889. }
  19890. void
  19891. Lowerer::LowerCoerseStr(IR::Instr* instr)
  19892. {
  19893. LowerConvStrCommon(IR::HelperOp_CoerseString, instr);
  19894. }
  19895. ///----------------------------------------------------------------------------
  19896. ///
  19897. /// Lowerer::LowerCoerseStrOrRegex - This method is used for String.Replace(arg1, arg2)
  19898. /// where arg1 is regex or string
  19899. /// if arg1 is not regex, then do String.Replace(CoerseStr(arg1), arg2);
  19900. ///
  19901. /// CoerseStrOrRegex arg1
  19902. ///
  19903. /// if (value == regex) goto :done
  19904. /// else
  19905. ///helper:
  19906. /// ConvStr value
  19907. ///done:
  19908. ///----------------------------------------------------------------------------
  19909. void
  19910. Lowerer::LowerCoerseStrOrRegex(IR::Instr* instr)
  19911. {
  19912. IR::RegOpnd * src1Opnd = instr->GetSrc1()->AsRegOpnd();
  19913. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  19914. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  19915. // if (value == regex) goto :done
  19916. if (!src1Opnd->IsNotTaggedValue())
  19917. {
  19918. this->m_lowererMD.GenerateObjectTest(src1Opnd, instr, helperLabel);
  19919. }
  19920. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  19921. InsertCompareBranch(IR::IndirOpnd::New(src1Opnd, 0, TyMachPtr, instr->m_func),
  19922. vtableOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
  19923. InsertMove(instr->GetDst(), src1Opnd, instr);
  19924. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  19925. instr->InsertBefore(helperLabel);
  19926. instr->InsertAfter(doneLabel);
  19927. // helper: ConvStr value
  19928. LowerConvStr(instr);
  19929. }
  19930. ///----------------------------------------------------------------------------
  19931. ///
  19932. /// Lowerer::LowerCoerseRegex - This method is used for String.Match(arg1)
  19933. /// if arg1 is regex, then pass CreateRegEx(arg1) to String.Match
  19934. ///
  19935. ///----------------------------------------------------------------------------
  19936. void
  19937. Lowerer::LowerCoerseRegex(IR::Instr* instr)
  19938. {
  19939. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  19940. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  19941. IR::RegOpnd * src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
  19942. if (!src1Opnd->IsNotTaggedValue())
  19943. {
  19944. this->m_lowererMD.GenerateObjectTest(src1Opnd, instr, helperLabel);
  19945. }
  19946. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  19947. InsertCompareBranch(IR::IndirOpnd::New(src1Opnd, 0, TyMachPtr, instr->m_func),
  19948. vtableOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
  19949. InsertMove(instr->GetDst(), src1Opnd, instr);
  19950. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  19951. instr->InsertBefore(helperLabel);
  19952. instr->InsertAfter(doneLabel);
  19953. this->LoadScriptContext(instr);
  19954. this->m_lowererMD.LoadHelperArgument(instr, IR::AddrOpnd::NewNull(instr->m_func)); // option
  19955. this->m_lowererMD.LoadHelperArgument(instr, src1Opnd); // regex
  19956. this->m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_CoerseRegex);
  19957. }
  19958. void
  19959. Lowerer::LowerConvPrimStr(IR::Instr * instr)
  19960. {
  19961. LowerConvStrCommon(IR::HelperOp_ConvPrimitiveString, instr);
  19962. }
  19963. void
  19964. Lowerer::GenerateRecyclerAlloc(IR::JnHelperMethod allocHelper, size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, bool inOpHelper)
  19965. {
  19966. size_t alignedSize = HeapInfo::GetAlignedSizeNoCheck(allocSize);
  19967. this->GenerateRecyclerAllocAligned(allocHelper, alignedSize, newObjDst, insertionPointInstr, inOpHelper);
  19968. }
  19969. void
  19970. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, int32 value, IR::Instr * insertBeforeInstr, bool isZeroed)
  19971. {
  19972. IRType type = TyInt32;
  19973. if (isZeroed)
  19974. {
  19975. if (value == 0)
  19976. {
  19977. // Recycler memory are zero initialized
  19978. return;
  19979. }
  19980. if (value > 0 && value <= USHORT_MAX)
  19981. {
  19982. // Recycler memory are zero initialized, so we can just initialize the 8 or 16 bits of value
  19983. type = (value <= UCHAR_MAX)? TyUint8 : TyUint16;
  19984. }
  19985. }
  19986. Func * func = this->m_func;
  19987. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), IR::IntConstOpnd::New(value, type, func), insertBeforeInstr);
  19988. }
  19989. void
  19990. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, uint32 value, IR::Instr * insertBeforeInstr, bool isZeroed)
  19991. {
  19992. IRType type = TyUint32;
  19993. if (isZeroed)
  19994. {
  19995. if (value == 0)
  19996. {
  19997. // Recycler memory are zero initialized
  19998. return;
  19999. }
  20000. if (value <= USHORT_MAX)
  20001. {
  20002. // Recycler memory are zero initialized, so we can just initialize the 8 or 16 bits of value
  20003. type = (value <= UCHAR_MAX)? TyUint8 : TyUint16;
  20004. }
  20005. }
  20006. Func * func = this->m_func;
  20007. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), IR::IntConstOpnd::New(value, type, func), insertBeforeInstr);
  20008. }
  20009. void
  20010. Lowerer::GenerateMemInitNull(IR::RegOpnd * opnd, int32 offset, IR::Instr * insertBeforeInstr, bool isZeroed)
  20011. {
  20012. if (isZeroed)
  20013. {
  20014. return;
  20015. }
  20016. GenerateMemInit(opnd, offset, IR::AddrOpnd::NewNull(m_func), insertBeforeInstr);
  20017. }
  20018. void
  20019. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, IR::Opnd * value, IR::Instr * insertBeforeInstr, bool isZeroed)
  20020. {
  20021. IRType type = value->GetType();
  20022. Func * func = this->m_func;
  20023. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), value, insertBeforeInstr);
  20024. }
  20025. void
  20026. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, IR::RegOpnd * offset, IR::Opnd * value, IR::Instr * insertBeforeInstr, bool isZeroed)
  20027. {
  20028. IRType type = value->GetType();
  20029. Func * func = this->m_func;
  20030. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), value, insertBeforeInstr);
  20031. }
  20032. void
  20033. Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, int32 value, IR::Instr * insertBeforeInstr)
  20034. {
  20035. GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
  20036. }
  20037. void
  20038. Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, uint32 value, IR::Instr * insertBeforeInstr)
  20039. {
  20040. GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
  20041. }
  20042. void
  20043. Lowerer::GenerateRecyclerMemInitNull(IR::RegOpnd * opnd, int32 offset, IR::Instr * insertBeforeInstr)
  20044. {
  20045. GenerateMemInitNull(opnd, offset, insertBeforeInstr, true);
  20046. }
  20047. void
  20048. Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, IR::Opnd * value, IR::Instr * insertBeforeInstr)
  20049. {
  20050. GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
  20051. }
  20052. void
  20053. Lowerer::GenerateMemCopy(IR::Opnd * dst, IR::Opnd * src, uint32 size, IR::Instr * insertBeforeInstr)
  20054. {
  20055. Func * func = this->m_func;
  20056. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, IR::IntConstOpnd::New(size, TyUint32, func));
  20057. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, src);
  20058. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, dst);
  20059. IR::Instr * memcpyInstr = IR::Instr::New(Js::OpCode::Call, func);
  20060. memcpyInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperMemCpy, func));
  20061. insertBeforeInstr->InsertBefore(memcpyInstr);
  20062. m_lowererMD.LowerCall(memcpyInstr, 3);
  20063. }
  20064. bool
  20065. Lowerer::GenerateSimplifiedInt4Rem(
  20066. IR::Instr *const remInstr,
  20067. IR::LabelInstr *const skipBailOutLabel) const
  20068. {
  20069. Assert(remInstr);
  20070. Assert(remInstr->m_opcode == Js::OpCode::Rem_I4 || remInstr->m_opcode == Js::OpCode::RemU_I4);
  20071. auto *dst = remInstr->GetDst(), *src1 = remInstr->GetSrc1(), *src2 = remInstr->GetSrc2();
  20072. Assert(src1 && src2);
  20073. Assert(dst->IsRegOpnd());
  20074. bool isModByPowerOf2 = (remInstr->HasBailOutInfo() && remInstr->GetBailOutKind() == IR::BailOnModByPowerOf2);
  20075. if (PHASE_OFF(Js::Phase::MathFastPathPhase, remInstr->m_func->GetTopFunc()) && !isModByPowerOf2)
  20076. return false;
  20077. if (!(src2->IsIntConstOpnd() && Math::IsPow2(src2->AsIntConstOpnd()->AsInt32())) && !isModByPowerOf2)
  20078. {
  20079. return false;
  20080. }
  20081. // We have:
  20082. // s3 = s1 % s2 , where s2 = +2^i
  20083. //
  20084. // Generate:
  20085. // test s1, s1
  20086. // js $slowPathLabel
  20087. // s3 = and s1, 0x00..fff (2^i - 1)
  20088. // jmp $doneLabel
  20089. // $slowPathLabel:
  20090. // (Slow path)
  20091. // (Neg zero check)
  20092. // (Bailout code)
  20093. // $doneLabel:
  20094. IR::LabelInstr *doneLabel = skipBailOutLabel, *slowPathLabel;
  20095. if (!doneLabel)
  20096. {
  20097. doneLabel = IR::LabelInstr::New(Js::OpCode::Label, remInstr->m_func);
  20098. remInstr->InsertAfter(doneLabel);
  20099. }
  20100. slowPathLabel = IR::LabelInstr::New(Js::OpCode::Label, remInstr->m_func, isModByPowerOf2);
  20101. remInstr->InsertBefore(slowPathLabel);
  20102. // test s1, s1
  20103. InsertTest(src1, src1, slowPathLabel);
  20104. // jsb $slowPathLabel
  20105. InsertBranch(LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), slowPathLabel, slowPathLabel);
  20106. // s3 = and s1, 0x00..fff (2^i - 1)
  20107. IR::Opnd* maskOpnd;
  20108. if(isModByPowerOf2)
  20109. {
  20110. Assert(isModByPowerOf2);
  20111. maskOpnd = IR::RegOpnd::New(TyInt32, remInstr->m_func);
  20112. // mov maskOpnd, s2
  20113. InsertMove(maskOpnd, src2, slowPathLabel);
  20114. // dec maskOpnd
  20115. InsertSub(/*needFlags*/ true, maskOpnd, maskOpnd, IR::IntConstOpnd::New(1, TyInt32, this->m_func, /*dontEncode*/true), slowPathLabel);
  20116. // maskOpnd < 0 goto $slowPath
  20117. InsertBranch(LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), slowPathLabel, slowPathLabel);
  20118. // TEST src2, maskOpnd
  20119. InsertTestBranch(src2, maskOpnd, Js::OpCode::BrNeq_A, slowPathLabel, slowPathLabel);
  20120. }
  20121. else
  20122. {
  20123. Assert(src2->IsIntConstOpnd());
  20124. int32 mask = src2->AsIntConstOpnd()->AsInt32() - 1;
  20125. maskOpnd = IR::IntConstOpnd::New(mask, TyInt32, remInstr->m_func);
  20126. }
  20127. // dst = src1 & maskOpnd
  20128. InsertAnd(dst, src1, maskOpnd, slowPathLabel);
  20129. // jmp $doneLabel
  20130. InsertBranch(Js::OpCode::Br, doneLabel, slowPathLabel);
  20131. return true;
  20132. }
  20133. #if DBG
  20134. bool
  20135. Lowerer::ValidOpcodeAfterLower(IR::Instr* instr, Func * func)
  20136. {
  20137. Js::OpCode opcode = instr->m_opcode;
  20138. if (opcode > Js::OpCode::MDStart)
  20139. {
  20140. return true;
  20141. }
  20142. switch (opcode)
  20143. {
  20144. case Js::OpCode::Ret:
  20145. case Js::OpCode::Label:
  20146. case Js::OpCode::StatementBoundary:
  20147. case Js::OpCode::DeletedNonHelperBranch:
  20148. case Js::OpCode::FunctionEntry:
  20149. case Js::OpCode::FunctionExit:
  20150. case Js::OpCode::TryCatch:
  20151. case Js::OpCode::TryFinally:
  20152. case Js::OpCode::Catch:
  20153. case Js::OpCode::GeneratorResumeJumpTable:
  20154. case Js::OpCode::Break:
  20155. #ifdef _M_X64
  20156. case Js::OpCode::PrologStart:
  20157. case Js::OpCode::PrologEnd:
  20158. #endif
  20159. #ifdef _M_IX86
  20160. case Js::OpCode::BailOutStackRestore:
  20161. #endif
  20162. return true;
  20163. case Js::OpCode::RestoreOutParam:
  20164. Assert(func->isPostRegAlloc);
  20165. return true;
  20166. // These may be removed by peep
  20167. case Js::OpCode::StartCall:
  20168. case Js::OpCode::LoweredStartCall:
  20169. case Js::OpCode::Nop:
  20170. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  20171. return func && !func->isPostPeeps;
  20172. case Js::OpCode::InlineeStart:
  20173. case Js::OpCode::InlineeEnd:
  20174. return instr->m_func->m_hasInlineArgsOpt;
  20175. #ifdef _M_X64
  20176. case Js::OpCode::LdArgSize:
  20177. case Js::OpCode::LdSpillSize:
  20178. return func && !func->isPostFinalLower;
  20179. #endif
  20180. case Js::OpCode::Leave:
  20181. Assert(!func->IsLoopBodyInTry());
  20182. Assert(func->HasTry() && func->DoOptimizeTry());
  20183. return func && !func->isPostFinalLower; //Lowered in FinalLower phase
  20184. };
  20185. return false;
  20186. }
  20187. #endif
  20188. void Lowerer::LowerProfiledBeginSwitch(IR::JitProfilingInstr* instr)
  20189. {
  20190. Assert(instr->isBeginSwitch);
  20191. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  20192. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
  20193. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  20194. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleProfiledSwitch, m_func));
  20195. m_lowererMD.LowerCall(instr, 0);
  20196. }
  20197. void Lowerer::LowerProfiledBinaryOp(IR::JitProfilingInstr* instr, IR::JnHelperMethod meth)
  20198. {
  20199. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
  20200. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  20201. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
  20202. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  20203. instr->SetSrc1(IR::HelperCallOpnd::New(meth, m_func));
  20204. m_lowererMD.LowerCall(instr, 0);
  20205. }
  20206. void Lowerer::GenerateNullOutGeneratorFrame(IR::Instr* insertInstr)
  20207. {
  20208. // null out frame pointer on generator object to signal completion to JavascriptGenerator::CallGenerator
  20209. // s = MOV prm1
  20210. // s[offset of JavascriptGenerator::frame] = MOV nullptr
  20211. StackSym *symSrc = StackSym::NewImplicitParamSym(3, m_func);
  20212. m_func->SetArgOffset(symSrc, LowererMD::GetFormalParamOffset() * MachPtr);
  20213. IR::SymOpnd *srcOpnd = IR::SymOpnd::New(symSrc, TyMachPtr, m_func);
  20214. IR::RegOpnd *dstOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  20215. m_lowererMD.CreateAssign(dstOpnd, srcOpnd, insertInstr);
  20216. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(dstOpnd, Js::JavascriptGenerator::GetFrameOffset(), TyMachPtr, m_func);
  20217. IR::AddrOpnd *addrOpnd = IR::AddrOpnd::NewNull(m_func);
  20218. m_lowererMD.CreateAssign(indirOpnd, addrOpnd, insertInstr);
  20219. }
  20220. void Lowerer::LowerFunctionExit(IR::Instr* funcExit)
  20221. {
  20222. if (m_func->GetJITFunctionBody()->IsCoroutine())
  20223. {
  20224. GenerateNullOutGeneratorFrame(funcExit->m_prev);
  20225. }
  20226. if (!m_func->DoSimpleJitDynamicProfile())
  20227. {
  20228. return;
  20229. }
  20230. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, m_func);
  20231. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleCleanImplicitCallFlags, m_func));
  20232. funcExit->m_prev->InsertBefore(callInstr);
  20233. m_lowererMD.LoadHelperArgument(callInstr, CreateFunctionBodyOpnd(funcExit->m_func));
  20234. m_lowererMD.LowerCall(callInstr, 0);
  20235. }
  20236. void Lowerer::LowerFunctionEntry(IR::Instr* funcEntry)
  20237. {
  20238. Assert(funcEntry->m_opcode == Js::OpCode::FunctionEntry);
  20239. //Don't do a body call increment for loops or asm.js
  20240. if (m_func->IsLoopBody() || m_func->GetJITFunctionBody()->IsAsmJsMode())
  20241. {
  20242. return;
  20243. }
  20244. IR::Instr *const insertBeforeInstr = this->m_func->GetFunctionEntryInsertionPoint();
  20245. LowerFunctionBodyCallCountChange(insertBeforeInstr);
  20246. if (m_func->DoSimpleJitDynamicProfile())
  20247. {
  20248. // Only generate the argument profiling if the function expects to have some arguments to profile and only if
  20249. // it has implicit ArgIns (the latter is a restriction imposed by the Interpreter, so it is mirrored in SimpleJit)
  20250. if (m_func->GetJITFunctionBody()->GetInParamsCount() > 1 && m_func->GetJITFunctionBody()->HasImplicitArgIns())
  20251. {
  20252. // Call out to the argument profiling helper
  20253. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, m_func);
  20254. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleProfileParameters, m_func));
  20255. insertBeforeInstr->InsertBefore(callInstr);
  20256. m_lowererMD.LoadHelperArgument(callInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  20257. m_lowererMD.LowerCall(callInstr, 0);
  20258. }
  20259. // Clear existing ImplicitCallFlags
  20260. const auto starFlag = GetImplicitCallFlagsOpnd();
  20261. this->InsertMove(starFlag, CreateClearImplicitCallFlagsOpnd(), insertBeforeInstr);
  20262. }
  20263. }
  20264. void Lowerer::LowerFunctionBodyCallCountChange(IR::Instr *const insertBeforeInstr)
  20265. {
  20266. Assert(insertBeforeInstr);
  20267. Func *const func = insertBeforeInstr->m_func;
  20268. const bool isSimpleJit = func->IsSimpleJit();
  20269. if ((isSimpleJit && PHASE_OFF(Js::FullJitPhase, m_func)))
  20270. {
  20271. return;
  20272. }
  20273. // mov countAddress, <countAddress>
  20274. IR::RegOpnd *const countAddressOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, func), TyMachPtr, func);
  20275. const IR::AutoReuseOpnd autoReuseCountAddressOpnd(countAddressOpnd, func);
  20276. InsertMove(
  20277. countAddressOpnd,
  20278. IR::AddrOpnd::New((Js::Var)func->GetWorkItem()->GetCallsCountAddress(), IR::AddrOpndKindDynamicMisc, func, true),
  20279. insertBeforeInstr);
  20280. IR::IndirOpnd *const countOpnd = IR::IndirOpnd::New(countAddressOpnd, 0, TyUint32, func);
  20281. const IR::AutoReuseOpnd autoReuseCountOpnd(countOpnd, func);
  20282. if(!isSimpleJit)
  20283. {
  20284. InsertAdd(false, countOpnd, countOpnd, IR::IntConstOpnd::New(1, TyUint32, func), insertBeforeInstr);
  20285. return;
  20286. }
  20287. IR::Instr *onOverflowInsertBeforeInstr;
  20288. InsertDecUInt32PreventOverflow(
  20289. countOpnd,
  20290. countOpnd,
  20291. insertBeforeInstr,
  20292. &onOverflowInsertBeforeInstr);
  20293. // ($overflow:)
  20294. // TransitionFromSimpleJit(framePointer)
  20295. m_lowererMD.LoadHelperArgument(onOverflowInsertBeforeInstr, IR::Opnd::CreateFramePointerOpnd(func));
  20296. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  20297. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperTransitionFromSimpleJit, func));
  20298. onOverflowInsertBeforeInstr->InsertBefore(callInstr);
  20299. m_lowererMD.LowerCall(callInstr, 0);
  20300. }
  20301. IR::Opnd*
  20302. Lowerer::GetImplicitCallFlagsOpnd()
  20303. {
  20304. return GetImplicitCallFlagsOpnd(m_func);
  20305. }
  20306. IR::Opnd*
  20307. Lowerer::GetImplicitCallFlagsOpnd(Func * func)
  20308. {
  20309. return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetImplicitCallFlagsAddr(), GetImplicitCallFlagsType(), func);
  20310. }
  20311. IR::Opnd*
  20312. Lowerer::CreateClearImplicitCallFlagsOpnd()
  20313. {
  20314. return IR::IntConstOpnd::New(Js::ImplicitCall_None, GetImplicitCallFlagsType(), m_func);
  20315. }
  20316. void
  20317. Lowerer::LowerSpreadArrayLiteral(IR::Instr *instr)
  20318. {
  20319. LoadScriptContext(instr);
  20320. IR::Opnd *src2Opnd = instr->UnlinkSrc2();
  20321. m_lowererMD.LoadHelperArgument(instr, src2Opnd);
  20322. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  20323. m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  20324. this->m_lowererMD.ChangeToHelperCall(instr, IR::HelperSpreadArrayLiteral);
  20325. }
  20326. IR::Instr *
  20327. Lowerer::LowerSpreadCall(IR::Instr *instr, Js::CallFlags callFlags, bool setupProfiledVersion)
  20328. {
  20329. // Get the target function object, and emit function object test.
  20330. IR::RegOpnd * functionObjOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  20331. functionObjOpnd->m_isCallArg = true;
  20332. if (!(callFlags & Js::CallFlags_New) && !setupProfiledVersion)
  20333. {
  20334. IR::LabelInstr* continueAfterExLabel = InsertContinueAfterExceptionLabelForDebugger(m_func, instr, false);
  20335. this->m_lowererMD.GenerateFunctionObjectTest(instr, functionObjOpnd, false, continueAfterExLabel);
  20336. }
  20337. IR::Instr *spreadIndicesInstr;
  20338. spreadIndicesInstr = GetLdSpreadIndicesInstr(instr);
  20339. Assert(spreadIndicesInstr->m_opcode == Js::OpCode::LdSpreadIndices);
  20340. // Get AuxArray
  20341. IR::Opnd *spreadIndicesOpnd = spreadIndicesInstr->UnlinkSrc1();
  20342. // Remove LdSpreadIndices from the argument chain
  20343. instr->ReplaceSrc2(spreadIndicesInstr->UnlinkSrc2());
  20344. // Emit the normal args
  20345. if (!(callFlags & Js::CallFlags_New))
  20346. {
  20347. callFlags = (Js::CallFlags)(callFlags | (instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed));
  20348. }
  20349. // Profiled helper call requires three more parameters, ArrayProfileId, profileId, and the frame pointer.
  20350. // This is just following the convention of HelperProfiledNewScObjArray call.
  20351. const unsigned short extraArgsCount = setupProfiledVersion ? 5 : 2; // function object and AuxArray
  20352. int32 argCount = this->m_lowererMD.LowerCallArgs(instr, (ushort)callFlags, extraArgsCount);
  20353. // Emit our extra (first) args for the Spread helper in reverse order
  20354. if (setupProfiledVersion)
  20355. {
  20356. IR::JitProfilingInstr* jitInstr = (IR::JitProfilingInstr*)instr;
  20357. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(jitInstr->arrayProfileId, m_func));
  20358. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(jitInstr->profileId, m_func));
  20359. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  20360. }
  20361. m_lowererMD.LoadHelperArgument(instr, functionObjOpnd);
  20362. m_lowererMD.LoadHelperArgument(instr, spreadIndicesOpnd);
  20363. // Change the call target to our helper
  20364. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(setupProfiledVersion ? IR::HelperProfiledNewScObjArraySpread : IR::HelperSpreadCall, this->m_func);
  20365. instr->SetSrc1(helperOpnd);
  20366. return this->m_lowererMD.LowerCall(instr, (Js::ArgSlot)argCount);
  20367. }
  20368. void
  20369. Lowerer::LowerDivI4Common(IR::Instr * instr)
  20370. {
  20371. Assert(instr);
  20372. Assert((instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::Div_I4) ||
  20373. (instr->m_opcode == Js::OpCode::RemU_I4 || instr->m_opcode == Js::OpCode::DivU_I4));
  20374. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  20375. const bool isRem = instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::RemU_I4;
  20376. // MIN_INT/-1 path is only needed for signed operations
  20377. // TEST src2, src2
  20378. // JEQ $div0
  20379. // CMP src1, MIN_INT
  20380. // JEQ $minInt
  20381. // JMP $div
  20382. // $div0: [helper]
  20383. // MOV dst, 0
  20384. // JMP $done
  20385. // $minInt: [helper]
  20386. // CMP src2, -1
  20387. // JNE $div
  20388. // dst = MOV src1 / 0
  20389. // JMP $done
  20390. // $div:
  20391. // dst = IDIV src2, src1
  20392. // $done:
  20393. IR::LabelInstr * div0Label = InsertLabel(true, instr);
  20394. IR::LabelInstr * divLabel = InsertLabel(false, instr);
  20395. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  20396. IR::Opnd * dst = instr->GetDst();
  20397. IR::Opnd * src1 = instr->GetSrc1();
  20398. IR::Opnd * src2 = instr->GetSrc2();
  20399. bool isWasm = m_func->GetJITFunctionBody()->IsWasmFunction();
  20400. Assert(!isWasm || isRem);
  20401. if (!isWasm)
  20402. {
  20403. InsertTestBranch(src2, src2, Js::OpCode::BrEq_A, div0Label, div0Label);
  20404. InsertMove(dst, IR::IntConstOpnd::NewFromType(0, dst->GetType(), m_func), divLabel);
  20405. InsertBranch(Js::OpCode::Br, doneLabel, divLabel);
  20406. }
  20407. if (instr->GetSrc1()->IsSigned())
  20408. {
  20409. IR::LabelInstr * minIntLabel = nullptr;
  20410. // we need to check for INT_MIN/-1 if divisor is either -1 or variable, and dividend is either INT_MIN or variable
  20411. int64 intMin = IRType_IsInt64(src1->GetType()) ? LONGLONG_MIN : INT_MIN;
  20412. bool needsMinOverNeg1Check = !(src2->IsImmediateOpnd() && src2->GetImmediateValue(m_func) != -1);
  20413. if (src1->IsImmediateOpnd())
  20414. {
  20415. if (needsMinOverNeg1Check && src1->GetImmediateValue(m_func) == intMin)
  20416. {
  20417. minIntLabel = InsertLabel(true, divLabel);
  20418. InsertBranch(Js::OpCode::Br, minIntLabel, div0Label);
  20419. }
  20420. else
  20421. {
  20422. needsMinOverNeg1Check = false;
  20423. }
  20424. }
  20425. else if(needsMinOverNeg1Check)
  20426. {
  20427. minIntLabel = InsertLabel(true, divLabel);
  20428. InsertCompareBranch(src1, IR::IntConstOpnd::NewFromType(intMin, src1->GetType(), m_func), Js::OpCode::BrEq_A, minIntLabel, div0Label);
  20429. }
  20430. if (needsMinOverNeg1Check)
  20431. {
  20432. Assert(minIntLabel);
  20433. Assert(!src2->IsImmediateOpnd() || src2->GetImmediateValue(m_func) == -1);
  20434. if (!src2->IsImmediateOpnd())
  20435. {
  20436. InsertCompareBranch(src2, IR::IntConstOpnd::NewFromType(-1, src2->GetType(), m_func), Js::OpCode::BrNeq_A, divLabel, divLabel);
  20437. }
  20438. InsertMove(dst, !isRem ? src1 : IR::IntConstOpnd::NewFromType(0, dst->GetType(), m_func), divLabel);
  20439. InsertBranch(Js::OpCode::Br, doneLabel, divLabel);
  20440. }
  20441. }
  20442. InsertBranch(Js::OpCode::Br, divLabel, div0Label);
  20443. m_lowererMD.EmitInt4Instr(instr);
  20444. }
  20445. void
  20446. Lowerer::LowerRemI4(IR::Instr * instr)
  20447. {
  20448. Assert(instr);
  20449. Assert(instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::RemU_I4);
  20450. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  20451. {
  20452. LowerDivI4Common(instr);
  20453. }
  20454. else
  20455. {
  20456. m_lowererMD.EmitInt4Instr(instr);
  20457. }
  20458. }
  20459. void
  20460. Lowerer::LowerTrapIfZero(IR::Instr * const instr)
  20461. {
  20462. Assert(instr);
  20463. Assert(instr->m_opcode == Js::OpCode::TrapIfZero);
  20464. Assert(instr->GetSrc1());
  20465. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  20466. IR::Opnd * src1 = instr->GetSrc1();
  20467. if (src1->IsImmediateOpnd())
  20468. {
  20469. if (src1->GetImmediateValue(m_func) == 0)
  20470. {
  20471. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_DivideByZero), TyInt32, m_func), instr);
  20472. }
  20473. }
  20474. else
  20475. {
  20476. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  20477. InsertCompareBranch(src1, IR::IntConstOpnd::NewFromType(0, src1->GetType(), m_func), Js::OpCode::BrNeq_A, doneLabel, doneLabel);
  20478. InsertLabel(true, doneLabel);
  20479. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_DivideByZero), TyInt32, m_func), doneLabel);
  20480. }
  20481. instr->m_opcode = Js::OpCode::Ld_I4;
  20482. LowerLdI4(instr);
  20483. }
  20484. void
  20485. Lowerer::LowerTrapIfMinIntOverNegOne(IR::Instr * const instr)
  20486. {
  20487. Assert(instr);
  20488. Assert(instr->m_opcode == Js::OpCode::TrapIfMinIntOverNegOne);
  20489. Assert(instr->GetSrc1());
  20490. Assert(instr->GetSrc2());
  20491. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  20492. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  20493. IR::Opnd * src1 = instr->GetSrc1();
  20494. IR::Opnd * src2 = instr->UnlinkSrc2();
  20495. int64 intMin = src1->IsInt64() ? LONGLONG_MIN : INT_MIN;
  20496. if (src1->IsImmediateOpnd())
  20497. {
  20498. if (src1->GetImmediateValue(m_func) != intMin)
  20499. {
  20500. // Const value not min int, will not trap
  20501. doneLabel->Remove();
  20502. src2->Free(m_func);
  20503. LowerLdI4(instr);
  20504. return;
  20505. }
  20506. // Is min int no need to do check
  20507. }
  20508. else
  20509. {
  20510. InsertCompareBranch(src1, IR::IntConstOpnd::NewFromType(intMin, src1->GetType(), m_func), Js::OpCode::BrNeq_A, doneLabel, doneLabel);
  20511. }
  20512. if (src2->IsImmediateOpnd())
  20513. {
  20514. if (src2->GetImmediateValue(m_func) != -1)
  20515. {
  20516. // Const value not min int, will not trap
  20517. doneLabel->Remove();
  20518. src2->Free(m_func);
  20519. LowerLdI4(instr);
  20520. return;
  20521. }
  20522. // Is -1 no need to do check
  20523. src2->Free(m_func);
  20524. }
  20525. else
  20526. {
  20527. InsertCompareBranch(src2, IR::IntConstOpnd::NewFromType(-1, src2->GetType(), m_func), Js::OpCode::BrNeq_A, doneLabel, doneLabel);
  20528. }
  20529. InsertLabel(true, doneLabel);
  20530. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(VBSERR_Overflow), TyInt32, m_func), doneLabel);
  20531. instr->m_opcode = Js::OpCode::Ld_I4;
  20532. LowerLdI4(instr);
  20533. }
  20534. void
  20535. Lowerer::GenerateThrow(IR::Opnd* errorCode, IR::Instr * instr)
  20536. {
  20537. IR::Instr *throwInstr = IR::Instr::New(Js::OpCode::RuntimeTypeError, IR::RegOpnd::New(TyMachReg, m_func), errorCode, m_func);
  20538. instr->InsertBefore(throwInstr);
  20539. const bool isWasm = m_func->GetJITFunctionBody() && m_func->GetJITFunctionBody()->IsWasmFunction();
  20540. LowerUnaryHelperMem(throwInstr, isWasm ? IR::HelperOp_WebAssemblyRuntimeError : IR::HelperOp_RuntimeTypeError);
  20541. }
  20542. void
  20543. Lowerer::LowerLdI4(IR::Instr * const instr)
  20544. {
  20545. if (instr->GetDst() && instr->GetDst()->IsInt64())
  20546. {
  20547. m_lowererMD.LowerInt64Assign(instr);
  20548. }
  20549. else
  20550. {
  20551. m_lowererMD.ChangeToAssign(instr);
  20552. }
  20553. }
  20554. void
  20555. Lowerer::LowerDivI4(IR::Instr * instr)
  20556. {
  20557. Assert(instr);
  20558. Assert(instr->m_opcode == Js::OpCode::Div_I4 || instr->m_opcode == Js::OpCode::DivU_I4);
  20559. #ifdef _M_IX86
  20560. if (
  20561. instr->GetDst() && instr->GetDst()->IsInt64() ||
  20562. instr->GetSrc1() && instr->GetSrc1()->IsInt64() ||
  20563. instr->GetSrc2() && instr->GetSrc2()->IsInt64()
  20564. )
  20565. {
  20566. m_lowererMD.EmitInt64Instr(instr);
  20567. return;
  20568. }
  20569. #endif
  20570. if (m_func->GetJITFunctionBody()->IsWasmFunction())
  20571. {
  20572. m_lowererMD.EmitInt4Instr(instr);
  20573. return;
  20574. }
  20575. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  20576. {
  20577. LowerDivI4Common(instr);
  20578. return;
  20579. }
  20580. if(!instr->HasBailOutInfo())
  20581. {
  20582. m_lowererMD.EmitInt4Instr(instr);
  20583. return;
  20584. }
  20585. Assert(!(instr->GetBailOutKind() & ~(IR::BailOnDivResultNotInt | IR::BailOutOnNegativeZero | IR::BailOutOnDivByZero | IR::BailOutOnDivOfMinInt)));
  20586. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  20587. // Split out and generate the bailout instruction
  20588. const auto nonBailOutInstr = IR::Instr::New(instr->m_opcode, instr->m_func);
  20589. instr->TransferTo(nonBailOutInstr);
  20590. instr->InsertBefore(nonBailOutInstr);
  20591. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  20592. instr->InsertAfter(doneLabel);
  20593. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  20594. // ordering instructions anymore.
  20595. IR::LabelInstr * bailOutLabel = GenerateBailOut(instr);
  20596. IR::Opnd * denominatorOpnd = nonBailOutInstr->GetSrc2();
  20597. IR::Opnd * nominatorOpnd = nonBailOutInstr->GetSrc1();
  20598. if (bailOutKind & IR::BailOutOnDivOfMinInt)
  20599. {
  20600. // Bailout if numerator is MIN_INT (could also check for denominator being -1
  20601. // before bailing out, but does not seem worth the extra code..)
  20602. InsertCompareBranch(nominatorOpnd, IR::IntConstOpnd::New(INT32_MIN, TyInt32, this->m_func, true), Js::OpCode::BrEq_A, bailOutLabel, nonBailOutInstr);
  20603. }
  20604. if (denominatorOpnd->IsIntConstOpnd() && Math::IsPow2(denominatorOpnd->AsIntConstOpnd()->AsInt32()))
  20605. {
  20606. Assert((bailOutKind & (IR::BailOutOnNegativeZero | IR::BailOutOnDivByZero)) == 0);
  20607. int pow2 = denominatorOpnd->AsIntConstOpnd()->AsInt32();
  20608. InsertTestBranch(nominatorOpnd, IR::IntConstOpnd::New(pow2 - 1, TyInt32, this->m_func, true),
  20609. Js::OpCode::BrNeq_A, bailOutLabel, nonBailOutInstr);
  20610. nonBailOutInstr->m_opcode = Js::OpCode::Shr_A;
  20611. nonBailOutInstr->ReplaceSrc2(IR::IntConstOpnd::New(Math::Log2(pow2), TyInt32, this->m_func, true));
  20612. LowererMD::ChangeToShift(nonBailOutInstr, false);
  20613. LowererMD::Legalize(nonBailOutInstr);
  20614. }
  20615. else
  20616. {
  20617. if (bailOutKind & IR::BailOutOnDivByZero)
  20618. {
  20619. // Bailout if denominator is 0
  20620. InsertTestBranch(denominatorOpnd, denominatorOpnd, Js::OpCode::BrEq_A, bailOutLabel, nonBailOutInstr);
  20621. }
  20622. // Lower the div and bailout if there is a reminder (machine specific)
  20623. IR::Instr * insertBeforeInstr = m_lowererMD.LowerDivI4AndBailOnReminder(nonBailOutInstr, bailOutLabel);
  20624. IR::Opnd * resultOpnd = nonBailOutInstr->GetDst();
  20625. if (bailOutKind & IR::BailOutOnNegativeZero)
  20626. {
  20627. // TEST result, result
  20628. // JNE skipNegDenominatorCheckLabel // Result not 0
  20629. // TEST denominator, denominator
  20630. // JNSB/BMI bailout // bail if negative
  20631. // skipNegDenominatorCheckLabel:
  20632. IR::LabelInstr * skipNegDenominatorCheckLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  20633. // Skip negative denominator check if the result is not 0
  20634. InsertTestBranch(resultOpnd, resultOpnd, Js::OpCode::BrNeq_A, skipNegDenominatorCheckLabel, insertBeforeInstr);
  20635. IR::LabelInstr * negDenominatorCheckLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  20636. insertBeforeInstr->InsertBefore(negDenominatorCheckLabel);
  20637. // Jump to done if the denominator is not negative
  20638. InsertTestBranch(denominatorOpnd, denominatorOpnd,
  20639. LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), bailOutLabel, insertBeforeInstr);
  20640. insertBeforeInstr->InsertBefore(skipNegDenominatorCheckLabel);
  20641. }
  20642. }
  20643. // We are all fine, jump around the bailout to done
  20644. InsertBranch(Js::OpCode::Br, doneLabel, bailOutLabel);
  20645. }
  20646. void
  20647. Lowerer::LowerRemR8(IR::Instr * instr)
  20648. {
  20649. Assert(instr);
  20650. Assert(instr->m_opcode == Js::OpCode::Rem_A);
  20651. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  20652. m_lowererMD.LoadDoubleHelperArgument(instr, instr->UnlinkSrc2());
  20653. m_lowererMD.LoadDoubleHelperArgument(instr, instr->UnlinkSrc1());
  20654. instr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperOp_Rem_Double, m_func));
  20655. m_lowererMD.LowerCall(instr, 0);
  20656. }
  20657. void
  20658. Lowerer::LowerNewScopeSlots(IR::Instr * instr, bool doStackSlots)
  20659. {
  20660. Func * func = m_func;
  20661. if (PHASE_OFF(Js::NewScopeSlotFastPathPhase, func))
  20662. {
  20663. this->LowerUnaryHelperMemWithFunctionInfo(instr, IR::HelperOP_NewScopeSlots);
  20664. return;
  20665. }
  20666. uint const count = instr->GetSrc1()->AsIntConstOpnd()->AsUint32();
  20667. uint const allocSize = count * sizeof(Js::Var);
  20668. uint const actualSlotCount = count - Js::ScopeSlots::FirstSlotIndex;
  20669. IR::RegOpnd * dst = instr->UnlinkDst()->AsRegOpnd();
  20670. // dst = RecyclerAlloc(allocSize)
  20671. // dst[EncodedSlotCountSlotIndex] = min(actualSlotCount, MaxEncodedSlotCount);
  20672. // dst[ScopeMetadataSlotIndex] = FunctionBody;
  20673. // mov undefinedOpnd, undefined
  20674. // dst[FirstSlotIndex..count] = undefinedOpnd;
  20675. // Note: stack allocation of both scope slots and frame display are done together
  20676. // in lowering of NewStackFrameDisplay
  20677. if (!doStackSlots)
  20678. {
  20679. GenerateRecyclerAlloc(IR::HelperAllocMemForVarArray, allocSize, dst, instr);
  20680. }
  20681. m_lowererMD.GenerateMemInit(dst, Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
  20682. (size_t)min<uint>(actualSlotCount, Js::ScopeSlots::MaxEncodedSlotCount), instr, !doStackSlots);
  20683. IR::Opnd * functionInfoOpnd = this->LoadFunctionInfoOpnd(instr);
  20684. GenerateMemInit(dst, Js::ScopeSlots::ScopeMetadataSlotIndex * sizeof(Js::Var),
  20685. functionInfoOpnd, instr, !doStackSlots);
  20686. IR::Opnd * undefinedOpnd = this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined);
  20687. const IR::AutoReuseOpnd autoReuseUndefinedOpnd(undefinedOpnd, func);
  20688. // avoid using a register for the undefined pointer if we are going to assign 1 or 2
  20689. if (actualSlotCount > 2 && !undefinedOpnd->IsRegOpnd())
  20690. {
  20691. // mov undefinedOpnd, undefined
  20692. IR::RegOpnd * regOpnd = IR::RegOpnd::New(TyVar, func);
  20693. InsertMove(regOpnd, undefinedOpnd, instr);
  20694. undefinedOpnd = regOpnd;
  20695. }
  20696. int const loopUnrollCount = 8;
  20697. if (actualSlotCount <= loopUnrollCount * 2)
  20698. {
  20699. // Just generate all the assignment in straight line code
  20700. // mov[dst + Js::FirstSlotIndex], undefinedOpnd
  20701. // ...
  20702. // mov[dst + count - 1], undefinedOpnd
  20703. for (unsigned int i = Js::ScopeSlots::FirstSlotIndex; i < count; i++)
  20704. {
  20705. GenerateMemInit(dst, sizeof(Js::Var) * i, undefinedOpnd, instr, !doStackSlots);
  20706. }
  20707. }
  20708. else
  20709. {
  20710. // Just generate all the assignment in loop of loopUnrollCount and the rest as straight line code
  20711. //
  20712. // lea currOpnd, [dst + sizeof(Var) * (loopAssignCount + Js::ScopeSlots::FirstSlotIndex - loopUnrollCount)];
  20713. // mov [currOpnd + loopUnrollCount + leftOverAssignCount - 1] , undefinedOpnd
  20714. // mov [currOpnd + loopUnrollCount + leftOverAssignCount - 2] , undefinedOpnd
  20715. // ...
  20716. // mov [currOpnd + loopUnrollCount], undefinedOpnd
  20717. // $LoopTop:
  20718. // mov [currOpnd + loopUnrollCount - 1], undefinedOpnd
  20719. // mov [currOpnd + loopUnrollCount - 2], undefinedOpnd
  20720. // ...
  20721. // mov [currOpnd], undefinedOpnd
  20722. // lea currOpnd, [currOpnd - loopUnrollCount]
  20723. // cmp dst, currOpnd
  20724. // jlt $Looptop
  20725. uint nLoop = actualSlotCount / loopUnrollCount;
  20726. uint loopAssignCount = nLoop * loopUnrollCount;
  20727. uint leftOverAssignCount = actualSlotCount - loopAssignCount; // The left over assignments
  20728. IR::RegOpnd * currOpnd = IR::RegOpnd::New(TyMachPtr, func);
  20729. const IR::AutoReuseOpnd autoReuseCurrOpnd(currOpnd, m_func);
  20730. InsertLea(
  20731. currOpnd,
  20732. IR::IndirOpnd::New(
  20733. dst,
  20734. sizeof(Js::Var) * (loopAssignCount + Js::ScopeSlots::FirstSlotIndex - loopUnrollCount),
  20735. TyMachPtr,
  20736. func),
  20737. instr);
  20738. for (unsigned int i = 0; i < leftOverAssignCount; i++)
  20739. {
  20740. GenerateMemInit(currOpnd, sizeof(Js::Var) * (loopUnrollCount + leftOverAssignCount - i - 1), undefinedOpnd, instr, !doStackSlots);
  20741. }
  20742. IR::LabelInstr * loopTop = InsertLoopTopLabel(instr);
  20743. Loop * loop = loopTop->GetLoop();
  20744. for (unsigned int i = 0; i < loopUnrollCount; i++)
  20745. {
  20746. GenerateMemInit(currOpnd, sizeof(Js::Var) * (loopUnrollCount - i - 1), undefinedOpnd, instr, !doStackSlots);
  20747. }
  20748. InsertLea(currOpnd, IR::IndirOpnd::New(currOpnd, -((int)sizeof(Js::Var) * loopUnrollCount), TyMachPtr, func), instr);
  20749. InsertCompareBranch(dst, currOpnd, Js::OpCode::BrLt_A, true, loopTop, instr);
  20750. loop->regAlloc.liveOnBackEdgeSyms->Set(currOpnd->m_sym->m_id);
  20751. loop->regAlloc.liveOnBackEdgeSyms->Set(dst->m_sym->m_id);
  20752. loop->regAlloc.liveOnBackEdgeSyms->Set(undefinedOpnd->AsRegOpnd()->m_sym->m_id);
  20753. }
  20754. if (!doStackSlots)
  20755. {
  20756. InsertMove(IR::RegOpnd::New(instr->m_func->GetLocalClosureSym(), TyMachPtr, func), dst, instr);
  20757. }
  20758. instr->Remove();
  20759. }
  20760. void Lowerer::LowerLdInnerFrameDisplay(IR::Instr *instr)
  20761. {
  20762. bool isStrict = instr->m_func->GetJITFunctionBody()->IsStrictMode();
  20763. if (isStrict)
  20764. {
  20765. if (instr->GetSrc2())
  20766. {
  20767. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdStrictInnerFrameDisplay);
  20768. }
  20769. else
  20770. {
  20771. #if DBG
  20772. instr->m_opcode = Js::OpCode::LdInnerFrameDisplayNoParent;
  20773. #endif
  20774. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdStrictInnerFrameDisplayNoParent);
  20775. }
  20776. }
  20777. else
  20778. {
  20779. if (instr->GetSrc2())
  20780. {
  20781. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdInnerFrameDisplay);
  20782. }
  20783. else
  20784. {
  20785. #if DBG
  20786. instr->m_opcode = Js::OpCode::LdInnerFrameDisplayNoParent;
  20787. #endif
  20788. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdInnerFrameDisplayNoParent);
  20789. }
  20790. }
  20791. }
  20792. void Lowerer::LowerLdFrameDisplay(IR::Instr *instr, bool doStackFrameDisplay)
  20793. {
  20794. bool isStrict = instr->m_func->GetJITFunctionBody()->IsStrictMode();
  20795. uint16 envDepth = instr->m_func->GetJITFunctionBody()->GetEnvDepth();
  20796. Func *func = this->m_func;
  20797. // envDepth of -1 indicates unknown depth (eval expression or HTML event handler).
  20798. // We could still fast-path these by generating a loop over the (dynamically loaded) scope chain length,
  20799. // but I doubt it's worth it.
  20800. // If the dst opnd is a byte code temp, that indicates we're prepending a block scope or some such and
  20801. // shouldn't attempt to do this.
  20802. if (envDepth == (uint16)-1 ||
  20803. (!doStackFrameDisplay && (instr->isNonFastPathFrameDisplay || instr->GetDst()->AsRegOpnd()->m_sym->IsTempReg(instr->m_func))) ||
  20804. PHASE_OFF(Js::FrameDisplayFastPathPhase, func))
  20805. {
  20806. if (isStrict)
  20807. {
  20808. if (instr->GetSrc2())
  20809. {
  20810. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdStrictFrameDisplay);
  20811. }
  20812. else
  20813. {
  20814. #if DBG
  20815. instr->m_opcode = Js::OpCode::LdFrameDisplayNoParent;
  20816. #endif
  20817. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdStrictFrameDisplayNoParent);
  20818. }
  20819. }
  20820. else
  20821. {
  20822. if (instr->GetSrc2())
  20823. {
  20824. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdFrameDisplay);
  20825. }
  20826. else
  20827. {
  20828. #if DBG
  20829. instr->m_opcode = Js::OpCode::LdFrameDisplayNoParent;
  20830. #endif
  20831. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdFrameDisplayNoParent);
  20832. }
  20833. }
  20834. return;
  20835. }
  20836. uint16 frameDispLength = envDepth + 1;
  20837. Assert(frameDispLength > 0);
  20838. IR::RegOpnd *dstOpnd = instr->UnlinkDst()->AsRegOpnd();
  20839. IR::RegOpnd *currentFrameOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  20840. uint allocSize = sizeof(Js::FrameDisplay) + (frameDispLength * sizeof(Js::Var));
  20841. if (doStackFrameDisplay)
  20842. {
  20843. IR::Instr *insertInstr = func->GetFunctionEntryInsertionPoint();
  20844. // Initialize stack pointers for scope slots and frame display together at the top of the function
  20845. // (in case we bail out before executing the instructions).
  20846. IR::LabelInstr *labelNoStackFunc = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  20847. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  20848. // Check whether stack functions have been disabled since we jitted.
  20849. // If they have, then we must allocate closure memory on the heap.
  20850. InsertTestBranch(IR::MemRefOpnd::New(m_func->GetJITFunctionBody()->GetFlagsAddr(), TyInt8, m_func),
  20851. IR::IntConstOpnd::New(Js::FunctionBody::Flags_StackNestedFunc, TyInt8, m_func, true),
  20852. Js::OpCode::BrEq_A, labelNoStackFunc, insertInstr);
  20853. // allocSize is greater than TyMachPtr and hence changing the initial size to TyMisc
  20854. StackSym * stackSym = StackSym::New(TyMisc, instr->m_func);
  20855. m_func->StackAllocate(stackSym, allocSize);
  20856. InsertLea(dstOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertInstr);
  20857. uint scopeSlotAllocSize =
  20858. (m_func->GetJITFunctionBody()->GetScopeSlotArraySize() + Js::ScopeSlots::FirstSlotIndex) * sizeof(Js::Var);
  20859. stackSym = StackSym::New(TyMisc, instr->m_func);
  20860. m_func->StackAllocate(stackSym, scopeSlotAllocSize);
  20861. InsertLea(currentFrameOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertInstr);
  20862. InsertBranch(Js::OpCode::Br, labelDone, insertInstr);
  20863. insertInstr->InsertBefore(labelNoStackFunc);
  20864. GenerateRecyclerAlloc(IR::HelperAllocMemForFrameDisplay, allocSize, dstOpnd, insertInstr, true);
  20865. GenerateRecyclerAlloc(IR::HelperAllocMemForVarArray, scopeSlotAllocSize, currentFrameOpnd, insertInstr, true);
  20866. insertInstr->InsertBefore(labelDone);
  20867. m_lowererMD.CreateAssign(IR::SymOpnd::New(m_func->GetLocalFrameDisplaySym(), 0, TyMachReg, m_func), dstOpnd, insertInstr);
  20868. m_lowererMD.CreateAssign(IR::SymOpnd::New(m_func->GetLocalClosureSym(), 0, TyMachReg, m_func), currentFrameOpnd, insertInstr);
  20869. }
  20870. else
  20871. {
  20872. GenerateRecyclerAlloc(IR::HelperAllocMemForFrameDisplay, allocSize, dstOpnd, instr);
  20873. }
  20874. // Copy contents of environment
  20875. // Work back to front to leave the head element(s) in cache
  20876. if (envDepth > 0)
  20877. {
  20878. IR::RegOpnd *envOpnd = instr->UnlinkSrc2()->AsRegOpnd();
  20879. for (uint16 i = envDepth; i >= 1; i--)
  20880. {
  20881. IR::Opnd *scopeOpnd = IR::RegOpnd::New(TyMachReg, func);
  20882. IR::Opnd *envLoadOpnd =
  20883. IR::IndirOpnd::New(envOpnd, Js::FrameDisplay::GetOffsetOfScopes() + ((i - 1) * sizeof(Js::Var)), TyMachReg, func);
  20884. m_lowererMD.CreateAssign(scopeOpnd, envLoadOpnd, instr);
  20885. IR::Opnd *dstStoreOpnd =
  20886. IR::IndirOpnd::New(dstOpnd, Js::FrameDisplay::GetOffsetOfScopes() + (i * sizeof(Js::Var)), TyMachReg, func);
  20887. m_lowererMD.CreateAssign(dstStoreOpnd, scopeOpnd, instr);
  20888. }
  20889. }
  20890. // Assign current element.
  20891. m_lowererMD.CreateAssign(
  20892. IR::IndirOpnd::New(dstOpnd, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg, func),
  20893. currentFrameOpnd,
  20894. instr);
  20895. // Combine tag, strict mode flag, and length
  20896. uintptr_t bits = 1 |
  20897. (isStrict << (Js::FrameDisplay::GetOffsetOfStrictMode() * 8)) |
  20898. (frameDispLength << (Js::FrameDisplay::GetOffsetOfLength() * 8));
  20899. m_lowererMD.CreateAssign(
  20900. IR::IndirOpnd::New(dstOpnd, 0, TyMachReg, func),
  20901. IR::IntConstOpnd::New(bits, TyMachReg, func, true),
  20902. instr);
  20903. instr->Remove();
  20904. }
  20905. IR::AddrOpnd *Lowerer::CreateFunctionBodyOpnd(Func *const func) const
  20906. {
  20907. return IR::AddrOpnd::New(func->GetJITFunctionBody()->GetAddr(), IR::AddrOpndKindDynamicFunctionBody, m_func, true);
  20908. }
  20909. IR::AddrOpnd *Lowerer::CreateFunctionBodyOpnd(Js::FunctionBody *const functionBody) const
  20910. {
  20911. // TODO: OOP JIT, CreateFunctionBodyOpnd
  20912. Assert(!m_func->IsOOPJIT());
  20913. return IR::AddrOpnd::New(functionBody, IR::AddrOpndKindDynamicFunctionBody, m_func, true);
  20914. }
  20915. bool
  20916. Lowerer::GenerateRecyclerOrMarkTempAlloc(IR::Instr * instr, IR::RegOpnd * dstOpnd, IR::JnHelperMethod allocHelper, size_t allocSize, IR::SymOpnd ** tempObjectSymOpnd)
  20917. {
  20918. if (instr->dstIsTempObject)
  20919. {
  20920. *tempObjectSymOpnd = GenerateMarkTempAlloc(dstOpnd, allocSize, instr);
  20921. return false;
  20922. }
  20923. this->GenerateRecyclerAlloc(allocHelper, allocSize, dstOpnd, instr);
  20924. *tempObjectSymOpnd = nullptr;
  20925. return true;
  20926. }
  20927. IR::SymOpnd *
  20928. Lowerer::GenerateMarkTempAlloc(IR::RegOpnd *const dstOpnd, const size_t allocSize, IR::Instr *const insertBeforeInstr)
  20929. {
  20930. Assert(dstOpnd);
  20931. Assert(allocSize != 0);
  20932. Assert(insertBeforeInstr);
  20933. Func *const func = insertBeforeInstr->m_func;
  20934. // Allocate stack space for the reg exp instance, and a slot for the boxed value
  20935. StackSym *const tempObjectSym = StackSym::New(TyMisc, func);
  20936. m_func->StackAllocate(tempObjectSym, (int)(allocSize + sizeof(void *)));
  20937. IR::SymOpnd * tempObjectOpnd = IR::SymOpnd::New(tempObjectSym, sizeof(void *), TyVar, func);
  20938. InsertLea(dstOpnd, tempObjectOpnd, insertBeforeInstr);
  20939. // Initialize the boxed instance slot
  20940. if (this->outerMostLoopLabel == nullptr)
  20941. {
  20942. GenerateMemInit(dstOpnd, -(int)sizeof(void *), IR::AddrOpnd::NewNull(func), insertBeforeInstr, false);
  20943. }
  20944. else if (!PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func))
  20945. {
  20946. InsertMove(IR::SymOpnd::New(tempObjectSym, TyMachPtr, func), IR::AddrOpnd::NewNull(func), this->outerMostLoopLabel, false);
  20947. }
  20948. return tempObjectOpnd;
  20949. }
  20950. void Lowerer::LowerBrFncCachedScopeEq(IR::Instr *instr)
  20951. {
  20952. Assert(instr->m_opcode == Js::OpCode::BrFncCachedScopeEq || instr->m_opcode == Js::OpCode::BrFncCachedScopeNeq);
  20953. Js::OpCode opcode = (instr->m_opcode == Js::OpCode::BrFncCachedScopeEq ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A);
  20954. IR::RegOpnd *src1Reg = instr->UnlinkSrc1()->AsRegOpnd();
  20955. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src1Reg, Js::ScriptFunction::GetOffsetOfCachedScopeObj(), TyMachReg, this->m_func);
  20956. this->InsertCompareBranch(indirOpnd, instr->UnlinkSrc2(), opcode, false, instr->AsBranchInstr()->GetTarget(), instr->m_next);
  20957. instr->Remove();
  20958. }
  20959. IR::Instr* Lowerer::InsertLoweredRegionStartMarker(IR::Instr* instrToInsertBefore)
  20960. {
  20961. AssertMsg(instrToInsertBefore->m_prev != nullptr, "Can't insert lowered region start marker as the first instr in the func.");
  20962. IR::LabelInstr* startMarkerLabel = IR::LabelInstr::New(Js::OpCode::Label, instrToInsertBefore->m_func);
  20963. instrToInsertBefore->InsertBefore(startMarkerLabel);
  20964. return startMarkerLabel;
  20965. }
  20966. IR::Instr* Lowerer::RemoveLoweredRegionStartMarker(IR::Instr* startMarkerInstr)
  20967. {
  20968. AssertMsg(startMarkerInstr->m_prev != nullptr, "Lowered region start marker became the first instruction in the func after lowering?");
  20969. IR::Instr* prevInstr = startMarkerInstr->m_prev;
  20970. startMarkerInstr->Remove();
  20971. return prevInstr;
  20972. }
  20973. IR::Instr* Lowerer::GetLdSpreadIndicesInstr(IR::Instr *instr)
  20974. {
  20975. IR::Opnd *src2 = instr->GetSrc2();
  20976. if (!src2->IsSymOpnd())
  20977. {
  20978. return nullptr;
  20979. }
  20980. IR::SymOpnd * argLinkOpnd = src2->AsSymOpnd();
  20981. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  20982. Assert(argLinkSym->IsSingleDef());
  20983. return argLinkSym->m_instrDef;
  20984. }
  20985. bool Lowerer::IsSpreadCall(IR::Instr *instr)
  20986. {
  20987. IR::Instr *lastInstr = GetLdSpreadIndicesInstr(instr);
  20988. return lastInstr && lastInstr->m_opcode == Js::OpCode::LdSpreadIndices;
  20989. }
  20990. // When under debugger, generate a new label to be used as safe place to jump after ignore exception,
  20991. // insert it after insertAfterInstr, and return the label inserted.
  20992. // Returns nullptr/NoOP for non-debugger code path.
  20993. //static
  20994. IR::LabelInstr* Lowerer::InsertContinueAfterExceptionLabelForDebugger(Func* func, IR::Instr* insertAfterInstr, bool isHelper)
  20995. {
  20996. Assert(func);
  20997. Assert(insertAfterInstr);
  20998. IR::LabelInstr* continueAfterExLabel = nullptr;
  20999. if (func->IsJitInDebugMode())
  21000. {
  21001. continueAfterExLabel = IR::LabelInstr::New(Js::OpCode::Label, func, isHelper);
  21002. insertAfterInstr->InsertAfter(continueAfterExLabel);
  21003. }
  21004. return continueAfterExLabel;
  21005. }
  21006. void Lowerer::GenerateSingleCharStrJumpTableLookup(IR::Instr * instr)
  21007. {
  21008. IR::MultiBranchInstr * multiBrInstr = instr->AsBranchInstr()->AsMultiBrInstr();
  21009. Func * func = instr->m_func;
  21010. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  21011. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  21012. // MOV strLengthOpnd, str->length
  21013. IR::RegOpnd * strLengthOpnd = IR::RegOpnd::New(TyUint32, func);
  21014. InsertMove(strLengthOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func), instr);
  21015. // CMP strLengthOpnd, 1
  21016. // JNE defaultLabel
  21017. IR::LabelInstr * defaultLabelInstr = (IR::LabelInstr *)multiBrInstr->GetBranchJumpTable()->defaultTarget;
  21018. InsertCompareBranch(strLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
  21019. // MOV strBuffer, str->psz
  21020. IR::RegOpnd * strBufferOpnd = IR::RegOpnd::New(TyMachPtr, func);
  21021. InsertMove(strBufferOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, func), instr);
  21022. // TST strBuffer, strBuffer
  21023. // JNE $continue
  21024. InsertTestBranch(strBufferOpnd, strBufferOpnd, Js::OpCode::BrNeq_A, continueLabel, instr);
  21025. // $helper:
  21026. // PUSH str
  21027. // CALL JavascriptString::GetSzHelper
  21028. // MOV strBuffer, eax
  21029. // $continue:
  21030. instr->InsertBefore(helperLabel);
  21031. m_lowererMD.LoadHelperArgument(instr, instr->GetSrc1());
  21032. IR::Instr * instrCall = IR::Instr::New(Js::OpCode::Call, strBufferOpnd, IR::HelperCallOpnd::New(IR::HelperString_GetSz, func), func);
  21033. instr->InsertBefore(instrCall);
  21034. m_lowererMD.LowerCall(instrCall, 0);
  21035. instr->InsertBefore(continueLabel);
  21036. // MOV charOpnd, [strBuffer]
  21037. IR::RegOpnd * charOpnd = IR::RegOpnd::New(TyUint32, func);
  21038. InsertMove(charOpnd, IR::IndirOpnd::New(strBufferOpnd, 0, TyUint16, func), instr);
  21039. if (multiBrInstr->m_baseCaseValue != 0)
  21040. {
  21041. // SUB charOpnd, baseIndex
  21042. InsertSub(false, charOpnd, charOpnd, IR::IntConstOpnd::New(multiBrInstr->m_baseCaseValue, TyUint32, func), instr);
  21043. }
  21044. // CMP charOpnd, lastCaseIndex - baseCaseIndex
  21045. // JA defaultLabel
  21046. InsertCompareBranch(charOpnd, IR::IntConstOpnd::New(multiBrInstr->m_lastCaseValue - multiBrInstr->m_baseCaseValue, TyUint32, func),
  21047. Js::OpCode::BrGt_A, true, defaultLabelInstr, instr);
  21048. instr->UnlinkSrc1();
  21049. LowerJumpTableMultiBranch(multiBrInstr, charOpnd);
  21050. }
  21051. void Lowerer::GenerateSwitchStringLookup(IR::Instr * instr)
  21052. {
  21053. /* Collect information about string length in all the case*/
  21054. charcount_t minLength = UINT_MAX;
  21055. charcount_t maxLength = 0;
  21056. BVUnit32 bvLength;
  21057. instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary()->dictionary.Map([&](JITJavascriptString * str, void *)
  21058. {
  21059. charcount_t len = str->GetLength();
  21060. minLength = min(minLength, str->GetLength());
  21061. maxLength = max(maxLength, str->GetLength());
  21062. if (len < 32)
  21063. {
  21064. bvLength.Set(len);
  21065. }
  21066. });
  21067. Func * func = instr->m_func;
  21068. IR::RegOpnd * strLengthOpnd = IR::RegOpnd::New(TyUint32, func);
  21069. InsertMove(strLengthOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func), instr);
  21070. IR::LabelInstr * defaultLabelInstr = (IR::LabelInstr *)instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary()->defaultTarget;
  21071. if (minLength == maxLength)
  21072. {
  21073. // Generate single length filter
  21074. InsertCompareBranch(strLengthOpnd, IR::IntConstOpnd::New(minLength, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
  21075. }
  21076. else if (maxLength < 32)
  21077. {
  21078. // Generate bit filter
  21079. // Jump to default label if the bit is not on for the length % 32
  21080. IR::IntConstOpnd * lenBitMaskOpnd = IR::IntConstOpnd::New(bvLength.GetWord(), TyUint32, func);
  21081. InsertBitTestBranch(lenBitMaskOpnd, strLengthOpnd, false, defaultLabelInstr, instr);
  21082. // Jump to default label if the bit is > 32
  21083. InsertTestBranch(strLengthOpnd, IR::IntConstOpnd::New(UINT32_MAX ^ 31, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
  21084. }
  21085. else
  21086. {
  21087. // CONSIDER: Generate range filter
  21088. }
  21089. this->LowerMultiBr(instr, IR::HelperOp_SwitchStringLookUp);
  21090. }
  21091. IR::Instr *
  21092. Lowerer::LowerTry(IR::Instr* instr, bool tryCatch)
  21093. {
  21094. if (this->m_func->hasBailout)
  21095. {
  21096. this->EnsureBailoutReturnValueSym();
  21097. }
  21098. this->EnsureHasBailedOutSym();
  21099. IR::SymOpnd * hasBailedOutOpnd = IR::SymOpnd::New(this->m_func->m_hasBailedOutSym, TyUint32, this->m_func);
  21100. IR::Instr * setInstr = IR::Instr::New(LowererMD::GetStoreOp(TyUint32), hasBailedOutOpnd, IR::IntConstOpnd::New(0, TyUint32, this->m_func), this->m_func);
  21101. instr->InsertBefore(setInstr);
  21102. LowererMD::Legalize(setInstr);
  21103. return m_lowererMD.LowerTry(instr, tryCatch ? IR::HelperOp_TryCatch : ((this->m_func->IsSimpleJit() && !this->m_func->hasBailout) || !this->m_func->DoOptimizeTry()) ?
  21104. IR::HelperOp_TryFinallySimpleJit : IR::HelperOp_TryFinally);
  21105. }
  21106. void
  21107. Lowerer::EnsureBailoutReturnValueSym()
  21108. {
  21109. if (this->m_func->m_bailoutReturnValueSym == nullptr)
  21110. {
  21111. this->m_func->m_bailoutReturnValueSym = StackSym::New(TyVar, this->m_func);
  21112. this->m_func->StackAllocate(this->m_func->m_bailoutReturnValueSym, sizeof(Js::Var));
  21113. }
  21114. }
  21115. void
  21116. Lowerer::EnsureHasBailedOutSym()
  21117. {
  21118. if (this->m_func->m_hasBailedOutSym == nullptr)
  21119. {
  21120. this->m_func->m_hasBailedOutSym = StackSym::New(TyUint32, this->m_func);
  21121. this->m_func->StackAllocate(this->m_func->m_hasBailedOutSym, MachRegInt);
  21122. }
  21123. }
  21124. void
  21125. Lowerer::InsertReturnThunkForRegion(Region* region, IR::LabelInstr* restoreLabel)
  21126. {
  21127. Assert(this->m_func->isPostLayout);
  21128. Assert(region->GetType() == RegionTypeTry || region->GetType() == RegionTypeCatch || region->GetType() == RegionTypeFinally);
  21129. if (!region->returnThunkEmitted)
  21130. {
  21131. this->m_func->m_exitInstr->InsertAfter(region->GetBailoutReturnThunkLabel());
  21132. bool newLastInstrInserted = false;
  21133. IR::Instr * insertBeforeInstr = region->GetBailoutReturnThunkLabel()->m_next;
  21134. if (insertBeforeInstr == nullptr)
  21135. {
  21136. Assert(this->m_func->m_exitInstr == this->m_func->m_tailInstr);
  21137. insertBeforeInstr = IR::Instr::New(Js::OpCode::Nop, this->m_func);
  21138. newLastInstrInserted = true;
  21139. region->GetBailoutReturnThunkLabel()->InsertAfter(insertBeforeInstr);
  21140. this->m_func->m_tailInstr = insertBeforeInstr;
  21141. }
  21142. IR::LabelOpnd * continuationAddr;
  21143. // We insert return thunk to the region's parent return thunk label
  21144. // For non exception finallys, we do not need a return thunk
  21145. // Because, we are not calling none xception finallys from within amd64_callWithFakeFrame
  21146. // But a non exception finally maybe within other eh regions that need a return thunk
  21147. if (region->IsNonExceptingFinally())
  21148. {
  21149. Assert(region->GetParent()->GetType() != RegionTypeRoot);
  21150. Region *ancestor = region->GetParent()->GetFirstAncestorOfNonExceptingFinallyParent();
  21151. Assert(ancestor && !ancestor->IsNonExceptingFinally());
  21152. if (ancestor->GetType() != RegionTypeRoot)
  21153. {
  21154. continuationAddr = IR::LabelOpnd::New(ancestor->GetBailoutReturnThunkLabel(), this->m_func);
  21155. }
  21156. else
  21157. {
  21158. continuationAddr = IR::LabelOpnd::New(restoreLabel, this->m_func);
  21159. }
  21160. }
  21161. else if (region->GetParent()->IsNonExceptingFinally())
  21162. {
  21163. Region *ancestor = region->GetFirstAncestorOfNonExceptingFinally();
  21164. if (ancestor && ancestor->GetType() != RegionTypeRoot)
  21165. {
  21166. continuationAddr = IR::LabelOpnd::New(ancestor->GetBailoutReturnThunkLabel(), this->m_func);
  21167. }
  21168. else
  21169. {
  21170. continuationAddr = IR::LabelOpnd::New(restoreLabel, this->m_func);
  21171. }
  21172. }
  21173. else if (region->GetParent()->GetType() != RegionTypeRoot)
  21174. {
  21175. continuationAddr = IR::LabelOpnd::New(region->GetParent()->GetBailoutReturnThunkLabel(), this->m_func);
  21176. }
  21177. else
  21178. {
  21179. continuationAddr = IR::LabelOpnd::New(restoreLabel, this->m_func);
  21180. }
  21181. IR::Instr * lastInstr = m_lowererMD.LowerEHRegionReturn(insertBeforeInstr, continuationAddr);
  21182. if (newLastInstrInserted)
  21183. {
  21184. Assert(this->m_func->m_tailInstr == insertBeforeInstr);
  21185. insertBeforeInstr->Remove();
  21186. this->m_func->m_tailInstr = lastInstr;
  21187. }
  21188. region->returnThunkEmitted = true;
  21189. }
  21190. }
  21191. void
  21192. Lowerer::SetHasBailedOut(IR::Instr * bailoutInstr)
  21193. {
  21194. Assert(this->m_func->isPostLayout);
  21195. IR::SymOpnd * hasBailedOutOpnd = IR::SymOpnd::New(this->m_func->m_hasBailedOutSym, TyUint32, this->m_func);
  21196. IR::Instr * setInstr = IR::Instr::New(LowererMD::GetStoreOp(TyUint32), hasBailedOutOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), this->m_func);
  21197. bailoutInstr->InsertBefore(setInstr);
  21198. LowererMD::Legalize(setInstr, true);
  21199. }
  21200. IR::Instr*
  21201. Lowerer::EmitEHBailoutStackRestore(IR::Instr * bailoutInstr)
  21202. {
  21203. Assert(this->m_func->isPostLayout);
  21204. #ifdef _M_IX86
  21205. BailOutInfo * bailoutInfo = bailoutInstr->GetBailOutInfo();
  21206. if (bailoutInfo->startCallCount != 0)
  21207. {
  21208. uint totalStackToBeRestored = 0;
  21209. uint stackAlignmentAdjustment = 0;
  21210. for (uint i = 0; i < bailoutInfo->startCallCount; i++)
  21211. {
  21212. uint startCallOutParamCount = bailoutInfo->GetStartCallOutParamCount(i);
  21213. if ((Math::Align<int32>(startCallOutParamCount * MachPtr, MachStackAlignment) - (startCallOutParamCount * MachPtr)) != 0)
  21214. {
  21215. stackAlignmentAdjustment++;
  21216. }
  21217. }
  21218. totalStackToBeRestored = (bailoutInfo->totalOutParamCount + stackAlignmentAdjustment) * MachPtr;
  21219. IR::RegOpnd * espOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegStackPointer(), TyMachReg, this->m_func);
  21220. IR::Opnd * opnd = IR::IndirOpnd::New(espOpnd, totalStackToBeRestored, TyMachReg, this->m_func);
  21221. IR::Instr * stackRestoreInstr = IR::Instr::New(Js::OpCode::LEA, espOpnd, opnd, this->m_func);
  21222. bailoutInstr->InsertAfter(stackRestoreInstr);
  21223. return stackRestoreInstr;
  21224. }
  21225. #endif
  21226. return bailoutInstr;
  21227. }
  21228. void
  21229. Lowerer::EmitSaveEHBailoutReturnValueAndJumpToRetThunk(IR::Instr * insertAfterInstr)
  21230. {
  21231. Assert(this->m_func->isPostLayout);
  21232. // After the CALL SaveAllRegistersAndBailout instruction, emit
  21233. //
  21234. // MOV bailoutReturnValueSym, eax
  21235. // JMP $currentRegion->bailoutReturnThunkLabel
  21236. IR::SymOpnd * bailoutReturnValueSymOpnd = IR::SymOpnd::New(this->m_func->m_bailoutReturnValueSym, TyVar, this->m_func);
  21237. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  21238. IR::Instr * movInstr = IR::Instr::New(LowererMD::GetStoreOp(TyVar), bailoutReturnValueSymOpnd, eaxOpnd, this->m_func);
  21239. insertAfterInstr->InsertAfter(movInstr);
  21240. LowererMD::Legalize(movInstr, true);
  21241. IR::BranchInstr * jumpInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, this->currentRegion->GetBailoutReturnThunkLabel(), this->m_func);
  21242. movInstr->InsertAfter(jumpInstr);
  21243. }
  21244. void
  21245. Lowerer::EmitRestoreReturnValueFromEHBailout(IR::LabelInstr * restoreLabel, IR::LabelInstr * epilogLabel)
  21246. {
  21247. Assert(this->m_func->isPostLayout);
  21248. // JMP $epilog
  21249. // $restore:
  21250. // MOV eax, bailoutReturnValueSym
  21251. // $epilog:
  21252. IR::SymOpnd * bailoutReturnValueSymOpnd = IR::SymOpnd::New(this->m_func->m_bailoutReturnValueSym, TyVar, this->m_func);
  21253. IR::RegOpnd * eaxOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  21254. IR::Instr * movInstr = IR::Instr::New(LowererMD::GetLoadOp(TyVar), eaxOpnd, bailoutReturnValueSymOpnd, this->m_func);
  21255. epilogLabel->InsertBefore(restoreLabel);
  21256. epilogLabel->InsertBefore(movInstr);
  21257. LowererMD::Legalize(movInstr, true);
  21258. restoreLabel->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, epilogLabel, this->m_func));
  21259. }
  21260. void
  21261. Lowerer::InsertBitTestBranch(IR::Opnd * bitMaskOpnd, IR::Opnd * bitIndex, bool jumpIfBitOn, IR::LabelInstr * targetLabel, IR::Instr * insertBeforeInstr)
  21262. {
  21263. #if defined(_M_IX86) || defined(_M_AMD64)
  21264. // Generate bit test and branch
  21265. // BT bitMaskOpnd, bitIndex
  21266. // JB/JAE targetLabel
  21267. Func * func = this->m_func;
  21268. IR::Instr * instr = IR::Instr::New(Js::OpCode::BT, func);
  21269. instr->SetSrc1(bitMaskOpnd);
  21270. instr->SetSrc2(bitIndex);
  21271. insertBeforeInstr->InsertBefore(instr);
  21272. if (!(bitMaskOpnd->IsRegOpnd() || bitMaskOpnd->IsIndirOpnd() || bitMaskOpnd->IsMemRefOpnd()))
  21273. {
  21274. instr->HoistSrc1(Js::OpCode::MOV);
  21275. }
  21276. InsertBranch(jumpIfBitOn ? Js::OpCode::JB : Js::OpCode::JAE, targetLabel, insertBeforeInstr);
  21277. #elif defined(_M_ARM)
  21278. // ARM don't have bit test instruction, so just generated
  21279. // MOV r1, 1
  21280. // SHL r1, bitIndex
  21281. // TEST bitMaskOpnd, r1
  21282. // BEQ/BNEQ targetLabel
  21283. Func * func = this->m_func;
  21284. IR::RegOpnd * lenBitOpnd = IR::RegOpnd::New(TyUint32, func);
  21285. InsertMove(lenBitOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
  21286. InsertShift(Js::OpCode::Shl_I4, false, lenBitOpnd, lenBitOpnd, bitIndex, insertBeforeInstr);
  21287. InsertTestBranch(lenBitOpnd, bitMaskOpnd, jumpIfBitOn? Js::OpCode::BrNeq_A :Js::OpCode::BrEq_A, targetLabel, insertBeforeInstr);
  21288. #else
  21289. AssertMsg(false, "Not implemented");
  21290. #endif
  21291. }
  21292. //
  21293. // Generates an object test and then a string test with the static string type
  21294. //
  21295. void
  21296. Lowerer::GenerateStringTest(IR::RegOpnd *srcReg, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr * continueLabel, bool generateObjectCheck)
  21297. {
  21298. Assert(srcReg);
  21299. if (!srcReg->GetValueType().IsString())
  21300. {
  21301. if (generateObjectCheck && !srcReg->IsNotTaggedValue())
  21302. {
  21303. this->m_lowererMD.GenerateObjectTest(srcReg, insertInstr, labelHelper);
  21304. }
  21305. // CMP [regSrcStr + offset(type)] , static string type -- check base string type
  21306. // BrEq/BrNeq labelHelper.
  21307. IR::IndirOpnd * src1 = IR::IndirOpnd::New(srcReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func);
  21308. IR::Opnd * src2 = this->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueStringTypeStatic);
  21309. if (continueLabel)
  21310. {
  21311. InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, continueLabel, insertInstr);
  21312. }
  21313. else
  21314. {
  21315. InsertCompareBranch(src1, src2, Js::OpCode::BrNeq_A, labelHelper, insertInstr);
  21316. }
  21317. }
  21318. }
  21319. void
  21320. Lowerer::LowerConvNum(IR::Instr *instrLoad, bool noMathFastPath)
  21321. {
  21322. if (PHASE_OFF(Js::OtherFastPathPhase, this->m_func) || noMathFastPath || !instrLoad->GetSrc1()->IsRegOpnd())
  21323. {
  21324. this->LowerUnaryHelperMemWithTemp2(instrLoad, IR_HELPER_OP_FULL_OR_INPLACE(ConvNumber));
  21325. return;
  21326. }
  21327. // MOV dst, src1
  21328. // TEST src1, 1
  21329. // JNE $done
  21330. // call ToNumber
  21331. //$done:
  21332. bool isInt = false;
  21333. bool isNotInt = false;
  21334. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  21335. IR::LabelInstr *labelDone = NULL;
  21336. IR::Instr *instr;
  21337. if (src1->IsTaggedInt())
  21338. {
  21339. isInt = true;
  21340. }
  21341. else if (src1->IsNotInt())
  21342. {
  21343. isNotInt = true;
  21344. }
  21345. if (!isNotInt)
  21346. {
  21347. // MOV dst, src1
  21348. instr = LowererMD::CreateAssign(instrLoad->GetDst(), src1, instrLoad);
  21349. if (!isInt)
  21350. {
  21351. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  21352. bool didTest = m_lowererMD.GenerateObjectTest(src1, instrLoad, labelDone);
  21353. if (didTest)
  21354. {
  21355. // This label is needed only to mark the helper block
  21356. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  21357. instrLoad->InsertBefore(labelHelper);
  21358. }
  21359. }
  21360. }
  21361. if (!isInt)
  21362. {
  21363. if (labelDone)
  21364. {
  21365. instrLoad->InsertAfter(labelDone);
  21366. }
  21367. this->LowerUnaryHelperMemWithTemp2(instrLoad, IR_HELPER_OP_FULL_OR_INPLACE(ConvNumber));
  21368. }
  21369. else
  21370. {
  21371. instrLoad->Remove();
  21372. }
  21373. }
  21374. IR::Opnd *
  21375. Lowerer::LoadSlotArrayWithCachedLocalType(IR::Instr * instrInsert, IR::PropertySymOpnd *propertySymOpnd)
  21376. {
  21377. IR::RegOpnd *opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  21378. if (propertySymOpnd->UsesAuxSlot())
  21379. {
  21380. // If we use the auxiliary slot array, load it and return it
  21381. IR::RegOpnd *opndSlotArray = IR::RegOpnd::New(TyMachReg, this->m_func);
  21382. IR::Opnd *opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  21383. LowererMD::CreateAssign(opndSlotArray, opndIndir, instrInsert);
  21384. return opndSlotArray;
  21385. }
  21386. else
  21387. {
  21388. // If we use inline slot return the address to the object header
  21389. return opndBase;
  21390. }
  21391. }
  21392. IR::Opnd *
  21393. Lowerer::LoadSlotArrayWithCachedProtoType(IR::Instr * instrInsert, IR::PropertySymOpnd *propertySymOpnd)
  21394. {
  21395. // Get the prototype object from the cache
  21396. intptr_t prototypeObject = propertySymOpnd->GetProtoObject();
  21397. Assert(prototypeObject != 0);
  21398. if (propertySymOpnd->UsesAuxSlot())
  21399. {
  21400. // If we use the auxiliary slot array, load it from the prototype object and return it
  21401. IR::RegOpnd *opndSlotArray = IR::RegOpnd::New(TyMachReg, this->m_func);
  21402. IR::Opnd *opnd = IR::MemRefOpnd::New((char*)prototypeObject + Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func, IR::AddrOpndKindDynamicAuxSlotArrayRef);
  21403. LowererMD::CreateAssign(opndSlotArray, opnd, instrInsert);
  21404. return opndSlotArray;
  21405. }
  21406. else
  21407. {
  21408. // If we use inline slot return the address of the prototype object
  21409. return IR::MemRefOpnd::New(prototypeObject, TyMachReg, this->m_func);
  21410. }
  21411. }
  21412. IR::Instr *
  21413. Lowerer::LowerLdAsmJsEnv(IR::Instr * instr)
  21414. {
  21415. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  21416. IR::Opnd * functionObjOpnd;
  21417. IR::Instr * instrPrev = this->m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  21418. Assert(!instr->GetSrc1());
  21419. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::AsmJsScriptFunction::GetOffsetOfModuleMemory(), TyMachPtr, m_func);
  21420. instr->SetSrc1(indirOpnd);
  21421. LowererMD::ChangeToAssign(instr);
  21422. return instrPrev;
  21423. }
  21424. IR::Instr *
  21425. Lowerer::LowerLdNativeCodeData(IR::Instr * instr)
  21426. {
  21427. Assert(!instr->GetSrc1());
  21428. Assert(m_func->IsTopFunc());
  21429. IR::Instr * instrPrev = instr->m_prev;
  21430. instr->SetSrc1(IR::MemRefOpnd::New((void*)m_func->GetWorkItem()->GetWorkItemData()->nativeDataAddr, TyMachPtr, m_func, IR::AddrOpndKindDynamicNativeCodeDataRef));
  21431. LowererMD::ChangeToAssign(instr);
  21432. return instrPrev;
  21433. }
  21434. IR::Instr *
  21435. Lowerer::LowerLdEnv(IR::Instr * instr)
  21436. {
  21437. IR::Opnd * src1 = instr->GetSrc1();
  21438. IR::Opnd * functionObjOpnd;
  21439. IR::Instr * instrPrev = this->m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  21440. Assert(!instr->GetSrc1());
  21441. if (src1 == nullptr || functionObjOpnd->IsRegOpnd())
  21442. {
  21443. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(),
  21444. Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, m_func);
  21445. instr->SetSrc1(indirOpnd);
  21446. }
  21447. else
  21448. {
  21449. Assert(functionObjOpnd->IsAddrOpnd());
  21450. IR::AddrOpnd* functionObjAddrOpnd = functionObjOpnd->AsAddrOpnd();
  21451. IR::MemRefOpnd* functionEnvMemRefOpnd = IR::MemRefOpnd::New((void *)((intptr_t)functionObjAddrOpnd->m_address + Js::ScriptFunction::GetOffsetOfEnvironment()),
  21452. TyMachPtr, this->m_func, IR::AddrOpndKindDynamicFunctionEnvironmentRef);
  21453. instr->SetSrc1(functionEnvMemRefOpnd);
  21454. }
  21455. LowererMD::ChangeToAssign(instr);
  21456. return instrPrev;
  21457. }
  21458. IR::Instr *
  21459. Lowerer::LowerFrameDisplayCheck(IR::Instr * instr)
  21460. {
  21461. IR::Instr *instrPrev = instr->m_prev;
  21462. IR::Instr *insertInstr = instr->m_next;
  21463. IR::AddrOpnd *addrOpnd = instr->UnlinkSrc2()->AsAddrOpnd();
  21464. FrameDisplayCheckRecord *record = (FrameDisplayCheckRecord*)addrOpnd->m_address;
  21465. IR::LabelInstr *errorLabel = nullptr;
  21466. IR::LabelInstr *continueLabel = nullptr;
  21467. IR::RegOpnd *envOpnd = instr->GetDst()->AsRegOpnd();
  21468. uint32 frameDisplayOffset = Js::FrameDisplay::GetOffsetOfScopes()/sizeof(Js::Var);
  21469. if (record->slotId != (uint32)-1 && record->slotId > frameDisplayOffset)
  21470. {
  21471. // Check that the frame display has enough scopes in it to satisfy the code.
  21472. errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  21473. continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  21474. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(envOpnd,
  21475. Js::FrameDisplay::GetOffsetOfLength(),
  21476. TyUint16, m_func, true);
  21477. IR::IntConstOpnd *slotIdOpnd = IR::IntConstOpnd::New(record->slotId - frameDisplayOffset, TyUint16, m_func);
  21478. InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrLe_A, true, errorLabel, insertInstr);
  21479. }
  21480. if (record->table)
  21481. {
  21482. // Check the size of each of the slot arrays in the scope chain.
  21483. FOREACH_HASHTABLE_ENTRY(uint32, bucket, record->table)
  21484. {
  21485. uint32 slotId = bucket.element;
  21486. if (slotId != (uint32)-1 && slotId > Js::ScopeSlots::FirstSlotIndex)
  21487. {
  21488. if (errorLabel == nullptr)
  21489. {
  21490. errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  21491. continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  21492. }
  21493. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(envOpnd,
  21494. bucket.value * sizeof(Js::Var),
  21495. TyVar, m_func, true);
  21496. IR::RegOpnd * slotArrayOpnd = IR::RegOpnd::New(TyVar, m_func);
  21497. InsertMove(slotArrayOpnd, indirOpnd, insertInstr);
  21498. indirOpnd = IR::IndirOpnd::New(slotArrayOpnd,
  21499. Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
  21500. TyVar, m_func, true);
  21501. IR::IntConstOpnd * slotIdOpnd = IR::IntConstOpnd::New(slotId - Js::ScopeSlots::FirstSlotIndex,
  21502. TyUint32, m_func);
  21503. InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrLe_A, true, errorLabel, insertInstr);
  21504. }
  21505. }
  21506. NEXT_HASHTABLE_ENTRY;
  21507. }
  21508. if (errorLabel)
  21509. {
  21510. InsertBranch(Js::OpCode::Br, continueLabel, insertInstr);
  21511. insertInstr->InsertBefore(errorLabel);
  21512. IR::Instr * instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
  21513. insertInstr->InsertBefore(instrHelper);
  21514. m_lowererMD.ChangeToHelperCall(instrHelper, IR::HelperOp_FatalInternalError);
  21515. insertInstr->InsertBefore(continueLabel);
  21516. }
  21517. m_lowererMD.ChangeToAssign(instr);
  21518. return instrPrev;
  21519. }
  21520. IR::Instr *
  21521. Lowerer::LowerSlotArrayCheck(IR::Instr * instr)
  21522. {
  21523. IR::Instr *instrPrev = instr->m_prev;
  21524. IR::Instr *insertInstr = instr->m_next;
  21525. IR::RegOpnd *slotArrayOpnd = instr->GetDst()->AsRegOpnd();
  21526. StackSym *stackSym = slotArrayOpnd->m_sym;
  21527. IR::IntConstOpnd *slotIdOpnd = instr->UnlinkSrc2()->AsIntConstOpnd();
  21528. uint32 slotId = (uint32)slotIdOpnd->GetValue();
  21529. Assert(slotId != (uint32)-1 && slotId >= Js::ScopeSlots::FirstSlotIndex);
  21530. if (slotId > Js::ScopeSlots::FirstSlotIndex)
  21531. {
  21532. if (m_func->DoStackFrameDisplay() && stackSym->m_id == m_func->GetLocalClosureSym()->m_id)
  21533. {
  21534. // The pointer we loaded points to the reserved/known address where the slot array can be boxed.
  21535. // Deref to get the real value.
  21536. IR::IndirOpnd * srcOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(stackSym, TyVar, m_func), 0, TyVar, m_func);
  21537. IR::RegOpnd * dstOpnd = IR::RegOpnd::New(TyVar, m_func);
  21538. InsertMove(dstOpnd, srcOpnd, insertInstr);
  21539. stackSym = dstOpnd->m_sym;
  21540. }
  21541. IR::LabelInstr *errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  21542. IR::LabelInstr *continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  21543. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(stackSym, TyVar, m_func),
  21544. Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
  21545. TyVar, m_func, true);
  21546. slotIdOpnd->SetValue(slotId - Js::ScopeSlots::FirstSlotIndex);
  21547. InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrGt_A, true, continueLabel, insertInstr);
  21548. insertInstr->InsertBefore(errorLabel);
  21549. IR::Instr * instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
  21550. insertInstr->InsertBefore(instrHelper);
  21551. m_lowererMD.ChangeToHelperCall(instrHelper, IR::HelperOp_FatalInternalError);
  21552. insertInstr->InsertBefore(continueLabel);
  21553. }
  21554. m_lowererMD.ChangeToAssign(instr);
  21555. return instrPrev;
  21556. }
  21557. IR::RegOpnd *
  21558. Lowerer::LoadIndexFromLikelyFloat(
  21559. IR::RegOpnd *indexOpnd,
  21560. const bool skipNegativeCheck,
  21561. IR::LabelInstr *const notIntLabel,
  21562. IR::LabelInstr *const negativeLabel,
  21563. IR::Instr *const insertBeforeInstr)
  21564. {
  21565. #ifdef _M_IX86
  21566. // We should only generate this if sse2 is available
  21567. Assert(AutoSystemInfo::Data.SSE2Available());
  21568. #endif
  21569. Func *func = insertBeforeInstr->m_func;
  21570. IR::LabelInstr * convertToUint = IR::LabelInstr::New(Js::OpCode::Label, func);
  21571. IR::LabelInstr * fallThrough = IR::LabelInstr::New(Js::OpCode::Label, func);
  21572. // First generate test for tagged int even though profile data says likely float. Indices are usually int and we need a fast path before we try to convert float to int
  21573. // mov intIndex, index
  21574. // sar intIndex, 1
  21575. // jae convertToInt
  21576. IR::RegOpnd *int32IndexOpnd = GenerateUntagVar(indexOpnd, convertToUint, insertBeforeInstr, !indexOpnd->IsTaggedInt());
  21577. if (!skipNegativeCheck)
  21578. {
  21579. // test index, index
  21580. // js $notTaggedIntOrNegative
  21581. InsertTestBranch(int32IndexOpnd, int32IndexOpnd, LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), negativeLabel, insertBeforeInstr);
  21582. }
  21583. InsertBranch(Js::OpCode::Br, fallThrough, insertBeforeInstr);
  21584. insertBeforeInstr->InsertBefore(convertToUint);
  21585. // try to convert float to int in a fast path
  21586. #if FLOATVAR
  21587. IR::RegOpnd* floatIndexOpnd = m_lowererMD.CheckFloatAndUntag(indexOpnd, insertBeforeInstr, notIntLabel);
  21588. #else
  21589. m_lowererMD.GenerateFloatTest(indexOpnd, insertBeforeInstr, notIntLabel);
  21590. IR::IndirOpnd * floatIndexOpnd = IR::IndirOpnd::New(indexOpnd, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  21591. #endif
  21592. IR::LabelInstr * doneConvUint32 = IR::LabelInstr::New(Js::OpCode::Label, func);
  21593. IR::LabelInstr * helperConvUint32 = IR::LabelInstr::New(Js::OpCode::Label, func, true /*helper*/);
  21594. m_lowererMD.ConvertFloatToInt32(int32IndexOpnd, floatIndexOpnd, helperConvUint32, doneConvUint32, insertBeforeInstr);
  21595. // helper path
  21596. insertBeforeInstr->InsertBefore(helperConvUint32);
  21597. m_lowererMD.LoadDoubleHelperArgument(insertBeforeInstr, floatIndexOpnd);
  21598. IR::Instr * helperCall = IR::Instr::New(Js::OpCode::Call, int32IndexOpnd, this->m_func);
  21599. insertBeforeInstr->InsertBefore(helperCall);
  21600. m_lowererMD.ChangeToHelperCall(helperCall, IR::HelperConv_ToUInt32Core);
  21601. // main path
  21602. insertBeforeInstr->InsertBefore(doneConvUint32);
  21603. //Convert uint32 to back to float for comparison that conversion was indeed successful
  21604. IR::RegOpnd *floatOpndFromUint32 = IR::RegOpnd::New(TyFloat64, func);
  21605. m_lowererMD.EmitUIntToFloat(floatOpndFromUint32, int32IndexOpnd, insertBeforeInstr);
  21606. // compare with float from the original indexOpnd, we need floatIndex == (float64)(uint32)floatIndex
  21607. InsertCompareBranch(floatOpndFromUint32, floatIndexOpnd, Js::OpCode::BrNeq_A, notIntLabel, insertBeforeInstr, false);
  21608. insertBeforeInstr->InsertBefore(fallThrough);
  21609. return int32IndexOpnd;
  21610. }
  21611. void
  21612. Lowerer::AllocStackForInObjectEnumeratorArray()
  21613. {
  21614. Func * func = this->m_func;
  21615. Assert(func->IsTopFunc());
  21616. if (func->m_forInLoopMaxDepth)
  21617. {
  21618. func->m_forInEnumeratorArrayOffset = func->StackAllocate(sizeof(Js::ForInObjectEnumerator) * this->m_func->m_forInLoopMaxDepth);
  21619. }
  21620. }
  21621. IR::RegOpnd *
  21622. Lowerer::GenerateForInEnumeratorLoad(IR::Opnd * forInEnumeratorOpnd, IR::Instr * insertBeforeInstr)
  21623. {
  21624. Func * func = insertBeforeInstr->m_func;
  21625. if (forInEnumeratorOpnd->IsSymOpnd())
  21626. {
  21627. StackSym * stackSym = forInEnumeratorOpnd->AsSymOpnd()->GetStackSym();
  21628. Assert(!stackSym->m_allocated);
  21629. uint forInLoopLevel = stackSym->m_offset;
  21630. Assert(func->m_forInLoopBaseDepth + forInLoopLevel < this->m_func->m_forInLoopMaxDepth);
  21631. stackSym->m_offset = this->m_func->m_forInEnumeratorArrayOffset + ((func->m_forInLoopBaseDepth + forInLoopLevel) * sizeof(Js::ForInObjectEnumerator));
  21632. stackSym->m_allocated = true;
  21633. }
  21634. else
  21635. {
  21636. Assert(forInEnumeratorOpnd->IsIndirOpnd());
  21637. if (forInEnumeratorOpnd->AsIndirOpnd()->GetOffset() == 0)
  21638. {
  21639. return forInEnumeratorOpnd->AsIndirOpnd()->GetBaseOpnd();
  21640. }
  21641. }
  21642. IR::RegOpnd * forInEnumeratorRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  21643. InsertLea(forInEnumeratorRegOpnd, forInEnumeratorOpnd, insertBeforeInstr);
  21644. return forInEnumeratorRegOpnd;
  21645. }
  21646. void
  21647. Lowerer::GenerateHasObjectArrayCheck(IR::RegOpnd * objectOpnd, IR::RegOpnd * typeOpnd, IR::LabelInstr * hasObjectArrayLabel, IR::Instr * insertBeforeInstr)
  21648. {
  21649. // CMP [objectOpnd + offset(objectArray)], nullptr
  21650. // JEQ $noObjectArrayLabel
  21651. // TEST[objectOpnd + offset(objectArray)], ObjectArrayFlagsTag (used as flags)
  21652. // JEQ $noObjectArrayLabel
  21653. // MOV typeHandlerOpnd, [typeOpnd + offset(typeHandler)]
  21654. // CMP typeHandler->OffsetOfInlineSlots, Js::DynamicTypeHandler::GetOffsetOfObjectHeaderInlineSlots()
  21655. // JNE $hasObjectArrayLabel
  21656. // $$noObjectArrayLabel: (fall thru)
  21657. Func * func = this->m_func;
  21658. IR::LabelInstr * noObjectArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  21659. IR::IndirOpnd * objectArrayOpnd = IR::IndirOpnd::New(objectOpnd, Js::DynamicObject::GetOffsetOfObjectArray(), TyMachPtr, func);
  21660. InsertCompareBranch(objectArrayOpnd, IR::AddrOpnd::NewNull(func), Js::OpCode::BrEq_A, noObjectArrayLabel, insertBeforeInstr);
  21661. InsertTestBranch(objectArrayOpnd, IR::IntConstOpnd::New((uint32)Js::DynamicObjectFlags::ObjectArrayFlagsTag, TyUint8, func),
  21662. Js::OpCode::BrNeq_A, noObjectArrayLabel, insertBeforeInstr);
  21663. IR::RegOpnd * typeHandlerOpnd = IR::RegOpnd::New(TyMachPtr, func);
  21664. InsertMove(typeHandlerOpnd, IR::IndirOpnd::New(typeOpnd, Js::DynamicType::GetOffsetOfTypeHandler(), TyMachPtr, func), insertBeforeInstr);
  21665. InsertCompareBranch(IR::IndirOpnd::New(typeHandlerOpnd, Js::DynamicTypeHandler::GetOffsetOfOffsetOfInlineSlots(), TyUint16, func),
  21666. IR::IntConstOpnd::New(Js::DynamicTypeHandler::GetOffsetOfObjectHeaderInlineSlots(), TyUint16, func),
  21667. Js::OpCode::BrNeq_A, hasObjectArrayLabel, insertBeforeInstr);
  21668. insertBeforeInstr->InsertBefore(noObjectArrayLabel);
  21669. }
  21670. void
  21671. Lowerer::GenerateInitForInEnumeratorFastPath(IR::Instr * instr, Js::ForInCache * forInCache)
  21672. {
  21673. Func * func = this->m_func;
  21674. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  21675. IR::RegOpnd * objectOpnd = instr->GetSrc1()->AsRegOpnd();
  21676. // Tagged check and object check
  21677. m_lowererMD.GenerateObjectTest(objectOpnd, instr, helperLabel);
  21678. m_lowererMD.GenerateIsDynamicObject(objectOpnd, instr, helperLabel);
  21679. // Type check with cache
  21680. //
  21681. // MOV typeOpnd, [objectOpnd + offset(type)]
  21682. // CMP [&forInCache->type], typeOpnd
  21683. // JNE $helper
  21684. IR::RegOpnd * typeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  21685. InsertMove(typeOpnd, IR::IndirOpnd::New(objectOpnd, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, func), instr);
  21686. InsertCompareBranch(IR::MemRefOpnd::New(&forInCache->type, TyMachPtr, func, IR::AddrOpndKindForInCacheType), typeOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
  21687. // Check forInCacheData->EnumNonEnumerable == false
  21688. //
  21689. // MOV forInCacheDataOpnd, [&forInCache->data]
  21690. // CMP forInCacheDataOpnd->enumNonEnumerable, 0
  21691. // JNE $helper
  21692. IR::RegOpnd * forInCacheDataOpnd = IR::RegOpnd::New(TyMachPtr, func);
  21693. InsertMove(forInCacheDataOpnd, IR::MemRefOpnd::New(&forInCache->data, TyMachPtr, func, IR::AddrOpndKindForInCacheData), instr);
  21694. InsertCompareBranch(IR::IndirOpnd::New(forInCacheDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataEnumNonEnumerable(), TyUint8, func),
  21695. IR::IntConstOpnd::New(0, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
  21696. // Check has object array
  21697. GenerateHasObjectArrayCheck(objectOpnd, typeOpnd, helperLabel, instr);
  21698. // Check first prototype with enumerable properties
  21699. //
  21700. // MOV prototypeObjectOpnd, [type + offset(prototype)]
  21701. // MOV prototypeTypeOpnd, [prototypeObjectOpnd + offset(type)]
  21702. // CMP [prototypeTypeOpnd + offset(typeId)], TypeIds_Null
  21703. // JEQ $noPrototypeWithEnumerablePropertiesLabel
  21704. //
  21705. // $checkFirstPrototypeLoopTopLabel:
  21706. // CMP [prototypeTypeOpnd + offset(typeId)], TypeIds_LastStaticType
  21707. // JLE $helper
  21708. // CMP [prototypeTypeOpnd, offset(hasNoEnumerableProperties], 0
  21709. // JEQ $helper
  21710. // <hasObjectArrayCheck prototypeObjectOpnd, prototypeTypeOpnd>
  21711. //
  21712. // MOV prototypeObjectOpnd, [prototypeTypeOpnd + offset(protottype)] (load next prototype)
  21713. //
  21714. // MOV prototypeTypeOpnd, [prototypeObjectOpnd + offset(type)] (tail dup TypeIds_Null check)
  21715. // CMP [prototypeTypeOpnd + offset(typeId)], TypeIds_Null
  21716. // JNE $checkFirstPrototypeLoopTopLabel
  21717. //
  21718. // $noPrototypeWithEnumerablePropertiesLabel:
  21719. //
  21720. IR::LabelInstr * noPrototypeWithEnumerablePropertiesLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  21721. IR::RegOpnd * prototypeObjectOpnd = IR::RegOpnd::New(TyMachPtr, func);
  21722. IR::RegOpnd * prototypeTypeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  21723. IR::IndirOpnd * prototypeTypeIdOpnd = IR::IndirOpnd::New(prototypeTypeOpnd, Js::DynamicType::GetOffsetOfTypeId(), TyUint32, func);
  21724. InsertMove(prototypeObjectOpnd, IR::IndirOpnd::New(typeOpnd, Js::DynamicType::GetOffsetOfPrototype(), TyMachPtr, func), instr);
  21725. InsertMove(prototypeTypeOpnd, IR::IndirOpnd::New(prototypeObjectOpnd, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, func), instr);
  21726. InsertCompareBranch(prototypeTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_Null, TyUint32, func), Js::OpCode::BrEq_A, noPrototypeWithEnumerablePropertiesLabel, instr);
  21727. IR::LabelInstr * checkFirstPrototypeLoopTopLabel = InsertLoopTopLabel(instr);
  21728. Loop * loop = checkFirstPrototypeLoopTopLabel->GetLoop();
  21729. loop->regAlloc.liveOnBackEdgeSyms->Set(prototypeObjectOpnd->m_sym->m_id);
  21730. loop->regAlloc.liveOnBackEdgeSyms->Set(prototypeTypeOpnd->m_sym->m_id);
  21731. InsertCompareBranch(prototypeTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_LastStaticType, TyUint32, func), Js::OpCode::BrLe_A, helperLabel, instr);
  21732. // No need to do EnsureObjectReady. Defer init type may not have this bit set, so we will go to helper and call EnsureObjectReady then
  21733. InsertCompareBranch(IR::IndirOpnd::New(prototypeTypeOpnd, Js::DynamicType::GetOffsetOfHasNoEnumerableProperties(), TyUint8, func),
  21734. IR::IntConstOpnd::New(0, TyUint8, func), Js::OpCode::BrEq_A, helperLabel, instr);
  21735. GenerateHasObjectArrayCheck(prototypeObjectOpnd, prototypeTypeOpnd, helperLabel, instr);
  21736. InsertMove(prototypeObjectOpnd, IR::IndirOpnd::New(prototypeTypeOpnd, Js::DynamicType::GetOffsetOfPrototype(), TyMachPtr, func), instr);
  21737. // Tail dup the TypeIds_Null check
  21738. InsertMove(prototypeTypeOpnd, IR::IndirOpnd::New(prototypeObjectOpnd, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, func), instr);
  21739. InsertCompareBranch(prototypeTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_Null, TyUint32, func), Js::OpCode::BrNeq_A, checkFirstPrototypeLoopTopLabel, instr);
  21740. instr->InsertBefore(noPrototypeWithEnumerablePropertiesLabel);
  21741. // Initialize DynamicObjectPropertyEnumerator fields
  21742. IR::Opnd * forInEnumeratorOpnd = instr->GetSrc2();
  21743. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorScriptContext(), TyMachPtr),
  21744. LoadScriptContextOpnd(instr), instr);
  21745. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorObject(), TyMachPtr),
  21746. objectOpnd, instr);
  21747. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorInitialType(), TyMachPtr),
  21748. typeOpnd, instr);
  21749. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorObjectIndex(), TyInt32),
  21750. IR::IntConstOpnd::New(Js::Constants::NoBigSlot, TyInt32, func), instr);
  21751. IR::RegOpnd * initialPropertyCountOpnd = IR::RegOpnd::New(TyInt32, func);
  21752. InsertMove(initialPropertyCountOpnd,
  21753. IR::IndirOpnd::New(forInCacheDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataPropertyCount(), TyInt32, func), instr);
  21754. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorInitialPropertyCount(), TyInt32),
  21755. initialPropertyCountOpnd, instr);
  21756. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorEnumeratedCount(), TyInt32),
  21757. IR::IntConstOpnd::New(0, TyInt32, func), instr);
  21758. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorFlags(), TyUint8),
  21759. IR::IntConstOpnd::New((uint8)(Js::EnumeratorFlags::UseCache | Js::EnumeratorFlags::SnapShotSemantics), TyUint8, func), instr);
  21760. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorCachedData(), TyMachPtr),
  21761. forInCacheDataOpnd, instr);
  21762. // Initialize rest of the JavascriptStaticEnumerator fields
  21763. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorCurrentEnumerator(), TyMachPtr),
  21764. IR::AddrOpnd::NewNull(func), instr);
  21765. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorPrefixEnumerator(), TyMachPtr),
  21766. IR::AddrOpnd::NewNull(func), instr);
  21767. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorArrayEnumerator(), TyMachPtr),
  21768. IR::AddrOpnd::NewNull(func), instr);
  21769. // Initialize rest of the ForInObjectEnumerator fields
  21770. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfShadowData(), TyMachPtr),
  21771. IR::AddrOpnd::NewNull(func), instr);
  21772. // Initialize can UseJitFastPath = true and enumeratingPrototype = false at the same time.
  21773. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfStates(), TyUint16),
  21774. IR::IntConstOpnd::New(1, TyUint16, func, true), instr);
  21775. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  21776. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  21777. instr->InsertBefore(helperLabel);
  21778. instr->InsertAfter(doneLabel);
  21779. }
  21780. void
  21781. Lowerer::LowerInitForInEnumerator(IR::Instr * instr)
  21782. {
  21783. Js::ForInCache * forInCache = nullptr;
  21784. Func * func = instr->m_func;
  21785. if (instr->IsProfiledInstr())
  21786. {
  21787. uint profileId = instr->AsProfiledInstr()->u.profileId;
  21788. forInCache = instr->m_func->GetJITFunctionBody()->GetForInCache(profileId);
  21789. Assert(forInCache != nullptr);
  21790. if (!func->IsSimpleJit()
  21791. #if ENABLE_TTD
  21792. && (func->IsOOPJIT() || !func->GetScriptContext()->GetThreadContext()->IsRuntimeInTTDMode())
  21793. //TODO: We will need to enable OOPJIT info to exclude this if we have a TTD Runtime
  21794. #endif
  21795. )
  21796. {
  21797. GenerateInitForInEnumeratorFastPath(instr, forInCache);
  21798. }
  21799. }
  21800. IR::RegOpnd * forInEnumeratorRegOpnd = GenerateForInEnumeratorLoad(instr->UnlinkSrc2(), instr);
  21801. instr->SetSrc2(forInEnumeratorRegOpnd);
  21802. m_lowererMD.LoadHelperArgument(instr, IR::AddrOpnd::New(forInCache, IR::AddrOpndKindForInCache, func));
  21803. this->LowerBinaryHelperMem(instr, IR::HelperOp_OP_InitForInEnumerator);
  21804. }
  21805. IR::LabelInstr *
  21806. Lowerer::InsertLoopTopLabel(IR::Instr * insertBeforeInstr)
  21807. {
  21808. Func * func = this->m_func;
  21809. IR::LabelInstr * loopTopLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  21810. loopTopLabel->m_isLoopTop = true;
  21811. Loop *loop = JitAnew(func->m_alloc, Loop, func->m_alloc, func);
  21812. loopTopLabel->SetLoop(loop);
  21813. loop->SetLoopTopInstr(loopTopLabel);
  21814. loop->regAlloc.liveOnBackEdgeSyms = AllocatorNew(JitArenaAllocator, func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
  21815. insertBeforeInstr->InsertBefore(loopTopLabel);
  21816. return loopTopLabel;
  21817. }
  21818. #if DBG
  21819. void
  21820. Lowerer::LegalizeVerifyRange(IR::Instr * instrStart, IR::Instr * instrLast)
  21821. {
  21822. FOREACH_INSTR_IN_RANGE(verifyLegalizeInstr, instrStart, instrLast)
  21823. {
  21824. LowererMD::Legalize<true>(verifyLegalizeInstr);
  21825. }
  21826. NEXT_INSTR_IN_RANGE;
  21827. }
  21828. #endif