Lower.cpp 875 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936793779387939794079417942794379447945794679477948794979507951795279537954795579567957795879597960796179627963796479657966796779687969797079717972797379747975797679777978797979807981798279837984798579867987798879897990799179927993799479957996799779987999800080018002800380048005800680078008800980108011801280138014801580168017801880198020802180228023802480258026802780288029803080318032803380348035803680378038803980408041804280438044804580468047804880498050805180528053805480558056805780588059806080618062806380648065806680678068806980708071807280738074807580768077807880798080808180828083808480858086808780888089809080918092809380948095809680978098809981008101810281038104810581068107810881098110811181128113811481158116811781188119812081218122812381248125812681278128812981308131813281338134813581368137813881398140814181428143814481458146814781488149815081518152815381548155815681578158815981608161816281638164816581668167816881698170817181728173817481758176817781788179818081818182818381848185818681878188818981908191819281938194819581968197819881998200820182028203820482058206820782088209821082118212821382148215821682178218821982208221822282238224822582268227822882298230823182328233823482358236823782388239824082418242824382448245824682478248824982508251825282538254825582568257825882598260826182628263826482658266826782688269827082718272827382748275827682778278827982808281828282838284828582868287828882898290829182928293829482958296829782988299830083018302830383048305830683078308830983108311831283138314831583168317831883198320832183228323832483258326832783288329833083318332833383348335833683378338833983408341834283438344834583468347834883498350835183528353835483558356835783588359836083618362836383648365836683678368836983708371837283738374837583768377837883798380838183828383838483858386838783888389839083918392839383948395839683978398839984008401840284038404840584068407840884098410841184128413841484158416841784188419842084218422842384248425842684278428842984308431843284338434843584368437843884398440844184428443844484458446844784488449845084518452845384548455845684578458845984608461846284638464846584668467846884698470847184728473847484758476847784788479848084818482848384848485848684878488848984908491849284938494849584968497849884998500850185028503850485058506850785088509851085118512851385148515851685178518851985208521852285238524852585268527852885298530853185328533853485358536853785388539854085418542854385448545854685478548854985508551855285538554855585568557855885598560856185628563856485658566856785688569857085718572857385748575857685778578857985808581858285838584858585868587858885898590859185928593859485958596859785988599860086018602860386048605860686078608860986108611861286138614861586168617861886198620862186228623862486258626862786288629863086318632863386348635863686378638863986408641864286438644864586468647864886498650865186528653865486558656865786588659866086618662866386648665866686678668866986708671867286738674867586768677867886798680868186828683868486858686868786888689869086918692869386948695869686978698869987008701870287038704870587068707870887098710871187128713871487158716871787188719872087218722872387248725872687278728872987308731873287338734873587368737873887398740874187428743874487458746874787488749875087518752875387548755875687578758875987608761876287638764876587668767876887698770877187728773877487758776877787788779878087818782878387848785878687878788878987908791879287938794879587968797879887998800880188028803880488058806880788088809881088118812881388148815881688178818881988208821882288238824882588268827882888298830883188328833883488358836883788388839884088418842884388448845884688478848884988508851885288538854885588568857885888598860886188628863886488658866886788688869887088718872887388748875887688778878887988808881888288838884888588868887888888898890889188928893889488958896889788988899890089018902890389048905890689078908890989108911891289138914891589168917891889198920892189228923892489258926892789288929893089318932893389348935893689378938893989408941894289438944894589468947894889498950895189528953895489558956895789588959896089618962896389648965896689678968896989708971897289738974897589768977897889798980898189828983898489858986898789888989899089918992899389948995899689978998899990009001900290039004900590069007900890099010901190129013901490159016901790189019902090219022902390249025902690279028902990309031903290339034903590369037903890399040904190429043904490459046904790489049905090519052905390549055905690579058905990609061906290639064906590669067906890699070907190729073907490759076907790789079908090819082908390849085908690879088908990909091909290939094909590969097909890999100910191029103910491059106910791089109911091119112911391149115911691179118911991209121912291239124912591269127912891299130913191329133913491359136913791389139914091419142914391449145914691479148914991509151915291539154915591569157915891599160916191629163916491659166916791689169917091719172917391749175917691779178917991809181918291839184918591869187918891899190919191929193919491959196919791989199920092019202920392049205920692079208920992109211921292139214921592169217921892199220922192229223922492259226922792289229923092319232923392349235923692379238923992409241924292439244924592469247924892499250925192529253925492559256925792589259926092619262926392649265926692679268926992709271927292739274927592769277927892799280928192829283928492859286928792889289929092919292929392949295929692979298929993009301930293039304930593069307930893099310931193129313931493159316931793189319932093219322932393249325932693279328932993309331933293339334933593369337933893399340934193429343934493459346934793489349935093519352935393549355935693579358935993609361936293639364936593669367936893699370937193729373937493759376937793789379938093819382938393849385938693879388938993909391939293939394939593969397939893999400940194029403940494059406940794089409941094119412941394149415941694179418941994209421942294239424942594269427942894299430943194329433943494359436943794389439944094419442944394449445944694479448944994509451945294539454945594569457945894599460946194629463946494659466946794689469947094719472947394749475947694779478947994809481948294839484948594869487948894899490949194929493949494959496949794989499950095019502950395049505950695079508950995109511951295139514951595169517951895199520952195229523952495259526952795289529953095319532953395349535953695379538953995409541954295439544954595469547954895499550955195529553955495559556955795589559956095619562956395649565956695679568956995709571957295739574957595769577957895799580958195829583958495859586958795889589959095919592959395949595959695979598959996009601960296039604960596069607960896099610961196129613961496159616961796189619962096219622962396249625962696279628962996309631963296339634963596369637963896399640964196429643964496459646964796489649965096519652965396549655965696579658965996609661966296639664966596669667966896699670967196729673967496759676967796789679968096819682968396849685968696879688968996909691969296939694969596969697969896999700970197029703970497059706970797089709971097119712971397149715971697179718971997209721972297239724972597269727972897299730973197329733973497359736973797389739974097419742974397449745974697479748974997509751975297539754975597569757975897599760976197629763976497659766976797689769977097719772977397749775977697779778977997809781978297839784978597869787978897899790979197929793979497959796979797989799980098019802980398049805980698079808980998109811981298139814981598169817981898199820982198229823982498259826982798289829983098319832983398349835983698379838983998409841984298439844984598469847984898499850985198529853985498559856985798589859986098619862986398649865986698679868986998709871987298739874987598769877987898799880988198829883988498859886988798889889989098919892989398949895989698979898989999009901990299039904990599069907990899099910991199129913991499159916991799189919992099219922992399249925992699279928992999309931993299339934993599369937993899399940994199429943994499459946994799489949995099519952995399549955995699579958995999609961996299639964996599669967996899699970997199729973997499759976997799789979998099819982998399849985998699879988998999909991999299939994999599969997999899991000010001100021000310004100051000610007100081000910010100111001210013100141001510016100171001810019100201002110022100231002410025100261002710028100291003010031100321003310034100351003610037100381003910040100411004210043100441004510046100471004810049100501005110052100531005410055100561005710058100591006010061100621006310064100651006610067100681006910070100711007210073100741007510076100771007810079100801008110082100831008410085100861008710088100891009010091100921009310094100951009610097100981009910100101011010210103101041010510106101071010810109101101011110112101131011410115101161011710118101191012010121101221012310124101251012610127101281012910130101311013210133101341013510136101371013810139101401014110142101431014410145101461014710148101491015010151101521015310154101551015610157101581015910160101611016210163101641016510166101671016810169101701017110172101731017410175101761017710178101791018010181101821018310184101851018610187101881018910190101911019210193101941019510196101971019810199102001020110202102031020410205102061020710208102091021010211102121021310214102151021610217102181021910220102211022210223102241022510226102271022810229102301023110232102331023410235102361023710238102391024010241102421024310244102451024610247102481024910250102511025210253102541025510256102571025810259102601026110262102631026410265102661026710268102691027010271102721027310274102751027610277102781027910280102811028210283102841028510286102871028810289102901029110292102931029410295102961029710298102991030010301103021030310304103051030610307103081030910310103111031210313103141031510316103171031810319103201032110322103231032410325103261032710328103291033010331103321033310334103351033610337103381033910340103411034210343103441034510346103471034810349103501035110352103531035410355103561035710358103591036010361103621036310364103651036610367103681036910370103711037210373103741037510376103771037810379103801038110382103831038410385103861038710388103891039010391103921039310394103951039610397103981039910400104011040210403104041040510406104071040810409104101041110412104131041410415104161041710418104191042010421104221042310424104251042610427104281042910430104311043210433104341043510436104371043810439104401044110442104431044410445104461044710448104491045010451104521045310454104551045610457104581045910460104611046210463104641046510466104671046810469104701047110472104731047410475104761047710478104791048010481104821048310484104851048610487104881048910490104911049210493104941049510496104971049810499105001050110502105031050410505105061050710508105091051010511105121051310514105151051610517105181051910520105211052210523105241052510526105271052810529105301053110532105331053410535105361053710538105391054010541105421054310544105451054610547105481054910550105511055210553105541055510556105571055810559105601056110562105631056410565105661056710568105691057010571105721057310574105751057610577105781057910580105811058210583105841058510586105871058810589105901059110592105931059410595105961059710598105991060010601106021060310604106051060610607106081060910610106111061210613106141061510616106171061810619106201062110622106231062410625106261062710628106291063010631106321063310634106351063610637106381063910640106411064210643106441064510646106471064810649106501065110652106531065410655106561065710658106591066010661106621066310664106651066610667106681066910670106711067210673106741067510676106771067810679106801068110682106831068410685106861068710688106891069010691106921069310694106951069610697106981069910700107011070210703107041070510706107071070810709107101071110712107131071410715107161071710718107191072010721107221072310724107251072610727107281072910730107311073210733107341073510736107371073810739107401074110742107431074410745107461074710748107491075010751107521075310754107551075610757107581075910760107611076210763107641076510766107671076810769107701077110772107731077410775107761077710778107791078010781107821078310784107851078610787107881078910790107911079210793107941079510796107971079810799108001080110802108031080410805108061080710808108091081010811108121081310814108151081610817108181081910820108211082210823108241082510826108271082810829108301083110832108331083410835108361083710838108391084010841108421084310844108451084610847108481084910850108511085210853108541085510856108571085810859108601086110862108631086410865108661086710868108691087010871108721087310874108751087610877108781087910880108811088210883108841088510886108871088810889108901089110892108931089410895108961089710898108991090010901109021090310904109051090610907109081090910910109111091210913109141091510916109171091810919109201092110922109231092410925109261092710928109291093010931109321093310934109351093610937109381093910940109411094210943109441094510946109471094810949109501095110952109531095410955109561095710958109591096010961109621096310964109651096610967109681096910970109711097210973109741097510976109771097810979109801098110982109831098410985109861098710988109891099010991109921099310994109951099610997109981099911000110011100211003110041100511006110071100811009110101101111012110131101411015110161101711018110191102011021110221102311024110251102611027110281102911030110311103211033110341103511036110371103811039110401104111042110431104411045110461104711048110491105011051110521105311054110551105611057110581105911060110611106211063110641106511066110671106811069110701107111072110731107411075110761107711078110791108011081110821108311084110851108611087110881108911090110911109211093110941109511096110971109811099111001110111102111031110411105111061110711108111091111011111111121111311114111151111611117111181111911120111211112211123111241112511126111271112811129111301113111132111331113411135111361113711138111391114011141111421114311144111451114611147111481114911150111511115211153111541115511156111571115811159111601116111162111631116411165111661116711168111691117011171111721117311174111751117611177111781117911180111811118211183111841118511186111871118811189111901119111192111931119411195111961119711198111991120011201112021120311204112051120611207112081120911210112111121211213112141121511216112171121811219112201122111222112231122411225112261122711228112291123011231112321123311234112351123611237112381123911240112411124211243112441124511246112471124811249112501125111252112531125411255112561125711258112591126011261112621126311264112651126611267112681126911270112711127211273112741127511276112771127811279112801128111282112831128411285112861128711288112891129011291112921129311294112951129611297112981129911300113011130211303113041130511306113071130811309113101131111312113131131411315113161131711318113191132011321113221132311324113251132611327113281132911330113311133211333113341133511336113371133811339113401134111342113431134411345113461134711348113491135011351113521135311354113551135611357113581135911360113611136211363113641136511366113671136811369113701137111372113731137411375113761137711378113791138011381113821138311384113851138611387113881138911390113911139211393113941139511396113971139811399114001140111402114031140411405114061140711408114091141011411114121141311414114151141611417114181141911420114211142211423114241142511426114271142811429114301143111432114331143411435114361143711438114391144011441114421144311444114451144611447114481144911450114511145211453114541145511456114571145811459114601146111462114631146411465114661146711468114691147011471114721147311474114751147611477114781147911480114811148211483114841148511486114871148811489114901149111492114931149411495114961149711498114991150011501115021150311504115051150611507115081150911510115111151211513115141151511516115171151811519115201152111522115231152411525115261152711528115291153011531115321153311534115351153611537115381153911540115411154211543115441154511546115471154811549115501155111552115531155411555115561155711558115591156011561115621156311564115651156611567115681156911570115711157211573115741157511576115771157811579115801158111582115831158411585115861158711588115891159011591115921159311594115951159611597115981159911600116011160211603116041160511606116071160811609116101161111612116131161411615116161161711618116191162011621116221162311624116251162611627116281162911630116311163211633116341163511636116371163811639116401164111642116431164411645116461164711648116491165011651116521165311654116551165611657116581165911660116611166211663116641166511666116671166811669116701167111672116731167411675116761167711678116791168011681116821168311684116851168611687116881168911690116911169211693116941169511696116971169811699117001170111702117031170411705117061170711708117091171011711117121171311714117151171611717117181171911720117211172211723117241172511726117271172811729117301173111732117331173411735117361173711738117391174011741117421174311744117451174611747117481174911750117511175211753117541175511756117571175811759117601176111762117631176411765117661176711768117691177011771117721177311774117751177611777117781177911780117811178211783117841178511786117871178811789117901179111792117931179411795117961179711798117991180011801118021180311804118051180611807118081180911810118111181211813118141181511816118171181811819118201182111822118231182411825118261182711828118291183011831118321183311834118351183611837118381183911840118411184211843118441184511846118471184811849118501185111852118531185411855118561185711858118591186011861118621186311864118651186611867118681186911870118711187211873118741187511876118771187811879118801188111882118831188411885118861188711888118891189011891118921189311894118951189611897118981189911900119011190211903119041190511906119071190811909119101191111912119131191411915119161191711918119191192011921119221192311924119251192611927119281192911930119311193211933119341193511936119371193811939119401194111942119431194411945119461194711948119491195011951119521195311954119551195611957119581195911960119611196211963119641196511966119671196811969119701197111972119731197411975119761197711978119791198011981119821198311984119851198611987119881198911990119911199211993119941199511996119971199811999120001200112002120031200412005120061200712008120091201012011120121201312014120151201612017120181201912020120211202212023120241202512026120271202812029120301203112032120331203412035120361203712038120391204012041120421204312044120451204612047120481204912050120511205212053120541205512056120571205812059120601206112062120631206412065120661206712068120691207012071120721207312074120751207612077120781207912080120811208212083120841208512086120871208812089120901209112092120931209412095120961209712098120991210012101121021210312104121051210612107121081210912110121111211212113121141211512116121171211812119121201212112122121231212412125121261212712128121291213012131121321213312134121351213612137121381213912140121411214212143121441214512146121471214812149121501215112152121531215412155121561215712158121591216012161121621216312164121651216612167121681216912170121711217212173121741217512176121771217812179121801218112182121831218412185121861218712188121891219012191121921219312194121951219612197121981219912200122011220212203122041220512206122071220812209122101221112212122131221412215122161221712218122191222012221122221222312224122251222612227122281222912230122311223212233122341223512236122371223812239122401224112242122431224412245122461224712248122491225012251122521225312254122551225612257122581225912260122611226212263122641226512266122671226812269122701227112272122731227412275122761227712278122791228012281122821228312284122851228612287122881228912290122911229212293122941229512296122971229812299123001230112302123031230412305123061230712308123091231012311123121231312314123151231612317123181231912320123211232212323123241232512326123271232812329123301233112332123331233412335123361233712338123391234012341123421234312344123451234612347123481234912350123511235212353123541235512356123571235812359123601236112362123631236412365123661236712368123691237012371123721237312374123751237612377123781237912380123811238212383123841238512386123871238812389123901239112392123931239412395123961239712398123991240012401124021240312404124051240612407124081240912410124111241212413124141241512416124171241812419124201242112422124231242412425124261242712428124291243012431124321243312434124351243612437124381243912440124411244212443124441244512446124471244812449124501245112452124531245412455124561245712458124591246012461124621246312464124651246612467124681246912470124711247212473124741247512476124771247812479124801248112482124831248412485124861248712488124891249012491124921249312494124951249612497124981249912500125011250212503125041250512506125071250812509125101251112512125131251412515125161251712518125191252012521125221252312524125251252612527125281252912530125311253212533125341253512536125371253812539125401254112542125431254412545125461254712548125491255012551125521255312554125551255612557125581255912560125611256212563125641256512566125671256812569125701257112572125731257412575125761257712578125791258012581125821258312584125851258612587125881258912590125911259212593125941259512596125971259812599126001260112602126031260412605126061260712608126091261012611126121261312614126151261612617126181261912620126211262212623126241262512626126271262812629126301263112632126331263412635126361263712638126391264012641126421264312644126451264612647126481264912650126511265212653126541265512656126571265812659126601266112662126631266412665126661266712668126691267012671126721267312674126751267612677126781267912680126811268212683126841268512686126871268812689126901269112692126931269412695126961269712698126991270012701127021270312704127051270612707127081270912710127111271212713127141271512716127171271812719127201272112722127231272412725127261272712728127291273012731127321273312734127351273612737127381273912740127411274212743127441274512746127471274812749127501275112752127531275412755127561275712758127591276012761127621276312764127651276612767127681276912770127711277212773127741277512776127771277812779127801278112782127831278412785127861278712788127891279012791127921279312794127951279612797127981279912800128011280212803128041280512806128071280812809128101281112812128131281412815128161281712818128191282012821128221282312824128251282612827128281282912830128311283212833128341283512836128371283812839128401284112842128431284412845128461284712848128491285012851128521285312854128551285612857128581285912860128611286212863128641286512866128671286812869128701287112872128731287412875128761287712878128791288012881128821288312884128851288612887128881288912890128911289212893128941289512896128971289812899129001290112902129031290412905129061290712908129091291012911129121291312914129151291612917129181291912920129211292212923129241292512926129271292812929129301293112932129331293412935129361293712938129391294012941129421294312944129451294612947129481294912950129511295212953129541295512956129571295812959129601296112962129631296412965129661296712968129691297012971129721297312974129751297612977129781297912980129811298212983129841298512986129871298812989129901299112992129931299412995129961299712998129991300013001130021300313004130051300613007130081300913010130111301213013130141301513016130171301813019130201302113022130231302413025130261302713028130291303013031130321303313034130351303613037130381303913040130411304213043130441304513046130471304813049130501305113052130531305413055130561305713058130591306013061130621306313064130651306613067130681306913070130711307213073130741307513076130771307813079130801308113082130831308413085130861308713088130891309013091130921309313094130951309613097130981309913100131011310213103131041310513106131071310813109131101311113112131131311413115131161311713118131191312013121131221312313124131251312613127131281312913130131311313213133131341313513136131371313813139131401314113142131431314413145131461314713148131491315013151131521315313154131551315613157131581315913160131611316213163131641316513166131671316813169131701317113172131731317413175131761317713178131791318013181131821318313184131851318613187131881318913190131911319213193131941319513196131971319813199132001320113202132031320413205132061320713208132091321013211132121321313214132151321613217132181321913220132211322213223132241322513226132271322813229132301323113232132331323413235132361323713238132391324013241132421324313244132451324613247132481324913250132511325213253132541325513256132571325813259132601326113262132631326413265132661326713268132691327013271132721327313274132751327613277132781327913280132811328213283132841328513286132871328813289132901329113292132931329413295132961329713298132991330013301133021330313304133051330613307133081330913310133111331213313133141331513316133171331813319133201332113322133231332413325133261332713328133291333013331133321333313334133351333613337133381333913340133411334213343133441334513346133471334813349133501335113352133531335413355133561335713358133591336013361133621336313364133651336613367133681336913370133711337213373133741337513376133771337813379133801338113382133831338413385133861338713388133891339013391133921339313394133951339613397133981339913400134011340213403134041340513406134071340813409134101341113412134131341413415134161341713418134191342013421134221342313424134251342613427134281342913430134311343213433134341343513436134371343813439134401344113442134431344413445134461344713448134491345013451134521345313454134551345613457134581345913460134611346213463134641346513466134671346813469134701347113472134731347413475134761347713478134791348013481134821348313484134851348613487134881348913490134911349213493134941349513496134971349813499135001350113502135031350413505135061350713508135091351013511135121351313514135151351613517135181351913520135211352213523135241352513526135271352813529135301353113532135331353413535135361353713538135391354013541135421354313544135451354613547135481354913550135511355213553135541355513556135571355813559135601356113562135631356413565135661356713568135691357013571135721357313574135751357613577135781357913580135811358213583135841358513586135871358813589135901359113592135931359413595135961359713598135991360013601136021360313604136051360613607136081360913610136111361213613136141361513616136171361813619136201362113622136231362413625136261362713628136291363013631136321363313634136351363613637136381363913640136411364213643136441364513646136471364813649136501365113652136531365413655136561365713658136591366013661136621366313664136651366613667136681366913670136711367213673136741367513676136771367813679136801368113682136831368413685136861368713688136891369013691136921369313694136951369613697136981369913700137011370213703137041370513706137071370813709137101371113712137131371413715137161371713718137191372013721137221372313724137251372613727137281372913730137311373213733137341373513736137371373813739137401374113742137431374413745137461374713748137491375013751137521375313754137551375613757137581375913760137611376213763137641376513766137671376813769137701377113772137731377413775137761377713778137791378013781137821378313784137851378613787137881378913790137911379213793137941379513796137971379813799138001380113802138031380413805138061380713808138091381013811138121381313814138151381613817138181381913820138211382213823138241382513826138271382813829138301383113832138331383413835138361383713838138391384013841138421384313844138451384613847138481384913850138511385213853138541385513856138571385813859138601386113862138631386413865138661386713868138691387013871138721387313874138751387613877138781387913880138811388213883138841388513886138871388813889138901389113892138931389413895138961389713898138991390013901139021390313904139051390613907139081390913910139111391213913139141391513916139171391813919139201392113922139231392413925139261392713928139291393013931139321393313934139351393613937139381393913940139411394213943139441394513946139471394813949139501395113952139531395413955139561395713958139591396013961139621396313964139651396613967139681396913970139711397213973139741397513976139771397813979139801398113982139831398413985139861398713988139891399013991139921399313994139951399613997139981399914000140011400214003140041400514006140071400814009140101401114012140131401414015140161401714018140191402014021140221402314024140251402614027140281402914030140311403214033140341403514036140371403814039140401404114042140431404414045140461404714048140491405014051140521405314054140551405614057140581405914060140611406214063140641406514066140671406814069140701407114072140731407414075140761407714078140791408014081140821408314084140851408614087140881408914090140911409214093140941409514096140971409814099141001410114102141031410414105141061410714108141091411014111141121411314114141151411614117141181411914120141211412214123141241412514126141271412814129141301413114132141331413414135141361413714138141391414014141141421414314144141451414614147141481414914150141511415214153141541415514156141571415814159141601416114162141631416414165141661416714168141691417014171141721417314174141751417614177141781417914180141811418214183141841418514186141871418814189141901419114192141931419414195141961419714198141991420014201142021420314204142051420614207142081420914210142111421214213142141421514216142171421814219142201422114222142231422414225142261422714228142291423014231142321423314234142351423614237142381423914240142411424214243142441424514246142471424814249142501425114252142531425414255142561425714258142591426014261142621426314264142651426614267142681426914270142711427214273142741427514276142771427814279142801428114282142831428414285142861428714288142891429014291142921429314294142951429614297142981429914300143011430214303143041430514306143071430814309143101431114312143131431414315143161431714318143191432014321143221432314324143251432614327143281432914330143311433214333143341433514336143371433814339143401434114342143431434414345143461434714348143491435014351143521435314354143551435614357143581435914360143611436214363143641436514366143671436814369143701437114372143731437414375143761437714378143791438014381143821438314384143851438614387143881438914390143911439214393143941439514396143971439814399144001440114402144031440414405144061440714408144091441014411144121441314414144151441614417144181441914420144211442214423144241442514426144271442814429144301443114432144331443414435144361443714438144391444014441144421444314444144451444614447144481444914450144511445214453144541445514456144571445814459144601446114462144631446414465144661446714468144691447014471144721447314474144751447614477144781447914480144811448214483144841448514486144871448814489144901449114492144931449414495144961449714498144991450014501145021450314504145051450614507145081450914510145111451214513145141451514516145171451814519145201452114522145231452414525145261452714528145291453014531145321453314534145351453614537145381453914540145411454214543145441454514546145471454814549145501455114552145531455414555145561455714558145591456014561145621456314564145651456614567145681456914570145711457214573145741457514576145771457814579145801458114582145831458414585145861458714588145891459014591145921459314594145951459614597145981459914600146011460214603146041460514606146071460814609146101461114612146131461414615146161461714618146191462014621146221462314624146251462614627146281462914630146311463214633146341463514636146371463814639146401464114642146431464414645146461464714648146491465014651146521465314654146551465614657146581465914660146611466214663146641466514666146671466814669146701467114672146731467414675146761467714678146791468014681146821468314684146851468614687146881468914690146911469214693146941469514696146971469814699147001470114702147031470414705147061470714708147091471014711147121471314714147151471614717147181471914720147211472214723147241472514726147271472814729147301473114732147331473414735147361473714738147391474014741147421474314744147451474614747147481474914750147511475214753147541475514756147571475814759147601476114762147631476414765147661476714768147691477014771147721477314774147751477614777147781477914780147811478214783147841478514786147871478814789147901479114792147931479414795147961479714798147991480014801148021480314804148051480614807148081480914810148111481214813148141481514816148171481814819148201482114822148231482414825148261482714828148291483014831148321483314834148351483614837148381483914840148411484214843148441484514846148471484814849148501485114852148531485414855148561485714858148591486014861148621486314864148651486614867148681486914870148711487214873148741487514876148771487814879148801488114882148831488414885148861488714888148891489014891148921489314894148951489614897148981489914900149011490214903149041490514906149071490814909149101491114912149131491414915149161491714918149191492014921149221492314924149251492614927149281492914930149311493214933149341493514936149371493814939149401494114942149431494414945149461494714948149491495014951149521495314954149551495614957149581495914960149611496214963149641496514966149671496814969149701497114972149731497414975149761497714978149791498014981149821498314984149851498614987149881498914990149911499214993149941499514996149971499814999150001500115002150031500415005150061500715008150091501015011150121501315014150151501615017150181501915020150211502215023150241502515026150271502815029150301503115032150331503415035150361503715038150391504015041150421504315044150451504615047150481504915050150511505215053150541505515056150571505815059150601506115062150631506415065150661506715068150691507015071150721507315074150751507615077150781507915080150811508215083150841508515086150871508815089150901509115092150931509415095150961509715098150991510015101151021510315104151051510615107151081510915110151111511215113151141511515116151171511815119151201512115122151231512415125151261512715128151291513015131151321513315134151351513615137151381513915140151411514215143151441514515146151471514815149151501515115152151531515415155151561515715158151591516015161151621516315164151651516615167151681516915170151711517215173151741517515176151771517815179151801518115182151831518415185151861518715188151891519015191151921519315194151951519615197151981519915200152011520215203152041520515206152071520815209152101521115212152131521415215152161521715218152191522015221152221522315224152251522615227152281522915230152311523215233152341523515236152371523815239152401524115242152431524415245152461524715248152491525015251152521525315254152551525615257152581525915260152611526215263152641526515266152671526815269152701527115272152731527415275152761527715278152791528015281152821528315284152851528615287152881528915290152911529215293152941529515296152971529815299153001530115302153031530415305153061530715308153091531015311153121531315314153151531615317153181531915320153211532215323153241532515326153271532815329153301533115332153331533415335153361533715338153391534015341153421534315344153451534615347153481534915350153511535215353153541535515356153571535815359153601536115362153631536415365153661536715368153691537015371153721537315374153751537615377153781537915380153811538215383153841538515386153871538815389153901539115392153931539415395153961539715398153991540015401154021540315404154051540615407154081540915410154111541215413154141541515416154171541815419154201542115422154231542415425154261542715428154291543015431154321543315434154351543615437154381543915440154411544215443154441544515446154471544815449154501545115452154531545415455154561545715458154591546015461154621546315464154651546615467154681546915470154711547215473154741547515476154771547815479154801548115482154831548415485154861548715488154891549015491154921549315494154951549615497154981549915500155011550215503155041550515506155071550815509155101551115512155131551415515155161551715518155191552015521155221552315524155251552615527155281552915530155311553215533155341553515536155371553815539155401554115542155431554415545155461554715548155491555015551155521555315554155551555615557155581555915560155611556215563155641556515566155671556815569155701557115572155731557415575155761557715578155791558015581155821558315584155851558615587155881558915590155911559215593155941559515596155971559815599156001560115602156031560415605156061560715608156091561015611156121561315614156151561615617156181561915620156211562215623156241562515626156271562815629156301563115632156331563415635156361563715638156391564015641156421564315644156451564615647156481564915650156511565215653156541565515656156571565815659156601566115662156631566415665156661566715668156691567015671156721567315674156751567615677156781567915680156811568215683156841568515686156871568815689156901569115692156931569415695156961569715698156991570015701157021570315704157051570615707157081570915710157111571215713157141571515716157171571815719157201572115722157231572415725157261572715728157291573015731157321573315734157351573615737157381573915740157411574215743157441574515746157471574815749157501575115752157531575415755157561575715758157591576015761157621576315764157651576615767157681576915770157711577215773157741577515776157771577815779157801578115782157831578415785157861578715788157891579015791157921579315794157951579615797157981579915800158011580215803158041580515806158071580815809158101581115812158131581415815158161581715818158191582015821158221582315824158251582615827158281582915830158311583215833158341583515836158371583815839158401584115842158431584415845158461584715848158491585015851158521585315854158551585615857158581585915860158611586215863158641586515866158671586815869158701587115872158731587415875158761587715878158791588015881158821588315884158851588615887158881588915890158911589215893158941589515896158971589815899159001590115902159031590415905159061590715908159091591015911159121591315914159151591615917159181591915920159211592215923159241592515926159271592815929159301593115932159331593415935159361593715938159391594015941159421594315944159451594615947159481594915950159511595215953159541595515956159571595815959159601596115962159631596415965159661596715968159691597015971159721597315974159751597615977159781597915980159811598215983159841598515986159871598815989159901599115992159931599415995159961599715998159991600016001160021600316004160051600616007160081600916010160111601216013160141601516016160171601816019160201602116022160231602416025160261602716028160291603016031160321603316034160351603616037160381603916040160411604216043160441604516046160471604816049160501605116052160531605416055160561605716058160591606016061160621606316064160651606616067160681606916070160711607216073160741607516076160771607816079160801608116082160831608416085160861608716088160891609016091160921609316094160951609616097160981609916100161011610216103161041610516106161071610816109161101611116112161131611416115161161611716118161191612016121161221612316124161251612616127161281612916130161311613216133161341613516136161371613816139161401614116142161431614416145161461614716148161491615016151161521615316154161551615616157161581615916160161611616216163161641616516166161671616816169161701617116172161731617416175161761617716178161791618016181161821618316184161851618616187161881618916190161911619216193161941619516196161971619816199162001620116202162031620416205162061620716208162091621016211162121621316214162151621616217162181621916220162211622216223162241622516226162271622816229162301623116232162331623416235162361623716238162391624016241162421624316244162451624616247162481624916250162511625216253162541625516256162571625816259162601626116262162631626416265162661626716268162691627016271162721627316274162751627616277162781627916280162811628216283162841628516286162871628816289162901629116292162931629416295162961629716298162991630016301163021630316304163051630616307163081630916310163111631216313163141631516316163171631816319163201632116322163231632416325163261632716328163291633016331163321633316334163351633616337163381633916340163411634216343163441634516346163471634816349163501635116352163531635416355163561635716358163591636016361163621636316364163651636616367163681636916370163711637216373163741637516376163771637816379163801638116382163831638416385163861638716388163891639016391163921639316394163951639616397163981639916400164011640216403164041640516406164071640816409164101641116412164131641416415164161641716418164191642016421164221642316424164251642616427164281642916430164311643216433164341643516436164371643816439164401644116442164431644416445164461644716448164491645016451164521645316454164551645616457164581645916460164611646216463164641646516466164671646816469164701647116472164731647416475164761647716478164791648016481164821648316484164851648616487164881648916490164911649216493164941649516496164971649816499165001650116502165031650416505165061650716508165091651016511165121651316514165151651616517165181651916520165211652216523165241652516526165271652816529165301653116532165331653416535165361653716538165391654016541165421654316544165451654616547165481654916550165511655216553165541655516556165571655816559165601656116562165631656416565165661656716568165691657016571165721657316574165751657616577165781657916580165811658216583165841658516586165871658816589165901659116592165931659416595165961659716598165991660016601166021660316604166051660616607166081660916610166111661216613166141661516616166171661816619166201662116622166231662416625166261662716628166291663016631166321663316634166351663616637166381663916640166411664216643166441664516646166471664816649166501665116652166531665416655166561665716658166591666016661166621666316664166651666616667166681666916670166711667216673166741667516676166771667816679166801668116682166831668416685166861668716688166891669016691166921669316694166951669616697166981669916700167011670216703167041670516706167071670816709167101671116712167131671416715167161671716718167191672016721167221672316724167251672616727167281672916730167311673216733167341673516736167371673816739167401674116742167431674416745167461674716748167491675016751167521675316754167551675616757167581675916760167611676216763167641676516766167671676816769167701677116772167731677416775167761677716778167791678016781167821678316784167851678616787167881678916790167911679216793167941679516796167971679816799168001680116802168031680416805168061680716808168091681016811168121681316814168151681616817168181681916820168211682216823168241682516826168271682816829168301683116832168331683416835168361683716838168391684016841168421684316844168451684616847168481684916850168511685216853168541685516856168571685816859168601686116862168631686416865168661686716868168691687016871168721687316874168751687616877168781687916880168811688216883168841688516886168871688816889168901689116892168931689416895168961689716898168991690016901169021690316904169051690616907169081690916910169111691216913169141691516916169171691816919169201692116922169231692416925169261692716928169291693016931169321693316934169351693616937169381693916940169411694216943169441694516946169471694816949169501695116952169531695416955169561695716958169591696016961169621696316964169651696616967169681696916970169711697216973169741697516976169771697816979169801698116982169831698416985169861698716988169891699016991169921699316994169951699616997169981699917000170011700217003170041700517006170071700817009170101701117012170131701417015170161701717018170191702017021170221702317024170251702617027170281702917030170311703217033170341703517036170371703817039170401704117042170431704417045170461704717048170491705017051170521705317054170551705617057170581705917060170611706217063170641706517066170671706817069170701707117072170731707417075170761707717078170791708017081170821708317084170851708617087170881708917090170911709217093170941709517096170971709817099171001710117102171031710417105171061710717108171091711017111171121711317114171151711617117171181711917120171211712217123171241712517126171271712817129171301713117132171331713417135171361713717138171391714017141171421714317144171451714617147171481714917150171511715217153171541715517156171571715817159171601716117162171631716417165171661716717168171691717017171171721717317174171751717617177171781717917180171811718217183171841718517186171871718817189171901719117192171931719417195171961719717198171991720017201172021720317204172051720617207172081720917210172111721217213172141721517216172171721817219172201722117222172231722417225172261722717228172291723017231172321723317234172351723617237172381723917240172411724217243172441724517246172471724817249172501725117252172531725417255172561725717258172591726017261172621726317264172651726617267172681726917270172711727217273172741727517276172771727817279172801728117282172831728417285172861728717288172891729017291172921729317294172951729617297172981729917300173011730217303173041730517306173071730817309173101731117312173131731417315173161731717318173191732017321173221732317324173251732617327173281732917330173311733217333173341733517336173371733817339173401734117342173431734417345173461734717348173491735017351173521735317354173551735617357173581735917360173611736217363173641736517366173671736817369173701737117372173731737417375173761737717378173791738017381173821738317384173851738617387173881738917390173911739217393173941739517396173971739817399174001740117402174031740417405174061740717408174091741017411174121741317414174151741617417174181741917420174211742217423174241742517426174271742817429174301743117432174331743417435174361743717438174391744017441174421744317444174451744617447174481744917450174511745217453174541745517456174571745817459174601746117462174631746417465174661746717468174691747017471174721747317474174751747617477174781747917480174811748217483174841748517486174871748817489174901749117492174931749417495174961749717498174991750017501175021750317504175051750617507175081750917510175111751217513175141751517516175171751817519175201752117522175231752417525175261752717528175291753017531175321753317534175351753617537175381753917540175411754217543175441754517546175471754817549175501755117552175531755417555175561755717558175591756017561175621756317564175651756617567175681756917570175711757217573175741757517576175771757817579175801758117582175831758417585175861758717588175891759017591175921759317594175951759617597175981759917600176011760217603176041760517606176071760817609176101761117612176131761417615176161761717618176191762017621176221762317624176251762617627176281762917630176311763217633176341763517636176371763817639176401764117642176431764417645176461764717648176491765017651176521765317654176551765617657176581765917660176611766217663176641766517666176671766817669176701767117672176731767417675176761767717678176791768017681176821768317684176851768617687176881768917690176911769217693176941769517696176971769817699177001770117702177031770417705177061770717708177091771017711177121771317714177151771617717177181771917720177211772217723177241772517726177271772817729177301773117732177331773417735177361773717738177391774017741177421774317744177451774617747177481774917750177511775217753177541775517756177571775817759177601776117762177631776417765177661776717768177691777017771177721777317774177751777617777177781777917780177811778217783177841778517786177871778817789177901779117792177931779417795177961779717798177991780017801178021780317804178051780617807178081780917810178111781217813178141781517816178171781817819178201782117822178231782417825178261782717828178291783017831178321783317834178351783617837178381783917840178411784217843178441784517846178471784817849178501785117852178531785417855178561785717858178591786017861178621786317864178651786617867178681786917870178711787217873178741787517876178771787817879178801788117882178831788417885178861788717888178891789017891178921789317894178951789617897178981789917900179011790217903179041790517906179071790817909179101791117912179131791417915179161791717918179191792017921179221792317924179251792617927179281792917930179311793217933179341793517936179371793817939179401794117942179431794417945179461794717948179491795017951179521795317954179551795617957179581795917960179611796217963179641796517966179671796817969179701797117972179731797417975179761797717978179791798017981179821798317984179851798617987179881798917990179911799217993179941799517996179971799817999180001800118002180031800418005180061800718008180091801018011180121801318014180151801618017180181801918020180211802218023180241802518026180271802818029180301803118032180331803418035180361803718038180391804018041180421804318044180451804618047180481804918050180511805218053180541805518056180571805818059180601806118062180631806418065180661806718068180691807018071180721807318074180751807618077180781807918080180811808218083180841808518086180871808818089180901809118092180931809418095180961809718098180991810018101181021810318104181051810618107181081810918110181111811218113181141811518116181171811818119181201812118122181231812418125181261812718128181291813018131181321813318134181351813618137181381813918140181411814218143181441814518146181471814818149181501815118152181531815418155181561815718158181591816018161181621816318164181651816618167181681816918170181711817218173181741817518176181771817818179181801818118182181831818418185181861818718188181891819018191181921819318194181951819618197181981819918200182011820218203182041820518206182071820818209182101821118212182131821418215182161821718218182191822018221182221822318224182251822618227182281822918230182311823218233182341823518236182371823818239182401824118242182431824418245182461824718248182491825018251182521825318254182551825618257182581825918260182611826218263182641826518266182671826818269182701827118272182731827418275182761827718278182791828018281182821828318284182851828618287182881828918290182911829218293182941829518296182971829818299183001830118302183031830418305183061830718308183091831018311183121831318314183151831618317183181831918320183211832218323183241832518326183271832818329183301833118332183331833418335183361833718338183391834018341183421834318344183451834618347183481834918350183511835218353183541835518356183571835818359183601836118362183631836418365183661836718368183691837018371183721837318374183751837618377183781837918380183811838218383183841838518386183871838818389183901839118392183931839418395183961839718398183991840018401184021840318404184051840618407184081840918410184111841218413184141841518416184171841818419184201842118422184231842418425184261842718428184291843018431184321843318434184351843618437184381843918440184411844218443184441844518446184471844818449184501845118452184531845418455184561845718458184591846018461184621846318464184651846618467184681846918470184711847218473184741847518476184771847818479184801848118482184831848418485184861848718488184891849018491184921849318494184951849618497184981849918500185011850218503185041850518506185071850818509185101851118512185131851418515185161851718518185191852018521185221852318524185251852618527185281852918530185311853218533185341853518536185371853818539185401854118542185431854418545185461854718548185491855018551185521855318554185551855618557185581855918560185611856218563185641856518566185671856818569185701857118572185731857418575185761857718578185791858018581185821858318584185851858618587185881858918590185911859218593185941859518596185971859818599186001860118602186031860418605186061860718608186091861018611186121861318614186151861618617186181861918620186211862218623186241862518626186271862818629186301863118632186331863418635186361863718638186391864018641186421864318644186451864618647186481864918650186511865218653186541865518656186571865818659186601866118662186631866418665186661866718668186691867018671186721867318674186751867618677186781867918680186811868218683186841868518686186871868818689186901869118692186931869418695186961869718698186991870018701187021870318704187051870618707187081870918710187111871218713187141871518716187171871818719187201872118722187231872418725187261872718728187291873018731187321873318734187351873618737187381873918740187411874218743187441874518746187471874818749187501875118752187531875418755187561875718758187591876018761187621876318764187651876618767187681876918770187711877218773187741877518776187771877818779187801878118782187831878418785187861878718788187891879018791187921879318794187951879618797187981879918800188011880218803188041880518806188071880818809188101881118812188131881418815188161881718818188191882018821188221882318824188251882618827188281882918830188311883218833188341883518836188371883818839188401884118842188431884418845188461884718848188491885018851188521885318854188551885618857188581885918860188611886218863188641886518866188671886818869188701887118872188731887418875188761887718878188791888018881188821888318884188851888618887188881888918890188911889218893188941889518896188971889818899189001890118902189031890418905189061890718908189091891018911189121891318914189151891618917189181891918920189211892218923189241892518926189271892818929189301893118932189331893418935189361893718938189391894018941189421894318944189451894618947189481894918950189511895218953189541895518956189571895818959189601896118962189631896418965189661896718968189691897018971189721897318974189751897618977189781897918980189811898218983189841898518986189871898818989189901899118992189931899418995189961899718998189991900019001190021900319004190051900619007190081900919010190111901219013190141901519016190171901819019190201902119022190231902419025190261902719028190291903019031190321903319034190351903619037190381903919040190411904219043190441904519046190471904819049190501905119052190531905419055190561905719058190591906019061190621906319064190651906619067190681906919070190711907219073190741907519076190771907819079190801908119082190831908419085190861908719088190891909019091190921909319094190951909619097190981909919100191011910219103191041910519106191071910819109191101911119112191131911419115191161911719118191191912019121191221912319124191251912619127191281912919130191311913219133191341913519136191371913819139191401914119142191431914419145191461914719148191491915019151191521915319154191551915619157191581915919160191611916219163191641916519166191671916819169191701917119172191731917419175191761917719178191791918019181191821918319184191851918619187191881918919190191911919219193191941919519196191971919819199192001920119202192031920419205192061920719208192091921019211192121921319214192151921619217192181921919220192211922219223192241922519226192271922819229192301923119232192331923419235192361923719238192391924019241192421924319244192451924619247192481924919250192511925219253192541925519256192571925819259192601926119262192631926419265192661926719268192691927019271192721927319274192751927619277192781927919280192811928219283192841928519286192871928819289192901929119292192931929419295192961929719298192991930019301193021930319304193051930619307193081930919310193111931219313193141931519316193171931819319193201932119322193231932419325193261932719328193291933019331193321933319334193351933619337193381933919340193411934219343193441934519346193471934819349193501935119352193531935419355193561935719358193591936019361193621936319364193651936619367193681936919370193711937219373193741937519376193771937819379193801938119382193831938419385193861938719388193891939019391193921939319394193951939619397193981939919400194011940219403194041940519406194071940819409194101941119412194131941419415194161941719418194191942019421194221942319424194251942619427194281942919430194311943219433194341943519436194371943819439194401944119442194431944419445194461944719448194491945019451194521945319454194551945619457194581945919460194611946219463194641946519466194671946819469194701947119472194731947419475194761947719478194791948019481194821948319484194851948619487194881948919490194911949219493194941949519496194971949819499195001950119502195031950419505195061950719508195091951019511195121951319514195151951619517195181951919520195211952219523195241952519526195271952819529195301953119532195331953419535195361953719538195391954019541195421954319544195451954619547195481954919550195511955219553195541955519556195571955819559195601956119562195631956419565195661956719568195691957019571195721957319574195751957619577195781957919580195811958219583195841958519586195871958819589195901959119592195931959419595195961959719598195991960019601196021960319604196051960619607196081960919610196111961219613196141961519616196171961819619196201962119622196231962419625196261962719628196291963019631196321963319634196351963619637196381963919640196411964219643196441964519646196471964819649196501965119652196531965419655196561965719658196591966019661196621966319664196651966619667196681966919670196711967219673196741967519676196771967819679196801968119682196831968419685196861968719688196891969019691196921969319694196951969619697196981969919700197011970219703197041970519706197071970819709197101971119712197131971419715197161971719718197191972019721197221972319724197251972619727197281972919730197311973219733197341973519736197371973819739197401974119742197431974419745197461974719748197491975019751197521975319754197551975619757197581975919760197611976219763197641976519766197671976819769197701977119772197731977419775197761977719778197791978019781197821978319784197851978619787197881978919790197911979219793197941979519796197971979819799198001980119802198031980419805198061980719808198091981019811198121981319814198151981619817198181981919820198211982219823198241982519826198271982819829198301983119832198331983419835198361983719838198391984019841198421984319844198451984619847198481984919850198511985219853198541985519856198571985819859198601986119862198631986419865198661986719868198691987019871198721987319874198751987619877198781987919880198811988219883198841988519886198871988819889198901989119892198931989419895198961989719898198991990019901199021990319904199051990619907199081990919910199111991219913199141991519916199171991819919199201992119922199231992419925199261992719928199291993019931199321993319934199351993619937199381993919940199411994219943199441994519946199471994819949199501995119952199531995419955199561995719958199591996019961199621996319964199651996619967199681996919970199711997219973199741997519976199771997819979199801998119982199831998419985199861998719988199891999019991199921999319994199951999619997199981999920000200012000220003200042000520006200072000820009200102001120012200132001420015200162001720018200192002020021200222002320024200252002620027200282002920030200312003220033200342003520036200372003820039200402004120042200432004420045200462004720048200492005020051200522005320054200552005620057200582005920060200612006220063200642006520066200672006820069200702007120072200732007420075200762007720078200792008020081200822008320084200852008620087200882008920090200912009220093200942009520096200972009820099201002010120102201032010420105201062010720108201092011020111201122011320114201152011620117201182011920120201212012220123201242012520126201272012820129201302013120132201332013420135201362013720138201392014020141201422014320144201452014620147201482014920150201512015220153201542015520156201572015820159201602016120162201632016420165201662016720168201692017020171201722017320174201752017620177201782017920180201812018220183201842018520186201872018820189201902019120192201932019420195201962019720198201992020020201202022020320204202052020620207202082020920210202112021220213202142021520216202172021820219202202022120222202232022420225202262022720228202292023020231202322023320234202352023620237202382023920240202412024220243202442024520246202472024820249202502025120252202532025420255202562025720258202592026020261202622026320264202652026620267202682026920270202712027220273202742027520276202772027820279202802028120282202832028420285202862028720288202892029020291202922029320294202952029620297202982029920300203012030220303203042030520306203072030820309203102031120312203132031420315203162031720318203192032020321203222032320324203252032620327203282032920330203312033220333203342033520336203372033820339203402034120342203432034420345203462034720348203492035020351203522035320354203552035620357203582035920360203612036220363203642036520366203672036820369203702037120372203732037420375203762037720378203792038020381203822038320384203852038620387203882038920390203912039220393203942039520396203972039820399204002040120402204032040420405204062040720408204092041020411204122041320414204152041620417204182041920420204212042220423204242042520426204272042820429204302043120432204332043420435204362043720438204392044020441204422044320444204452044620447204482044920450204512045220453204542045520456204572045820459204602046120462204632046420465204662046720468204692047020471204722047320474204752047620477204782047920480204812048220483204842048520486204872048820489204902049120492204932049420495204962049720498204992050020501205022050320504205052050620507205082050920510205112051220513205142051520516205172051820519205202052120522205232052420525205262052720528205292053020531205322053320534205352053620537205382053920540205412054220543205442054520546205472054820549205502055120552205532055420555205562055720558205592056020561205622056320564205652056620567205682056920570205712057220573205742057520576205772057820579205802058120582205832058420585205862058720588205892059020591205922059320594205952059620597205982059920600206012060220603206042060520606206072060820609206102061120612206132061420615206162061720618206192062020621206222062320624206252062620627206282062920630206312063220633206342063520636206372063820639206402064120642206432064420645206462064720648206492065020651206522065320654206552065620657206582065920660206612066220663206642066520666206672066820669206702067120672206732067420675206762067720678206792068020681206822068320684206852068620687206882068920690206912069220693206942069520696206972069820699207002070120702207032070420705207062070720708207092071020711207122071320714207152071620717207182071920720207212072220723207242072520726207272072820729207302073120732207332073420735207362073720738207392074020741207422074320744207452074620747207482074920750207512075220753207542075520756207572075820759207602076120762207632076420765207662076720768207692077020771207722077320774207752077620777207782077920780207812078220783207842078520786207872078820789207902079120792207932079420795207962079720798207992080020801208022080320804208052080620807208082080920810208112081220813208142081520816208172081820819208202082120822208232082420825208262082720828208292083020831208322083320834208352083620837208382083920840208412084220843208442084520846208472084820849208502085120852208532085420855208562085720858208592086020861208622086320864208652086620867208682086920870208712087220873208742087520876208772087820879208802088120882208832088420885208862088720888208892089020891208922089320894208952089620897208982089920900209012090220903209042090520906209072090820909209102091120912209132091420915209162091720918209192092020921209222092320924209252092620927209282092920930209312093220933209342093520936209372093820939209402094120942209432094420945209462094720948209492095020951209522095320954209552095620957209582095920960209612096220963209642096520966209672096820969209702097120972209732097420975209762097720978209792098020981209822098320984209852098620987209882098920990209912099220993209942099520996209972099820999210002100121002210032100421005210062100721008210092101021011210122101321014210152101621017210182101921020210212102221023210242102521026210272102821029210302103121032210332103421035210362103721038210392104021041210422104321044210452104621047210482104921050210512105221053210542105521056210572105821059210602106121062210632106421065210662106721068210692107021071210722107321074210752107621077210782107921080210812108221083210842108521086210872108821089210902109121092210932109421095210962109721098210992110021101211022110321104211052110621107211082110921110211112111221113211142111521116211172111821119211202112121122211232112421125211262112721128211292113021131211322113321134211352113621137211382113921140211412114221143211442114521146211472114821149211502115121152211532115421155211562115721158211592116021161211622116321164211652116621167211682116921170211712117221173211742117521176211772117821179211802118121182211832118421185211862118721188211892119021191211922119321194211952119621197211982119921200212012120221203212042120521206212072120821209212102121121212212132121421215212162121721218212192122021221212222122321224212252122621227212282122921230212312123221233212342123521236212372123821239212402124121242212432124421245212462124721248212492125021251212522125321254212552125621257212582125921260212612126221263212642126521266212672126821269212702127121272212732127421275212762127721278212792128021281212822128321284212852128621287212882128921290212912129221293212942129521296212972129821299213002130121302213032130421305213062130721308213092131021311213122131321314213152131621317213182131921320213212132221323213242132521326213272132821329213302133121332213332133421335213362133721338213392134021341213422134321344213452134621347213482134921350213512135221353213542135521356213572135821359213602136121362213632136421365213662136721368213692137021371213722137321374213752137621377213782137921380213812138221383213842138521386213872138821389213902139121392213932139421395213962139721398213992140021401214022140321404214052140621407214082140921410214112141221413214142141521416214172141821419214202142121422214232142421425214262142721428214292143021431214322143321434214352143621437214382143921440214412144221443214442144521446214472144821449214502145121452214532145421455214562145721458214592146021461214622146321464214652146621467214682146921470214712147221473214742147521476214772147821479214802148121482214832148421485214862148721488214892149021491214922149321494214952149621497214982149921500215012150221503215042150521506215072150821509215102151121512215132151421515215162151721518215192152021521215222152321524215252152621527215282152921530215312153221533215342153521536215372153821539215402154121542215432154421545215462154721548215492155021551215522155321554215552155621557215582155921560215612156221563215642156521566215672156821569215702157121572215732157421575215762157721578215792158021581215822158321584215852158621587215882158921590215912159221593215942159521596215972159821599216002160121602216032160421605216062160721608216092161021611216122161321614216152161621617216182161921620216212162221623216242162521626216272162821629216302163121632216332163421635216362163721638216392164021641216422164321644216452164621647216482164921650216512165221653216542165521656216572165821659216602166121662216632166421665216662166721668216692167021671216722167321674216752167621677216782167921680216812168221683216842168521686216872168821689216902169121692216932169421695216962169721698216992170021701217022170321704217052170621707217082170921710217112171221713217142171521716217172171821719217202172121722217232172421725217262172721728217292173021731217322173321734217352173621737217382173921740217412174221743217442174521746217472174821749217502175121752217532175421755217562175721758217592176021761217622176321764217652176621767217682176921770217712177221773217742177521776217772177821779217802178121782217832178421785217862178721788217892179021791217922179321794217952179621797217982179921800218012180221803218042180521806218072180821809218102181121812218132181421815218162181721818218192182021821218222182321824218252182621827218282182921830218312183221833218342183521836218372183821839218402184121842218432184421845218462184721848218492185021851218522185321854218552185621857218582185921860218612186221863218642186521866218672186821869218702187121872218732187421875218762187721878218792188021881218822188321884218852188621887218882188921890218912189221893218942189521896218972189821899219002190121902219032190421905219062190721908219092191021911219122191321914219152191621917219182191921920219212192221923219242192521926219272192821929219302193121932219332193421935219362193721938219392194021941219422194321944219452194621947219482194921950219512195221953219542195521956219572195821959219602196121962219632196421965219662196721968219692197021971219722197321974219752197621977219782197921980219812198221983219842198521986219872198821989219902199121992219932199421995219962199721998219992200022001220022200322004220052200622007220082200922010220112201222013220142201522016220172201822019220202202122022220232202422025220262202722028220292203022031220322203322034220352203622037220382203922040220412204222043220442204522046220472204822049220502205122052220532205422055220562205722058220592206022061220622206322064220652206622067220682206922070220712207222073220742207522076220772207822079220802208122082220832208422085220862208722088220892209022091220922209322094220952209622097220982209922100221012210222103221042210522106221072210822109221102211122112221132211422115221162211722118221192212022121221222212322124221252212622127221282212922130221312213222133221342213522136221372213822139221402214122142221432214422145221462214722148221492215022151221522215322154221552215622157221582215922160221612216222163221642216522166221672216822169221702217122172221732217422175221762217722178221792218022181221822218322184221852218622187221882218922190221912219222193221942219522196221972219822199222002220122202222032220422205222062220722208222092221022211222122221322214222152221622217222182221922220222212222222223222242222522226222272222822229222302223122232222332223422235222362223722238222392224022241
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Debug/DebuggingFlags.h"
  7. #include "Debug/DiagProbe.h"
  8. #include "Debug/DebugManager.h"
  9. // Parser includes
  10. #include "RegexCommon.h"
  11. #include "RegexPattern.h"
  12. #include "ExternalLowerer.h"
  13. #include "ExternalLowerer.h"
  14. ///----------------------------------------------------------------------------
  15. ///
  16. /// Lowerer::Lower
  17. ///
  18. /// Lowerer's main entrypoint. Lowers this function..
  19. ///
  20. ///----------------------------------------------------------------------------
  21. void
  22. Lowerer::Lower()
  23. {
  24. this->m_func->StopMaintainByteCodeOffset();
  25. NoRecoverMemoryJitArenaAllocator localAlloc(L"BE-Lower", this->m_func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  26. this->m_alloc = &localAlloc;
  27. BVSparse<JitArenaAllocator> localInitializedTempSym(&localAlloc);
  28. this->initializedTempSym = &localInitializedTempSym;
  29. BVSparse<JitArenaAllocator> localAddToLiveOnBackEdgeSyms(&localAlloc);
  30. this->addToLiveOnBackEdgeSyms = &localAddToLiveOnBackEdgeSyms;
  31. Assert(this->m_func->GetCloneMap() == nullptr);
  32. m_lowererMD.Init(this);
  33. bool defaultDoFastPath = this->m_func->DoFastPaths();
  34. bool loopFastPath = this->m_func->DoLoopFastPaths();
  35. if (!loopFastPath || !defaultDoFastPath
  36. #ifdef INLINE_CACHE_STATS
  37. || PHASE_STATS1(Js::PolymorphicInlineCachePhase)
  38. #endif
  39. )
  40. {
  41. //arguments[] access is similar to array fast path hence disable when array fastpath is disabled.
  42. //loopFastPath is always true except explicitly disabled
  43. //defaultDoFastPath can be false when we the source code size is huge
  44. m_func->SetHasStackArgs(false);
  45. }
  46. if (m_func->HasAnyStackNestedFunc())
  47. {
  48. EnsureStackFunctionListStackSym();
  49. }
  50. if (m_func->DoStackFrameDisplay() && !m_func->IsLoopBody())
  51. {
  52. AllocStackClosure();
  53. }
  54. if (m_func->IsJitInDebugMode())
  55. {
  56. // Initialize metadata of local var slots.
  57. // Too late to wait until Register Allocator, as we need the offset when lowerering bailout for debugger.
  58. int32 hasLocalVarChangedOffset = m_func->GetHasLocalVarChangedOffset();
  59. if (hasLocalVarChangedOffset != Js::Constants::InvalidOffset)
  60. {
  61. // MOV [EBP + m_func->GetHasLocalVarChangedOffset()], 0
  62. StackSym* sym = StackSym::New(TyInt8, m_func);
  63. sym->m_offset = hasLocalVarChangedOffset;
  64. sym->m_allocated = true;
  65. IR::Opnd* opnd1 = IR::SymOpnd::New(sym, TyInt8, m_func);
  66. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func);
  67. LowererMD::CreateAssign(opnd1, opnd2, m_func->GetFunctionEntryInsertionPoint());
  68. #ifdef DBG
  69. // Pre-fill all local slots with a pattern. This will help identify non-initialized/garbage var values.
  70. // Note that in the beginning of the function in bytecode we should initialize all locals to undefined.
  71. uint32 localSlotCount = m_func->GetJnFunction()->GetEndNonTempLocalIndex() - m_func->GetJnFunction()->GetFirstNonTempLocalIndex();
  72. for (uint i = 0; i < localSlotCount; ++i)
  73. {
  74. int offset = m_func->GetLocalVarSlotOffset(i);
  75. IRType opnd1Type;
  76. opnd2;
  77. uint32 slotSize = Func::GetDiagLocalSlotSize();
  78. switch (slotSize)
  79. {
  80. case 4:
  81. opnd1Type = TyInt32;
  82. opnd2 = IR::IntConstOpnd::New(Func::c_debugFillPattern4, opnd1Type, m_func);
  83. break;
  84. case 8:
  85. opnd1Type = TyInt64;
  86. opnd2 = IR::AddrOpnd::New((Js::Var)Func::c_debugFillPattern8, IR::AddrOpndKindConstant, m_func);
  87. break;
  88. default:
  89. AssertMsg(FALSE, "Unsupported slot size!");
  90. opnd1Type = TyIllegal;
  91. opnd2 = nullptr;
  92. }
  93. sym = StackSym::New(opnd1Type, m_func);
  94. sym->m_offset = offset;
  95. sym->m_allocated = true;
  96. opnd1 = IR::SymOpnd::New(sym, TyInt32, m_func);
  97. LowererMD::CreateAssign(opnd1, opnd2, m_func->GetFunctionEntryInsertionPoint());
  98. }
  99. #endif
  100. }
  101. Assert(!m_func->HasAnyStackNestedFunc());
  102. }
  103. this->LowerRange(m_func->m_headInstr, m_func->m_tailInstr, defaultDoFastPath, loopFastPath);
  104. this->m_func->ClearCloneMap();
  105. if (m_func->HasAnyStackNestedFunc())
  106. {
  107. EnsureZeroLastStackFunctionNext();
  108. }
  109. if (!m_func->IsSimpleJit())
  110. {
  111. Js::EntryPointInfo* entryPointInfo = this->m_func->m_workItem->GetEntryPoint();
  112. Assert(entryPointInfo->GetJitTransferData() != nullptr && !entryPointInfo->GetJitTransferData()->GetIsReady());
  113. }
  114. this->initializedTempSym = nullptr;
  115. this->m_alloc = nullptr;
  116. this->m_func->DisableConstandAddressLoadHoist();
  117. }
  118. void
  119. Lowerer::LowerRange(IR::Instr *instrStart, IR::Instr *instrEnd, bool defaultDoFastPath, bool defaultDoLoopFastPath)
  120. {
  121. bool noMathFastPath;
  122. bool noFieldFastPath;
  123. bool fNoLower = false;
  124. noFieldFastPath = !defaultDoFastPath;
  125. noMathFastPath = !defaultDoFastPath;
  126. #if DBG_DUMP
  127. wchar_t * globOptInstrString = nullptr;
  128. #endif
  129. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, instrEnd, instrStart)
  130. {
  131. // Try to peep this`
  132. instr = this->PreLowerPeepInstr(instr, &instrPrev);
  133. #if DBG
  134. IR::Instr * verifyLegalizeInstrNext = instr->m_next;
  135. #endif
  136. // If we have debugger bailout as part of real instr (not separate BailForDebugger instr),
  137. // extract/split out BailOutForDebugger into separate instr, if needed.
  138. // The instr can have just debugger bailout, or debugger bailout + other shared bailout.
  139. // Note that by the time we get here, we should not have aux-only bailout (in globopt we promote it to normal bailout).
  140. if (m_func->IsJitInDebugMode() && instr->HasBailOutInfo() &&
  141. ((instr->GetBailOutKind() & IR::BailOutForDebuggerBits) && instr->m_opcode != Js::OpCode::BailForDebugger ||
  142. instr->HasAuxBailOut()))
  143. {
  144. instr = this->SplitBailForDebugger(instr); // Change instr, as returned is the one we need to lower next.
  145. instrPrev = instr->m_prev; // Change just in case if instr got changed.
  146. }
  147. #if DBG_DUMP
  148. if (!instr->IsLowered() && !instr->IsLabelInstr()
  149. && (CONFIG_FLAG(ForcePostLowerGlobOptInstrString) ||
  150. PHASE_DUMP(Js::LowererPhase, m_func) ||
  151. PHASE_DUMP(Js::LinearScanPhase, m_func) ||
  152. PHASE_DUMP(Js::RegAllocPhase, m_func) ||
  153. PHASE_DUMP(Js::PeepsPhase, m_func) ||
  154. PHASE_DUMP(Js::LayoutPhase, m_func) ||
  155. PHASE_DUMP(Js::EmitterPhase, m_func) ||
  156. PHASE_DUMP(Js::EncoderPhase, m_func) ||
  157. PHASE_DUMP(Js::BackEndPhase, m_func)))
  158. {
  159. if(instr->m_next && instr->m_next->m_opcode != Js::OpCode::StatementBoundary && !instr->m_next->IsLabelInstr())
  160. {
  161. instr->m_next->globOptInstrString = globOptInstrString;
  162. }
  163. globOptInstrString = instr->DumpString();
  164. }
  165. #endif
  166. IR::Opnd *src1;
  167. IR::RegOpnd *srcReg1;
  168. IR::RegOpnd *srcReg2;
  169. if (instr->IsBranchInstr() && !instr->AsBranchInstr()->IsMultiBranch() && instr->AsBranchInstr()->GetTarget()->m_isLoopTop)
  170. {
  171. Loop * loop = instr->AsBranchInstr()->GetTarget()->GetLoop();
  172. if (this->outerMostLoopLabel == nullptr && !loop->isProcessed)
  173. {
  174. while (loop && loop->GetLoopTopInstr()) // some loops are optimized away so that they are not loops anymore.
  175. // They do, however, stay in the loop graph but don't have loop top labels assigned to them
  176. {
  177. this->outerMostLoopLabel = loop->GetLoopTopInstr();
  178. Assert(this->outerMostLoopLabel->m_isLoopTop);
  179. // landing pad must fall through to the loop
  180. Assert(this->outerMostLoopLabel->m_prev->HasFallThrough());
  181. loop = loop->parent;
  182. }
  183. this->initializedTempSym->ClearAll();
  184. }
  185. noFieldFastPath = !defaultDoLoopFastPath;
  186. noMathFastPath = !defaultDoLoopFastPath;
  187. }
  188. #ifdef INLINE_CACHE_STATS
  189. if(PHASE_STATS1(Js::PolymorphicInlineCachePhase))
  190. {
  191. // Always use the slow path, so we can track property accesses
  192. noFieldFastPath = true;
  193. }
  194. #endif
  195. switch(instr->m_opcode)
  196. {
  197. case Js::OpCode::LdHandlerScope:
  198. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdHandlerScope);
  199. break;
  200. case Js::OpCode::InitSetFld:
  201. instrPrev = this->LowerStFld(instr, IR::HelperOP_InitSetter, IR::HelperOP_InitSetter, false);
  202. break;
  203. case Js::OpCode::InitGetFld:
  204. instrPrev = this->LowerStFld(instr, IR::HelperOP_InitGetter, IR::HelperOP_InitGetter, false);
  205. break;
  206. case Js::OpCode::InitProto:
  207. instrPrev = this->LowerStFld(instr, IR::HelperOP_InitProto, IR::HelperOP_InitProto, false);
  208. break;
  209. case Js::OpCode::LdArgCnt:
  210. this->LoadArgumentCount(instr);
  211. break;
  212. case Js::OpCode::LdStackArgPtr:
  213. this->LoadStackArgPtr(instr);
  214. break;
  215. case Js::OpCode::LdHeapArguments:
  216. case Js::OpCode::LdLetHeapArguments:
  217. instrPrev = m_lowererMD.LoadHeapArguments(instr);
  218. break;
  219. case Js::OpCode::LdArgumentsFromStack:
  220. instrPrev = this->LoadArgumentsFromStack(instr);
  221. break;
  222. case Js::OpCode::LdHeapArgsCached:
  223. case Js::OpCode::LdLetHeapArgsCached:
  224. m_lowererMD.LoadHeapArgsCached(instr);
  225. break;
  226. case Js::OpCode::InvalCachedScope:
  227. this->LowerBinaryHelper(instr, IR::HelperOP_InvalidateCachedScope);
  228. break;
  229. case Js::OpCode::NewScopeObject:
  230. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewScopeObject);
  231. break;
  232. case Js::OpCode::NewStackScopeSlots:
  233. this->LowerNewScopeSlots(instr, m_func->DoStackScopeSlots());
  234. break;
  235. case Js::OpCode::NewScopeSlots:
  236. this->LowerNewScopeSlots(instr, false);
  237. break;
  238. case Js::OpCode::InitLocalClosure:
  239. // Real initialization of the stack pointers happens on entry to the function, so this instruction
  240. // (which exists to provide a def in the IR) can go away.
  241. instr->Remove();
  242. break;
  243. case Js::OpCode::NewScopeSlotsWithoutPropIds:
  244. this->LowerBinaryHelperMemWithFuncBody(instr, IR::HelperOP_NewScopeSlotsWithoutPropIds);
  245. break;
  246. case Js::OpCode::NewBlockScope:
  247. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewBlockScope);
  248. break;
  249. case Js::OpCode::NewPseudoScope:
  250. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewPseudoScope);
  251. break;
  252. case Js::OpCode::CloneInnerScopeSlots:
  253. this->LowerUnaryHelperMem(instr, IR::HelperOP_CloneInnerScopeSlots);
  254. break;
  255. case Js::OpCode::CloneBlockScope:
  256. this->LowerUnaryHelperMem(instr, IR::HelperOP_CloneBlockScope);
  257. break;
  258. case Js::OpCode::GetCachedFunc:
  259. m_lowererMD.LowerGetCachedFunc(instr);
  260. break;
  261. case Js::OpCode::BrFncCachedScopeEq:
  262. case Js::OpCode::BrFncCachedScopeNeq:
  263. this->LowerBrFncCachedScopeEq(instr);
  264. break;
  265. case Js::OpCode::CommitScope:
  266. m_lowererMD.LowerCommitScope(instr);
  267. break;
  268. case Js::OpCode::LdFldForTypeOf:
  269. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValueForTypeOf, IR::HelperOp_PatchGetValuePolymorphicForTypeOf,
  270. IR::HelperOp_PatchGetValueForTypeOf, IR::HelperOp_PatchGetValuePolymorphicForTypeOf);
  271. break;
  272. case Js::OpCode::LdFld:
  273. case Js::OpCode::LdFldForCallApplyTarget:
  274. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValue, IR::HelperOp_PatchGetValuePolymorphic,
  275. IR::HelperOp_PatchGetValue, IR::HelperOp_PatchGetValuePolymorphic);
  276. break;
  277. case Js::OpCode::LdSuperFld:
  278. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValueWithThisPtr, IR::HelperOp_PatchGetValuePolymorphicWithThisPtr,
  279. IR::HelperOp_PatchGetValueWithThisPtr, IR::HelperOp_PatchGetValuePolymorphicWithThisPtr);
  280. break;
  281. case Js::OpCode::LdRootFld:
  282. instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootValue, IR::HelperOp_PatchGetRootValuePolymorphic,
  283. IR::HelperOp_PatchGetRootValue, IR::HelperOp_PatchGetRootValuePolymorphic);
  284. break;
  285. case Js::OpCode::LdRootFldForTypeOf:
  286. instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootValueForTypeOf, IR::HelperOp_PatchGetRootValuePolymorphicForTypeOf,
  287. IR::HelperOp_PatchGetRootValueForTypeOf, IR::HelperOp_PatchGetRootValuePolymorphicForTypeOf);
  288. break;
  289. case Js::OpCode::LdMethodFldPolyInlineMiss:
  290. instrPrev = LowerLdFld(instr, IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic, true, nullptr, true);
  291. break;
  292. case Js::OpCode::LdMethodFld:
  293. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic,
  294. IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic);
  295. break;
  296. case Js::OpCode::LdRootMethodFld:
  297. instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootMethod, IR::HelperOp_PatchGetRootMethodPolymorphic,
  298. IR::HelperOp_PatchGetRootMethod, IR::HelperOp_PatchGetRootMethodPolymorphic);
  299. break;
  300. case Js::OpCode::ScopedLdMethodFld:
  301. // "Scoped" in ScopedLdMethodFld is a bit of a misnomer because it doesn't look through a scope chain.
  302. // Instead the op is to allow for either a LdRootMethodFld or LdMethodFld depending on whether the
  303. // object is the root object or not.
  304. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_ScopedGetMethod, IR::HelperOp_ScopedGetMethodPolymorphic,
  305. IR::HelperOp_ScopedGetMethod, IR::HelperOp_ScopedGetMethodPolymorphic);
  306. break;
  307. case Js::OpCode::LdMethodFromFlags:
  308. {
  309. Assert(instr->HasBailOutInfo());
  310. bool success = m_lowererMD.GenerateFastLdMethodFromFlags(instr);
  311. AssertMsg(success, "Not expected to generate helper block here");
  312. break;
  313. }
  314. case Js::OpCode::CheckFixedFld:
  315. AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instr->m_func->GetJnFunction()) || !PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func->GetJnFunction()), "CheckFixedFld with fixed prop(Data|Method) phase disabled?");
  316. this->GenerateCheckFixedFld(instr);
  317. break;
  318. case Js::OpCode::CheckPropertyGuardAndLoadType:
  319. instrPrev = this->GeneratePropertyGuardCheckBailoutAndLoadType(instr);
  320. break;
  321. case Js::OpCode::CheckObjType:
  322. this->GenerateCheckObjType(instr);
  323. break;
  324. case Js::OpCode::AdjustObjType:
  325. this->LowerAdjustObjType(instr);
  326. break;
  327. case Js::OpCode::DeleteFld:
  328. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteProperty, false, false);
  329. break;
  330. case Js::OpCode::DeleteRootFld:
  331. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteRootProperty, false, false);
  332. break;
  333. case Js::OpCode::DeleteFldStrict:
  334. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteProperty, false, true);
  335. break;
  336. case Js::OpCode::DeleteRootFldStrict:
  337. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteRootProperty, false, true);
  338. break;
  339. case Js::OpCode::ScopedLdFldForTypeOf:
  340. if (!noFieldFastPath)
  341. {
  342. m_lowererMD.GenerateFastScopedLdFld(instr);
  343. }
  344. instrPrev = this->LowerScopedLdFld(instr, IR::HelperOp_PatchGetPropertyForTypeOfScoped, true);
  345. break;
  346. case Js::OpCode::ScopedLdFld:
  347. if (!noFieldFastPath)
  348. {
  349. m_lowererMD.GenerateFastScopedLdFld(instr);
  350. }
  351. instrPrev = this->LowerScopedLdFld(instr, IR::HelperOp_PatchGetPropertyScoped, true);
  352. break;
  353. case Js::OpCode::ScopedLdInst:
  354. instrPrev = this->LowerScopedLdInst(instr, IR::HelperOp_GetInstanceScoped);
  355. break;
  356. case Js::OpCode::ScopedDeleteFld:
  357. instrPrev = this->LowerScopedDelFld(instr, IR::HelperOp_DeletePropertyScoped, false, false);
  358. break;
  359. case Js::OpCode::ScopedDeleteFldStrict:
  360. instrPrev = this->LowerScopedDelFld(instr, IR::HelperOp_DeletePropertyScoped, false, true);
  361. break;
  362. case Js::OpCode::NewScFunc:
  363. instrPrev = this->LowerNewScFunc(instr);
  364. break;
  365. case Js::OpCode::NewScGenFunc:
  366. instrPrev = this->LowerNewScGenFunc(instr);
  367. break;
  368. case Js::OpCode::StFld:
  369. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueNoLocalFastPath, IR::HelperOp_PatchPutValueNoLocalFastPathPolymorphic,
  370. IR::HelperOp_PatchPutValue, IR::HelperOp_PatchPutValuePolymorphic, true, Js::PropertyOperation_None);
  371. break;
  372. case Js::OpCode::StSuperFld:
  373. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueWithThisPtrNoLocalFastPath, IR::HelperOp_PatchPutValueWithThisPtrNoLocalFastPathPolymorphic,
  374. IR::HelperOp_PatchPutValueWithThisPtr, IR::HelperOp_PatchPutValueWithThisPtrPolymorphic, true, Js::PropertyOperation_None);
  375. break;
  376. case Js::OpCode::StRootFld:
  377. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPathPolymorphic,
  378. IR::HelperOp_PatchPutRootValue, IR::HelperOp_PatchPutRootValuePolymorphic, true, Js::PropertyOperation_Root);
  379. break;
  380. case Js::OpCode::StFldStrict:
  381. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueNoLocalFastPath, IR::HelperOp_PatchPutValueNoLocalFastPathPolymorphic,
  382. IR::HelperOp_PatchPutValue, IR::HelperOp_PatchPutValuePolymorphic, true, Js::PropertyOperation_StrictMode);
  383. break;
  384. case Js::OpCode::StRootFldStrict:
  385. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPathPolymorphic,
  386. IR::HelperOp_PatchPutRootValue, IR::HelperOp_PatchPutRootValuePolymorphic, true, Js::PropertyOperation_StrictModeRoot);
  387. break;
  388. case Js::OpCode::InitFld:
  389. case Js::OpCode::InitRootFld:
  390. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchInitValue, IR::HelperOp_PatchInitValuePolymorphic,
  391. IR::HelperOp_PatchInitValue, IR::HelperOp_PatchInitValuePolymorphic, false, Js::PropertyOperation_None);
  392. break;
  393. case Js::OpCode::ScopedInitFunc:
  394. instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_InitFuncScoped, false);
  395. break;
  396. case Js::OpCode::ScopedStFld:
  397. case Js::OpCode::ScopedStFldStrict:
  398. if (!noFieldFastPath)
  399. {
  400. m_lowererMD.GenerateFastScopedStFld(instr);
  401. }
  402. instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_PatchSetPropertyScoped, true, true,
  403. instr->m_opcode == Js::OpCode::ScopedStFld ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode);
  404. break;
  405. case Js::OpCode::ConsoleScopedStFld:
  406. {
  407. if (!noFieldFastPath)
  408. {
  409. m_lowererMD.GenerateFastScopedStFld(instr);
  410. }
  411. Js::PropertyOperationFlags flags = static_cast<Js::PropertyOperationFlags>(Js::PropertyOperation_None | Js::PropertyOperation_AllowUndeclInConsoleScope);
  412. instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_ConsolePatchSetPropertyScoped, true, true, flags);
  413. break;
  414. }
  415. case Js::OpCode::LdStr:
  416. m_lowererMD.ChangeToAssign(instr);
  417. break;
  418. case Js::OpCode::CloneStr:
  419. {
  420. GenerateGetImmutableOrScriptUnreferencedString(instr->GetSrc1()->AsRegOpnd(), instr, IR::HelperOp_CompoundStringCloneForAppending, false);
  421. instr->Remove();
  422. break;
  423. }
  424. case Js::OpCode::NewScObjArray:
  425. instrPrev = this->LowerNewScObjArray(instr);
  426. break;
  427. case Js::OpCode::NewScObject:
  428. case Js::OpCode::NewScObjectSpread:
  429. case Js::OpCode::NewScObjArraySpread:
  430. instrPrev = this->LowerNewScObject(instr, true, true);
  431. break;
  432. case Js::OpCode::NewScObjectNoCtor:
  433. instrPrev = this->LowerNewScObject(instr, false, true);
  434. break;
  435. case Js::OpCode::NewScObjectNoCtorFull:
  436. instrPrev = this->LowerNewScObject(instr, false, true, true);
  437. break;
  438. case Js::OpCode::GetNewScObject:
  439. instrPrev = this->LowerGetNewScObject(instr);
  440. break;
  441. case Js::OpCode::UpdateNewScObjectCache:
  442. instrPrev = instr->m_prev;
  443. this->LowerUpdateNewScObjectCache(instr, instr->GetSrc2(), instr->GetSrc1(), true /* isCtorFunction */);
  444. instr->Remove();
  445. break;
  446. case Js::OpCode::NewScObjectSimple:
  447. this->LowerNewScObjectSimple(instr);
  448. break;
  449. case Js::OpCode::NewScObjectLiteral:
  450. this->LowerNewScObjectLiteral(instr);
  451. break;
  452. case Js::OpCode::LdPropIds:
  453. m_lowererMD.ChangeToAssign(instr);
  454. break;
  455. case Js::OpCode::StArrSegItem_A:
  456. instrPrev = this->LowerArraySegmentVars(instr);
  457. break;
  458. case Js::OpCode::InlineMathAcos:
  459. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Acos);
  460. break;
  461. case Js::OpCode::InlineMathAsin:
  462. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Asin);
  463. break;
  464. case Js::OpCode::InlineMathAtan:
  465. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Atan);
  466. break;
  467. case Js::OpCode::InlineMathAtan2:
  468. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Atan2);
  469. break;
  470. case Js::OpCode::InlineMathCos:
  471. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Cos);
  472. break;
  473. case Js::OpCode::InlineMathExp:
  474. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Exp);
  475. break;
  476. case Js::OpCode::InlineMathLog:
  477. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Log);
  478. break;
  479. case Js::OpCode::InlineMathPow:
  480. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Pow);
  481. break;
  482. case Js::OpCode::InlineMathSin:
  483. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Sin);
  484. break;
  485. case Js::OpCode::InlineMathSqrt:
  486. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  487. break;
  488. case Js::OpCode::InlineMathTan:
  489. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Tan);
  490. break;
  491. case Js::OpCode::InlineMathFloor:
  492. #if defined(ASMJS_PLAT) && (defined(_M_X64) || defined(_M_IX86))
  493. if (!AutoSystemInfo::Data.SSE4_1Available() && instr->m_func->GetJnFunction()->GetIsAsmjsMode())
  494. {
  495. m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_FloorFlt, IR::HelperDirectMath_FloorDb);
  496. break;
  497. }
  498. #endif
  499. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  500. break;
  501. case Js::OpCode::InlineMathCeil:
  502. #if defined(ASMJS_PLAT) && (defined(_M_X64) || defined(_M_IX86))
  503. if (!AutoSystemInfo::Data.SSE4_1Available() && instr->m_func->GetJnFunction()->GetIsAsmjsMode())
  504. {
  505. m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_CeilFlt, IR::HelperDirectMath_CeilDb);
  506. break;
  507. }
  508. #endif
  509. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  510. break;
  511. case Js::OpCode::InlineMathRound:
  512. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  513. break;
  514. case Js::OpCode::InlineMathAbs:
  515. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  516. break;
  517. case Js::OpCode::InlineMathImul:
  518. GenerateFastInlineMathImul(instr);
  519. break;
  520. case Js::OpCode::InlineMathClz32:
  521. GenerateFastInlineMathClz32(instr);
  522. break;
  523. case Js::OpCode::InlineMathFround:
  524. GenerateFastInlineMathFround(instr);
  525. break;
  526. case Js::OpCode::InlineMathMin:
  527. case Js::OpCode::InlineMathMax:
  528. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  529. break;
  530. case Js::OpCode::InlineMathRandom:
  531. this->GenerateFastInlineBuiltInMathRandom(instr);
  532. break;
  533. #ifdef ENABLE_DOM_FAST_PATH
  534. case Js::OpCode::DOMFastPathGetter:
  535. this->LowerFastInlineDOMFastPathGetter(instr);
  536. break;
  537. #endif
  538. case Js::OpCode::InlineArrayPush:
  539. this->GenerateFastInlineArrayPush(instr);
  540. break;
  541. case Js::OpCode::InlineArrayPop:
  542. this->GenerateFastInlineArrayPop(instr);
  543. break;
  544. //Now retrieve the function object from the ArgOut_A_InlineSpecialized instruction opcode to push it on the stack after all the other arguments have been pushed.
  545. //The lowering of the direct call to helper is handled by GenerateDirectCall (architecture specific).
  546. case Js::OpCode::CallDirect:
  547. {
  548. IR::Opnd * src1 = instr->GetSrc1();
  549. Assert(src1->IsHelperCallOpnd());
  550. switch (src1->AsHelperCallOpnd()->m_fnHelper)
  551. {
  552. case IR::JnHelperMethod::HelperString_Split:
  553. case IR::JnHelperMethod::HelperString_Match:
  554. GenerateFastInlineStringSplitMatch(instr);
  555. break;
  556. case IR::JnHelperMethod::HelperRegExp_Exec:
  557. GenerateFastInlineRegExpExec(instr);
  558. break;
  559. case IR::JnHelperMethod::HelperGlobalObject_ParseInt:
  560. GenerateFastInlineGlobalObjectParseInt(instr);
  561. break;
  562. case IR::JnHelperMethod::HelperString_FromCharCode:
  563. GenerateFastInlineStringFromCharCode(instr);
  564. break;
  565. case IR::JnHelperMethod::HelperString_FromCodePoint:
  566. GenerateFastInlineStringFromCodePoint(instr);
  567. break;
  568. case IR::JnHelperMethod::HelperString_CharAt:
  569. GenerateFastInlineStringCharCodeAt(instr, Js::BuiltinFunction::String_CharAt);
  570. break;
  571. case IR::JnHelperMethod::HelperString_CharCodeAt:
  572. GenerateFastInlineStringCharCodeAt(instr, Js::BuiltinFunction::String_CharCodeAt);
  573. break;
  574. case IR::JnHelperMethod::HelperString_Replace:
  575. GenerateFastInlineStringReplace(instr);
  576. break;
  577. }
  578. instrPrev = LowerCallDirect(instr);
  579. break;
  580. }
  581. case Js::OpCode::CallIDynamic:
  582. {
  583. Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
  584. instrPrev = this->LowerCallIDynamic(instr, (ushort)flags);
  585. break;
  586. }
  587. case Js::OpCode::CallIDynamicSpread:
  588. {
  589. Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
  590. instrPrev = this->LowerCallIDynamicSpread(instr, (ushort)flags);
  591. break;
  592. }
  593. case Js::OpCode::CallI:
  594. case Js::OpCode::CallINew:
  595. case Js::OpCode::CallIFixed:
  596. case Js::OpCode::CallINewTargetNew:
  597. {
  598. Js::CallFlags flags = Js::CallFlags_None;
  599. if (instr->isCtorCall)
  600. {
  601. flags = Js::CallFlags_New;
  602. }
  603. else
  604. {
  605. if (instr->m_opcode == Js::OpCode::CallINew)
  606. {
  607. flags = Js::CallFlags_New;
  608. }
  609. else if (instr->m_opcode == Js::OpCode::CallINewTargetNew)
  610. {
  611. flags = (Js::CallFlags) (Js::CallFlags_New | Js::CallFlags_ExtraArg | Js::CallFlags_NewTarget);
  612. }
  613. if (instr->GetDst())
  614. {
  615. flags = (Js::CallFlags) (flags | Js::CallFlags_Value);
  616. }
  617. else
  618. {
  619. flags = (Js::CallFlags) (flags | Js::CallFlags_NotUsed);
  620. }
  621. }
  622. if (!PHASE_OFF(Js::CallFastPathPhase, this->m_func) && !noMathFastPath)
  623. {
  624. // We shouldn't have turned this instruction into a fixed method call if we're calling one of the
  625. // built-ins we still inline in the lowerer.
  626. Assert(instr->m_opcode != Js::OpCode::CallIFixed || !Func::IsBuiltInInlinedInLowerer(instr->GetSrc1()));
  627. // Disable InlineBuiltInLibraryCall as it does not work well with 2nd chance reg alloc
  628. // and may invalidate live on back edge data by introducing refs across loops. See Winblue Bug: 577641
  629. //// Callee may still be a library built-in; if so, generate it inline.
  630. //if (this->InlineBuiltInLibraryCall(instr))
  631. //{
  632. // m_lowererMD.LowerCallI(instr, (ushort)flags, true /*isHelper*/);
  633. //}
  634. //else
  635. //{
  636. m_lowererMD.LowerCallI(instr, (ushort)flags);
  637. //}
  638. }
  639. else
  640. {
  641. m_lowererMD.LowerCallI(instr, (ushort)flags);
  642. }
  643. break;
  644. }
  645. case Js::OpCode::AsmJsCallI:
  646. m_lowererMD.LowerAsmJsCallI(instr);
  647. break;
  648. case Js::OpCode::AsmJsCallE:
  649. m_lowererMD.LowerAsmJsCallE(instr);
  650. break;
  651. case Js::OpCode::CallIEval:
  652. {
  653. Js::CallFlags flags = (Js::CallFlags)(Js::CallFlags_ExtraArg | (instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed));
  654. if (IsSpreadCall(instr))
  655. {
  656. instrPrev = LowerSpreadCall(instr, flags);
  657. }
  658. else
  659. {
  660. m_lowererMD.LowerCallI(instr, (ushort)flags);
  661. }
  662. #ifdef PERF_HINT
  663. if (PHASE_TRACE1(Js::PerfHintPhase))
  664. {
  665. WritePerfHint(PerfHints::CallsEval, this->m_func->GetJnFunction(), instr->GetByteCodeOffset());
  666. }
  667. #endif
  668. break;
  669. }
  670. case Js::OpCode::CallIPut:
  671. m_lowererMD.LowerCallPut(instr);
  672. break;
  673. case Js::OpCode::CallHelper:
  674. instrPrev = m_lowererMD.LowerCallHelper(instr);
  675. break;
  676. case Js::OpCode::Ret:
  677. if (instr->m_next->m_opcode != Js::OpCode::FunctionExit)
  678. {
  679. // If this RET isn't at the end of the function, insert a branch to
  680. // the epilog.
  681. IR::Instr *exitPrev = m_func->m_exitInstr->m_prev;
  682. if (!exitPrev->IsLabelInstr())
  683. {
  684. exitPrev = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  685. m_func->m_exitInstr->InsertBefore(exitPrev);
  686. }
  687. IR::BranchInstr *exitBr = IR::BranchInstr::New(Js::OpCode::Br,
  688. exitPrev->AsLabelInstr(), m_func);
  689. instr->InsertAfter(exitBr);
  690. m_lowererMD.LowerUncondBranch(exitBr);
  691. }
  692. m_lowererMD.LowerRet(instr);
  693. break;
  694. case Js::OpCode::LdArgumentsFromFrame:
  695. this->LoadArgumentsFromFrame(instr);
  696. break;
  697. case Js::OpCode::LdC_A_I4:
  698. src1 = instr->UnlinkSrc1();
  699. AssertMsg(src1->IsIntConstOpnd(), "Source of LdC_A_I4 should be an IntConst...");
  700. instrPrev = this->LowerLoadVar(instr,
  701. IR::AddrOpnd::NewFromNumber(static_cast<int32>(src1->AsIntConstOpnd()->GetValue()), this->m_func));
  702. src1->Free(this->m_func);
  703. break;
  704. case Js::OpCode::LdC_A_R8:
  705. src1 = instr->UnlinkSrc1();
  706. AssertMsg(src1->IsFloatConstOpnd(), "Source of LdC_A_R8 should be a FloatConst...");
  707. instrPrev = this->LowerLoadVar(instr, src1->AsFloatConstOpnd()->GetAddrOpnd(this->m_func));
  708. src1->Free(this->m_func);
  709. break;
  710. case Js::OpCode::LdC_F8_R8:
  711. src1 = instr->UnlinkSrc1();
  712. AssertMsg(src1->IsFloatConstOpnd(), "Source of LdC_F8_R8 should be a FloatConst...");
  713. instrPrev = m_lowererMD.LoadFloatValue(instr->UnlinkDst()->AsRegOpnd(), src1->AsFloatConstOpnd()->m_value, instr);
  714. src1->Free(this->m_func);
  715. instr->Remove();
  716. break;
  717. case Js::OpCode::NewRegEx:
  718. instrPrev = this->LowerNewRegEx(instr);
  719. break;
  720. case Js::OpCode::Conv_Obj:
  721. this->LowerUnaryHelperMem(instr, IR::HelperOp_ConvObject);
  722. break;
  723. case Js::OpCode::NewWithObject:
  724. this->LowerUnaryHelperMem(instr, IR::HelperOp_NewWithObject);
  725. break;
  726. case Js::OpCode::LdCustomSpreadIteratorList:
  727. this->LowerUnaryHelperMem(instr, IR::HelperOp_ToSpreadedFunctionArgument);
  728. break;
  729. case Js::OpCode::Conv_Num:
  730. this->LowerConvNum(instr, noMathFastPath);
  731. break;
  732. case Js::OpCode::Incr_A:
  733. if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  734. {
  735. this->LowerUnaryHelperMem(instr, IR::HelperOp_Increment);
  736. }
  737. else
  738. {
  739. instr->SetSrc2(IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(1), IR::AddrOpndKindConstantVar, this->m_func));
  740. m_lowererMD.GenerateFastAdd(instr);
  741. instr->FreeSrc2();
  742. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Increment));
  743. }
  744. break;
  745. case Js::OpCode::Decr_A:
  746. if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  747. {
  748. this->LowerUnaryHelperMem(instr, IR::HelperOp_Decrement);
  749. }
  750. else
  751. {
  752. instr->SetSrc2(IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(1), IR::AddrOpndKindConstantVar, this->m_func));
  753. m_lowererMD.GenerateFastSub(instr);
  754. instr->FreeSrc2();
  755. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Decrement));
  756. }
  757. break;
  758. case Js::OpCode::Neg_A:
  759. if (instr->GetDst()->IsFloat())
  760. {
  761. Assert(instr->GetSrc1()->IsFloat());
  762. m_lowererMD.LowerToFloat(instr);
  763. }
  764. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  765. {
  766. this->LowerUnaryHelperMem(instr, IR::HelperOp_Negate);
  767. }
  768. else if (m_lowererMD.GenerateFastNeg(instr))
  769. {
  770. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Negate));
  771. }
  772. break;
  773. case Js::OpCode::Not_A:
  774. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
  775. {
  776. this->LowerUnaryHelperMem(instr, IR::HelperOp_Not);
  777. }
  778. else if (m_lowererMD.GenerateFastNot(instr))
  779. {
  780. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Not));
  781. }
  782. break;
  783. case Js::OpCode::BrEq_I4:
  784. case Js::OpCode::BrNeq_I4:
  785. case Js::OpCode::BrGt_I4:
  786. case Js::OpCode::BrGe_I4:
  787. case Js::OpCode::BrLt_I4:
  788. case Js::OpCode::BrLe_I4:
  789. case Js::OpCode::BrUnGt_I4:
  790. case Js::OpCode::BrUnGe_I4:
  791. case Js::OpCode::BrUnLt_I4:
  792. case Js::OpCode::BrUnLe_I4:
  793. {
  794. // See calls to MarkOneFltTmpSym under BrSrEq. This is to handle the case
  795. // where a branch is type-specialized and uses the result of a float pref op,
  796. // which must then be saved to var at the def.
  797. StackSym *sym = instr->GetSrc1()->GetStackSym();
  798. if (sym)
  799. {
  800. sym = sym->GetVarEquivSym(nullptr);
  801. }
  802. sym = instr->GetSrc2()->GetStackSym();
  803. if (sym)
  804. {
  805. sym = sym->GetVarEquivSym(nullptr);
  806. }
  807. }
  808. // FALLTHROUGH
  809. case Js::OpCode::Neg_I4:
  810. case Js::OpCode::Not_I4:
  811. case Js::OpCode::Add_I4:
  812. case Js::OpCode::Sub_I4:
  813. case Js::OpCode::Mul_I4:
  814. case Js::OpCode::Rem_I4:
  815. case Js::OpCode::Or_I4:
  816. case Js::OpCode::Xor_I4:
  817. case Js::OpCode::And_I4:
  818. case Js::OpCode::Shl_I4:
  819. case Js::OpCode::Shr_I4:
  820. case Js::OpCode::ShrU_I4:
  821. case Js::OpCode::BrTrue_I4:
  822. case Js::OpCode::BrFalse_I4:
  823. if(instr->HasBailOutInfo())
  824. {
  825. const auto bailOutKind = instr->GetBailOutKind();
  826. if(bailOutKind & IR::BailOutOnResultConditions ||
  827. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck)
  828. {
  829. const auto nonBailOutInstr = SplitBailOnResultCondition(instr);
  830. IR::LabelInstr *bailOutLabel, *skipBailOutLabel;
  831. LowerBailOnResultCondition(instr, &bailOutLabel, &skipBailOutLabel);
  832. LowerInstrWithBailOnResultCondition(nonBailOutInstr, bailOutKind, bailOutLabel, skipBailOutLabel);
  833. }
  834. else if(bailOutKind == IR::BailOnModByPowerOf2)
  835. {
  836. Assert(instr->m_opcode == Js::OpCode::Rem_I4);
  837. bool fastPath = GenerateSimplifiedInt4Rem(instr);
  838. Assert(fastPath);
  839. instr->FreeSrc1();
  840. instr->FreeSrc2();
  841. this->GenerateBailOut(instr);
  842. }
  843. }
  844. else
  845. {
  846. if (instr->m_opcode == Js::OpCode::Rem_I4)
  847. {
  848. // fast path
  849. this->GenerateSimplifiedInt4Rem(instr);
  850. // slow path
  851. this->LowerRemI4(instr);
  852. }
  853. #if defined(_M_IX86) || defined(_M_X64)
  854. else if (instr->m_opcode == Js::OpCode::Mul_I4)
  855. {
  856. if (!LowererMD::GenerateSimplifiedInt4Mul(instr))
  857. {
  858. m_lowererMD.EmitInt4Instr(instr);
  859. }
  860. }
  861. #endif
  862. else
  863. {
  864. m_lowererMD.EmitInt4Instr(instr);
  865. }
  866. }
  867. break;
  868. case Js::OpCode::Div_I4:
  869. this->LowerDivI4(instr);
  870. break;
  871. case Js::OpCode::Add_Ptr:
  872. m_lowererMD.EmitPtrInstr(instr);
  873. break;
  874. case Js::OpCode::Typeof:
  875. this->LowerUnaryHelperMem(instr, IR::HelperOp_Typeof);
  876. break;
  877. case Js::OpCode::TypeofElem:
  878. this->LowerLdElemI(instr, IR::HelperOp_TypeofElem, false);
  879. break;
  880. case Js::OpCode::LdLen_A:
  881. {
  882. bool fastPath = !noMathFastPath;
  883. if(!fastPath && instr->HasBailOutInfo())
  884. {
  885. // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
  886. // bailouts require the fast path.
  887. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  888. if(bailOutKind & IR::BailOutKindBits)
  889. {
  890. fastPath = true;
  891. }
  892. else
  893. {
  894. const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
  895. fastPath =
  896. bailOutKindMinusBits &&
  897. bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
  898. bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
  899. }
  900. }
  901. bool instrIsInHelperBlock;
  902. if(!fastPath)
  903. {
  904. LowerLdLen(instr, false);
  905. }
  906. else if(GenerateFastLdLen(instr, &instrIsInHelperBlock))
  907. {
  908. Assert(
  909. !instr->HasBailOutInfo() ||
  910. (instr->GetBailOutKind() & ~IR::BailOutKindBits) != IR::BailOutOnIrregularLength);
  911. LowerLdLen(instr, instrIsInHelperBlock);
  912. }
  913. break;
  914. }
  915. case Js::OpCode::LdThis:
  916. {
  917. if (noFieldFastPath || !m_lowererMD.GenerateLdThisCheck(instr))
  918. {
  919. IR::JnHelperMethod meth;
  920. if (instr->IsJitProfilingInstr())
  921. {
  922. Assert(instr->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
  923. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  924. meth = IR::HelperSimpleProfiledLdThis;
  925. this->LowerBinaryHelper(instr, meth);
  926. }
  927. else
  928. {
  929. meth = IR::HelperLdThisNoFastPath;
  930. this->LowerBinaryHelperMem(instr, meth);
  931. }
  932. }
  933. else
  934. {
  935. this->LowerBinaryHelperMem(instr, IR::HelperLdThis);
  936. }
  937. break;
  938. }
  939. case Js::OpCode::StrictLdThis:
  940. if (noFieldFastPath)
  941. {
  942. IR::JnHelperMethod meth;
  943. if (instr->IsJitProfilingInstr())
  944. {
  945. Assert(instr->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
  946. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  947. meth = IR::HelperSimpleProfiledStrictLdThis;
  948. this->LowerUnaryHelper(instr, meth);
  949. }
  950. else
  951. {
  952. meth = IR::HelperStrictLdThis;
  953. this->LowerUnaryHelperMem(instr, meth);
  954. }
  955. }
  956. else
  957. {
  958. m_lowererMD.GenerateLdThisStrict(instr);
  959. instr->Remove();
  960. }
  961. break;
  962. case Js::OpCode::CheckThis:
  963. m_lowererMD.GenerateLdThisCheck(instr);
  964. instr->FreeSrc1();
  965. this->GenerateBailOut(instr);
  966. break;
  967. case Js::OpCode::StrictCheckThis:
  968. m_lowererMD.GenerateLdThisStrict(instr);
  969. instr->FreeSrc1();
  970. this->GenerateBailOut(instr);
  971. break;
  972. case Js::OpCode::NewScArray:
  973. instrPrev = this->LowerNewScArray(instr);
  974. break;
  975. case Js::OpCode::NewScArrayWithMissingValues:
  976. this->LowerUnaryHelperMem(instr, IR::HelperScrArr_OP_NewScArrayWithMissingValues);
  977. break;
  978. case Js::OpCode::NewScIntArray:
  979. instrPrev = this->LowerNewScIntArray(instr);
  980. break;
  981. case Js::OpCode::NewScFltArray:
  982. instrPrev = this->LowerNewScFltArray(instr);
  983. break;
  984. case Js::OpCode::GetForInEnumerator:
  985. this->LowerUnaryHelperMem(instr, IR::HelperOp_OP_GetForInEnumerator);
  986. break;
  987. case Js::OpCode::ReleaseForInEnumerator:
  988. this->LowerUnaryHelperMem(instr, IR::HelperOp_OP_ReleaseForInEnumerator);
  989. break;
  990. case Js::OpCode::Add_A:
  991. if (instr->GetDst()->IsFloat())
  992. {
  993. Assert(instr->GetSrc1()->IsFloat());
  994. Assert(instr->GetSrc2()->IsFloat());
  995. // we don't want to mix float32 and float64
  996. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  997. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  998. m_lowererMD.LowerToFloat(instr);
  999. }
  1000. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  1001. {
  1002. this->LowerBinaryHelperMem(instr, IR::HelperOp_Add);
  1003. }
  1004. else if (m_lowererMD.TryGenerateFastMulAdd(instr, &instrPrev))
  1005. {
  1006. }
  1007. else
  1008. {
  1009. m_lowererMD.GenerateFastAdd(instr);
  1010. this->LowerBinaryHelperMemWithTemp3(instr, IR_HELPER_OP_FULL_OR_INPLACE(Add), IR::HelperOp_AddLeftDead);
  1011. }
  1012. break;
  1013. case Js::OpCode::Div_A:
  1014. {
  1015. if (instr->IsJitProfilingInstr()) {
  1016. LowerProfiledBinaryOp(instr->AsJitProfilingInstr(), IR::HelperSimpleProfiledDivide);
  1017. }
  1018. else if (instr->GetDst()->IsFloat())
  1019. {
  1020. Assert(instr->GetSrc1()->IsFloat());
  1021. Assert(instr->GetSrc2()->IsFloat());
  1022. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1023. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1024. m_lowererMD.LowerToFloat(instr);
  1025. }
  1026. else
  1027. {
  1028. if (!PHASE_OFF(Js::MathFastPathPhase, this->m_func) && !noMathFastPath)
  1029. {
  1030. IR::AddrOpnd *src2 = instr->GetSrc2()->IsAddrOpnd() ? instr->GetSrc2()->AsAddrOpnd() : nullptr;
  1031. if (src2 && src2->IsVar() && Js::TaggedInt::Is(src2->m_address))
  1032. {
  1033. int32 value = Js::TaggedInt::ToInt32(src2->m_address);
  1034. if (Math::IsPow2(value))
  1035. {
  1036. m_lowererMD.GenerateFastDivByPow2(instr);
  1037. }
  1038. }
  1039. }
  1040. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Divide));
  1041. }
  1042. break;
  1043. }
  1044. case Js::OpCode::Expo_A:
  1045. {
  1046. if (instr->GetDst()->IsFloat())
  1047. {
  1048. Assert(instr->GetSrc1()->IsFloat());
  1049. Assert(instr->GetSrc2()->IsFloat());
  1050. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1051. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1052. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Pow);
  1053. }
  1054. else
  1055. {
  1056. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Exponentiation));
  1057. }
  1058. break;
  1059. }
  1060. case Js::OpCode::Mul_A:
  1061. if (instr->GetDst()->IsFloat())
  1062. {
  1063. Assert(instr->GetSrc1()->IsFloat());
  1064. Assert(instr->GetSrc2()->IsFloat());
  1065. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1066. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1067. m_lowererMD.LowerToFloat(instr);
  1068. }
  1069. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  1070. {
  1071. this->LowerBinaryHelperMem(instr, IR::HelperOp_Multiply);
  1072. }
  1073. else if (m_lowererMD.GenerateFastMul(instr))
  1074. {
  1075. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Multiply));
  1076. }
  1077. break;
  1078. case Js::OpCode::Rem_A:
  1079. if (instr->GetDst()->IsFloat64())
  1080. {
  1081. this->LowerRemR8(instr);
  1082. }
  1083. else if (instr->IsJitProfilingInstr())
  1084. {
  1085. this->LowerProfiledBinaryOp(instr->AsJitProfilingInstr(), IR::HelperSimpleProfiledRemainder);
  1086. }
  1087. else
  1088. {
  1089. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Modulus));
  1090. }
  1091. break;
  1092. case Js::OpCode::Sub_A:
  1093. if (instr->GetDst()->IsFloat())
  1094. {
  1095. Assert(instr->GetSrc1()->IsFloat());
  1096. Assert(instr->GetSrc2()->IsFloat());
  1097. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1098. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1099. m_lowererMD.LowerToFloat(instr);
  1100. }
  1101. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  1102. {
  1103. this->LowerBinaryHelperMem(instr, IR::HelperOp_Subtract);
  1104. }
  1105. else if (m_lowererMD.TryGenerateFastMulAdd(instr, &instrPrev))
  1106. {
  1107. }
  1108. else
  1109. {
  1110. m_lowererMD.GenerateFastSub(instr);
  1111. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Subtract));
  1112. }
  1113. break;
  1114. case Js::OpCode::And_A:
  1115. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
  1116. {
  1117. this->LowerBinaryHelperMem(instr, IR::HelperOp_And);
  1118. }
  1119. else if (m_lowererMD.GenerateFastAnd(instr))
  1120. {
  1121. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(And));
  1122. }
  1123. break;
  1124. case Js::OpCode::Or_A:
  1125. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
  1126. {
  1127. this->LowerBinaryHelperMem(instr, IR::HelperOp_Or);
  1128. }
  1129. else if (m_lowererMD.GenerateFastOr(instr))
  1130. {
  1131. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Or));
  1132. }
  1133. break;
  1134. case Js::OpCode::Xor_A:
  1135. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastXor(instr))
  1136. {
  1137. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Xor));
  1138. }
  1139. break;
  1140. case Js::OpCode::Shl_A:
  1141. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftLeft(instr))
  1142. {
  1143. this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftLeft);
  1144. }
  1145. break;
  1146. case Js::OpCode::Shr_A:
  1147. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftRight(instr))
  1148. {
  1149. this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftRight);
  1150. }
  1151. break;
  1152. case Js::OpCode::ShrU_A:
  1153. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftRight(instr))
  1154. {
  1155. this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftRightU);
  1156. }
  1157. break;
  1158. case Js::OpCode::CmEq_A:
  1159. if (instr->GetSrc1()->IsFloat())
  1160. {
  1161. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1162. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1163. }
  1164. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
  1165. {
  1166. if (!fNoLower)
  1167. {
  1168. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmEq_A);
  1169. }
  1170. }
  1171. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1172. {
  1173. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmEq_A);
  1174. }
  1175. break;
  1176. case Js::OpCode::CmNeq_A:
  1177. if (instr->GetSrc1()->IsFloat())
  1178. {
  1179. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1180. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1181. }
  1182. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
  1183. {
  1184. if (!fNoLower)
  1185. {
  1186. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmNeq_A);
  1187. }
  1188. }
  1189. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1190. {
  1191. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmNeq_A);
  1192. }
  1193. break;
  1194. case Js::OpCode::CmSrEq_A:
  1195. if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
  1196. {
  1197. if (!fNoLower)
  1198. {
  1199. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_A);
  1200. }
  1201. }
  1202. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastCmSrEq(instr))
  1203. {
  1204. }
  1205. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1206. {
  1207. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_A);
  1208. }
  1209. break;
  1210. case Js::OpCode::CmSrNeq_A:
  1211. if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
  1212. {
  1213. if (!fNoLower)
  1214. {
  1215. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrNeq_A);
  1216. }
  1217. }
  1218. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1219. {
  1220. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrNeq_A);
  1221. }
  1222. break;
  1223. case Js::OpCode::CmGt_A:
  1224. if (instr->GetSrc1()->IsFloat())
  1225. {
  1226. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1227. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1228. }
  1229. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1230. {
  1231. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmGt_A);
  1232. }
  1233. break;
  1234. case Js::OpCode::CmGe_A:
  1235. if (instr->GetSrc1()->IsFloat())
  1236. {
  1237. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1238. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1239. }
  1240. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1241. {
  1242. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmGe_A);
  1243. }
  1244. break;
  1245. case Js::OpCode::CmLt_A:
  1246. if (instr->GetSrc1()->IsFloat())
  1247. {
  1248. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1249. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1250. }
  1251. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1252. {
  1253. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmLt_A);
  1254. }
  1255. break;
  1256. case Js::OpCode::CmLe_A:
  1257. if (instr->GetSrc1()->IsFloat())
  1258. {
  1259. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1260. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1261. }
  1262. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1263. {
  1264. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmLe_A);
  1265. }
  1266. break;
  1267. case Js::OpCode::CmEq_I4:
  1268. case Js::OpCode::CmNeq_I4:
  1269. case Js::OpCode::CmGe_I4:
  1270. case Js::OpCode::CmGt_I4:
  1271. case Js::OpCode::CmLe_I4:
  1272. case Js::OpCode::CmLt_I4:
  1273. case Js::OpCode::CmUnGe_I4:
  1274. case Js::OpCode::CmUnGt_I4:
  1275. case Js::OpCode::CmUnLe_I4:
  1276. case Js::OpCode::CmUnLt_I4:
  1277. this->m_lowererMD.GenerateFastCmXxI4(instr);
  1278. break;
  1279. case Js::OpCode::Conv_Bool:
  1280. instrPrev = this->m_lowererMD.GenerateConvBool(instr);
  1281. break;
  1282. case Js::OpCode::IsInst:
  1283. m_lowererMD.GenerateFastIsInst(instr);
  1284. instrPrev = this->LowerIsInst(instr, IR::HelperScrObj_OP_IsInst);
  1285. break;
  1286. case Js::OpCode::IsIn:
  1287. this->LowerBinaryHelperMem(instr, IR::HelperOp_IsIn);
  1288. break;
  1289. case Js::OpCode::LdInt8ArrViewElem:
  1290. case Js::OpCode::LdUInt8ArrViewElem:
  1291. case Js::OpCode::LdInt16ArrViewElem:
  1292. case Js::OpCode::LdUInt16ArrViewElem:
  1293. case Js::OpCode::LdInt32ArrViewElem:
  1294. case Js::OpCode::LdUInt32ArrViewElem:
  1295. case Js::OpCode::LdFloat32ArrViewElem:
  1296. case Js::OpCode::LdFloat64ArrViewElem:
  1297. instrPrev = LowerLdArrViewElem(instr);
  1298. break;
  1299. case Js::OpCode::StInt8ArrViewElem:
  1300. case Js::OpCode::StUInt8ArrViewElem:
  1301. case Js::OpCode::StInt16ArrViewElem:
  1302. case Js::OpCode::StUInt16ArrViewElem:
  1303. case Js::OpCode::StInt32ArrViewElem:
  1304. case Js::OpCode::StUInt32ArrViewElem:
  1305. case Js::OpCode::StFloat32ArrViewElem:
  1306. case Js::OpCode::StFloat64ArrViewElem:
  1307. instrPrev = LowerStArrViewElem(instr);
  1308. break;
  1309. case Js::OpCode::Memset:
  1310. case Js::OpCode::Memcopy:
  1311. {
  1312. instrPrev = LowerMemOp(instr);
  1313. break;
  1314. }
  1315. case Js::OpCode::ArrayDetachedCheck:
  1316. instrPrev = LowerArrayDetachedCheck(instr);
  1317. break;
  1318. case Js::OpCode::StElemI_A:
  1319. case Js::OpCode::StElemI_A_Strict:
  1320. {
  1321. // Note: under debugger (Fast F12) don't let GenerateFastStElemI which calls into ToNumber_Helper
  1322. // which takes double, and currently our helper wrapper doesn't support double.
  1323. bool fastPath = !noMathFastPath && !m_func->IsJitInDebugMode();
  1324. if(!fastPath && instr->HasBailOutInfo())
  1325. {
  1326. // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
  1327. // bailouts require the fast path.
  1328. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1329. const IR::BailOutKind bailOutKindBits = bailOutKind & IR::BailOutKindBits;
  1330. if(bailOutKindBits & ~(IR::BailOutOnMissingValue | IR::BailOutConvertedNativeArray))
  1331. {
  1332. fastPath = true;
  1333. }
  1334. else
  1335. {
  1336. const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
  1337. fastPath =
  1338. bailOutKindMinusBits &&
  1339. bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
  1340. bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
  1341. }
  1342. }
  1343. IR::Opnd * opnd = instr->GetDst();
  1344. IR::Opnd * baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  1345. ValueType profiledBaseValueType = baseOpnd->AsRegOpnd()->GetValueType();
  1346. if (profiledBaseValueType.IsUninitialized() && baseOpnd->AsRegOpnd()->m_sym->IsSingleDef())
  1347. {
  1348. baseOpnd->SetValueType(baseOpnd->FindProfiledValueType());
  1349. }
  1350. bool instrIsInHelperBlock;
  1351. if (!fastPath)
  1352. {
  1353. this->LowerStElemI(
  1354. instr,
  1355. instr->m_opcode == Js::OpCode::StElemI_A ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode,
  1356. false);
  1357. }
  1358. else if (GenerateFastStElemI(instr, &instrIsInHelperBlock))
  1359. {
  1360. #if DBG
  1361. if(instr->HasBailOutInfo())
  1362. {
  1363. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1364. Assert(
  1365. (bailOutKind & ~IR::BailOutKindBits) != IR::BailOutConventionalTypedArrayAccessOnly &&
  1366. !(
  1367. bailOutKind &
  1368. (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)
  1369. ));
  1370. }
  1371. #endif
  1372. this->LowerStElemI(
  1373. instr,
  1374. instr->m_opcode == Js::OpCode::StElemI_A ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode,
  1375. instrIsInHelperBlock);
  1376. }
  1377. break;
  1378. }
  1379. case Js::OpCode::LdElemI_A:
  1380. case Js::OpCode::LdMethodElem:
  1381. {
  1382. bool fastPath =
  1383. !noMathFastPath &&
  1384. (
  1385. instr->m_opcode != Js::OpCode::LdMethodElem ||
  1386. instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyObject()
  1387. );
  1388. if(!fastPath && instr->HasBailOutInfo())
  1389. {
  1390. // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
  1391. // bailouts require the fast path.
  1392. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1393. if(bailOutKind & IR::BailOutKindBits)
  1394. {
  1395. fastPath = true;
  1396. }
  1397. else
  1398. {
  1399. const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
  1400. fastPath =
  1401. bailOutKindMinusBits &&
  1402. bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
  1403. bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
  1404. }
  1405. }
  1406. IR::Opnd * opnd = instr->GetSrc1();
  1407. IR::Opnd * baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  1408. ValueType profiledBaseValueType = baseOpnd->AsRegOpnd()->GetValueType();
  1409. if (profiledBaseValueType.IsUninitialized() && baseOpnd->AsRegOpnd()->m_sym->IsSingleDef())
  1410. {
  1411. baseOpnd->SetValueType(baseOpnd->FindProfiledValueType());
  1412. }
  1413. bool instrIsInHelperBlock;
  1414. if (!fastPath)
  1415. {
  1416. this->LowerLdElemI(
  1417. instr,
  1418. instr->m_opcode == Js::OpCode::LdElemI_A ? IR::HelperOp_GetElementI : IR::HelperOp_GetMethodElement,
  1419. false);
  1420. }
  1421. else if (GenerateFastLdElemI(instr, &instrIsInHelperBlock))
  1422. {
  1423. #if DBG
  1424. if(instr->HasBailOutInfo())
  1425. {
  1426. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1427. Assert(
  1428. (bailOutKind & ~IR::BailOutKindBits) != IR::BailOutConventionalTypedArrayAccessOnly &&
  1429. !(
  1430. bailOutKind &
  1431. (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)
  1432. ));
  1433. }
  1434. #endif
  1435. this->LowerLdElemI(
  1436. instr,
  1437. instr->m_opcode == Js::OpCode::LdElemI_A ? IR::HelperOp_GetElementI : IR::HelperOp_GetMethodElement,
  1438. instrIsInHelperBlock);
  1439. }
  1440. break;
  1441. }
  1442. case Js::OpCode::InitSetElemI:
  1443. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitElemSetter);
  1444. break;
  1445. case Js::OpCode::InitGetElemI:
  1446. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitElemGetter);
  1447. break;
  1448. case Js::OpCode::InitComputedProperty:
  1449. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitComputedProperty);
  1450. break;
  1451. case Js::OpCode::Delete_A:
  1452. this->LowerUnaryHelperMem(instr, IR::HelperOp_Delete);
  1453. break;
  1454. case Js::OpCode::DeleteElemI_A:
  1455. this->LowerDeleteElemI(instr, false);
  1456. break;
  1457. case Js::OpCode::DeleteElemIStrict_A:
  1458. this->LowerDeleteElemI(instr, true);
  1459. break;
  1460. case Js::OpCode::BytecodeArgOutCapture:
  1461. m_lowererMD.ChangeToAssign(instr);
  1462. break;
  1463. case Js::OpCode::UnwrapWithObj:
  1464. this->LowerUnaryHelper(instr, IR::HelperOp_UnwrapWithObj);
  1465. break;
  1466. case Js::OpCode::Ld_A:
  1467. case Js::OpCode::Ld_I4:
  1468. case Js::OpCode::InitConst:
  1469. if (instr->IsJitProfilingInstr() && instr->AsJitProfilingInstr()->isBeginSwitch) {
  1470. LowerProfiledBeginSwitch(instr->AsJitProfilingInstr());
  1471. break;
  1472. }
  1473. m_lowererMD.ChangeToAssign(instr);
  1474. if(instr->HasBailOutInfo())
  1475. {
  1476. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1477. if(bailOutKind == IR::BailOutExpectingString)
  1478. {
  1479. this->LowerBailOnNotString(instr);
  1480. }
  1481. else
  1482. {
  1483. // Should not reach here as there are only 1 BailOutKind (BailOutExpectingString) currently associated with the Load Instr
  1484. Assert(false);
  1485. }
  1486. }
  1487. break;
  1488. case Js::OpCode::LdIndir:
  1489. Assert(instr->GetDst());
  1490. Assert(instr->GetDst()->IsRegOpnd());
  1491. Assert(instr->GetSrc1());
  1492. Assert(instr->GetSrc1()->IsIndirOpnd());
  1493. Assert(!instr->GetSrc2());
  1494. m_lowererMD.ChangeToAssign(instr);
  1495. break;
  1496. case Js::OpCode::FromVar:
  1497. Assert(instr->GetSrc1()->GetType() == TyVar);
  1498. if (instr->GetDst()->GetType() == TyInt32)
  1499. {
  1500. if(m_lowererMD.EmitLoadInt32(instr, !(instr->HasBailOutInfo() && (instr->GetBailOutKind() == IR::BailOutOnNotPrimitive))))
  1501. {
  1502. // Bail out instead of calling a helper
  1503. Assert(instr->GetBailOutKind() == IR::BailOutIntOnly || instr->GetBailOutKind() == IR::BailOutExpectingInteger);
  1504. Assert(!instr->GetSrc1()->GetValueType().IsInt()); // when we know it's an int, it should not have bailout info, to avoid generating a bailout path that will never be taken
  1505. instr->UnlinkSrc1();
  1506. instr->UnlinkDst();
  1507. GenerateBailOut(instr);
  1508. }
  1509. }
  1510. else if (instr->GetDst()->IsFloat())
  1511. {
  1512. if (m_func->GetJnFunction()->GetIsAsmJsFunction())
  1513. {
  1514. m_lowererMD.EmitLoadFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1515. instr->Remove();
  1516. }
  1517. else
  1518. {
  1519. m_lowererMD.EmitLoadFloatFromNumber(instr->GetDst(), instr->GetSrc1(), instr);
  1520. }
  1521. }
  1522. // Support on IA only
  1523. #if defined(_M_IX86) || defined(_M_X64)
  1524. else if (instr->GetDst()->IsSimd128())
  1525. {
  1526. // SIMD_JS
  1527. m_lowererMD.GenerateCheckedSimdLoad(instr);
  1528. }
  1529. #endif
  1530. else
  1531. {
  1532. Assert(UNREACHED);
  1533. }
  1534. break;
  1535. case Js::OpCode::ArgOut_A:
  1536. // I don't know if this can happen in asm.js mode, but if it can, we might want to handle differently
  1537. Assert(!m_func->GetJnFunction()->GetIsAsmjsMode());
  1538. // fall-through
  1539. case Js::OpCode::ArgOut_A_Inline:
  1540. case Js::OpCode::ArgOut_A_Dynamic:
  1541. {
  1542. // ArgOut/StartCall are normally lowered by the lowering of the associated call instr.
  1543. // If the call becomes unreachable, we could end up with an orphan ArgOut or StartCall.
  1544. // Change the ArgOut into a store to the stack for bailouts
  1545. instr->FreeSrc2();
  1546. StackSym *argSym = instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  1547. argSym->m_offset = this->m_func->StackAllocate(sizeof(Js::Var));
  1548. argSym->m_allocated = true;
  1549. argSym->m_isOrphanedArg = true;
  1550. this->m_lowererMD.ChangeToAssign(instr);
  1551. }
  1552. break;
  1553. case Js::OpCode::LoweredStartCall:
  1554. case Js::OpCode::StartCall:
  1555. // ArgOut/StartCall are normally lowered by the lowering of the associated call instr.
  1556. // If the call becomes unreachable, we could end up with an orphan ArgOut or StartCall.
  1557. // We'll just delete these StartCalls during peeps.
  1558. break;
  1559. case Js::OpCode::ToVar:
  1560. Assert(instr->GetDst()->GetType() == TyVar);
  1561. if (instr->GetSrc1()->GetType() == TyInt32)
  1562. {
  1563. m_lowererMD.EmitLoadVar(instr);
  1564. }
  1565. else if (instr->GetSrc1()->GetType() == TyFloat64)
  1566. {
  1567. Assert(instr->GetSrc1()->IsRegOpnd());
  1568. m_lowererMD.SaveDoubleToVar(
  1569. instr->GetDst()->AsRegOpnd(),
  1570. instr->GetSrc1()->AsRegOpnd(), instr, instr);
  1571. instr->Remove();
  1572. }
  1573. #if defined(_M_IX86) || defined(_M_X64)
  1574. else if (IRType_IsSimd128(instr->GetSrc1()->GetType()))
  1575. {
  1576. m_lowererMD.GenerateSimdStore(instr);
  1577. }
  1578. #endif
  1579. else
  1580. {
  1581. Assert(UNREACHED);
  1582. }
  1583. break;
  1584. case Js::OpCode::Conv_Prim:
  1585. if (instr->GetDst()->IsFloat())
  1586. {
  1587. if (instr->GetSrc1()->IsIntConstOpnd())
  1588. {
  1589. LoadFloatFromNonReg(instr->UnlinkSrc1(), instr->UnlinkDst(), instr);
  1590. }
  1591. else if (instr->GetSrc1()->IsInt32())
  1592. {
  1593. m_lowererMD.EmitIntToFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1594. }
  1595. else if (instr->GetSrc1()->IsUInt32())
  1596. {
  1597. Assert(instr->GetDst()->IsFloat64());
  1598. m_lowererMD.EmitUIntToFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1599. }
  1600. else
  1601. {
  1602. Assert(instr->GetDst()->IsFloat64());
  1603. Assert(instr->GetSrc1()->IsFloat32());
  1604. m_lowererMD.EmitFloat32ToFloat64(instr->GetDst(), instr->GetSrc1(), instr);
  1605. }
  1606. }
  1607. else
  1608. {
  1609. Assert(instr->GetDst()->IsInt32());
  1610. Assert(instr->GetSrc1()->IsFloat());
  1611. m_lowererMD.EmitFloatToInt(instr->GetDst(), instr->GetSrc1(), instr);
  1612. }
  1613. instr->Remove();
  1614. break;
  1615. case Js::OpCode::FunctionExit:
  1616. LowerFunctionExit(instr);
  1617. // The rest of Epilog generation happens after reg allocation
  1618. break;
  1619. case Js::OpCode::FunctionEntry:
  1620. LowerFunctionEntry(instr);
  1621. // The rest of Prolog generation happens after reg allocation
  1622. break;
  1623. case Js::OpCode::ArgIn_Rest:
  1624. case Js::OpCode::ArgIn_A:
  1625. if (m_func->GetJnFunction()->GetIsAsmjsMode() && !m_func->IsLoopBody())
  1626. {
  1627. instrPrev = LowerArgInAsmJs(instr);
  1628. }
  1629. else
  1630. {
  1631. instrPrev = LowerArgIn(instr);
  1632. }
  1633. break;
  1634. case Js::OpCode::Label:
  1635. if (instr->AsLabelInstr()->m_isLoopTop)
  1636. {
  1637. if (this->outerMostLoopLabel == instr)
  1638. {
  1639. noFieldFastPath = !defaultDoFastPath;
  1640. noMathFastPath = !defaultDoFastPath;
  1641. this->outerMostLoopLabel = nullptr;
  1642. instr->AsLabelInstr()->GetLoop()->isProcessed = true;
  1643. }
  1644. this->m_func->MarkConstantAddressSyms(instr->AsLabelInstr()->GetLoop()->regAlloc.liveOnBackEdgeSyms);
  1645. instr->AsLabelInstr()->GetLoop()->regAlloc.liveOnBackEdgeSyms->Or(this->addToLiveOnBackEdgeSyms);
  1646. }
  1647. break;
  1648. case Js::OpCode::Br:
  1649. m_lowererMD.LowerUncondBranch(instr);
  1650. break;
  1651. case Js::OpCode::BrFncEqApply:
  1652. LowerBrFncApply(instr,IR::HelperOp_OP_BrFncEqApply);
  1653. break;
  1654. case Js::OpCode::BrFncNeqApply:
  1655. LowerBrFncApply(instr,IR::HelperOp_OP_BrFncNeqApply);
  1656. break;
  1657. case Js::OpCode::BrHasSideEffects:
  1658. case Js::OpCode::BrNotHasSideEffects:
  1659. m_lowererMD.GenerateFastBrS(instr->AsBranchInstr());
  1660. break;
  1661. case Js::OpCode::BrFalse_A:
  1662. case Js::OpCode::BrTrue_A:
  1663. if (instr->GetSrc1()->IsFloat())
  1664. {
  1665. GenerateFastBrBool(instr->AsBranchInstr());
  1666. }
  1667. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) ||
  1668. noMathFastPath ||
  1669. GenerateFastBrBool(instr->AsBranchInstr()))
  1670. {
  1671. this->LowerBrBMem(instr, IR::HelperConv_ToBoolean);
  1672. }
  1673. break;
  1674. case Js::OpCode::BrOnObject_A:
  1675. if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath)
  1676. {
  1677. this->LowerBrOnObject(instr, IR::HelperOp_IsObject);
  1678. }
  1679. else
  1680. {
  1681. GenerateFastBrOnObject(instr);
  1682. }
  1683. break;
  1684. case Js::OpCode::BrOnClassConstructor:
  1685. this->LowerBrOnClassConstructor(instr, IR::HelperOp_IsClassConstructor);
  1686. break;
  1687. case Js::OpCode::BrAddr_A:
  1688. case Js::OpCode::BrNotAddr_A:
  1689. case Js::OpCode::BrNotNull_A:
  1690. m_lowererMD.LowerCondBranch(instr);
  1691. break;
  1692. case Js::OpCode::BrEq_A:
  1693. case Js::OpCode::BrNotNeq_A:
  1694. if (instr->GetSrc1()->IsFloat())
  1695. {
  1696. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1697. m_lowererMD.LowerToFloat(instr);
  1698. }
  1699. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1700. {
  1701. bool needHelper = true;
  1702. if (this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
  1703. {
  1704. if (!fNoLower)
  1705. {
  1706. this->LowerBrCMem(instr, IR::HelperOp_Equal, false, false /*isHelper*/);
  1707. }
  1708. }
  1709. else if (this->TryGenerateFastBrEq(instr))
  1710. {
  1711. }
  1712. else if (m_lowererMD.GenerateFastBrString(instr->AsBranchInstr()) || this->GenerateFastBrEqLikely(instr->AsBranchInstr(), &needHelper))
  1713. {
  1714. if (needHelper)
  1715. {
  1716. this->LowerBrCMem(instr, IR::HelperOp_Equal, false);
  1717. }
  1718. }
  1719. else
  1720. {
  1721. if (needHelper)
  1722. {
  1723. this->LowerBrCMem(instr, IR::HelperOp_Equal, false, false /*isHelper*/);
  1724. }
  1725. }
  1726. if (!needHelper)
  1727. {
  1728. instr->Remove();
  1729. }
  1730. }
  1731. else
  1732. {
  1733. this->LowerBrCMem(instr, IR::HelperOp_Equal, true, false /*isHelper*/);
  1734. }
  1735. break;
  1736. case Js::OpCode::BrGe_A:
  1737. case Js::OpCode::BrNotGe_A:
  1738. if (instr->GetSrc1()->IsFloat())
  1739. {
  1740. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1741. m_lowererMD.LowerToFloat(instr);
  1742. }
  1743. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1744. {
  1745. this->LowerBrCMem(instr, IR::HelperOp_GreaterEqual, false, false /*isHelper*/);
  1746. }
  1747. else
  1748. {
  1749. this->LowerBrCMem(instr, IR::HelperOp_GreaterEqual, true, false /*isHelper*/);
  1750. }
  1751. break;
  1752. case Js::OpCode::BrGt_A:
  1753. case Js::OpCode::BrNotGt_A:
  1754. if (instr->GetSrc1()->IsFloat())
  1755. {
  1756. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1757. m_lowererMD.LowerToFloat(instr);
  1758. }
  1759. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1760. {
  1761. this->LowerBrCMem(instr, IR::HelperOp_Greater, false, false /*isHelper*/);
  1762. }
  1763. else
  1764. {
  1765. this->LowerBrCMem(instr, IR::HelperOp_Greater, true, false /*isHelper*/);
  1766. }
  1767. break;
  1768. case Js::OpCode::BrLt_A:
  1769. case Js::OpCode::BrNotLt_A:
  1770. if (instr->GetSrc1()->IsFloat())
  1771. {
  1772. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1773. m_lowererMD.LowerToFloat(instr);
  1774. }
  1775. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1776. {
  1777. this->LowerBrCMem(instr, IR::HelperOp_Less, false, false /*isHelper*/);
  1778. }
  1779. else
  1780. {
  1781. this->LowerBrCMem(instr, IR::HelperOp_Less, true, false /*isHelper*/);
  1782. }
  1783. break;
  1784. case Js::OpCode::BrLe_A:
  1785. case Js::OpCode::BrNotLe_A:
  1786. if (instr->GetSrc1()->IsFloat())
  1787. {
  1788. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1789. m_lowererMD.LowerToFloat(instr);
  1790. }
  1791. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1792. {
  1793. this->LowerBrCMem(instr, IR::HelperOp_LessEqual, false, false /*isHelper*/);
  1794. }
  1795. else
  1796. {
  1797. this->LowerBrCMem(instr, IR::HelperOp_LessEqual, true, false /*isHelper*/);
  1798. }
  1799. break;
  1800. case Js::OpCode::BrNeq_A:
  1801. case Js::OpCode::BrNotEq_A:
  1802. if (instr->GetSrc1()->IsFloat())
  1803. {
  1804. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1805. m_lowererMD.LowerToFloat(instr);
  1806. }
  1807. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1808. {
  1809. bool needHelper = true;
  1810. if (this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
  1811. {
  1812. if (!fNoLower)
  1813. {
  1814. this->LowerBrCMem(instr, IR::HelperOp_NotEqual, false, false /*isHelper*/);
  1815. }
  1816. }
  1817. else if (this->TryGenerateFastBrNeq(instr))
  1818. {
  1819. }
  1820. else if (m_lowererMD.GenerateFastBrString(instr->AsBranchInstr()) || this->GenerateFastBrEqLikely(instr->AsBranchInstr(), &needHelper))
  1821. {
  1822. this->LowerBrCMem(instr, IR::HelperOp_NotEqual, false);
  1823. }
  1824. else
  1825. {
  1826. this->LowerBrCMem(instr, IR::HelperOp_NotEqual, false, false /*isHelper*/);
  1827. }
  1828. }
  1829. else
  1830. {
  1831. this->LowerBrCMem(instr, IR::HelperOp_NotEqual, true, false /*isHelper*/);
  1832. }
  1833. break;
  1834. case Js::OpCode::MultiBr:
  1835. {
  1836. IR::MultiBranchInstr * multiBranchInstr = instr->AsBranchInstr()->AsMultiBrInstr();
  1837. switch (multiBranchInstr->m_kind)
  1838. {
  1839. case IR::MultiBranchInstr::StrDictionary:
  1840. this->GenerateSwitchStringLookup(instr);
  1841. break;
  1842. case IR::MultiBranchInstr::SingleCharStrJumpTable:
  1843. this->GenerateSingleCharStrJumpTableLookup(instr);
  1844. m_func->m_totalJumpTableSizeInBytesForSwitchStatements += (multiBranchInstr->GetBranchJumpTable()->tableSize * sizeof(void*));
  1845. break;
  1846. case IR::MultiBranchInstr::IntJumpTable:
  1847. this->LowerMultiBr(instr);
  1848. m_func->m_totalJumpTableSizeInBytesForSwitchStatements += (multiBranchInstr->GetBranchJumpTable()->tableSize * sizeof(void*));
  1849. break;
  1850. default:
  1851. Assert(false);
  1852. }
  1853. break;
  1854. }
  1855. case Js::OpCode::BrSrEq_A:
  1856. case Js::OpCode::BrSrNotNeq_A:
  1857. {
  1858. srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  1859. srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  1860. if (instr->GetSrc1()->IsFloat())
  1861. {
  1862. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1863. m_lowererMD.LowerToFloat(instr);
  1864. }
  1865. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
  1866. {
  1867. if (!fNoLower)
  1868. {
  1869. this->LowerBrCMem(instr, IR::HelperOp_StrictEqual, false, false /*isHelper*/);
  1870. }
  1871. }
  1872. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->GenerateFastBrSrEq(instr, srcReg1, srcReg2, &instrPrev, noMathFastPath))
  1873. {
  1874. }
  1875. else
  1876. {
  1877. bool needHelper = true;
  1878. if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1879. {
  1880. if (m_lowererMD.GenerateFastBrString(instr->AsBranchInstr()) || this->GenerateFastBrEqLikely(instr->AsBranchInstr(), &needHelper))
  1881. {
  1882. if (needHelper)
  1883. {
  1884. this->LowerBrCMem(instr, IR::HelperOp_StrictEqual, false);
  1885. }
  1886. }
  1887. else
  1888. {
  1889. if (needHelper)
  1890. {
  1891. this->LowerBrCMem(instr, IR::HelperOp_StrictEqual, false, false /*isHelper*/);
  1892. }
  1893. }
  1894. if (!needHelper)
  1895. {
  1896. instr->Remove();
  1897. }
  1898. }
  1899. else
  1900. {
  1901. this->LowerBrCMem(instr, IR::HelperOp_StrictEqual, true, false /*isHelper*/);
  1902. }
  1903. }
  1904. break;
  1905. }
  1906. case Js::OpCode::BrSrNeq_A:
  1907. case Js::OpCode::BrSrNotEq_A:
  1908. if (instr->GetSrc1()->IsFloat())
  1909. {
  1910. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1911. m_lowererMD.LowerToFloat(instr);
  1912. }
  1913. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1914. {
  1915. bool needHelper = true;
  1916. if (this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
  1917. {
  1918. if (!fNoLower)
  1919. {
  1920. this->LowerBrCMem(instr, IR::HelperOp_NotStrictEqual, false, false /*isHelper*/);
  1921. }
  1922. }
  1923. else if (this->GenerateFastBrSrNeq(instr, &instrPrev))
  1924. {
  1925. }
  1926. else if (m_lowererMD.GenerateFastBrString(instr->AsBranchInstr()) || this->GenerateFastBrEqLikely(instr->AsBranchInstr(), &needHelper))
  1927. {
  1928. if (needHelper)
  1929. {
  1930. this->LowerBrCMem(instr, IR::HelperOp_NotStrictEqual, false);
  1931. }
  1932. }
  1933. else
  1934. {
  1935. if (needHelper)
  1936. {
  1937. this->LowerBrCMem(instr, IR::HelperOp_NotStrictEqual, false, false /*isHelper*/);
  1938. }
  1939. }
  1940. if (!needHelper)
  1941. {
  1942. instr->Remove();
  1943. }
  1944. }
  1945. else
  1946. {
  1947. this->LowerBrCMem(instr, IR::HelperOp_NotStrictEqual, true, false /*isHelper*/);
  1948. }
  1949. break;
  1950. case Js::OpCode::BrOnEmpty:
  1951. case Js::OpCode::BrOnNotEmpty:
  1952. if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func))
  1953. {
  1954. m_lowererMD.GenerateFastBrBReturn(instr);
  1955. this->LowerBrBReturn(instr, IR::HelperOp_OP_BrOnEmpty, true);
  1956. }
  1957. else
  1958. {
  1959. this->LowerBrBReturn(instr, IR::HelperOp_OP_BrOnEmpty, false);
  1960. }
  1961. break;
  1962. case Js::OpCode::BrOnHasProperty:
  1963. case Js::OpCode::BrOnNoProperty:
  1964. this->LowerBrProperty(instr, IR::HelperOp_HasProperty);
  1965. break;
  1966. case Js::OpCode::BrOnException:
  1967. Assert(!this->m_func->DoGlobOpt());
  1968. instr->Remove();
  1969. break;
  1970. case Js::OpCode::BrOnNoException:
  1971. instr->m_opcode = LowererMD::MDUncondBranchOpcode;
  1972. break;
  1973. case Js::OpCode::StSlot:
  1974. this->LowerStSlot(instr);
  1975. break;
  1976. case Js::OpCode::StSlotChkUndecl:
  1977. this->LowerStSlotChkUndecl(instr);
  1978. break;
  1979. case Js::OpCode::ProfiledLoopStart:
  1980. {
  1981. Assert(m_func->DoSimpleJitDynamicProfile());
  1982. Assert(instr->IsJitProfilingInstr());
  1983. // Check for the helper instr from IRBuilding (it won't be there if there are no LoopEnds due to an infinite loop)
  1984. auto prev = instr->m_prev;
  1985. if (prev->IsJitProfilingInstr() && prev->AsJitProfilingInstr()->isLoopHelper)
  1986. {
  1987. auto saveOpnd = prev->UnlinkDst();
  1988. instrPrev = prev->m_prev;
  1989. prev->Remove();
  1990. const auto starFlag = GetImplicitCallFlagsOpnd();
  1991. IR::AutoReuseOpnd a(starFlag, m_func);
  1992. this->InsertMove(saveOpnd, starFlag, instr);
  1993. this->InsertMove(starFlag, CreateClearImplicitCallFlagsOpnd(), instr);
  1994. }
  1995. else
  1996. {
  1997. #if DBG
  1998. // Double check that we indeed do not have a LoopEnd that is part of the same loop for the rest of the function
  1999. auto cur = instr;
  2000. auto loopNumber = instr->AsJitProfilingInstr()->loopNumber;
  2001. while (cur)
  2002. {
  2003. Assert(cur->m_opcode != Js::OpCode::ProfiledLoopEnd || cur->IsJitProfilingInstr() && cur->AsJitProfilingInstr()->loopNumber != loopNumber);
  2004. cur = cur->m_next;
  2005. }
  2006. #endif
  2007. }
  2008. // If we turned off fulljit, there's no reason to do this.
  2009. if (!m_func->GetJnFunction()->DoFullJit())
  2010. {
  2011. instr->Remove();
  2012. }
  2013. else
  2014. {
  2015. Assert(instr->GetDst());
  2016. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleGetScheduledEntryPoint, m_func));
  2017. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateUint32Opnd(instr->AsJitProfilingInstr()->loopNumber, m_func));
  2018. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  2019. this->m_lowererMD.LowerCall(instr, 0);
  2020. }
  2021. break;
  2022. }
  2023. case Js::OpCode::ProfiledLoopBodyStart:
  2024. {
  2025. Assert(m_func->DoSimpleJitDynamicProfile());
  2026. const auto loopNum = instr->AsJitProfilingInstr()->loopNumber;
  2027. Assert(loopNum < m_func->GetJnFunction()->GetLoopCount());
  2028. auto entryPointOpnd = instr->UnlinkSrc1();
  2029. auto dobailout = instr->UnlinkDst();
  2030. const auto dobailoutType = TyUint8;
  2031. Assert(dobailout->GetType() == TyUint8 && sizeof(decltype(Js::SimpleJitHelpers::IsLoopCodeGenDone(nullptr))) == 1);
  2032. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(0, TyUint32, m_func)); // zero indicates that we do not want to add flags back in
  2033. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(loopNum, TyUint32, m_func));
  2034. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  2035. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleRecordLoopImplicitCallFlags, m_func));
  2036. m_lowererMD.LowerCall(instr, 0);
  2037. // Outline of JITed code:
  2038. //
  2039. // LoopStart:
  2040. // entryPoint = GetScheduledEntryPoint(framePtr, loopNum)
  2041. // LoopBodyStart:
  2042. // uint8 dobailout;
  2043. // if (entryPoint) {
  2044. // dobailout = IsLoopCodeGenDone(entryPoint)
  2045. // } else {
  2046. // dobailout = ++interpretCount >= threshold
  2047. // }
  2048. // // already exists from IRBuilding:
  2049. // if (dobailout) {
  2050. // Bailout
  2051. // }
  2052. if (!m_func->GetJnFunction()->DoFullJit() || !m_func->GetJnFunction()->DoJITLoopBody())
  2053. {
  2054. // If we're not doing fulljit, we've turned off JitLoopBodies, or if we don't have loop headers allocated (the function has a Try, etc)
  2055. // just move false to dobailout
  2056. this->InsertMove(dobailout, IR::IntConstOpnd::New(0, dobailoutType, m_func, true), instr->m_next);
  2057. }
  2058. else if (m_func->GetJnFunction()->ForceJITLoopBody())
  2059. {
  2060. // If we're forcing jit loop bodies, move true to dobailout
  2061. this->InsertMove(dobailout, IR::IntConstOpnd::New(1, dobailoutType, m_func, true), instr->m_next);
  2062. }
  2063. else
  2064. {
  2065. // Put in the labels
  2066. auto entryPointIsNull = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2067. auto checkDoBailout = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2068. instr->InsertAfter(checkDoBailout);
  2069. instr->InsertAfter(entryPointIsNull);
  2070. this->InsertCompareBranch(entryPointOpnd, IR::AddrOpnd::New(nullptr, IR::AddrOpndKindDynamicMisc, m_func), Js::OpCode::BrEq_A, false, entryPointIsNull, instr->m_next);
  2071. // If the entry point is not null
  2072. auto isCodeGenDone = IR::Instr::New(Js::OpCode::Call, dobailout, IR::HelperCallOpnd::New(IR::HelperSimpleIsLoopCodeGenDone, m_func), m_func);
  2073. entryPointIsNull->InsertBefore(isCodeGenDone);
  2074. m_lowererMD.LoadHelperArgument(isCodeGenDone, entryPointOpnd);
  2075. m_lowererMD.LowerCall(isCodeGenDone, 0);
  2076. this->InsertBranch(LowererMD::MDUncondBranchOpcode, true, checkDoBailout, entryPointIsNull);
  2077. // If the entry point is null
  2078. auto head = m_func->GetJnFunction()->GetLoopHeader(loopNum);
  2079. Assert(head);
  2080. static_assert(sizeof(head->interpretCount) == 4, "Change the type in the following line");
  2081. const auto type = TyUint32;
  2082. auto countReg = IR::RegOpnd::New(type, m_func);
  2083. auto countAddr = IR::MemRefOpnd::New(&head->interpretCount, type, m_func);
  2084. IR::AutoReuseOpnd a(countReg, m_func), b(countAddr, m_func);
  2085. this->InsertAdd(false, countReg, countAddr, IR::IntConstOpnd::New(1, type, m_func, true), checkDoBailout);
  2086. this->InsertMove(countAddr, countReg, checkDoBailout);
  2087. this->InsertMove(dobailout, IR::IntConstOpnd::New(0, dobailoutType, m_func, true), checkDoBailout);
  2088. // GetLoopInterpretCount() is a dynamic quantity. It's computed at simple-JIT time here, but that's okay
  2089. // because there would have been sufficient iterations in interpreted mode to get a reasonable value.
  2090. const auto threshold = instr->m_func->GetJnFunction()->GetLoopInterpretCount(head);
  2091. this->InsertCompareBranch(countReg, IR::IntConstOpnd::New(threshold, type, m_func), Js::OpCode::BrLt_A, checkDoBailout, checkDoBailout);
  2092. this->InsertMove(dobailout, IR::IntConstOpnd::New(1, dobailoutType, m_func, true), checkDoBailout);
  2093. // fallthrough
  2094. // Label checkDoBailout (inserted above)
  2095. }
  2096. }
  2097. break;
  2098. case Js::OpCode::ProfiledLoopEnd:
  2099. {
  2100. Assert(m_func->DoSimpleJitDynamicProfile());
  2101. // This is set up in IRBuilding
  2102. Assert(instr->GetSrc1());
  2103. IR::Opnd* savedFlags = instr->UnlinkSrc1();
  2104. m_lowererMD.LoadHelperArgument(instr, savedFlags);
  2105. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateUint32Opnd(instr->AsJitProfilingInstr()->loopNumber, m_func));
  2106. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  2107. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleRecordLoopImplicitCallFlags, m_func));
  2108. m_lowererMD.LowerCall(instr, 0);
  2109. }
  2110. break;
  2111. case Js::OpCode::InitLoopBodyCount:
  2112. Assert(this->m_func->IsLoopBody());
  2113. instr->SetSrc1(IR::IntConstOpnd::New(0, TyUint32, this->m_func));
  2114. this->m_lowererMD.ChangeToAssign(instr);
  2115. break;
  2116. case Js::OpCode::StLoopBodyCount:
  2117. Assert(this->m_func->IsLoopBody());
  2118. this->LowerStLoopBodyCount(instr);
  2119. break;
  2120. case Js::OpCode::IncrLoopBodyCount:
  2121. Assert(this->m_func->IsLoopBody());
  2122. instr->m_opcode = Js::OpCode::Add_I4;
  2123. instr->SetSrc2(IR::IntConstOpnd::New(1, TyUint32, this->m_func));
  2124. this->m_lowererMD.EmitInt4Instr(instr);
  2125. break;
  2126. #if !FLOATVAR
  2127. case Js::OpCode::StSlotBoxTemp:
  2128. this->LowerStSlotBoxTemp(instr);
  2129. break;
  2130. #endif
  2131. case Js::OpCode::LdSlot:
  2132. case Js::OpCode::LdSlotArr:
  2133. {
  2134. Js::ProfileId profileId;
  2135. IR::Instr *profileBeforeInstr;
  2136. if(instr->IsJitProfilingInstr())
  2137. {
  2138. profileId = instr->AsJitProfilingInstr()->profileId;
  2139. Assert(profileId != Js::Constants::NoProfileId);
  2140. profileBeforeInstr = instr->m_next;
  2141. }
  2142. else
  2143. {
  2144. profileId = Js::Constants::NoProfileId;
  2145. profileBeforeInstr = nullptr;
  2146. }
  2147. this->LowerLdSlot(instr);
  2148. if(profileId != Js::Constants::NoProfileId)
  2149. {
  2150. LowerProfileLdSlot(instr->GetDst(), instr->m_func, profileId, profileBeforeInstr);
  2151. }
  2152. break;
  2153. }
  2154. case Js::OpCode::LdAsmJsSlot:
  2155. this->LowerLdSlot(instr);
  2156. break;
  2157. case Js::OpCode::StAsmJsSlot:
  2158. this->LowerStSlot(instr);
  2159. break;
  2160. case Js::OpCode::ChkUndecl:
  2161. instrPrev = this->LowerChkUndecl(instr);
  2162. break;
  2163. case Js::OpCode::LdArrHead:
  2164. this->LowerLdArrHead(instr);
  2165. break;
  2166. case Js::OpCode::StElemC:
  2167. case Js::OpCode::StArrSegElemC:
  2168. this->LowerStElemC(instr);
  2169. break;
  2170. case Js::OpCode::LdEnv:
  2171. instrPrev = this->LowerLdEnv(instr);
  2172. break;
  2173. case Js::OpCode::LdAsmJsEnv:
  2174. instrPrev = this->LowerLdAsmJsEnv(instr);
  2175. break;
  2176. case Js::OpCode::LdElemUndef:
  2177. this->LowerLdElemUndef(instr);
  2178. break;
  2179. case Js::OpCode::LdElemUndefScoped:
  2180. this->LowerElementUndefinedScopedMem(instr, IR::HelperOp_LdElemUndefScoped);
  2181. break;
  2182. case Js::OpCode::EnsureNoRootFld:
  2183. this->LowerElementUndefined(instr, IR::HelperOp_EnsureNoRootProperty);
  2184. break;
  2185. case Js::OpCode::EnsureNoRootRedeclFld:
  2186. this->LowerElementUndefined(instr, IR::HelperOp_EnsureNoRootRedeclProperty);
  2187. break;
  2188. case Js::OpCode::ScopedEnsureNoRedeclFld:
  2189. this->LowerElementUndefinedScoped(instr, IR::HelperOp_EnsureNoRedeclPropertyScoped);
  2190. break;
  2191. case Js::OpCode::LdFuncExpr:
  2192. // src = function Expression
  2193. m_lowererMD.LoadFuncExpression(instr);
  2194. this->GenerateGetCurrentFunctionObject(instr);
  2195. break;
  2196. case Js::OpCode::LdNewTarget:
  2197. this->GenerateLoadNewTarget(instr);
  2198. break;
  2199. case Js::OpCode::ChkNewCallFlag:
  2200. this->GenerateCheckForCallFlagNew(instr);
  2201. break;
  2202. case Js::OpCode::StFuncExpr:
  2203. // object.propid = src
  2204. LowerStFld(instr, IR::HelperOp_StFunctionExpression, IR::HelperOp_StFunctionExpression, false);
  2205. break;
  2206. case Js::OpCode::InitLetFld:
  2207. case Js::OpCode::InitRootLetFld:
  2208. LowerStFld(instr, IR::HelperOp_InitLetFld, IR::HelperOp_InitLetFld, false);
  2209. break;
  2210. case Js::OpCode::InitConstFld:
  2211. case Js::OpCode::InitRootConstFld:
  2212. LowerStFld(instr, IR::HelperOp_InitConstFld, IR::HelperOp_InitConstFld, false);
  2213. break;
  2214. case Js::OpCode::InitUndeclRootLetFld:
  2215. LowerElementUndefined(instr, IR::HelperOp_InitUndeclRootLetFld);
  2216. break;
  2217. case Js::OpCode::InitUndeclRootConstFld:
  2218. LowerElementUndefined(instr, IR::HelperOp_InitUndeclRootConstFld);
  2219. break;
  2220. case Js::OpCode::InitUndeclConsoleLetFld:
  2221. LowerElementUndefined(instr, IR::HelperOp_InitUndeclConsoleLetFld);
  2222. break;
  2223. case Js::OpCode::InitUndeclConsoleConstFld:
  2224. LowerElementUndefined(instr, IR::HelperOp_InitUndeclConsoleConstFld);
  2225. break;
  2226. case Js::OpCode::InitClassMember:
  2227. LowerStFld(instr, IR::HelperOp_InitClassMember, IR::HelperOp_InitClassMember, false);
  2228. break;
  2229. case Js::OpCode::InitClassMemberComputedName:
  2230. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberComputedName);
  2231. break;
  2232. case Js::OpCode::InitClassMemberGetComputedName:
  2233. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberGetComputedName);
  2234. break;
  2235. case Js::OpCode::InitClassMemberSetComputedName:
  2236. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberSetComputedName);
  2237. break;
  2238. case Js::OpCode::InitClassMemberGet:
  2239. instrPrev = this->LowerStFld(instr, IR::HelperOp_InitClassMemberGet, IR::HelperOp_InitClassMemberGet, false);
  2240. break;
  2241. case Js::OpCode::InitClassMemberSet:
  2242. instrPrev = this->LowerStFld(instr, IR::HelperOp_InitClassMemberSet, IR::HelperOp_InitClassMemberSet, false);
  2243. break;
  2244. case Js::OpCode::NewStackFrameDisplay:
  2245. this->LowerLdFrameDisplay(instr, m_func->DoStackFrameDisplay());
  2246. break;
  2247. case Js::OpCode::LdFrameDisplay:
  2248. this->LowerLdFrameDisplay(instr, false);
  2249. break;
  2250. case Js::OpCode::LdInnerFrameDisplay:
  2251. this->LowerLdInnerFrameDisplay(instr);
  2252. break;
  2253. case Js::OpCode::Throw:
  2254. case Js::OpCode::InlineThrow:
  2255. case Js::OpCode::EHThrow:
  2256. this->LowerUnaryHelperMem(instr, IR::HelperOp_Throw);
  2257. break;
  2258. case Js::OpCode::TryCatch:
  2259. instrPrev = this->LowerTry(instr, true /*try-catch*/);
  2260. break;
  2261. case Js::OpCode::TryFinally:
  2262. instrPrev = this->LowerTry(instr, false /*try-finally*/);
  2263. break;
  2264. case Js::OpCode::Catch:
  2265. instrPrev = m_lowererMD.LowerCatch(instr);
  2266. break;
  2267. case Js::OpCode::LeaveNull:
  2268. instrPrev = m_lowererMD.LowerLeaveNull(instr);
  2269. break;
  2270. case Js::OpCode::Leave:
  2271. if (this->m_func->HasTry() && this->m_func->DoOptimizeTryCatch())
  2272. {
  2273. // Required in Register Allocator to mark region boundaries
  2274. break;
  2275. }
  2276. instrPrev = m_lowererMD.LowerLeave(instr, instr->AsBranchInstr()->GetTarget(), false /*fromFinalLower*/, instr->AsBranchInstr()->m_isOrphanedLeave);
  2277. break;
  2278. case Js::OpCode::BailOnException:
  2279. instrPrev = this->LowerBailOnException(instr);
  2280. break;
  2281. case Js::OpCode::RuntimeTypeError:
  2282. case Js::OpCode::InlineRuntimeTypeError:
  2283. this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeTypeError);
  2284. break;
  2285. case Js::OpCode::RuntimeReferenceError:
  2286. case Js::OpCode::InlineRuntimeReferenceError:
  2287. this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeReferenceError);
  2288. break;
  2289. case Js::OpCode::Break:
  2290. // Inline breakpoint: for now do nothing.
  2291. break;
  2292. case Js::OpCode::Nop:
  2293. // This may need support for debugging the JIT, but for now just remove the instruction.
  2294. instr->Remove();
  2295. break;
  2296. case Js::OpCode::Unused:
  2297. // Currently Unused is used with ScopedLdInst to keep the second dst alive, but we don't need to lower it.
  2298. instr->Remove();
  2299. break;
  2300. case Js::OpCode::StatementBoundary:
  2301. // This instruction is merely to help convey source info through the IR
  2302. // and eventually generate the nativeOffset maps.
  2303. break;
  2304. case Js::OpCode::BailOnNotPolymorphicInlinee:
  2305. instrPrev = LowerBailOnNotPolymorphicInlinee(instr);
  2306. break;
  2307. case Js::OpCode::BailOnNoSimdTypeSpec:
  2308. case Js::OpCode::BailOnNoProfile:
  2309. this->GenerateBailOut(instr, nullptr, nullptr);
  2310. break;
  2311. case Js::OpCode::BailOnNotSpreadable:
  2312. instrPrev = this->LowerBailOnNotSpreadable(instr);
  2313. break;
  2314. case Js::OpCode::BailOnNotStackArgs:
  2315. instrPrev = this->LowerBailOnNotStackArgs(instr);
  2316. break;
  2317. case Js::OpCode::BailOnEqual:
  2318. case Js::OpCode::BailOnNotEqual:
  2319. instrPrev = this->LowerBailOnEqualOrNotEqual(instr);
  2320. break;
  2321. case Js::OpCode::BailOnNegative:
  2322. LowerBailOnNegative(instr);
  2323. break;
  2324. case Js::OpCode::BailForDebugger:
  2325. instrPrev = this->LowerBailForDebugger(instr);
  2326. break;
  2327. case Js::OpCode::BailOnNotObject:
  2328. instrPrev = this->LowerBailOnNotObject(instr);
  2329. break;
  2330. case Js::OpCode::BailOnNotBuiltIn:
  2331. instrPrev = this->LowerBailOnNotBuiltIn(instr);
  2332. break;
  2333. case Js::OpCode::BailOnNotArray:
  2334. {
  2335. IR::Instr *bailOnNotArray, *bailOnMissingValue;
  2336. SplitBailOnNotArray(instr, &bailOnNotArray, &bailOnMissingValue);
  2337. IR::RegOpnd *const arrayOpnd = LowerBailOnNotArray(bailOnNotArray);
  2338. if(bailOnMissingValue)
  2339. {
  2340. LowerBailOnMissingValue(bailOnMissingValue, arrayOpnd);
  2341. }
  2342. break;
  2343. }
  2344. case Js::OpCode::BoundCheck:
  2345. case Js::OpCode::UnsignedBoundCheck:
  2346. LowerBoundCheck(instr);
  2347. break;
  2348. case Js::OpCode::BailTarget:
  2349. instrPrev = this->LowerBailTarget(instr);
  2350. break;
  2351. case Js::OpCode::InlineeStart:
  2352. this->LowerInlineeStart(instr);
  2353. break;
  2354. case Js::OpCode::EndCallForPolymorphicInlinee:
  2355. instr->Remove();
  2356. break;
  2357. case Js::OpCode::InlineeEnd:
  2358. this->LowerInlineeEnd(instr);
  2359. break;
  2360. case Js::OpCode::InlineBuiltInEnd:
  2361. case Js::OpCode::InlineNonTrackingBuiltInEnd:
  2362. this->LowerInlineBuiltIn(instr);
  2363. break;
  2364. case Js::OpCode::ExtendArg_A:
  2365. if (instr->GetSrc1()->IsRegOpnd())
  2366. {
  2367. IR::RegOpnd *src1 = instr->GetSrc1()->AsRegOpnd();
  2368. this->addToLiveOnBackEdgeSyms->Clear(src1->m_sym->m_id);
  2369. }
  2370. instr->Remove();
  2371. break;
  2372. case Js::OpCode::InlineBuiltInStart:
  2373. case Js::OpCode::BytecodeArgOutUse:
  2374. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  2375. instr->Remove();
  2376. break;
  2377. case Js::OpCode::DeadBrEqual:
  2378. this->LowerBinaryHelperMem(instr, IR::HelperOp_Equal);
  2379. break;
  2380. case Js::OpCode::DeadBrSrEqual:
  2381. this->LowerBinaryHelperMem(instr, IR::HelperOp_StrictEqual);
  2382. break;
  2383. case Js::OpCode::DeadBrRelational:
  2384. this->LowerBinaryHelperMem(instr, IR::HelperOp_Greater);
  2385. break;
  2386. case Js::OpCode::DeadBrOnHasProperty:
  2387. this->LowerUnaryHelperMem(instr, IR::HelperOp_HasProperty);
  2388. break;
  2389. case Js::OpCode::DeletedNonHelperBranch:
  2390. break;
  2391. case Js::OpCode::InitClass:
  2392. instrPrev = this->LowerInitClass(instr);
  2393. break;
  2394. case Js::OpCode::NewConcatStrMulti:
  2395. this->LowerNewConcatStrMulti(instr);
  2396. break;
  2397. case Js::OpCode::NewConcatStrMultiBE:
  2398. this->LowerNewConcatStrMultiBE(instr);
  2399. break;
  2400. case Js::OpCode::SetConcatStrMultiItem:
  2401. this->LowerSetConcatStrMultiItem(instr);
  2402. break;
  2403. case Js::OpCode::SetConcatStrMultiItemBE:
  2404. Assert(instr->GetSrc1()->IsRegOpnd());
  2405. this->addToLiveOnBackEdgeSyms->Clear(instr->GetSrc1()->GetStackSym()->m_id);
  2406. // code corresponding to it should already have been generated while lowering NewConcatStrMultiBE
  2407. instr->Remove();
  2408. break;
  2409. case Js::OpCode::Conv_Str:
  2410. this->LowerConvStr(instr);
  2411. break;
  2412. case Js::OpCode::Coerse_Str:
  2413. this->LowerCoerseStr(instr);
  2414. break;
  2415. case Js::OpCode::Coerse_StrOrRegex:
  2416. this->LowerCoerseStrOrRegex(instr);
  2417. break;
  2418. case Js::OpCode::Coerse_Regex:
  2419. this->LowerCoerseRegex(instr);
  2420. break;
  2421. case Js::OpCode::Conv_PrimStr:
  2422. this->LowerConvPrimStr(instr);
  2423. break;
  2424. case Js::OpCode::ObjectFreeze:
  2425. this->LowerUnaryHelper(instr, IR::HelperOP_Freeze);
  2426. break;
  2427. case Js::OpCode::ClearAttributes:
  2428. this->LowerBinaryHelper(instr, IR::HelperOP_ClearAttributes);
  2429. break;
  2430. case Js::OpCode::SpreadArrayLiteral:
  2431. this->LowerSpreadArrayLiteral(instr);
  2432. break;
  2433. case Js::OpCode::CallIExtended:
  2434. {
  2435. // Currently, the only use for CallIExtended is a call that uses spread.
  2436. Assert(IsSpreadCall(instr));
  2437. instrPrev = this->LowerSpreadCall(instr, Js::CallFlags_None);
  2438. break;
  2439. }
  2440. case Js::OpCode::CallIExtendedNew:
  2441. {
  2442. // Currently, the only use for CallIExtended is a call that uses spread.
  2443. Assert(IsSpreadCall(instr));
  2444. instrPrev = this->LowerSpreadCall(instr, Js::CallFlags_New);
  2445. break;
  2446. }
  2447. case Js::OpCode::CallIExtendedNewTargetNew:
  2448. {
  2449. // Currently, the only use for CallIExtended is a call that uses spread.
  2450. Assert(IsSpreadCall(instr));
  2451. instrPrev = this->LowerSpreadCall(instr, (Js::CallFlags)(Js::CallFlags_New | Js::CallFlags_ExtraArg | Js::CallFlags_NewTarget));
  2452. break;
  2453. }
  2454. case Js::OpCode::LdSpreadIndices:
  2455. instr->Remove();
  2456. break;
  2457. case Js::OpCode::LdSuper:
  2458. instrPrev = m_lowererMD.LowerLdSuper(instr, IR::HelperLdSuper);
  2459. break;
  2460. case Js::OpCode::LdSuperCtor:
  2461. instrPrev = m_lowererMD.LowerLdSuper(instr, IR::HelperLdSuperCtor);
  2462. break;
  2463. case Js::OpCode::ScopedLdSuper:
  2464. instrPrev = m_lowererMD.LowerLdSuper(instr, IR::HelperScopedLdSuper);
  2465. break;
  2466. case Js::OpCode::ScopedLdSuperCtor:
  2467. instrPrev = m_lowererMD.LowerLdSuper(instr, IR::HelperScopedLdSuperCtor);
  2468. break;
  2469. case Js::OpCode::SetHomeObj:
  2470. {
  2471. IR::Opnd *src2Opnd = instr->UnlinkSrc2();
  2472. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  2473. m_lowererMD.LoadHelperArgument(instr, src2Opnd);
  2474. m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  2475. m_lowererMD.ChangeToHelperCall(instr, IR::HelperSetHomeObj);
  2476. break;
  2477. }
  2478. case Js::OpCode::SetComputedNameVar:
  2479. {
  2480. IR::Opnd *src2Opnd = instr->UnlinkSrc2();
  2481. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  2482. m_lowererMD.LoadHelperArgument(instr, src2Opnd);
  2483. m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  2484. m_lowererMD.ChangeToHelperCall(instr, IR::HelperSetComputedNameVar);
  2485. break;
  2486. }
  2487. case Js::OpCode::InlineeMetaArg:
  2488. {
  2489. m_lowererMD.ChangeToAssign(instr);
  2490. break;
  2491. }
  2492. case Js::OpCode::Yield:
  2493. {
  2494. instr->FreeSrc1(); // Source is not actually used by the backend other than to calculate lifetime
  2495. IR::Opnd* dstOpnd = instr->UnlinkDst();
  2496. // prm2 is the ResumeYieldData pointer per calling convention established in JavascriptGenerator::CallGenerator
  2497. // This is the value the bytecode expects to be in the dst register of the Yield opcode after resumption.
  2498. // Load it here after the bail-in.
  2499. StackSym *resumeYieldDataSym = StackSym::NewParamSlotSym(2, m_func);
  2500. m_func->SetArgOffset(resumeYieldDataSym, (LowererMD::GetFormalParamOffset() + 1) * MachPtr);
  2501. IR::SymOpnd * resumeYieldDataOpnd = IR::SymOpnd::New(resumeYieldDataSym, TyMachPtr, m_func);
  2502. AssertMsg(instr->m_next->IsLabelInstr(), "Expect the resume label to immediately follow Yield instruction");
  2503. m_lowererMD.CreateAssign(dstOpnd, resumeYieldDataOpnd, instr->m_next->m_next);
  2504. GenerateBailOut(instr);
  2505. break;
  2506. }
  2507. case Js::OpCode::ResumeYield:
  2508. case Js::OpCode::ResumeYieldStar:
  2509. {
  2510. IR::Opnd *srcOpnd1 = instr->UnlinkSrc1();
  2511. IR::Opnd *srcOpnd2 = instr->m_opcode == Js::OpCode::ResumeYieldStar ? instr->UnlinkSrc2() : IR::AddrOpnd::NewNull(m_func);
  2512. m_lowererMD.LoadHelperArgument(instr, srcOpnd2);
  2513. m_lowererMD.LoadHelperArgument(instr, srcOpnd1);
  2514. m_lowererMD.ChangeToHelperCall(instr, IR::HelperResumeYield);
  2515. break;
  2516. }
  2517. case Js::OpCode::GeneratorResumeJumpTable:
  2518. {
  2519. // Lowered in LowerPrologEpilog so that the jumps introduced are not considered to be part of the flow for the RegAlloc phase.
  2520. // Introduce a BailOutNoSave label if there were yield points that were elided due to optimizations. They could still be hit
  2521. // if an active generator object had been paused at such a yield point when the function body was JITed. So safe guard such a
  2522. // case by having the native code simply jump back to the interpreter for such yield points.
  2523. IR::LabelInstr *bailOutNoSaveLabel = nullptr;
  2524. m_func->MapUntilYieldOffsetResumeLabels([this, &bailOutNoSaveLabel](int, const YieldOffsetResumeLabel& yorl)
  2525. {
  2526. if (yorl.Second() == nullptr)
  2527. {
  2528. if (bailOutNoSaveLabel == nullptr)
  2529. {
  2530. bailOutNoSaveLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2531. }
  2532. return true;
  2533. }
  2534. return false;
  2535. });
  2536. // Insert the bailoutnosave label somewhere along with a call to BailOutNoSave helper
  2537. if (bailOutNoSaveLabel != nullptr)
  2538. {
  2539. IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->m_prev;
  2540. IR::LabelInstr * exitTargetInstr;
  2541. if (exitPrevInstr->IsLabelInstr())
  2542. {
  2543. exitTargetInstr = exitPrevInstr->AsLabelInstr();
  2544. exitPrevInstr = exitPrevInstr->m_prev;
  2545. }
  2546. else
  2547. {
  2548. exitTargetInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  2549. exitPrevInstr->InsertAfter(exitTargetInstr);
  2550. }
  2551. bailOutNoSaveLabel->m_hasNonBranchRef = true;
  2552. bailOutNoSaveLabel->isOpHelper = true;
  2553. IR::Instr* bailOutCall = IR::Instr::New(Js::OpCode::Call, m_func);
  2554. exitPrevInstr->InsertAfter(bailOutCall);
  2555. exitPrevInstr->InsertAfter(bailOutNoSaveLabel);
  2556. exitPrevInstr->InsertAfter(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, exitTargetInstr, m_func));
  2557. IR::RegOpnd * frameRegOpnd = IR::RegOpnd::New(nullptr, LowererMD::GetRegFramePointer(), TyMachPtr, m_func);
  2558. m_lowererMD.LoadHelperArgument(bailOutCall, frameRegOpnd);
  2559. m_lowererMD.ChangeToHelperCall(bailOutCall, IR::HelperNoSaveRegistersBailOutForElidedYield);
  2560. m_func->m_bailOutNoSaveLabel = bailOutNoSaveLabel;
  2561. }
  2562. break;
  2563. }
  2564. case Js::OpCode::AsyncSpawn:
  2565. this->LowerBinaryHelperMem(instr, IR::HelperAsyncSpawn);
  2566. break;
  2567. case Js::OpCode::FrameDisplayCheck:
  2568. instrPrev = this->LowerFrameDisplayCheck(instr);
  2569. break;
  2570. case Js::OpCode::SlotArrayCheck:
  2571. instrPrev = this->LowerSlotArrayCheck(instr);
  2572. break;
  2573. default:
  2574. #if defined(_M_IX86) || defined(_M_X64)
  2575. if (IsSimd128Opcode(instr->m_opcode))
  2576. {
  2577. instrPrev = m_lowererMD.Simd128Instruction(instr);
  2578. break;
  2579. }
  2580. #endif
  2581. AssertMsg(instr->IsLowered(), "Unknown opcode");
  2582. if(!instr->IsLowered())
  2583. {
  2584. Fatal();
  2585. }
  2586. break;
  2587. }
  2588. #if DBG
  2589. LegalizeVerifyRange(instrPrev ? instrPrev->m_next : instrStart,
  2590. verifyLegalizeInstrNext ? verifyLegalizeInstrNext->m_prev : nullptr);
  2591. #endif
  2592. } NEXT_INSTR_BACKWARD_EDITING_IN_RANGE;
  2593. Assert(this->outerMostLoopLabel == nullptr);
  2594. }
  2595. IR::Instr *
  2596. Lowerer::LoadFunctionBody(IR::Instr * instr)
  2597. {
  2598. return m_lowererMD.LoadHelperArgument(instr, LoadFunctionBodyOpnd(instr));
  2599. }
  2600. IR::Instr *
  2601. Lowerer::LoadScriptContext(IR::Instr * instr)
  2602. {
  2603. return m_lowererMD.LoadHelperArgument(instr, LoadScriptContextOpnd(instr));
  2604. }
  2605. IR::Opnd *
  2606. Lowerer::LoadFunctionBodyOpnd(IR::Instr * instr)
  2607. {
  2608. return IR::AddrOpnd::New(instr->m_func->GetJnFunction(), IR::AddrOpndKindDynamicFunctionBody, instr->m_func);
  2609. }
  2610. IR::Opnd *
  2611. Lowerer::LoadScriptContextOpnd(IR::Instr * instr)
  2612. {
  2613. return IR::AddrOpnd::New(this->m_func->GetScriptContext(), IR::AddrOpndKindDynamicScriptContext, this->m_func);
  2614. }
  2615. IR::Opnd *
  2616. Lowerer::LoadScriptContextValueOpnd(IR::Instr * instr, ScriptContextValue valueType)
  2617. {
  2618. Js::ScriptContext *scriptContext = instr->m_func->GetScriptContext();
  2619. switch (valueType)
  2620. {
  2621. case ScriptContextValue::ScriptContextNumberAllocator:
  2622. return IR::AddrOpnd::New(scriptContext->GetNumberAllocator(), IR::AddrOpndKindDynamicMisc, instr->m_func);
  2623. case ScriptContextValue::ScriptContextRecycler:
  2624. return IR::AddrOpnd::New(scriptContext->GetRecycler(), IR::AddrOpndKindDynamicMisc, instr->m_func);
  2625. default:
  2626. Assert(false);
  2627. return nullptr;
  2628. }
  2629. }
  2630. IR::Opnd *
  2631. Lowerer::LoadLibraryValueOpnd(IR::Instr * instr, LibraryValue valueType, RegNum regNum)
  2632. {
  2633. Js::ScriptContext *scriptContext = instr->m_func->GetScriptContext();
  2634. switch (valueType)
  2635. {
  2636. case LibraryValue::ValueEmptyString:
  2637. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetEmptyString(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2638. case LibraryValue::ValueUndeclBlockVar:
  2639. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetUndeclBlockVar(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2640. case LibraryValue::ValueUndefined:
  2641. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetUndefined(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2642. case LibraryValue::ValueNull:
  2643. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetNull(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2644. case LibraryValue::ValueTrue:
  2645. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetTrue(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2646. case LibraryValue::ValueFalse:
  2647. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetFalse(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2648. case LibraryValue::ValueNegativeZero:
  2649. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetNegativeZero(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2650. case LibraryValue::ValueNumberTypeStatic:
  2651. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetNumberTypeStatic(), IR::AddrOpndKindDynamicType, instr->m_func, true);
  2652. case LibraryValue::ValueStringTypeStatic:
  2653. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetStringTypeStatic(), IR::AddrOpndKindDynamicType, instr->m_func, true);
  2654. case LibraryValue::ValueObjectType:
  2655. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetObjectType(), IR::AddrOpndKindDynamicType, instr->m_func);
  2656. case LibraryValue::ValueObjectHeaderInlinedType:
  2657. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetObjectHeaderInlinedType(), IR::AddrOpndKindDynamicType, instr->m_func);
  2658. case LibraryValue::ValueRegexType:
  2659. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetRegexType(), IR::AddrOpndKindDynamicType, instr->m_func);
  2660. case LibraryValue::ValueArrayConstructor:
  2661. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetArrayConstructor(), IR::AddrOpndKindDynamicVar, instr->m_func);
  2662. case LibraryValue::ValueJavascriptArrayType:
  2663. return IR::AddrOpnd::New(Js::JavascriptArray::GetInitialType(scriptContext), IR::AddrOpndKindDynamicType, instr->m_func);
  2664. case LibraryValue::ValueNativeIntArrayType:
  2665. return IR::AddrOpnd::New(Js::JavascriptNativeIntArray::GetInitialType(scriptContext), IR::AddrOpndKindDynamicType, instr->m_func);
  2666. case LibraryValue::ValueNativeFloatArrayType:
  2667. return IR::AddrOpnd::New(Js::JavascriptNativeFloatArray::GetInitialType(scriptContext), IR::AddrOpndKindDynamicType, instr->m_func);
  2668. case LibraryValue::ValueConstructorCacheDefaultInstance:
  2669. return IR::AddrOpnd::New(&Js::ConstructorCache::DefaultInstance, IR::AddrOpndKindDynamicMisc, instr->m_func);
  2670. case LibraryValue::ValueAbsDoubleCst:
  2671. return IR::MemRefOpnd::New((void*)&Js::JavascriptNumber::AbsDoubleCst, TyMachDouble, instr->m_func, IR::AddrOpndKindDynamicDoubleRef);
  2672. case LibraryValue::ValueCharStringCache:
  2673. return IR::AddrOpnd::New((Js::Var)&scriptContext->GetLibrary()->GetCharStringCache(), IR::AddrOpndKindDynamicCharStringCache, instr->m_func);
  2674. default:
  2675. Assert(false);
  2676. return nullptr;
  2677. }
  2678. }
  2679. IR::Opnd *
  2680. Lowerer::LoadVTableValueOpnd(IR::Instr * instr, VTableValue vtableType)
  2681. {
  2682. return IR::AddrOpnd::New((Js::Var)instr->m_func->GetScriptContext()->GetLibrary()->GetVTableAddresses()[vtableType], IR::AddrOpndKindDynamicVtable, this->m_func);
  2683. }
  2684. IR::Opnd *
  2685. Lowerer::LoadOptimizationOverridesValueOpnd(IR::Instr *instr, OptimizationOverridesValue valueType)
  2686. {
  2687. Js::ScriptContext *scriptContext = instr->m_func->GetScriptContext();
  2688. switch (valueType)
  2689. {
  2690. case OptimizationOverridesValue::OptimizationOverridesSideEffects:
  2691. return IR::MemRefOpnd::New(scriptContext->optimizationOverrides.GetAddressOfSideEffects(), TyInt32, instr->m_func);
  2692. case OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable:
  2693. return IR::MemRefOpnd::New(scriptContext->optimizationOverrides.GetAddressOfArraySetElementFastPathVtable(), TyMachPtr, instr->m_func);
  2694. case OptimizationOverridesValue::OptimizationOverridesIntArraySetElementFastPathVtable:
  2695. return IR::MemRefOpnd::New(scriptContext->optimizationOverrides.GetAddressOfIntArraySetElementFastPathVtable(), TyMachPtr, instr->m_func);
  2696. case OptimizationOverridesValue::OptimizationOverridesFloatArraySetElementFastPathVtable:
  2697. return IR::MemRefOpnd::New(scriptContext->optimizationOverrides.GetAddressOfFloatArraySetElementFastPathVtable(), TyMachPtr, instr->m_func);
  2698. default:
  2699. Assert(false);
  2700. return nullptr;
  2701. }
  2702. }
  2703. IR::Opnd *
  2704. Lowerer::LoadNumberAllocatorValueOpnd(IR::Instr *instr, NumberAllocatorValue valueType)
  2705. {
  2706. Js::ScriptContext *scriptContext = instr->m_func->GetScriptContext();
  2707. bool allowNativeCodeBumpAllocation = scriptContext->GetNumberAllocator()->AllowNativeCodeBumpAllocation();
  2708. switch (valueType)
  2709. {
  2710. case NumberAllocatorValue::NumberAllocatorEndAddress:
  2711. return IR::MemRefOpnd::New(((char *)scriptContext->GetNumberAllocator()) + Js::RecyclerJavascriptNumberAllocator::GetEndAddressOffset(), TyMachPtr, instr->m_func);
  2712. case NumberAllocatorValue::NumberAllocatorFreeObjectList:
  2713. return IR::MemRefOpnd::New(
  2714. ((char *)scriptContext->GetNumberAllocator()) +
  2715. (allowNativeCodeBumpAllocation ? Js::RecyclerJavascriptNumberAllocator::GetFreeObjectListOffset() : Js::RecyclerJavascriptNumberAllocator::GetEndAddressOffset()),
  2716. TyMachPtr, instr->m_func);
  2717. default:
  2718. Assert(false);
  2719. return nullptr;
  2720. }
  2721. }
  2722. IR::Opnd *
  2723. Lowerer::LoadIsInstInlineCacheOpnd(IR::Instr * instr, uint inlineCacheIndex)
  2724. {
  2725. Js::IsInstInlineCache * inlineCache = instr->m_func->GetJnFunction()->GetIsInstInlineCache(inlineCacheIndex);
  2726. return IR::AddrOpnd::New(inlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func);
  2727. }
  2728. IR::Opnd *
  2729. Lowerer::LoadRuntimeInlineCacheOpnd(IR::Instr * instr, IR::PropertySymOpnd * propertySymOpnd, bool isHelper)
  2730. {
  2731. Assert(propertySymOpnd->m_runtimeInlineCache != nullptr);
  2732. IR::Opnd * inlineCacheOpnd = nullptr;
  2733. if (instr->m_func->GetJnFunction()->GetInlineCachesOnFunctionObject() && !instr->m_func->IsInlinee())
  2734. {
  2735. inlineCacheOpnd = this->GetInlineCacheFromFuncObjectForRuntimeUse(instr, propertySymOpnd, isHelper);
  2736. }
  2737. else
  2738. {
  2739. Js::InlineCache * inlineCache = propertySymOpnd->m_runtimeInlineCache;
  2740. inlineCacheOpnd = IR::AddrOpnd::New(inlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func, /* dontEncode */ true);
  2741. }
  2742. return inlineCacheOpnd;
  2743. }
  2744. bool
  2745. Lowerer::TryGenerateFastCmSrEq(IR::Instr * instr)
  2746. {
  2747. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  2748. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  2749. if (srcReg2 && IsConstRegOpnd(srcReg2))
  2750. {
  2751. return m_lowererMD.GenerateFastCmSrEqConst(instr);
  2752. }
  2753. else if (srcReg1 && IsConstRegOpnd(srcReg1))
  2754. {
  2755. instr->SwapOpnds();
  2756. return m_lowererMD.GenerateFastCmSrEqConst(instr);
  2757. }
  2758. else if (srcReg2 && (srcReg2->m_sym->m_isStrConst))
  2759. {
  2760. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_String);
  2761. return true;
  2762. }
  2763. else if (srcReg1 && (srcReg1->m_sym->m_isStrConst))
  2764. {
  2765. instr->SwapOpnds();
  2766. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_String);
  2767. return true;
  2768. }
  2769. else if (srcReg2 && (srcReg2->m_sym->m_isStrEmpty))
  2770. {
  2771. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_EmptyString);
  2772. return true;
  2773. }
  2774. else if (srcReg1 && (srcReg1->m_sym->m_isStrEmpty))
  2775. {
  2776. instr->SwapOpnds();
  2777. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_EmptyString);
  2778. return true;
  2779. }
  2780. return false;
  2781. }
  2782. bool
  2783. Lowerer::GenerateFastBrSrEq(IR::Instr * instr, IR::RegOpnd * srcReg1, IR::RegOpnd * srcReg2, IR::Instr ** pInstrPrev, bool noMathFastPath)
  2784. {
  2785. if (srcReg2 && IsConstRegOpnd(srcReg2))
  2786. {
  2787. this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg2->m_sym->GetConstOpnd(), true);
  2788. instr->Remove();
  2789. return true;
  2790. }
  2791. else if (srcReg1 && IsConstRegOpnd(srcReg1))
  2792. {
  2793. instr->SwapOpnds();
  2794. this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg1->m_sym->GetConstOpnd(), true);
  2795. instr->Remove();
  2796. return true;
  2797. }
  2798. else if (srcReg2 && (srcReg2->m_sym->m_isStrConst))
  2799. {
  2800. this->LowerBrCMem(instr, IR::HelperOp_StrictEqualString, noMathFastPath, false);
  2801. return true;
  2802. }
  2803. else if (srcReg1 && (srcReg1->m_sym->m_isStrConst))
  2804. {
  2805. instr->SwapOpnds();
  2806. this->LowerBrCMem(instr, IR::HelperOp_StrictEqualString, noMathFastPath, false);
  2807. return true;
  2808. }
  2809. else if (srcReg2 && (srcReg2->m_sym->m_isStrEmpty))
  2810. {
  2811. this->LowerBrCMem(instr, IR::HelperOp_StrictEqualEmptyString, noMathFastPath, false);
  2812. return true;
  2813. }
  2814. else if (srcReg1 && (srcReg1->m_sym->m_isStrEmpty))
  2815. {
  2816. instr->SwapOpnds();
  2817. this->LowerBrCMem(instr, IR::HelperOp_StrictEqualEmptyString, noMathFastPath, false);
  2818. return true;
  2819. }
  2820. return false;
  2821. }
  2822. ///----------------------------------------------------------------------------
  2823. ///
  2824. /// Lowerer::GenerateFastBrConst
  2825. ///
  2826. ///----------------------------------------------------------------------------
  2827. IR::BranchInstr *
  2828. Lowerer::GenerateFastBrConst(IR::BranchInstr *branchInstr, IR::Opnd * constOpnd, bool isEqual)
  2829. {
  2830. Assert(constOpnd->IsAddrOpnd() || constOpnd->IsIntConstOpnd());
  2831. //
  2832. // Given:
  2833. // BrSrEq_A $L1, s1, s2
  2834. // where s2 is either 'null', 'undefined', 'true' or 'false'
  2835. //
  2836. // Generate:
  2837. //
  2838. // CMP s1, s2
  2839. // JEQ/JNE $L1
  2840. //
  2841. Assert(this->IsConstRegOpnd(branchInstr->GetSrc2()->AsRegOpnd()));
  2842. IR::Opnd *opnd = branchInstr->GetSrc1();
  2843. if (!opnd->IsRegOpnd())
  2844. {
  2845. IR::RegOpnd *lhsReg = IR::RegOpnd::New(TyVar, m_func);
  2846. LowererMD::CreateAssign(lhsReg, opnd, branchInstr);
  2847. opnd = lhsReg;
  2848. }
  2849. Assert(opnd->IsRegOpnd());
  2850. IR::BranchInstr *newBranch;
  2851. newBranch = InsertCompareBranch(opnd, constOpnd, isEqual ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A, branchInstr->GetTarget(), branchInstr);
  2852. return newBranch;
  2853. }
  2854. bool
  2855. Lowerer::TryGenerateFastBrEq(IR::Instr * instr)
  2856. {
  2857. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  2858. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  2859. bool isConst = false;
  2860. if (srcReg1 && this->IsNullOrUndefRegOpnd(srcReg1))
  2861. {
  2862. instr->SwapOpnds();
  2863. isConst = true;
  2864. }
  2865. // Fast path for == null or == undefined
  2866. // if (src == null || src == undefined)
  2867. if (isConst || srcReg2 && this->IsNullOrUndefRegOpnd(srcReg2))
  2868. {
  2869. IR::BranchInstr *newBranch;
  2870. newBranch = this->GenerateFastBrConst(instr->AsBranchInstr(),
  2871. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  2872. true);
  2873. this->GenerateFastBrConst(instr->AsBranchInstr(),
  2874. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
  2875. true);
  2876. instr->Remove();
  2877. return true;
  2878. }
  2879. return false;
  2880. }
  2881. bool
  2882. Lowerer::TryGenerateFastBrNeq(IR::Instr * instr)
  2883. {
  2884. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  2885. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  2886. bool isConst = false;
  2887. if (srcReg1 && this->IsNullOrUndefRegOpnd(srcReg1))
  2888. {
  2889. instr->SwapOpnds();
  2890. isConst = true;
  2891. }
  2892. // Fast path for != null or != undefined
  2893. // if (src != null && src != undefined)
  2894. //
  2895. // That is:
  2896. // if (src == NULL) goto labelEq
  2897. // if (src != undef) goto target
  2898. // labelEq:
  2899. if (isConst || (srcReg2 && this->IsNullOrUndefRegOpnd(srcReg2)))
  2900. {
  2901. IR::LabelInstr *labelEq = instr->GetOrCreateContinueLabel();
  2902. IR::BranchInstr *newBranch;
  2903. newBranch = this->GenerateFastBrConst(instr->AsBranchInstr(),
  2904. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  2905. true);
  2906. newBranch->AsBranchInstr()->SetTarget(labelEq);
  2907. this->GenerateFastBrConst(instr->AsBranchInstr(),
  2908. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
  2909. false);
  2910. instr->Remove();
  2911. return true;
  2912. }
  2913. return false;
  2914. }
  2915. bool
  2916. Lowerer::GenerateFastBrSrNeq(IR::Instr * instr, IR::Instr ** pInstrPrev)
  2917. {
  2918. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  2919. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  2920. if (srcReg2 && IsConstRegOpnd(srcReg2))
  2921. {
  2922. this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg2->m_sym->GetConstOpnd(), false);
  2923. instr->Remove();
  2924. return true;
  2925. }
  2926. else if (srcReg1 && IsConstRegOpnd(srcReg1))
  2927. {
  2928. instr->SwapOpnds();
  2929. this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg1->m_sym->GetConstOpnd(), false);
  2930. instr->Remove();
  2931. return true;
  2932. }
  2933. return false;
  2934. }
  2935. void
  2936. Lowerer::GenerateDynamicObjectAlloc(IR::Instr * newObjInstr, uint inlineSlotCount, uint slotCount, IR::RegOpnd * newObjDst, IR::Opnd * typeSrc)
  2937. {
  2938. size_t headerAllocSize = sizeof(Js::DynamicObject) + inlineSlotCount * sizeof(Js::Var);
  2939. IR::SymOpnd * tempObjectSymOpnd;
  2940. bool isZeroed = GenerateRecyclerOrMarkTempAlloc(newObjInstr, newObjDst, IR::HelperAllocMemForScObject, headerAllocSize, &tempObjectSymOpnd);
  2941. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  2942. {
  2943. // Hoist the vtable init to the outer most loop top as it never changes
  2944. InsertMove(tempObjectSymOpnd,
  2945. LoadVTableValueOpnd(this->outerMostLoopLabel, VTableValue::VtableDynamicObject), this->outerMostLoopLabel, false);
  2946. }
  2947. else
  2948. {
  2949. // MOV [newObjDst + offset(vtable)], DynamicObject::vtable
  2950. GenerateMemInit(newObjDst, 0, LoadVTableValueOpnd(newObjInstr, VTableValue::VtableDynamicObject), newObjInstr, isZeroed);
  2951. }
  2952. // MOV [newObjDst + offset(type)], newObjectType
  2953. GenerateMemInit(newObjDst, Js::DynamicObject::GetOffsetOfType(), typeSrc, newObjInstr, isZeroed);
  2954. // CALL JavascriptOperators::AllocMemForVarArray((slotCount - inlineSlotCount) * sizeof(Js::Var))
  2955. if (slotCount > inlineSlotCount)
  2956. {
  2957. size_t auxSlotsAllocSize = (slotCount - inlineSlotCount) * sizeof(Js::Var);
  2958. IR::RegOpnd* auxSlots = IR::RegOpnd::New(TyMachPtr, m_func);
  2959. GenerateRecyclerAllocAligned(IR::HelperAllocMemForVarArray, auxSlotsAllocSize, auxSlots, newObjInstr);
  2960. GenerateMemInit(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), auxSlots, newObjInstr, isZeroed);
  2961. IR::IndirOpnd* newObjAuxSlots = IR::IndirOpnd::New(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachPtr, m_func);
  2962. this->m_lowererMD.CreateAssign(newObjAuxSlots, auxSlots, newObjInstr);
  2963. }
  2964. else
  2965. {
  2966. GenerateMemInitNull(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), newObjInstr, isZeroed);
  2967. }
  2968. GenerateMemInitNull(newObjDst, Js::DynamicObject::GetOffsetOfObjectArray(), newObjInstr, isZeroed);
  2969. }
  2970. void
  2971. Lowerer::LowerNewScObjectSimple(IR::Instr * instr)
  2972. {
  2973. GenerateDynamicObjectAlloc(
  2974. instr,
  2975. 0,
  2976. 0,
  2977. instr->UnlinkDst()->AsRegOpnd(),
  2978. LoadLibraryValueOpnd(
  2979. instr,
  2980. Js::FunctionBody::DoObjectHeaderInliningForEmptyObjects()
  2981. ? LibraryValue::ValueObjectHeaderInlinedType
  2982. : LibraryValue::ValueObjectType));
  2983. instr->Remove();
  2984. }
  2985. void
  2986. Lowerer::LowerNewScObjectLiteral(IR::Instr *newObjInstr)
  2987. {
  2988. Func * func = m_func;
  2989. IR::IntConstOpnd * literalObjectIdOpnd = newObjInstr->UnlinkSrc2()->AsIntConstOpnd();
  2990. Js::DynamicType ** literalTypeRef = newObjInstr->m_func->GetJnFunction()->GetObjectLiteralTypeRef(literalObjectIdOpnd->AsUint32());
  2991. Js::DynamicType * literalType = *literalTypeRef;
  2992. IR::LabelInstr * helperLabel = nullptr;
  2993. IR::LabelInstr * allocLabel = nullptr;
  2994. IR::Opnd * literalTypeRefOpnd;
  2995. IR::Opnd * literalTypeOpnd;
  2996. IR::Opnd * propertyArrayOpnd;
  2997. IR::IntConstOpnd * propertyArrayIdOpnd = newObjInstr->UnlinkSrc1()->AsIntConstOpnd();
  2998. const Js::PropertyIdArray * propIds = Js::ByteCodeReader::ReadPropertyIdArray(propertyArrayIdOpnd->AsUint32(), newObjInstr->m_func->GetJnFunction());
  2999. Js::ScriptContext *const scriptContext = newObjInstr->m_func->GetJnFunction()->GetScriptContext();
  3000. uint inlineSlotCapacity = Js::JavascriptOperators::GetLiteralInlineSlotCapacity(propIds, scriptContext);
  3001. uint slotCapacity = Js::JavascriptOperators::GetLiteralSlotCapacity(propIds, scriptContext);
  3002. IR::RegOpnd * dstOpnd;
  3003. literalTypeRefOpnd = IR::AddrOpnd::New(literalTypeRef, IR::AddrOpndKindDynamicMisc, this->m_func);
  3004. propertyArrayOpnd = IR::AddrOpnd::New((Js::Var)propIds, IR::AddrOpndKindDynamicMisc, this->m_func);
  3005. if (literalType == nullptr || !literalType->GetIsShared())
  3006. {
  3007. helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3008. allocLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3009. literalTypeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3010. InsertMove(literalTypeOpnd, IR::MemRefOpnd::New(literalTypeRef, TyMachPtr, func), newObjInstr);
  3011. InsertTestBranch(literalTypeOpnd, literalTypeOpnd,
  3012. Js::OpCode::BrEq_A, helperLabel, newObjInstr);
  3013. InsertTestBranch(IR::IndirOpnd::New(literalTypeOpnd->AsRegOpnd(), Js::DynamicType::GetOffsetOfIsShared(), TyInt8, func),
  3014. IR::IntConstOpnd::New(1, TyInt8, func, true), Js::OpCode::BrEq_A, helperLabel, newObjInstr);
  3015. dstOpnd = newObjInstr->GetDst()->AsRegOpnd();
  3016. }
  3017. else
  3018. {
  3019. literalTypeOpnd = IR::AddrOpnd::New(literalType, IR::AddrOpndKindDynamicType, func);
  3020. dstOpnd = newObjInstr->UnlinkDst()->AsRegOpnd();
  3021. Assert(inlineSlotCapacity == literalType->GetTypeHandler()->GetInlineSlotCapacity());
  3022. Assert(slotCapacity == (uint)literalType->GetTypeHandler()->GetSlotCapacity());
  3023. }
  3024. if (helperLabel)
  3025. {
  3026. InsertBranch(Js::OpCode::Br, allocLabel, newObjInstr);
  3027. // Slow path to ensure the type is there
  3028. newObjInstr->InsertBefore(helperLabel);
  3029. IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperEnsureObjectLiteralType, func);
  3030. m_lowererMD.LoadHelperArgument(newObjInstr, literalTypeRefOpnd);
  3031. m_lowererMD.LoadHelperArgument(newObjInstr, propertyArrayOpnd);
  3032. LoadScriptContext(newObjInstr);
  3033. IR::Instr * ensureTypeInstr = IR::Instr::New(Js::OpCode::Call, literalTypeOpnd, opndHelper, func);
  3034. newObjInstr->InsertBefore(ensureTypeInstr);
  3035. m_lowererMD.LowerCall(ensureTypeInstr, 0);
  3036. newObjInstr->InsertBefore(allocLabel);
  3037. }
  3038. else
  3039. {
  3040. Assert(allocLabel == nullptr);
  3041. }
  3042. // For the next call:
  3043. // inlineSlotCapacity == Number of slots to allocate beyond the DynamicObject header
  3044. // slotCapacity - inlineSlotCapacity == Number of aux slots to allocate
  3045. if(Js::FunctionBody::DoObjectHeaderInliningForObjectLiteral(propIds, scriptContext))
  3046. {
  3047. Assert(inlineSlotCapacity >= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity());
  3048. Assert(inlineSlotCapacity == slotCapacity);
  3049. slotCapacity = inlineSlotCapacity -= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity();
  3050. }
  3051. GenerateDynamicObjectAlloc(
  3052. newObjInstr,
  3053. inlineSlotCapacity,
  3054. slotCapacity,
  3055. dstOpnd,
  3056. literalTypeOpnd);
  3057. newObjInstr->Remove();
  3058. }
  3059. IR::Instr*
  3060. Lowerer::LowerProfiledNewScArray(IR::JitProfilingInstr* arrInstr)
  3061. {
  3062. IR::Instr *instrPrev = arrInstr->m_prev;
  3063. /*
  3064. JavascriptArray *ProfilingHelpers::ProfiledNewScArray(
  3065. const uint length,
  3066. FunctionBody *const functionBody,
  3067. const ProfileId profileId)
  3068. */
  3069. m_lowererMD.LoadHelperArgument(arrInstr, IR::Opnd::CreateProfileIdOpnd(arrInstr->profileId, m_func));
  3070. m_lowererMD.LoadHelperArgument(arrInstr, CreateFunctionBodyOpnd(arrInstr->m_func));
  3071. m_lowererMD.LoadHelperArgument(arrInstr, arrInstr->UnlinkSrc1());
  3072. arrInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledNewScArray, m_func));
  3073. m_lowererMD.LowerCall(arrInstr, 0);
  3074. return instrPrev;
  3075. }
  3076. IR::Instr *
  3077. Lowerer::LowerNewScArray(IR::Instr *arrInstr)
  3078. {
  3079. if (arrInstr->IsJitProfilingInstr())
  3080. {
  3081. return LowerProfiledNewScArray(arrInstr->AsJitProfilingInstr());
  3082. }
  3083. IR::Instr *instrPrev = arrInstr->m_prev;
  3084. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScArray;
  3085. if (arrInstr->IsProfiledInstr() && arrInstr->m_func->HasProfileInfo())
  3086. {
  3087. RecyclerWeakReference<Js::FunctionBody> *weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
  3088. Assert(weakFuncRef);
  3089. Js::ProfileId profileId = static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
  3090. Js::FunctionBody *functionBody = arrInstr->m_func->GetJnFunction();
  3091. Js::DynamicProfileInfo *profileInfo = functionBody->GetAnyDynamicProfileInfo();
  3092. Js::ArrayCallSiteInfo *arrayInfo = profileInfo->GetArrayCallSiteInfo(functionBody, profileId);
  3093. Assert(arrInstr->GetSrc1()->IsConstOpnd());
  3094. GenerateProfiledNewScArrayFastPath(arrInstr, arrayInfo, weakFuncRef, arrInstr->GetSrc1()->AsIntConstOpnd()->AsUint32());
  3095. if (arrInstr->GetDst() && arrInstr->GetDst()->GetValueType().IsLikelyNativeArray())
  3096. {
  3097. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
  3098. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfo, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
  3099. helperMethod = IR::HelperScrArr_ProfiledNewScArray;
  3100. }
  3101. }
  3102. LoadScriptContext(arrInstr);
  3103. IR::Opnd *src1Opnd = arrInstr->UnlinkSrc1();
  3104. m_lowererMD.LoadHelperArgument(arrInstr, src1Opnd);
  3105. m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
  3106. return instrPrev;
  3107. }
  3108. template <typename ArrayType>
  3109. BOOL Lowerer::IsSmallObject(uint32 length)
  3110. {
  3111. if (ArrayType::HasInlineHeadSegment(length))
  3112. return true;
  3113. uint32 alignedHeadSegmentSize = Js::SparseArraySegment<typename ArrayType::TElement>::GetAlignedSize(length);
  3114. size_t allocSize = sizeof(Js::SparseArraySegment<typename ArrayType::TElement>) + alignedHeadSegmentSize * sizeof(typename ArrayType::TElement);
  3115. return HeapInfo::IsSmallObject(HeapInfo::GetAlignedSizeNoCheck(allocSize));
  3116. }
  3117. void
  3118. Lowerer::GenerateProfiledNewScArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, RecyclerWeakReference<Js::FunctionBody> * weakFuncRef, uint32 length)
  3119. {
  3120. if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
  3121. {
  3122. return;
  3123. }
  3124. Func * func = this->m_func;
  3125. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3126. uint32 size = length;
  3127. bool isZeroed;
  3128. IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
  3129. IR::RegOpnd *headOpnd;
  3130. uint32 i = length;
  3131. if (instr->GetDst() && instr->GetDst()->GetValueType().IsLikelyNativeIntArray())
  3132. {
  3133. if (!IsSmallObject<Js::JavascriptNativeIntArray>(length))
  3134. {
  3135. return;
  3136. }
  3137. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, helperLabel);
  3138. Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
  3139. headOpnd = GenerateArrayAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isZeroed);
  3140. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3141. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3142. for (; i < size; i++)
  3143. {
  3144. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3145. Js::JavascriptNativeIntArray::MissingItem, instr, isZeroed);
  3146. }
  3147. }
  3148. else if (instr->GetDst() && instr->GetDst()->GetValueType().IsLikelyNativeFloatArray())
  3149. {
  3150. if (!IsSmallObject<Js::JavascriptNativeFloatArray>(length))
  3151. {
  3152. return;
  3153. }
  3154. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, helperLabel);
  3155. Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
  3156. headOpnd = GenerateArrayAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isZeroed);
  3157. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3158. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3159. // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
  3160. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3161. uint const missingItemCount = size * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3162. i = i * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3163. for (; i < missingItemCount; i++)
  3164. {
  3165. GenerateMemInit(
  3166. headOpnd, offsetStart + i * sizeof(Js::JavascriptArray::MissingItem),
  3167. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, m_func, true),
  3168. instr, isZeroed);
  3169. }
  3170. }
  3171. else
  3172. {
  3173. if (!IsSmallObject<Js::JavascriptArray>(length))
  3174. {
  3175. return;
  3176. }
  3177. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3178. headOpnd = GenerateArrayAlloc<Js::JavascriptArray>(instr, &size, arrayInfo, &isZeroed);
  3179. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3180. for (; i < size; i++)
  3181. {
  3182. GenerateMemInit(
  3183. headOpnd, offsetStart + i * sizeof(Js::Var),
  3184. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, m_func, true),
  3185. instr, isZeroed);
  3186. }
  3187. }
  3188. // Skip pass the helper call
  3189. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3190. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  3191. instr->InsertBefore(helperLabel);
  3192. instr->InsertAfter(doneLabel);
  3193. }
  3194. void
  3195. Lowerer::GenerateArrayInfoIsNativeIntArrayTest(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, IR::LabelInstr * helperLabel)
  3196. {
  3197. Func * func = this->m_func;
  3198. InsertTestBranch(IR::MemRefOpnd::New(((char *)arrayInfo) + Js::ArrayCallSiteInfo::GetOffsetOfBits(), TyUint8, func),
  3199. IR::IntConstOpnd::New(Js::ArrayCallSiteInfo::NotNativeIntBit, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
  3200. }
  3201. void
  3202. Lowerer::GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, IR::LabelInstr * helperLabel)
  3203. {
  3204. Func * func = this->m_func;
  3205. InsertCompareBranch(IR::MemRefOpnd::New(((char *)arrayInfo) + Js::ArrayCallSiteInfo::GetOffsetOfBits(), TyUint8, func),
  3206. IR::IntConstOpnd::New(Js::ArrayCallSiteInfo::NotNativeIntBit, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
  3207. }
  3208. template <typename ArrayType>
  3209. static IR::JnHelperMethod GetArrayAllocMemHelper();
  3210. template <>
  3211. static IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptArray>()
  3212. {
  3213. return IR::HelperAllocMemForJavascriptArray;
  3214. }
  3215. template <>
  3216. static IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptNativeIntArray>()
  3217. {
  3218. return IR::HelperAllocMemForJavascriptNativeIntArray;
  3219. }
  3220. template <>
  3221. static IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptNativeFloatArray>()
  3222. {
  3223. return IR::HelperAllocMemForJavascriptNativeFloatArray;
  3224. }
  3225. template <typename ArrayType>
  3226. IR::RegOpnd *
  3227. Lowerer::GenerateArrayAlloc(IR::Instr *instr, uint32 * psize, Js::ArrayCallSiteInfo * arrayInfo, bool * pIsHeadSegmentZeroed)
  3228. {
  3229. Func * func = this->m_func;
  3230. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3231. // Generate code as in JavascriptArray::NewLiteral
  3232. uint32 count = *psize;
  3233. uint alignedHeadSegmentSize;
  3234. size_t arrayAllocSize;
  3235. IR::RegOpnd * headOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3236. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func, false);
  3237. IR::Instr * leaHeadInstr = nullptr;
  3238. bool isHeadSegmentZeroed = false;
  3239. if (ArrayType::HasInlineHeadSegment(count))
  3240. {
  3241. uint32 allocCount = count == 0 ? Js::SparseArraySegmentBase::SMALL_CHUNK_SIZE : count;
  3242. arrayAllocSize = Js::JavascriptArray::DetermineAllocationSize<ArrayType, 0>(allocCount, nullptr, &alignedHeadSegmentSize);
  3243. leaHeadInstr = IR::Instr::New(Js::OpCode::LEA, headOpnd,
  3244. IR::IndirOpnd::New(dstOpnd, sizeof(ArrayType), TyMachPtr, func), func);
  3245. isHeadSegmentZeroed = true;
  3246. }
  3247. else
  3248. {
  3249. // Need to allocate the head segment first so that if it throws,
  3250. // we doesn't have the memory assigned to dstOpnd yet
  3251. // Even if the instruction is marked as dstIsTempObject, we still should not allocate
  3252. // that big of a chunk on the stack.
  3253. alignedHeadSegmentSize = Js::SparseArraySegment<typename ArrayType::TElement>::GetAlignedSize(count);
  3254. GenerateRecyclerAlloc(
  3255. IR::HelperAllocMemForSparseArraySegmentBase,
  3256. sizeof(Js::SparseArraySegment<typename ArrayType::TElement>) +
  3257. alignedHeadSegmentSize * sizeof(typename ArrayType::TElement),
  3258. headOpnd,
  3259. instr);
  3260. arrayAllocSize = sizeof(ArrayType);
  3261. }
  3262. *psize = alignedHeadSegmentSize;
  3263. IR::SymOpnd * tempObjectSymOpnd;
  3264. bool isZeroed = GenerateRecyclerOrMarkTempAlloc(instr, dstOpnd,
  3265. GetArrayAllocMemHelper<ArrayType>(), arrayAllocSize, &tempObjectSymOpnd);
  3266. isHeadSegmentZeroed = isHeadSegmentZeroed & isZeroed;
  3267. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  3268. {
  3269. // Hoist the vtable init to the outer most loop top as it never changes
  3270. InsertMove(tempObjectSymOpnd,
  3271. this->LoadVTableValueOpnd(this->outerMostLoopLabel, ArrayType::VtableHelper()),
  3272. this->outerMostLoopLabel, false);
  3273. }
  3274. else
  3275. {
  3276. GenerateMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, ArrayType::VtableHelper()), instr, isZeroed);
  3277. }
  3278. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfType(), this->LoadLibraryValueOpnd(instr, ArrayType::InitialTypeHelper()), instr, isZeroed);
  3279. GenerateMemInitNull(dstOpnd, ArrayType::GetOffsetOfAuxSlots(), instr, isZeroed);
  3280. // Emit the flags and call site index together
  3281. Js::ProfileId arrayCallSiteIndex = (Js::ProfileId)instr->AsProfiledInstr()->u.profileId;
  3282. #if DBG
  3283. if (instr->AsProfiledInstr()->u.profileId < Js::Constants::NoProfileId)
  3284. {
  3285. Js::FunctionBody * functionBody = instr->m_func->GetJnFunction();
  3286. Assert((uint32)(arrayInfo - functionBody->GetAnyDynamicProfileInfo()->GetArrayCallSiteInfo(functionBody, 0)) == arrayCallSiteIndex);
  3287. }
  3288. else
  3289. {
  3290. Assert(arrayInfo == nullptr);
  3291. }
  3292. #endif
  3293. // The same at this:
  3294. // GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), (uint16)Js::DynamicObjectFlags::InitialArrayValue, instr, isZeroed);
  3295. // GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayCallSiteIndex(), arrayCallSiteIndex, instr, isZeroed);
  3296. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), (uint)Js::DynamicObjectFlags::InitialArrayValue | ((uint)arrayCallSiteIndex << 16), instr, isZeroed);
  3297. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLength(), count, instr, isZeroed);
  3298. if (leaHeadInstr != nullptr)
  3299. {
  3300. instr->InsertBefore(leaHeadInstr);
  3301. LowererMD::ChangeToLea(leaHeadInstr);
  3302. }
  3303. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfHead(), headOpnd, instr, isZeroed);
  3304. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLastUsedSegmentOrSegmentMap(), headOpnd, instr, isZeroed);
  3305. // Initialize segment head
  3306. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLeft(), 0, instr, isHeadSegmentZeroed);
  3307. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), count, instr, isHeadSegmentZeroed);
  3308. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfSize(), alignedHeadSegmentSize, instr, isHeadSegmentZeroed);
  3309. GenerateMemInitNull(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfNext(), instr, isHeadSegmentZeroed);
  3310. *pIsHeadSegmentZeroed = isHeadSegmentZeroed;
  3311. return headOpnd;
  3312. }
  3313. void
  3314. Lowerer::GenerateProfiledNewScObjArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, RecyclerWeakReference<Js::FunctionBody> * weakFuncRef, uint32 length)
  3315. {
  3316. if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func))
  3317. {
  3318. return;
  3319. }
  3320. Func * func = this->m_func;
  3321. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3322. uint32 size = length;
  3323. bool isZeroed;
  3324. IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
  3325. IR::RegOpnd *headOpnd;
  3326. if (arrayInfo && arrayInfo->IsNativeIntArray())
  3327. {
  3328. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, helperLabel);
  3329. Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
  3330. headOpnd = GenerateArrayAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isZeroed);
  3331. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3332. for (uint i = 0; i < size; i++)
  3333. {
  3334. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3335. Js::JavascriptNativeIntArray::MissingItem, instr, isZeroed);
  3336. }
  3337. }
  3338. else if (arrayInfo && arrayInfo->IsNativeFloatArray())
  3339. {
  3340. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, helperLabel);
  3341. Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
  3342. headOpnd = GenerateArrayAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isZeroed);
  3343. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3344. // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
  3345. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3346. uint const missingItemCount = size * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3347. for (uint i = 0; i < missingItemCount; i++)
  3348. {
  3349. GenerateMemInit(
  3350. headOpnd, offsetStart + i * sizeof(Js::JavascriptArray::MissingItem),
  3351. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, m_func, true),
  3352. instr, isZeroed);
  3353. }
  3354. }
  3355. else
  3356. {
  3357. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3358. headOpnd = GenerateArrayAlloc<Js::JavascriptArray>(instr, &size, arrayInfo, &isZeroed);
  3359. for (uint i = 0; i < size; i++)
  3360. {
  3361. GenerateMemInit(
  3362. headOpnd, offsetStart + i * sizeof(Js::Var),
  3363. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, m_func, true),
  3364. instr, isZeroed);
  3365. }
  3366. }
  3367. // Skip pass the helper call
  3368. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3369. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  3370. instr->InsertBefore(helperLabel);
  3371. instr->InsertAfter(doneLabel);
  3372. }
  3373. void
  3374. Lowerer::GenerateProfiledNewScIntArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, RecyclerWeakReference<Js::FunctionBody> * weakFuncRef)
  3375. {
  3376. // Helper will deal with ForceES5ARray
  3377. if (PHASE_OFF(Js::ArrayLiteralFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
  3378. {
  3379. return;
  3380. }
  3381. if (!arrayInfo->IsNativeIntArray())
  3382. {
  3383. return;
  3384. }
  3385. Func * func = this->m_func;
  3386. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3387. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, helperLabel);
  3388. IR::AddrOpnd * elementsOpnd = instr->GetSrc1()->AsAddrOpnd();
  3389. Js::AuxArray<int32> * ints = (Js::AuxArray<int32> *)elementsOpnd->m_address;
  3390. uint32 size = ints->count;
  3391. // Generate code as in JavascriptArray::NewLiteral
  3392. bool isHeadSegmentZeroed;
  3393. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3394. Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
  3395. IR::RegOpnd * headOpnd = GenerateArrayAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isHeadSegmentZeroed);
  3396. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3397. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicMisc, m_func), instr, isHeadSegmentZeroed);
  3398. // Initialize the elements
  3399. uint i = 0;
  3400. if (ints->count > 16)
  3401. {
  3402. // Do memcpy if > 16
  3403. IR::RegOpnd * dstElementsOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3404. const IR::AutoReuseOpnd autoReuseDstElementsOpnd(dstElementsOpnd, func);
  3405. IR::Opnd * srcOpnd = IR::AddrOpnd::New(ints->elements, IR::AddrOpndKindDynamicMisc, func);
  3406. InsertLea(dstElementsOpnd, IR::IndirOpnd::New(headOpnd, sizeof(Js::SparseArraySegmentBase), TyMachPtr, func), instr);
  3407. GenerateMemCopy(dstElementsOpnd, srcOpnd, ints->count * sizeof(int32), instr);
  3408. i = ints->count;
  3409. }
  3410. else
  3411. {
  3412. for (; i < ints->count; i++)
  3413. {
  3414. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3415. ints->elements[i], instr, isHeadSegmentZeroed);
  3416. }
  3417. }
  3418. Assert(i == ints->count);
  3419. for (; i < size; i++)
  3420. {
  3421. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3422. Js::JavascriptNativeIntArray::MissingItem, instr, isHeadSegmentZeroed);
  3423. }
  3424. // Skip pass the helper call
  3425. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3426. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  3427. instr->InsertBefore(helperLabel);
  3428. instr->InsertAfter(doneLabel);
  3429. }
  3430. void
  3431. Lowerer::GenerateProfiledNewScFloatArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, RecyclerWeakReference<Js::FunctionBody> * weakFuncRef)
  3432. {
  3433. if (PHASE_OFF(Js::ArrayLiteralFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
  3434. {
  3435. return;
  3436. }
  3437. if (!arrayInfo->IsNativeFloatArray())
  3438. {
  3439. return;
  3440. }
  3441. Func * func = this->m_func;
  3442. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3443. // If the array info hasn't mark as not int array yet, go to the helper and mark it.
  3444. // It really is just for assert purpose in JavascriptNativeFloatArray::ToVarArray
  3445. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, helperLabel);
  3446. IR::AddrOpnd * elementsOpnd = instr->GetSrc1()->AsAddrOpnd();
  3447. Js::AuxArray<double> * doubles = (Js::AuxArray<double> *)elementsOpnd->m_address;
  3448. uint32 size = doubles->count;
  3449. // Generate code as in JavascriptArray::NewLiteral
  3450. bool isHeadSegmentZeroed;
  3451. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3452. Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
  3453. IR::RegOpnd * headOpnd = GenerateArrayAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isHeadSegmentZeroed);
  3454. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3455. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isHeadSegmentZeroed);
  3456. // Initialize the elements
  3457. IR::RegOpnd * dstElementsOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3458. const IR::AutoReuseOpnd autoReuseDstElementsOpnd(dstElementsOpnd, func);
  3459. IR::Opnd * srcOpnd = IR::AddrOpnd::New(doubles->elements, IR::AddrOpndKindDynamicMisc, func);
  3460. InsertLea(dstElementsOpnd, IR::IndirOpnd::New(headOpnd, sizeof(Js::SparseArraySegmentBase), TyMachPtr, func), instr);
  3461. GenerateMemCopy(dstElementsOpnd, srcOpnd, doubles->count * sizeof(double), instr);
  3462. // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
  3463. uint const offsetStart = sizeof(Js::SparseArraySegmentBase) + doubles->count * sizeof(double);
  3464. uint const missingItem = (size - doubles->count) * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3465. for (uint i = 0; i < missingItem; i++)
  3466. {
  3467. GenerateMemInit(headOpnd, offsetStart + i * sizeof(Js::JavascriptArray::MissingItem),
  3468. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, m_func, true), instr, isHeadSegmentZeroed);
  3469. }
  3470. // Skip pass the helper call
  3471. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3472. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  3473. instr->InsertBefore(helperLabel);
  3474. instr->InsertAfter(doneLabel);
  3475. }
  3476. IR::Instr *
  3477. Lowerer::LowerNewScIntArray(IR::Instr *arrInstr)
  3478. {
  3479. IR::Instr *instrPrev = arrInstr->m_prev;
  3480. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScIntArray;
  3481. if ((arrInstr->IsJitProfilingInstr() || arrInstr->IsProfiledInstr()) && arrInstr->m_func->HasProfileInfo())
  3482. {
  3483. RecyclerWeakReference<Js::FunctionBody> *weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
  3484. if (weakFuncRef)
  3485. {
  3486. Js::FunctionBody *functionBody = arrInstr->m_func->GetJnFunction();
  3487. // Technically a load of the same memory address either way.
  3488. Js::ProfileId profileId =
  3489. arrInstr->IsJitProfilingInstr()
  3490. ? arrInstr->AsJitProfilingInstr()->profileId
  3491. : static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
  3492. Js::ArrayCallSiteInfo *arrayInfo =
  3493. functionBody->GetAnyDynamicProfileInfo()->GetArrayCallSiteInfo(functionBody, profileId);
  3494. // Only do fast-path if it isn't a JitProfiling instr and not copy-on-access array
  3495. if (arrInstr->IsProfiledInstr()
  3496. && (PHASE_OFF1(Js::Phase::CopyOnAccessArrayPhase) || arrayInfo->isNotCopyOnAccessArray) && !PHASE_FORCE1(Js::Phase::CopyOnAccessArrayPhase))
  3497. {
  3498. GenerateProfiledNewScIntArrayFastPath(arrInstr, arrayInfo, weakFuncRef);
  3499. }
  3500. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
  3501. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfo, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
  3502. helperMethod = IR::HelperScrArr_ProfiledNewScIntArray;
  3503. }
  3504. }
  3505. LoadScriptContext(arrInstr);
  3506. IR::Opnd *elementsOpnd = arrInstr->UnlinkSrc1();
  3507. m_lowererMD.LoadHelperArgument(arrInstr, elementsOpnd);
  3508. m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
  3509. return instrPrev;
  3510. }
  3511. IR::Instr *
  3512. Lowerer::LowerNewScFltArray(IR::Instr *arrInstr)
  3513. {
  3514. IR::Instr *instrPrev = arrInstr->m_prev;
  3515. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScFltArray;
  3516. if ((arrInstr->IsJitProfilingInstr() || arrInstr->IsProfiledInstr()) && arrInstr->m_func->HasProfileInfo())
  3517. {
  3518. RecyclerWeakReference<Js::FunctionBody> *weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
  3519. if (weakFuncRef)
  3520. {
  3521. Js::ProfileId profileId =
  3522. arrInstr->IsJitProfilingInstr()
  3523. ? arrInstr->AsJitProfilingInstr()->profileId
  3524. : static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
  3525. Js::FunctionBody *functionBody = arrInstr->m_func->GetJnFunction();
  3526. Js::ArrayCallSiteInfo *arrayInfo =
  3527. functionBody->GetAnyDynamicProfileInfo()->GetArrayCallSiteInfo(functionBody, profileId);
  3528. // Only do fast-path if it isn't a JitProfiling instr
  3529. if (arrInstr->IsProfiledInstr()) {
  3530. GenerateProfiledNewScFloatArrayFastPath(arrInstr, arrayInfo, weakFuncRef);
  3531. }
  3532. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
  3533. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfo, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
  3534. helperMethod = IR::HelperScrArr_ProfiledNewScFltArray;
  3535. }
  3536. }
  3537. LoadScriptContext(arrInstr);
  3538. IR::Opnd *elementsOpnd = arrInstr->UnlinkSrc1();
  3539. m_lowererMD.LoadHelperArgument(arrInstr, elementsOpnd);
  3540. m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
  3541. return instrPrev;
  3542. }
  3543. IR::Instr *
  3544. Lowerer::LowerArraySegmentVars(IR::Instr *arrayInstr)
  3545. {
  3546. IR::Instr * instrPrev;
  3547. IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperArraySegmentVars, m_func);
  3548. instrPrev = m_lowererMD.LoadHelperArgument(arrayInstr, arrayInstr->UnlinkSrc2());
  3549. m_lowererMD.LoadHelperArgument(arrayInstr, arrayInstr->UnlinkSrc1());
  3550. arrayInstr->m_opcode = Js::OpCode::Call;
  3551. arrayInstr->SetSrc1(opndHelper);
  3552. m_lowererMD.LowerCall(arrayInstr, 0);
  3553. return instrPrev;
  3554. }
  3555. IR::Instr* Lowerer::LowerProfiledNewArray(IR::JitProfilingInstr* instr, bool hasArgs)
  3556. {
  3557. // Use the special helper which checks whether Array has been overwritten by the user and if
  3558. // it hasn't, possibly allocates a native array
  3559. // Insert a temporary label before the instruction we're about to lower, so that we can return
  3560. // the first instruction above that needs to be lowered after we're done - regardless of argument
  3561. // list, StartCall, etc.
  3562. IR::Instr* startMarkerInstr = InsertLoweredRegionStartMarker(instr);
  3563. Assert(instr->isNewArray);
  3564. Assert(instr->arrayProfileId != Js::Constants::NoProfileId);
  3565. Assert(instr->profileId != Js::Constants::NoProfileId);
  3566. bool isSpreadCall = instr->m_opcode == Js::OpCode::NewScObjectSpread || instr->m_opcode == Js::OpCode::NewScObjArraySpread;
  3567. m_lowererMD.LoadNewScObjFirstArg(instr, IR::AddrOpnd::New(nullptr, IR::AddrOpndKindConstantVar, m_func, true), isSpreadCall ? 1 : 0);
  3568. if (isSpreadCall)
  3569. {
  3570. this->LowerSpreadCall(instr, Js::CallFlags_New, true);
  3571. }
  3572. else
  3573. {
  3574. const int32 argCount = m_lowererMD.LowerCallArgs(instr, Js::CallFlags_New, 4);
  3575. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->arrayProfileId, m_func));
  3576. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
  3577. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  3578. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  3579. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledNewScObjArray, m_func));
  3580. m_lowererMD.LowerCall(instr, static_cast<Js::ArgSlot>(argCount));
  3581. }
  3582. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  3583. }
  3584. ///----------------------------------------------------------------------------
  3585. ///
  3586. /// Lowerer::LowerNewScObject
  3587. ///
  3588. /// Machine independent lowering of a CallI instr.
  3589. ///
  3590. ///----------------------------------------------------------------------------
  3591. IR::Instr *
  3592. Lowerer::LowerNewScObject(IR::Instr *newObjInstr, bool callCtor, bool hasArgs, bool isBaseClassConstructorNewScObject)
  3593. {
  3594. if (newObjInstr->IsJitProfilingInstr() && newObjInstr->AsJitProfilingInstr()->isNewArray)
  3595. {
  3596. Assert(callCtor);
  3597. return LowerProfiledNewArray(newObjInstr->AsJitProfilingInstr(), hasArgs);
  3598. }
  3599. bool isSpreadCall = newObjInstr->m_opcode == Js::OpCode::NewScObjectSpread ||
  3600. newObjInstr->m_opcode == Js::OpCode::NewScObjArraySpread;
  3601. Func* func = newObjInstr->m_func;
  3602. // Insert a temporary label before the instruction we're about to lower, so that we can return
  3603. // the first instruction above that needs to be lowered after we're done - regardless of argument
  3604. // list, StartCall, etc.
  3605. IR::Instr* startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  3606. IR::Opnd *ctorOpnd = newObjInstr->GetSrc1();
  3607. IR::RegOpnd *newObjDst = newObjInstr->GetDst()->AsRegOpnd();
  3608. Assert(!callCtor || !hasArgs || (newObjInstr->GetSrc2() != nullptr /*&& newObjInstr->GetSrc2()->IsSymOpnd()*/));
  3609. bool skipNewScObj = false;
  3610. bool returnNewScObj = false;
  3611. bool emitBailOut = false;
  3612. // If we haven't yet split NewScObject into NewScObjectNoCtor and CallI, we will need a temporary register
  3613. // to hold the result of the object allocation.
  3614. IR::RegOpnd* createObjDst = callCtor ? IR::RegOpnd::New(TyVar, func) : newObjDst;
  3615. IR::LabelInstr* helperOrBailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, /* isOpHelper = */ true);
  3616. IR::LabelInstr* callCtorLabel = IR::LabelInstr::New(Js::OpCode::Label, func, /* isOpHelper = */ false);
  3617. // Try to emit the fast allocation and construction path.
  3618. bool usedFixedCtorCache = TryLowerNewScObjectWithFixedCtorCache(newObjInstr, createObjDst, helperOrBailoutLabel, callCtorLabel, skipNewScObj, returnNewScObj, emitBailOut);
  3619. AssertMsg(!skipNewScObj || callCtor, "What will we return if we skip the default new object and don't call the ctor?");
  3620. Assert(!skipNewScObj || !returnNewScObj);
  3621. Assert(usedFixedCtorCache || !skipNewScObj);
  3622. Assert(!usedFixedCtorCache || newObjInstr->HasFixedFunctionAddressTarget());
  3623. Assert(!skipNewScObj || !emitBailOut);
  3624. #if DBG
  3625. if (usedFixedCtorCache)
  3626. {
  3627. Js::JavascriptFunction* ctor = newObjInstr->GetFixedFunction();
  3628. Js::FunctionInfo* ctorInfo = ctor->GetFunctionInfo();
  3629. Assert((ctorInfo->GetAttributes() & Js::FunctionInfo::Attributes::ErrorOnNew) == 0);
  3630. Assert(!!(ctorInfo->GetAttributes() & Js::FunctionInfo::Attributes::SkipDefaultNewObject) == skipNewScObj);
  3631. }
  3632. #endif
  3633. IR::Instr* startCallInstr = nullptr;
  3634. if (callCtor && hasArgs)
  3635. {
  3636. hasArgs = !newObjInstr->HasEmptyArgOutChain(&startCallInstr);
  3637. }
  3638. // If we're not skipping the default new object, let's emit bailout or a call to NewScObject* helper
  3639. IR::JnHelperMethod newScHelper = IR::HelperInvalid;
  3640. IR::Instr *newScObjCall = nullptr;
  3641. if (!skipNewScObj)
  3642. {
  3643. // If we emitted the fast path, this block is a helper block.
  3644. if (usedFixedCtorCache)
  3645. {
  3646. newObjInstr->InsertBefore(helperOrBailoutLabel);
  3647. }
  3648. if (emitBailOut)
  3649. {
  3650. IR::Instr* bailOutInstr = newObjInstr;
  3651. newObjInstr = IR::Instr::New(newObjInstr->m_opcode, func);
  3652. bailOutInstr->TransferTo(newObjInstr);
  3653. bailOutInstr->m_opcode = Js::OpCode::BailOut;
  3654. bailOutInstr->InsertAfter(newObjInstr);
  3655. GenerateBailOut(bailOutInstr);
  3656. }
  3657. else
  3658. {
  3659. Assert(!newObjDst->CanStoreTemp());
  3660. // createObjDst = NewScObject...(ctorOpnd)
  3661. newScHelper = !callCtor ?
  3662. (isBaseClassConstructorNewScObject ?
  3663. (hasArgs ? IR::HelperNewScObjectNoCtorFull : IR::HelperNewScObjectNoArgNoCtorFull) :
  3664. (hasArgs ? IR::HelperNewScObjectNoCtor : IR::HelperNewScObjectNoArgNoCtor)) :
  3665. (hasArgs || usedFixedCtorCache ? IR::HelperNewScObjectNoCtor : IR::HelperNewScObjectNoArg);
  3666. LoadScriptContext(newObjInstr);
  3667. m_lowererMD.LoadHelperArgument(newObjInstr, newObjInstr->GetSrc1());
  3668. newScObjCall = IR::Instr::New(Js::OpCode::Call, createObjDst, IR::HelperCallOpnd::New(newScHelper, func), func);
  3669. newObjInstr->InsertBefore(newScObjCall);
  3670. m_lowererMD.LowerCall(newScObjCall, 0);
  3671. }
  3672. }
  3673. // If we call HelperNewScObjectNoArg directly, we won't be calling the constructor from here, because the helper will do it.
  3674. // We could probably avoid this complexity by converting NewScObjectNoArg to NewScObject in the IRBuilder, once we have dedicated
  3675. // code paths for new Object() and new Array().
  3676. callCtor &= hasArgs || usedFixedCtorCache;
  3677. AssertMsg(!skipNewScObj || callCtor, "What will we return if we skip the default new object and don't call the ctor?");
  3678. newObjInstr->InsertBefore(callCtorLabel);
  3679. if (callCtor && usedFixedCtorCache)
  3680. {
  3681. IR::JnHelperMethod ctorHelper = IR::JnHelperMethodCount;
  3682. // If we have no arguments (i.e. the argument chain is empty), we can recognize a couple of common special cases, such
  3683. // as new Object() or new Array(), for which we have optimized helpers.
  3684. Js::JavascriptFunction* ctor = newObjInstr->GetFixedFunction();
  3685. Js::FunctionInfo* ctorInfo = ctor->GetFunctionInfo();
  3686. if (!hasArgs && (ctorInfo == &Js::JavascriptObject::EntryInfo::NewInstance || ctorInfo == &Js::JavascriptArray::EntryInfo::NewInstance))
  3687. {
  3688. if (ctorInfo == &Js::JavascriptObject::EntryInfo::NewInstance)
  3689. {
  3690. Assert(skipNewScObj);
  3691. ctorHelper = IR::HelperNewJavascriptObjectNoArg;
  3692. callCtor = false;
  3693. }
  3694. else if (ctorInfo == &Js::JavascriptArray::EntryInfo::NewInstance)
  3695. {
  3696. Assert(skipNewScObj);
  3697. ctorHelper = IR::HelperNewJavascriptArrayNoArg;
  3698. callCtor = false;
  3699. }
  3700. if (!callCtor)
  3701. {
  3702. LoadScriptContext(newObjInstr);
  3703. IR::Instr *ctorCall = IR::Instr::New(Js::OpCode::Call, newObjDst, IR::HelperCallOpnd::New(ctorHelper, func), func);
  3704. newObjInstr->InsertBefore(ctorCall);
  3705. m_lowererMD.LowerCall(ctorCall, 0);
  3706. }
  3707. }
  3708. }
  3709. IR::AutoReuseOpnd autoReuseSavedCtorOpnd;
  3710. if (callCtor)
  3711. {
  3712. // Load the first argument, which is either the object just created or null. Spread has an extra argument.
  3713. IR::Instr * argInstr = this->m_lowererMD.LoadNewScObjFirstArg(newObjInstr, createObjDst, isSpreadCall ? 1 : 0);
  3714. IR::Instr * insertAfterCtorInstr = newObjInstr->m_next;
  3715. if (skipNewScObj)
  3716. {
  3717. // Since we skipped the default new object, we must be returning whatever the constructor returns
  3718. // (which better be an Object), so let's just use newObjDst directly.
  3719. // newObjDst = newObjInstr->m_src1(createObjDst, ...)
  3720. Assert(newObjInstr->GetDst() == newObjDst);
  3721. if (isSpreadCall)
  3722. {
  3723. newObjInstr = this->LowerSpreadCall(newObjInstr, Js::CallFlags_New);
  3724. }
  3725. else
  3726. {
  3727. newObjInstr = this->m_lowererMD.LowerCallI(newObjInstr, Js::CallFlags_New, false, argInstr);
  3728. }
  3729. }
  3730. else
  3731. {
  3732. // We may need to return the default new object or whatever the constructor returns. Let's stash
  3733. // away the constructor's return in a temporary operand, and do the right check, if necessary.
  3734. // ctorResultObjOpnd = newObjInstr->m_src1(createObjDst, ...)
  3735. IR::RegOpnd *ctorResultObjOpnd = IR::RegOpnd::New(TyVar, func);
  3736. newObjInstr->UnlinkDst();
  3737. newObjInstr->SetDst(ctorResultObjOpnd);
  3738. if (isSpreadCall)
  3739. {
  3740. newObjInstr = this->LowerSpreadCall(newObjInstr, Js::CallFlags_New);
  3741. }
  3742. else
  3743. {
  3744. newObjInstr = this->m_lowererMD.LowerCallI(newObjInstr, Js::CallFlags_New, false, argInstr);
  3745. }
  3746. if (returnNewScObj)
  3747. {
  3748. // MOV newObjDst, createObjDst
  3749. this->m_lowererMD.CreateAssign(newObjDst, createObjDst, insertAfterCtorInstr);
  3750. }
  3751. else
  3752. {
  3753. LowerGetNewScObjectCommon(ctorResultObjOpnd, ctorResultObjOpnd, createObjDst, insertAfterCtorInstr);
  3754. this->m_lowererMD.CreateAssign(newObjDst, ctorResultObjOpnd, insertAfterCtorInstr);
  3755. }
  3756. }
  3757. // We don't ever need to update the constructor cache, if we hard coded it. Caches requiring update after constructor
  3758. // don't get cloned, and those that don't require update will never need one anymore.
  3759. if (!usedFixedCtorCache)
  3760. {
  3761. LowerUpdateNewScObjectCache(insertAfterCtorInstr, newObjDst, ctorOpnd, false /* isCtorFunction */);
  3762. }
  3763. }
  3764. else
  3765. {
  3766. if (newObjInstr->IsJitProfilingInstr())
  3767. {
  3768. Assert(m_func->IsSimpleJit());
  3769. Assert(!Js::FunctionBody::IsNewSimpleJit());
  3770. // This path skipped calling the Ctor, which skips calling LowerCallI with newObjInstr, meaning that the call will not be profiled.
  3771. // So we insert it manually here.
  3772. if(newScHelper == IR::HelperNewScObjectNoArg &&
  3773. newObjDst &&
  3774. ctorOpnd->IsRegOpnd() &&
  3775. newObjDst->AsRegOpnd()->m_sym == ctorOpnd->AsRegOpnd()->m_sym)
  3776. {
  3777. Assert(newObjInstr->m_func->IsSimpleJit());
  3778. Assert(createObjDst != newObjDst);
  3779. // The function object sym is going to be overwritten, so save it in a temp for profiling
  3780. IR::RegOpnd *const savedCtorOpnd = IR::RegOpnd::New(ctorOpnd->GetType(), newObjInstr->m_func);
  3781. autoReuseSavedCtorOpnd.Initialize(savedCtorOpnd, newObjInstr->m_func);
  3782. Lowerer::InsertMove(savedCtorOpnd, ctorOpnd, newObjInstr);
  3783. ctorOpnd = savedCtorOpnd;
  3784. }
  3785. // It is a constructor (CallFlags_New) and therefore a single argument (this) would have been given.
  3786. const auto info = Lowerer::MakeCallInfoConst(Js::CallFlags_New, 1, func);
  3787. Assert(newScObjCall);
  3788. IR::JitProfilingInstr *const newObjJitProfilingInstr = newObjInstr->AsJitProfilingInstr();
  3789. GenerateCallProfiling(
  3790. newObjJitProfilingInstr->profileId,
  3791. newObjJitProfilingInstr->inlineCacheIndex,
  3792. createObjDst,
  3793. ctorOpnd,
  3794. info,
  3795. false,
  3796. newScObjCall,
  3797. newObjInstr);
  3798. }
  3799. // MOV newObjDst, createObjDst
  3800. if (!skipNewScObj && createObjDst != newObjDst)
  3801. {
  3802. this->m_lowererMD.CreateAssign(newObjDst, createObjDst, newObjInstr);
  3803. }
  3804. newObjInstr->Remove();
  3805. }
  3806. // Return the first instruction above the region we've just lowered.
  3807. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  3808. }
  3809. IR::Instr*
  3810. Lowerer::GenerateCallProfiling(Js::ProfileId profileId, Js::InlineCacheIndex inlineCacheIndex, IR::Opnd* retval, IR::Opnd*calleeFunctionObjOpnd, IR::Opnd* callInfo, bool returnTypeOnly, IR::Instr*callInstr,IR::Instr*insertAfter)
  3811. {
  3812. // This should only ever happen in profiling simplejit
  3813. Assert(m_func->DoSimpleJitDynamicProfile());
  3814. // Make sure they gave us the correct call instruction
  3815. #if defined(_M_IX86) || defined(_M_X64)
  3816. Assert(callInstr->m_opcode == Js::OpCode::CALL);
  3817. #elif defined(_M_ARM)
  3818. Assert(callInstr->m_opcode == Js::OpCode::BLX);
  3819. #endif
  3820. Func*const func = insertAfter->m_func;
  3821. {
  3822. // First, we should save the implicit call flags
  3823. const auto starFlag = GetImplicitCallFlagsOpnd();
  3824. const auto saveOpnd = IR::RegOpnd::New(starFlag->GetType(), func);
  3825. IR::AutoReuseOpnd a(starFlag, func), b(saveOpnd, func);
  3826. //Save the flags (before call) and restore them (after the call)
  3827. this->InsertMove(saveOpnd, starFlag, callInstr);
  3828. // Note: On arm this is slightly inefficient because it forces a reload of the memory location to a reg (whereas x86 can load straight from hard-coded memory into a reg)
  3829. // But it works and making it not reload the memory location would force more refactoring.
  3830. this->InsertMove(starFlag, saveOpnd, insertAfter->m_next);
  3831. }
  3832. // Profile a call that just happened: push some extra info on the stack and call the helper
  3833. if (!retval)
  3834. {
  3835. if (returnTypeOnly)
  3836. {
  3837. // If we are only supposed to profile the return type but don't use the return value, we might
  3838. // as well do nothing!
  3839. return insertAfter;
  3840. }
  3841. retval = IR::AddrOpnd::NewNull(func);
  3842. }
  3843. IR::Instr* profileCall = IR::Instr::New(Js::OpCode::Call, func);
  3844. bool needInlineCacheIndex;
  3845. IR::JnHelperMethod helperMethod;
  3846. if (returnTypeOnly)
  3847. {
  3848. needInlineCacheIndex = false;
  3849. helperMethod = IR::HelperSimpleProfileReturnTypeCall;
  3850. }
  3851. else if(inlineCacheIndex == Js::Constants::NoInlineCacheIndex)
  3852. {
  3853. needInlineCacheIndex = false;
  3854. helperMethod = IR::HelperSimpleProfileCall_DefaultInlineCacheIndex;
  3855. }
  3856. else
  3857. {
  3858. needInlineCacheIndex = true;
  3859. helperMethod = IR::HelperSimpleProfileCall;
  3860. }
  3861. profileCall->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
  3862. insertAfter->InsertAfter(profileCall);
  3863. m_lowererMD.LoadHelperArgument(profileCall, callInfo);
  3864. m_lowererMD.LoadHelperArgument(profileCall, calleeFunctionObjOpnd);
  3865. m_lowererMD.LoadHelperArgument(profileCall, retval);
  3866. if(needInlineCacheIndex)
  3867. {
  3868. m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateInlineCacheIndexOpnd(inlineCacheIndex, func));
  3869. }
  3870. m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateProfileIdOpnd(profileId, func));
  3871. // Push the frame pointer so that the profiling call can grab the stack layout
  3872. m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateFramePointerOpnd(func));
  3873. // No args: the helper is stdcall
  3874. return m_lowererMD.LowerCall(profileCall, 0);
  3875. }
  3876. bool Lowerer::TryLowerNewScObjectWithFixedCtorCache(IR::Instr* newObjInstr, IR::RegOpnd* newObjDst,
  3877. IR::LabelInstr* helperOrBailoutLabel, IR::LabelInstr* callCtorLabel, bool& skipNewScObj, bool& returnNewScObj, bool& emitBailOut)
  3878. {
  3879. skipNewScObj = false;
  3880. returnNewScObj = false;
  3881. AssertMsg(!PHASE_OFF(Js::ObjTypeSpecNewObjPhase, this->m_func) || !newObjInstr->HasBailOutInfo(),
  3882. "Why do we have bailout on NewScObject when ObjTypeSpecNewObj is off?");
  3883. if (PHASE_OFF(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()) && PHASE_OFF(Js::ObjTypeSpecNewObjPhase, this->m_func))
  3884. {
  3885. return false;
  3886. }
  3887. Js::JitTimeConstructorCache* ctorCache;
  3888. if (newObjInstr->HasBailOutInfo())
  3889. {
  3890. Assert(newObjInstr->IsNewScObjectInstr());
  3891. Assert(newObjInstr->IsProfiledInstr());
  3892. Assert(newObjInstr->GetBailOutKind() == IR::BailOutFailedCtorGuardCheck);
  3893. emitBailOut = true;
  3894. ctorCache = newObjInstr->m_func->GetConstructorCache(static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId));
  3895. Assert(ctorCache != nullptr);
  3896. Assert(!ctorCache->skipNewScObject);
  3897. Assert(!ctorCache->typeIsFinal || ctorCache->ctorHasNoExplicitReturnValue);
  3898. LinkCtorCacheToGuardedProperties(ctorCache);
  3899. }
  3900. else
  3901. {
  3902. if (newObjInstr->m_opcode == Js::OpCode::NewScObjArray || newObjInstr->m_opcode == Js::OpCode::NewScObjArraySpread)
  3903. {
  3904. // These instr's carry a profile that indexes the array call site info, not the ctor cache.
  3905. return false;
  3906. }
  3907. ctorCache = newObjInstr->IsProfiledInstr() ? newObjInstr->m_func->GetConstructorCache(static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId)) : nullptr;
  3908. if (ctorCache == nullptr)
  3909. {
  3910. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()))
  3911. {
  3912. Js::FunctionBody* callerFunctionBody = newObjInstr->m_func->GetJnFunction();
  3913. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  3914. Output::Print(L"FixedNewObj: function %s (%s): lowering non-fixed new script object for %s, because %s.\n",
  3915. callerFunctionBody->GetDisplayName(), callerFunctionBody->GetDebugNumberSet(debugStringBuffer), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  3916. newObjInstr->IsProfiledInstr() ? L"constructor cache hasn't been cloned" : L"instruction is not profiled");
  3917. Output::Flush();
  3918. }
  3919. return false;
  3920. }
  3921. }
  3922. Assert(ctorCache != nullptr);
  3923. // We should only have cloned if the script contexts match.
  3924. Assert(newObjInstr->m_func->GetScriptContext() == ctorCache->scriptContext);
  3925. // Built-in constructors don't need a default new object. Since we know which constructor we're calling, we can skip creating a default
  3926. // object and call a specialized helper (or even constructor, directly) avoiding the checks in generic NewScObjectCommon.
  3927. if (ctorCache->skipNewScObject)
  3928. {
  3929. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()))
  3930. {
  3931. Js::FunctionBody* callerFunctionBody = newObjInstr->m_func->GetJnFunction();
  3932. const Js::JavascriptFunction* ctor = ctorCache->constructor;
  3933. Js::FunctionBody* ctorBody = ctor->GetFunctionInfo()->HasBody() ? ctor->GetFunctionInfo()->GetFunctionBody() : nullptr;
  3934. const wchar_t* ctorName = ctorBody != nullptr ? ctorBody->GetDisplayName() : L"<unknown>";
  3935. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  3936. wchar_t debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  3937. Output::Print(L"FixedNewObj: function %s (%s): lowering skipped new script object for %s with %s ctor <unknown> (%s %s).\n",
  3938. callerFunctionBody->GetDisplayName(), callerFunctionBody->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  3939. newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? L"inlined" : L"called",
  3940. ctorName, ctorBody ? ctorBody->GetDebugNumberSet(debugStringBuffer) : L"(null)");
  3941. Output::Flush();
  3942. }
  3943. // All built-in constructors share a special singleton cache that is never checked and never invalidated. It cannot be used
  3944. // as a guard to protect any property operations downstream from the constructor. If this ever becomes a performance issue,
  3945. // we could have a dedicated cache for each built-in constructor, populate it and invalidate it as any other constructor cache.
  3946. AssertMsg(!emitBailOut, "Can't bail out on constructor cache guard for built-in constructors.");
  3947. skipNewScObj = true;
  3948. IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
  3949. this->m_lowererMD.CreateAssign(newObjDst, zeroOpnd, newObjInstr);
  3950. return true;
  3951. }
  3952. AssertMsg(ctorCache->type != nullptr, "Why did we hard-code a mismatched, invalidated or polymorphic constructor cache?");
  3953. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()))
  3954. {
  3955. Js::FunctionBody* callerFunctionBody = newObjInstr->m_func->GetJnFunction();
  3956. const Js::JavascriptFunction* constructor = ctorCache->constructor;
  3957. Js::FunctionBody* constructorBody = constructor->GetFunctionInfo()->HasBody() ? constructor->GetFunctionInfo()->GetFunctionBody() : nullptr;
  3958. const wchar_t* constructorName = constructorBody != nullptr ? constructorBody->GetDisplayName() : L"<unknown>";
  3959. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  3960. wchar_t debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  3961. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()))
  3962. {
  3963. Output::Print(L"FixedNewObj: function %s (%s): lowering fixed new script object for %s with %s ctor <unknown> (%s %s): type = %p, slots = %d, inlined slots = %d.\n",
  3964. callerFunctionBody->GetDisplayName(), callerFunctionBody->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  3965. newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? L"inlined" : L"called",
  3966. constructorName, constructorBody ? constructorBody->GetDebugNumberSet(debugStringBuffer) : L"(null)",
  3967. ctorCache->type, ctorCache->slotCount, ctorCache->inlineSlotCount);
  3968. }
  3969. else
  3970. {
  3971. Output::Print(L"FixedNewObj: function %s (%s): lowering fixed new script object for %s with %s ctor <unknown> (%s %s): slots = %d, inlined slots = %d.\n",
  3972. callerFunctionBody->GetDisplayName(), callerFunctionBody->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  3973. newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? L"inlined" : L"called",
  3974. constructorName, debugStringBuffer, ctorCache->slotCount, ctorCache->inlineSlotCount);
  3975. }
  3976. Output::Flush();
  3977. }
  3978. // If the constructor has no return statements, we can safely return the object that was created here.
  3979. // No need to check what the constructor returned - it must be undefined.
  3980. returnNewScObj = ctorCache->ctorHasNoExplicitReturnValue;
  3981. Assert(Js::ConstructorCache::GetSizeOfGuardValue() == static_cast<size_t>(TySize[TyMachPtr]));
  3982. IR::MemRefOpnd* guardOpnd = IR::MemRefOpnd::New(const_cast<void*>(ctorCache->runtimeCache->GetAddressOfGuardValue()), TyMachReg, this->m_func,
  3983. IR::AddrOpndKindDynamicGuardValueRef);
  3984. IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
  3985. InsertCompareBranch(guardOpnd, zeroOpnd, Js::OpCode::BrEq_A, helperOrBailoutLabel, newObjInstr);
  3986. // If we are calling new on a class constructor, the contract is that we pass new.target as the 'this' argument.
  3987. // function is the constructor on which we called new - which is new.target.
  3988. Js::JavascriptFunction* ctor = newObjInstr->GetFixedFunction();
  3989. Js::FunctionInfo* functionInfo = Js::JavascriptOperators::GetConstructorFunctionInfo(ctor, this->m_func->GetScriptContext());
  3990. Assert(functionInfo);
  3991. if (functionInfo->IsClassConstructor())
  3992. {
  3993. // MOV newObjDst, function
  3994. this->m_lowererMD.CreateAssign(newObjDst, newObjInstr->GetSrc1(), newObjInstr);
  3995. }
  3996. else
  3997. {
  3998. const Js::DynamicType* newObjectType = ctorCache->type;
  3999. Assert(newObjectType->GetIsShared());
  4000. IR::AddrOpnd* typeSrc = IR::AddrOpnd::New(const_cast<void *>(reinterpret_cast<const void *>(newObjectType)), IR::AddrOpndKindDynamicType, m_func);
  4001. // For the next call:
  4002. // inlineSlotSize == Number of slots to allocate beyond the DynamicObject header
  4003. // slotSize - inlineSlotSize == Number of aux slots to allocate
  4004. int inlineSlotSize = ctorCache->inlineSlotCount;
  4005. int slotSize = ctorCache->slotCount;
  4006. if (newObjectType->GetTypeHandler()->IsObjectHeaderInlinedTypeHandler())
  4007. {
  4008. Assert(inlineSlotSize >= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity());
  4009. Assert(inlineSlotSize == slotSize);
  4010. slotSize = inlineSlotSize -= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity();
  4011. }
  4012. GenerateDynamicObjectAlloc(newObjInstr, inlineSlotSize, slotSize, newObjDst, typeSrc);
  4013. }
  4014. // JMP $callCtor
  4015. IR::BranchInstr *callCtorBranch = IR::BranchInstr::New(Js::OpCode::Br, callCtorLabel, m_func);
  4016. newObjInstr->InsertBefore(callCtorBranch);
  4017. this->m_lowererMD.LowerUncondBranch(callCtorBranch);
  4018. return true;
  4019. }
  4020. void
  4021. Lowerer::GenerateRecyclerAllocAligned(IR::JnHelperMethod allocHelper, size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, bool inOpHelper)
  4022. {
  4023. IR::LabelInstr * allocDoneLabel = nullptr;
  4024. if (!PHASE_OFF(Js::JitAllocNewObjPhase, insertionPointInstr->m_func->GetJnFunction()) && HeapInfo::IsSmallObject(allocSize))
  4025. {
  4026. IR::LabelInstr * allocHelperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4027. allocDoneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, inOpHelper);
  4028. this->m_lowererMD.GenerateFastRecyclerAlloc(allocSize, newObjDst, insertionPointInstr, allocHelperLabel, allocDoneLabel);
  4029. // $allocHelper:
  4030. insertionPointInstr->InsertBefore(allocHelperLabel);
  4031. }
  4032. // call JavascriptOperators::AllocMemForScObject(allocSize, scriptContext->GetRecycler())
  4033. this->m_lowererMD.LoadHelperArgument(insertionPointInstr, this->LoadScriptContextValueOpnd(insertionPointInstr, ScriptContextValue::ScriptContextRecycler));
  4034. this->m_lowererMD.LoadHelperArgument(insertionPointInstr, IR::IntConstOpnd::New((int32)allocSize, TyUint32, m_func, true));
  4035. IR::Instr *newObjCall = IR::Instr::New(Js::OpCode::Call, newObjDst, IR::HelperCallOpnd::New(allocHelper, m_func), m_func);
  4036. insertionPointInstr->InsertBefore(newObjCall);
  4037. this->m_lowererMD.LowerCall(newObjCall, 0);
  4038. if (allocDoneLabel != nullptr)
  4039. {
  4040. // $allocDone:
  4041. insertionPointInstr->InsertBefore(allocDoneLabel);
  4042. }
  4043. }
  4044. IR::Instr *
  4045. Lowerer::LowerGetNewScObject(IR::Instr *instr)
  4046. {
  4047. Assert(instr);
  4048. Assert(instr->m_opcode == Js::OpCode::GetNewScObject);
  4049. Assert(instr->GetDst());
  4050. Assert(instr->GetSrc1());
  4051. Assert(instr->GetSrc2());
  4052. const auto instrPrev = instr->m_prev;
  4053. Assert(instrPrev);
  4054. LowerGetNewScObjectCommon(
  4055. instr->GetDst()->AsRegOpnd(),
  4056. instr->GetSrc1()->AsRegOpnd(),
  4057. instr->GetSrc2()->AsRegOpnd(),
  4058. instr);
  4059. instr->Remove();
  4060. return instrPrev;
  4061. }
  4062. void
  4063. Lowerer::LowerGetNewScObjectCommon(
  4064. IR::RegOpnd *const resultObjOpnd,
  4065. IR::RegOpnd *const constructorReturnOpnd,
  4066. IR::RegOpnd *const newObjOpnd,
  4067. IR::Instr *insertBeforeInstr)
  4068. {
  4069. Assert(resultObjOpnd);
  4070. Assert(constructorReturnOpnd);
  4071. Assert(newObjOpnd);
  4072. Assert(insertBeforeInstr);
  4073. // (newObjOpnd == 'this' value passed to constructor)
  4074. //
  4075. // if (!IsJsObject(constructorReturnOpnd))
  4076. // goto notObjectLabel
  4077. // newObjOpnd = constructorReturnOpnd
  4078. // notObjectLabel:
  4079. // resultObjOpnd = newObjOpnd
  4080. if(!constructorReturnOpnd->IsEqual(newObjOpnd))
  4081. {
  4082. // Need to check whether the constructor returned an object
  4083. IR::LabelInstr *notObjectLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4084. Assert(insertBeforeInstr->m_prev);
  4085. IR::LabelInstr *const doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4086. insertBeforeInstr->InsertBefore(doneLabel);
  4087. insertBeforeInstr = doneLabel;
  4088. #if defined(_M_ARM32_OR_ARM64)
  4089. m_lowererMD.LoadHelperArgument(insertBeforeInstr, constructorReturnOpnd);
  4090. IR::Opnd * targetOpnd = IR::RegOpnd::New(StackSym::New(TyInt32,m_func), TyInt32, m_func);
  4091. IR::Instr * callIsObjectInstr = IR::Instr::New(Js::OpCode::Call, targetOpnd, m_func);
  4092. insertBeforeInstr->InsertBefore(callIsObjectInstr);
  4093. this->m_lowererMD.ChangeToHelperCall(callIsObjectInstr, IR::HelperOp_IsObject);
  4094. InsertTestBranch( targetOpnd, targetOpnd, Js::OpCode::BrEq_A, notObjectLabel,insertBeforeInstr);
  4095. #else
  4096. m_lowererMD.GenerateIsJsObjectTest(constructorReturnOpnd, insertBeforeInstr, notObjectLabel);
  4097. #endif
  4098. // Value returned by constructor is an object (use constructorReturnOpnd)
  4099. if(!resultObjOpnd->IsEqual(constructorReturnOpnd))
  4100. {
  4101. this->m_lowererMD.CreateAssign(resultObjOpnd, constructorReturnOpnd, insertBeforeInstr);
  4102. }
  4103. insertBeforeInstr->InsertBefore(
  4104. m_lowererMD.LowerUncondBranch(IR::BranchInstr::New(Js::OpCode::Br, doneLabel, m_func)));
  4105. // Value returned by constructor is not an object (use newObjOpnd)
  4106. insertBeforeInstr->InsertBefore(notObjectLabel);
  4107. }
  4108. if(!resultObjOpnd->IsEqual(newObjOpnd))
  4109. {
  4110. this->m_lowererMD.CreateAssign(resultObjOpnd, newObjOpnd, insertBeforeInstr);
  4111. }
  4112. // fall through to insertBeforeInstr or doneLabel
  4113. }
  4114. ///----------------------------------------------------------------------------
  4115. ///
  4116. /// Lowerer::LowerUpdateNewScObjectCache
  4117. ///
  4118. ///----------------------------------------------------------------------------
  4119. IR::Instr *
  4120. Lowerer::LowerUpdateNewScObjectCache(IR::Instr * insertInstr, IR::Opnd *dst, IR::Opnd *src1, const bool isCtorFunction)
  4121. {
  4122. // if (!isCtorFunction)
  4123. // {
  4124. // MOV r1, [src1 + offset(type)] -- check base TypeIds_Function
  4125. // CMP [r1 + offset(typeId)], TypeIds_Function
  4126. // }
  4127. // JNE $fallThru
  4128. // MOV r2, [src1 + offset(constructorCache)]
  4129. // MOV r3, [r2 + offset(updateAfterCtor)]
  4130. // TEST r3, r3 -- check if updateAfterCtor is 0
  4131. // JEQ $fallThru
  4132. // CALL UpdateNewScObjectCache(src1, dst, scriptContext)
  4133. // $fallThru:
  4134. IR::LabelInstr *labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4135. if (!src1->IsRegOpnd())
  4136. {
  4137. IR::RegOpnd *srcRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4138. LowererMD::CreateAssign(srcRegOpnd, src1, insertInstr);
  4139. src1 = srcRegOpnd;
  4140. }
  4141. // Check if constructor is a function if we don't already know it.
  4142. if (!isCtorFunction)
  4143. {
  4144. // MOV r1, [src1 + offset(type)] -- check base TypeIds_Function
  4145. IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  4146. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  4147. LowererMD::CreateAssign(r1, indirOpnd, insertInstr);
  4148. // CMP [r1 + offset(typeId)], TypeIds_Function
  4149. // JNE $fallThru
  4150. indirOpnd = IR::IndirOpnd::New(r1, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
  4151. IR::IntConstOpnd *intOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, this->m_func, true);
  4152. InsertCompareBranch(indirOpnd, intOpnd, Js::OpCode::BrNeq_A, labelFallThru, insertInstr);
  4153. }
  4154. // Every function has a constructor cache, even if only the default blank one.
  4155. // r2 = MOV JavascriptFunction->constructorCache
  4156. IR::RegOpnd *r2 = IR::RegOpnd::New(TyVar, this->m_func);
  4157. IR::IndirOpnd *opndIndir = IR::IndirOpnd::New(src1->AsRegOpnd(), Js::JavascriptFunction::GetOffsetOfConstructorCache(), TyMachReg, this->m_func);
  4158. IR::Instr *instr = LowererMD::CreateAssign(r2, opndIndir, insertInstr);
  4159. // r3 = constructorCache->updateAfterCtor
  4160. IR::RegOpnd *r3 = IR::RegOpnd::New(TyInt8, this->m_func);
  4161. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(r2, Js::ConstructorCache::GetOffsetOfUpdateAfterCtor(), TyUint8, this->m_func);
  4162. instr = LowererMD::CreateAssign(r3, indirOpnd, insertInstr);
  4163. // TEST r3, r3 -- check if updateAfterCtor is 0
  4164. // JEQ $fallThru
  4165. InsertTestBranch(r3, r3, Js::OpCode::BrEq_A, labelFallThru, insertInstr);
  4166. // r2 = UpdateNewScObjectCache(src1, dst, scriptContext)
  4167. insertInstr->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true)); // helper label for uncommon path
  4168. IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperUpdateNewScObjectCache, m_func);
  4169. LoadScriptContext(insertInstr);
  4170. m_lowererMD.LoadHelperArgument(insertInstr, dst);
  4171. m_lowererMD.LoadHelperArgument(insertInstr, src1);
  4172. instr = IR::Instr::New(Js::OpCode::Call, m_func);
  4173. instr->SetSrc1(opndHelper);
  4174. insertInstr->InsertBefore(instr);
  4175. m_lowererMD.LowerCall(instr, 0);
  4176. // $fallThru:
  4177. insertInstr->InsertBefore(labelFallThru);
  4178. return insertInstr;
  4179. }
  4180. IR::Instr *
  4181. Lowerer::LowerNewScObjArray(IR::Instr *newObjInstr)
  4182. {
  4183. IR::Instr* startCallInstr;
  4184. if (newObjInstr->HasEmptyArgOutChain(&startCallInstr))
  4185. {
  4186. newObjInstr->FreeSrc2();
  4187. return LowerNewScObjArrayNoArg(newObjInstr);
  4188. }
  4189. IR::Instr* startMarkerInstr = nullptr;
  4190. IR::Opnd *targetOpnd = newObjInstr->GetSrc1();
  4191. Func *func = newObjInstr->m_func;
  4192. if (!targetOpnd->IsAddrOpnd())
  4193. {
  4194. if (!newObjInstr->HasBailOutInfo())
  4195. {
  4196. return this->LowerNewScObject(newObjInstr, true, true);
  4197. }
  4198. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4199. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4200. // list, StartCall, etc.
  4201. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4202. // For whatever reason, we couldn't do a fixed function check on the call target.
  4203. // Generate a runtime check on the target.
  4204. Assert(newObjInstr->GetBailOutKind() == IR::BailOutOnNotNativeArray);
  4205. IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, func);
  4206. InsertCompareBranch(
  4207. targetOpnd,
  4208. LoadLibraryValueOpnd(newObjInstr, LibraryValue::ValueArrayConstructor),
  4209. Js::OpCode::BrEq_A,
  4210. true,
  4211. labelSkipBailOut,
  4212. newObjInstr);
  4213. IR::ProfiledInstr *instrNew = IR::ProfiledInstr::New(newObjInstr->m_opcode, newObjInstr->UnlinkDst(), newObjInstr->UnlinkSrc1(), newObjInstr->UnlinkSrc2(), func);
  4214. instrNew->u.profileId = newObjInstr->AsProfiledInstr()->u.profileId;
  4215. newObjInstr->InsertAfter(instrNew);
  4216. newObjInstr->m_opcode = Js::OpCode::BailOut;
  4217. GenerateBailOut(newObjInstr);
  4218. instrNew->InsertBefore(labelSkipBailOut);
  4219. newObjInstr = instrNew;
  4220. }
  4221. else
  4222. {
  4223. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4224. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4225. // list, StartCall, etc.
  4226. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4227. }
  4228. RecyclerWeakReference<Js::FunctionBody> *weakFuncRef = nullptr;
  4229. Js::ArrayCallSiteInfo *arrayInfo = nullptr;
  4230. Assert(newObjInstr->IsProfiledInstr());
  4231. IR::RegOpnd *resultObjOpnd = newObjInstr->GetDst()->AsRegOpnd();
  4232. IR::Instr * insertInstr = newObjInstr->m_next;
  4233. Js::ProfileId profileId = static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId);
  4234. // We may not have profileId if we converted a NewScObject to NewScObjArray
  4235. if (profileId != Js::Constants::NoProfileId)
  4236. {
  4237. Js::FunctionBody *functionBody = func->GetJnFunction();
  4238. arrayInfo = functionBody->GetAnyDynamicProfileInfo()->GetArrayCallSiteInfo(functionBody, profileId);
  4239. Assert(arrayInfo);
  4240. weakFuncRef = func->GetWeakFuncRef();
  4241. Assert(weakFuncRef);
  4242. }
  4243. IR::Opnd *opndSrc1 = newObjInstr->UnlinkSrc1();
  4244. if (opndSrc1->IsImmediateOpnd())
  4245. {
  4246. intptr_t length = opndSrc1->GetImmediateValue();
  4247. if (length >= 0 && length <= 8)
  4248. {
  4249. GenerateProfiledNewScObjArrayFastPath(newObjInstr, arrayInfo, weakFuncRef, (uint32)length);
  4250. }
  4251. }
  4252. IR::Opnd *profileOpnd = IR::AddrOpnd::New(arrayInfo, IR::AddrOpndKindDynamicArrayCallSiteInfo, func);
  4253. this->m_lowererMD.LoadNewScObjFirstArg(newObjInstr, profileOpnd);
  4254. IR::JnHelperMethod helperMethod = IR::HelperScrArr_ProfiledNewInstance;
  4255. newObjInstr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
  4256. newObjInstr = GenerateDirectCall(newObjInstr, targetOpnd, Js::CallFlags_New);
  4257. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  4258. InsertCompareBranch(
  4259. IR::IndirOpnd::New(resultObjOpnd, 0, TyMachPtr, func),
  4260. LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptArray),
  4261. Js::OpCode::BrEq_A,
  4262. true,
  4263. labelDone,
  4264. insertInstr);
  4265. // We know we have a native array, so store the weak ref and call site index.
  4266. m_lowererMD.CreateAssign(
  4267. IR::IndirOpnd::New(resultObjOpnd, Js::JavascriptNativeArray::GetOffsetOfArrayCallSiteIndex(), TyUint16, func),
  4268. IR::Opnd::CreateProfileIdOpnd(profileId, func),
  4269. insertInstr);
  4270. m_lowererMD.CreateAssign(
  4271. IR::IndirOpnd::New(resultObjOpnd, Js::JavascriptNativeArray::GetOffsetOfWeakFuncRef(), TyMachReg, func),
  4272. IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, func),
  4273. insertInstr);
  4274. insertInstr->InsertBefore(labelDone);
  4275. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  4276. }
  4277. IR::Instr *
  4278. Lowerer::LowerNewScObjArrayNoArg(IR::Instr *newObjInstr)
  4279. {
  4280. IR::Opnd *targetOpnd = newObjInstr->GetSrc1();
  4281. Func *func = newObjInstr->m_func;
  4282. IR::Instr* startMarkerInstr = nullptr;
  4283. if (!targetOpnd->IsAddrOpnd())
  4284. {
  4285. if (!newObjInstr->HasBailOutInfo())
  4286. {
  4287. return this->LowerNewScObject(newObjInstr, true, false);
  4288. }
  4289. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4290. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4291. // list, StartCall, etc.
  4292. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4293. // For whatever reason, we couldn't do a fixed function check on the call target.
  4294. // Generate a runtime check on the target.
  4295. Assert(newObjInstr->GetBailOutKind() == IR::BailOutOnNotNativeArray);
  4296. IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, func);
  4297. InsertCompareBranch(
  4298. targetOpnd,
  4299. LoadLibraryValueOpnd(newObjInstr, LibraryValue::ValueArrayConstructor),
  4300. Js::OpCode::BrEq_A,
  4301. true,
  4302. labelSkipBailOut,
  4303. newObjInstr);
  4304. IR::ProfiledInstr *instrNew = IR::ProfiledInstr::New(newObjInstr->m_opcode, newObjInstr->UnlinkDst(), newObjInstr->UnlinkSrc1(), func);
  4305. instrNew->u.profileId = newObjInstr->AsProfiledInstr()->u.profileId;
  4306. newObjInstr->InsertAfter(instrNew);
  4307. newObjInstr->m_opcode = Js::OpCode::BailOut;
  4308. GenerateBailOut(newObjInstr);
  4309. instrNew->InsertBefore(labelSkipBailOut);
  4310. newObjInstr = instrNew;
  4311. }
  4312. else
  4313. {
  4314. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4315. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4316. // list, StartCall, etc.
  4317. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4318. }
  4319. Assert(newObjInstr->IsProfiledInstr());
  4320. RecyclerWeakReference<Js::FunctionBody> *weakFuncRef = nullptr;
  4321. Js::ArrayCallSiteInfo *arrayInfo = nullptr;
  4322. Js::ProfileId profileId = static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId);
  4323. if (profileId != Js::Constants::NoProfileId)
  4324. {
  4325. Js::FunctionBody *functionBody = func->GetJnFunction();
  4326. arrayInfo = functionBody->GetAnyDynamicProfileInfo()->GetArrayCallSiteInfo(functionBody, profileId);
  4327. Assert(arrayInfo);
  4328. weakFuncRef = func->GetWeakFuncRef();
  4329. Assert(weakFuncRef);
  4330. }
  4331. GenerateProfiledNewScObjArrayFastPath(newObjInstr, arrayInfo, weakFuncRef, 0);
  4332. m_lowererMD.LoadHelperArgument(newObjInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, func));
  4333. m_lowererMD.LoadHelperArgument(newObjInstr, IR::AddrOpnd::New(arrayInfo, IR::AddrOpndKindDynamicArrayCallSiteInfo, func));
  4334. LoadScriptContext(newObjInstr);
  4335. m_lowererMD.LoadHelperArgument(newObjInstr, targetOpnd);
  4336. newObjInstr->UnlinkSrc1();
  4337. newObjInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperScrArr_ProfiledNewInstanceNoArg, func));
  4338. m_lowererMD.LowerCall(newObjInstr, 0);
  4339. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  4340. }
  4341. ///----------------------------------------------------------------------------
  4342. ///
  4343. /// Lowerer::LowerPrologEpilog
  4344. ///
  4345. ///----------------------------------------------------------------------------
  4346. void
  4347. Lowerer::LowerPrologEpilog()
  4348. {
  4349. if (m_func->GetJnFunction()->IsGenerator())
  4350. {
  4351. LowerGeneratorResumeJumpTable();
  4352. }
  4353. IR::Instr * instr;
  4354. instr = m_func->m_headInstr;
  4355. AssertMsg(instr->IsEntryInstr(), "First instr isn't an EntryInstr...");
  4356. m_lowererMD.LowerEntryInstr(instr->AsEntryInstr());
  4357. instr = m_func->m_exitInstr;
  4358. AssertMsg(instr->IsExitInstr(), "Last instr isn't an ExitInstr...");
  4359. m_lowererMD.LowerExitInstr(instr->AsExitInstr());
  4360. }
  4361. void
  4362. Lowerer::LowerPrologEpilogAsmJs()
  4363. {
  4364. IR::Instr * instr;
  4365. instr = m_func->m_headInstr;
  4366. AssertMsg(instr->IsEntryInstr(), "First instr isn't an EntryInstr...");
  4367. m_lowererMD.LowerEntryInstrAsmJs(instr->AsEntryInstr());
  4368. instr = m_func->m_exitInstr;
  4369. AssertMsg(instr->IsExitInstr(), "Last instr isn't an ExitInstr...");
  4370. m_lowererMD.LowerExitInstrAsmJs(instr->AsExitInstr());
  4371. }
  4372. void
  4373. Lowerer::LowerGeneratorResumeJumpTable()
  4374. {
  4375. Assert(m_func->GetJnFunction()->IsGenerator());
  4376. IR::Instr * jumpTableInstr = m_func->m_headInstr;
  4377. AssertMsg(jumpTableInstr->IsEntryInstr(), "First instr isn't an EntryInstr...");
  4378. // Hope to do away with this linked list scan by moving this lowering to a post-prolog-epilog/pre-encoder phase that is common to all architectures (currently such phase is only available on amd64/arm)
  4379. while (jumpTableInstr->m_opcode != Js::OpCode::GeneratorResumeJumpTable)
  4380. {
  4381. jumpTableInstr = jumpTableInstr->m_next;
  4382. }
  4383. IR::Opnd * srcOpnd = jumpTableInstr->UnlinkSrc1();
  4384. m_func->MapYieldOffsetResumeLabels([&](int i, const YieldOffsetResumeLabel& yorl)
  4385. {
  4386. uint32 offset = yorl.First();
  4387. IR::LabelInstr * label = yorl.Second();
  4388. if (label != nullptr && label->m_hasNonBranchRef)
  4389. {
  4390. // Also fix up the bailout at the label with the jump to epilog that was not emitted in GenerateBailOut()
  4391. Assert(label->m_prev->HasBailOutInfo());
  4392. GenerateJumpToEpilogForBailOut(label->m_prev->GetBailOutInfo(), label->m_prev);
  4393. }
  4394. else if (label == nullptr)
  4395. {
  4396. label = m_func->m_bailOutNoSaveLabel;
  4397. }
  4398. // For each offset label pair, insert a compare of the offset and branch if equal to the label
  4399. InsertCompareBranch(srcOpnd, IR::IntConstOpnd::New(offset, TyUint32, m_func), Js::OpCode::BrSrEq_A, label, jumpTableInstr);
  4400. });
  4401. jumpTableInstr->Remove();
  4402. }
  4403. void
  4404. Lowerer::DoInterruptProbes()
  4405. {
  4406. this->m_func->SetHasInstrNumber(true);
  4407. uint instrCount = 1;
  4408. FOREACH_INSTR_IN_FUNC(instr, this->m_func)
  4409. {
  4410. instr->SetNumber(instrCount++);
  4411. if (instr->IsLabelInstr())
  4412. {
  4413. IR::LabelInstr *labelInstr = instr->AsLabelInstr();
  4414. if (labelInstr->m_isLoopTop)
  4415. {
  4416. // For every loop top label, insert the following:
  4417. // cmp sp, ThreadContext::stackLimitForCurrentThread
  4418. // bgt $continue
  4419. // $helper:
  4420. // call JavascriptOperators::ScriptAbort
  4421. // b $exit
  4422. // $continue:
  4423. IR::LabelInstr *newLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4424. labelInstr->InsertAfter(newLabel);
  4425. this->InsertOneLoopProbe(newLabel, newLabel);
  4426. }
  4427. }
  4428. }
  4429. NEXT_INSTR_IN_FUNC;
  4430. }
  4431. // Insert an interrupt probe at each loop back branch. (Currently uncalled, since we're inserting
  4432. // probes at loop tops instead of back edges, but kept around because it may prove useful.)
  4433. uint
  4434. Lowerer::DoLoopProbeAndNumber(IR::BranchInstr *branchInstr)
  4435. {
  4436. IR::LabelInstr *labelInstr = branchInstr->GetTarget();
  4437. if (labelInstr == nullptr || labelInstr->GetNumber() == 0)
  4438. {
  4439. // Forward branch (possibly an indirect jump after try-catch-finally); nothing to do.
  4440. return branchInstr->GetNumber() + 1;
  4441. }
  4442. Assert(labelInstr->m_isLoopTop);
  4443. // Insert a stack probe at this branch. Number all the instructions we insert
  4444. // and return the next instruction number.
  4445. uint number = branchInstr->GetNumber();
  4446. IR::Instr *instrPrev = branchInstr->m_prev;
  4447. IR::Instr *instrNext = branchInstr->m_next;
  4448. if (branchInstr->IsUnconditional())
  4449. {
  4450. // B $loop ==>
  4451. // cmp [], 0
  4452. // beq $loop
  4453. // $helper:
  4454. // call abort
  4455. // b $exit
  4456. this->InsertOneLoopProbe(branchInstr, labelInstr);
  4457. branchInstr->Remove();
  4458. }
  4459. else
  4460. {
  4461. // Bcc $loop ==>
  4462. // Binv $notloop
  4463. // cmp [], 0
  4464. // beq $loop
  4465. // $helper:
  4466. // call abort
  4467. // b $exit
  4468. // $notloop:
  4469. IR::LabelInstr *loopExitLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4470. branchInstr->SetTarget(loopExitLabel);
  4471. LowererMD::InvertBranch(branchInstr);
  4472. branchInstr->InsertAfter(loopExitLabel);
  4473. this->InsertOneLoopProbe(loopExitLabel, labelInstr);
  4474. }
  4475. FOREACH_INSTR_IN_RANGE(instr, instrPrev->m_next, instrNext->m_prev)
  4476. {
  4477. instr->SetNumber(number++);
  4478. }
  4479. NEXT_INSTR_IN_RANGE;
  4480. return number;
  4481. }
  4482. void
  4483. Lowerer::InsertOneLoopProbe(IR::Instr *insertInstr, IR::LabelInstr *loopLabel)
  4484. {
  4485. // Insert one interrupt probe at the given instruction. Probe the stack and call the abort helper
  4486. // directly if the probe fails.
  4487. IR::Opnd *memRefOpnd = IR::MemRefOpnd::New(
  4488. this->m_func->GetScriptContext()->GetThreadContext()->GetAddressOfStackLimitForCurrentThread(),
  4489. TyMachReg, this->m_func);
  4490. IR::RegOpnd *regStackPointer = IR::RegOpnd::New(
  4491. NULL, this->m_lowererMD.GetRegStackPointer(), TyMachReg, this->m_func);
  4492. InsertCompareBranch(regStackPointer, memRefOpnd, Js::OpCode::BrGt_A, loopLabel, insertInstr);
  4493. IR::LabelInstr *helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4494. insertInstr->InsertBefore(helperLabel);
  4495. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperScriptAbort, this->m_func);
  4496. IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  4497. instr->SetSrc1(helperOpnd);
  4498. insertInstr->InsertBefore(instr);
  4499. this->m_lowererMD.LowerCall(instr, 0);
  4500. // Jump to the exit after the helper call. This instruction will never be reached, but the jump
  4501. // indicates that nothing is live after the call (to avoid useless spills in code that will
  4502. // be executed).
  4503. instr = this->m_func->m_exitInstr->GetPrevRealInstrOrLabel();
  4504. if (instr->IsLabelInstr())
  4505. {
  4506. helperLabel = instr->AsLabelInstr();
  4507. }
  4508. else
  4509. {
  4510. helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4511. this->m_func->m_exitInstr->InsertBefore(helperLabel);
  4512. }
  4513. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, helperLabel, this->m_func);
  4514. insertInstr->InsertBefore(instr);
  4515. }
  4516. ///----------------------------------------------------------------------------
  4517. ///
  4518. /// Lowerer::LoadPropertySymAsArgument
  4519. ///
  4520. /// Generate code to pass a fieldSym as argument to a helper.
  4521. ///----------------------------------------------------------------------------
  4522. IR::Instr *
  4523. Lowerer::LoadPropertySymAsArgument(IR::Instr *instr, IR::Opnd *fieldSrc)
  4524. {
  4525. IR::Instr * instrPrev;
  4526. AssertMsg(fieldSrc->IsSymOpnd() && fieldSrc->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as src of LdFld");
  4527. IR::SymOpnd *symOpnd = fieldSrc->AsSymOpnd();
  4528. PropertySym * fieldSym = symOpnd->m_sym->AsPropertySym();
  4529. IR::IntConstOpnd * indexOpnd = IR::IntConstOpnd::New(fieldSym->m_propertyId, TyInt32, m_func, /*dontEncode*/true);
  4530. instrPrev = m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  4531. IR::RegOpnd * instanceOpnd = symOpnd->CreatePropertyOwnerOpnd(m_func);
  4532. m_lowererMD.LoadHelperArgument(instr, instanceOpnd);
  4533. return instrPrev;
  4534. }
  4535. ///----------------------------------------------------------------------------
  4536. ///
  4537. /// Lowerer::LoadFunctionBodyAsArgument
  4538. ///
  4539. /// Special case: the "property ID" is a key into the ScriptContext's FunctionBody map
  4540. ///----------------------------------------------------------------------------
  4541. IR::Instr *
  4542. Lowerer::LoadFunctionBodyAsArgument(IR::Instr *instr, IR::IntConstOpnd * functionBodySlotOpnd, IR::RegOpnd * envOpnd)
  4543. {
  4544. IR::Instr * instrPrev;
  4545. // We need to pass in the function reference, we can't embed the pointer to the function proxy here.
  4546. // The function proxy may be deferred parsed/serialize, and may 'progress' to a real function body after it is undeferred
  4547. // At which point the deferred function proxy may be collect.
  4548. // Just pass it the address where we will find the function proxy/body
  4549. Js::FunctionProxyPtrPtr proxyRef = instr->m_func->GetJnFunction()->GetNestedFuncReference((uint)functionBodySlotOpnd->GetValue());
  4550. AssertMsg(proxyRef, "Expected FunctionProxy for index of NewScFunc or NewScGenFunc opnd");
  4551. AssertMsg(*proxyRef, "Expected FunctionProxy for index of NewScFunc or NewScGenFunc opnd");
  4552. IR::AddrOpnd * indexOpnd = IR::AddrOpnd::New((Js::Var)proxyRef, IR::AddrOpndKindDynamicMisc, m_func);
  4553. instrPrev = m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  4554. m_lowererMD.LoadHelperArgument(instr, envOpnd);
  4555. return instrPrev;
  4556. }
  4557. IR::Instr *
  4558. Lowerer::LowerProfiledLdFld(IR::JitProfilingInstr *ldFldInstr)
  4559. {
  4560. const auto instrPrev = ldFldInstr->m_prev;
  4561. auto src = ldFldInstr->UnlinkSrc1();
  4562. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  4563. IR::JnHelperMethod helper;
  4564. switch (ldFldInstr->m_opcode)
  4565. {
  4566. case Js::OpCode::LdFld:
  4567. helper = IR::HelperProfiledLdFld;
  4568. goto ldFldCommon;
  4569. case Js::OpCode::LdRootFld:
  4570. helper = IR::HelperProfiledLdRootFld;
  4571. goto ldFldCommon;
  4572. case Js::OpCode::LdMethodFld:
  4573. helper = IR::HelperProfiledLdMethodFld;
  4574. goto ldFldCommon;
  4575. case Js::OpCode::LdRootMethodFld:
  4576. helper = IR::HelperProfiledLdRootMethodFld;
  4577. goto ldFldCommon;
  4578. case Js::OpCode::LdFldForCallApplyTarget:
  4579. helper = IR::HelperProfiledLdFld_CallApplyTarget;
  4580. goto ldFldCommon;
  4581. case Js::OpCode::LdFldForTypeOf:
  4582. helper = IR::HelperProfiledLdFldForTypeOf;
  4583. goto ldFldCommon;
  4584. case Js::OpCode::LdRootFldForTypeOf:
  4585. helper = IR::HelperProfiledLdRootFldForTypeOf;
  4586. goto ldFldCommon;
  4587. ldFldCommon:
  4588. {
  4589. Assert(ldFldInstr->profileId == Js::Constants::NoProfileId);
  4590. /*
  4591. Var ProfilingHelpers::ProfiledLdFld_Jit(
  4592. const Var instance,
  4593. const PropertyId propertyId,
  4594. const InlineCacheIndex inlineCacheIndex,
  4595. void *const framePointer)
  4596. */
  4597. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  4598. m_lowererMD.LoadHelperArgument(
  4599. ldFldInstr,
  4600. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  4601. LoadPropertySymAsArgument(ldFldInstr, src);
  4602. break;
  4603. }
  4604. case Js::OpCode::LdSuperFld:
  4605. {
  4606. Assert(ldFldInstr->profileId == Js::Constants::NoProfileId);
  4607. IR::Opnd * src2 = nullptr;
  4608. /*
  4609. Var ProfilingHelpers::ProfiledLdSuperFld_Jit(
  4610. const Var instance,
  4611. const PropertyId propertyId,
  4612. const InlineCacheIndex inlineCacheIndex,
  4613. void *const framePointer,
  4614. const Var thisInstance)
  4615. */
  4616. src2 = ldFldInstr->UnlinkSrc2();
  4617. m_lowererMD.LoadHelperArgument(ldFldInstr, src2 );
  4618. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  4619. m_lowererMD.LoadHelperArgument(
  4620. ldFldInstr,
  4621. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  4622. LoadPropertySymAsArgument(ldFldInstr, src);
  4623. helper = IR::HelperProfiledLdSuperFld;
  4624. break;
  4625. }
  4626. case Js::OpCode::LdLen_A:
  4627. // If we want to profile this call, then push some extra args and call the profiling version
  4628. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateProfileIdOpnd(ldFldInstr->profileId, m_func));
  4629. m_lowererMD.LoadHelperArgument(ldFldInstr, src->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func));
  4630. m_lowererMD.LoadHelperArgument(ldFldInstr, CreateFunctionBodyOpnd(ldFldInstr->m_func));
  4631. helper = IR::HelperSimpleProfiledLdLen;
  4632. break;
  4633. default:
  4634. Assert(false);
  4635. __assume(false);
  4636. }
  4637. ldFldInstr->SetSrc1(IR::HelperCallOpnd::New(helper, m_func));
  4638. m_lowererMD.LowerCall(ldFldInstr, 0);
  4639. return instrPrev;
  4640. }
  4641. ///----------------------------------------------------------------------------
  4642. ///
  4643. /// Lowerer::LowerLdFld
  4644. ///
  4645. /// Lower an instruction (LdFld, ScopedLdFld) that takes a property
  4646. /// reference as a source and puts a result in a register.
  4647. ///
  4648. ///----------------------------------------------------------------------------
  4649. IR::Instr *
  4650. Lowerer::LowerLdFld(
  4651. IR::Instr * ldFldInstr,
  4652. IR::JnHelperMethod helperMethod,
  4653. IR::JnHelperMethod polymorphicHelperMethod,
  4654. bool useInlineCache,
  4655. IR::LabelInstr *labelBailOut,
  4656. bool isHelper)
  4657. {
  4658. if (ldFldInstr->IsJitProfilingInstr())
  4659. {
  4660. // If we want to profile then do something completely different
  4661. return this->LowerProfiledLdFld(ldFldInstr->AsJitProfilingInstr());
  4662. }
  4663. IR::Opnd *src;
  4664. IR::Instr *instrPrev = ldFldInstr->m_prev;
  4665. src = ldFldInstr->UnlinkSrc1();
  4666. if (ldFldInstr->m_opcode == Js::OpCode::LdSuperFld)
  4667. {
  4668. IR::Opnd * src2 = nullptr;
  4669. src2 = ldFldInstr->UnlinkSrc2();
  4670. m_lowererMD.LoadHelperArgument(ldFldInstr, src2);
  4671. }
  4672. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  4673. if (useInlineCache)
  4674. {
  4675. IR::Opnd * inlineCacheOpnd;
  4676. AssertMsg(src->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  4677. if (src->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache && polymorphicHelperMethod != helperMethod)
  4678. {
  4679. Js::PolymorphicInlineCache * polymorphicInlineCache = src->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache;
  4680. helperMethod = polymorphicHelperMethod;
  4681. inlineCacheOpnd = IR::AddrOpnd::New(polymorphicInlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func);
  4682. }
  4683. else
  4684. {
  4685. // Need to load runtime inline cache opnd first before loading any helper argument
  4686. // because LoadRuntimeInlineCacheOpnd may create labels marked as helper,
  4687. // and cause op helper register push/pop save in x86, messing up with any helper arguments that is already pushed
  4688. inlineCacheOpnd = this->LoadRuntimeInlineCacheOpnd(ldFldInstr, src->AsPropertySymOpnd(), isHelper);
  4689. }
  4690. this->LoadPropertySymAsArgument(ldFldInstr, src);
  4691. this-> m_lowererMD.LoadHelperArgument(
  4692. ldFldInstr,
  4693. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  4694. this->m_lowererMD.LoadHelperArgument(ldFldInstr, inlineCacheOpnd);
  4695. this->m_lowererMD.LoadHelperArgument(ldFldInstr, LoadFunctionBodyOpnd(ldFldInstr));
  4696. }
  4697. else
  4698. {
  4699. LoadScriptContext(ldFldInstr);
  4700. this->LoadPropertySymAsArgument(ldFldInstr, src);
  4701. }
  4702. // Do we need to reload the type and slot array after the helper returns?
  4703. // (We do if there's a propertySymOpnd downstream that needs it, i.e., the type is not dead.)
  4704. IR::RegOpnd *opndBase = src->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func);
  4705. m_lowererMD.ChangeToHelperCall(ldFldInstr, helperMethod, labelBailOut, opndBase, src->AsSymOpnd()->IsPropertySymOpnd() ? src->AsSymOpnd()->AsPropertySymOpnd() : nullptr, isHelper);
  4706. return instrPrev;
  4707. }
  4708. bool
  4709. Lowerer::GenerateLdFldWithCachedType(IR::Instr * instrLdFld, bool* continueAsHelperOut, IR::LabelInstr** labelHelperOut, IR::RegOpnd** typeOpndOut)
  4710. {
  4711. IR::Instr *instr;
  4712. IR::Opnd *opnd;
  4713. IR::LabelInstr *labelObjCheckFailed = nullptr;
  4714. IR::LabelInstr *labelTypeCheckFailed = nullptr;
  4715. IR::LabelInstr *labelDone = nullptr;
  4716. Assert(continueAsHelperOut != nullptr);
  4717. *continueAsHelperOut = false;
  4718. Assert(labelHelperOut != nullptr);
  4719. *labelHelperOut = nullptr;
  4720. Assert(typeOpndOut != nullptr);
  4721. *typeOpndOut = nullptr;
  4722. Assert(instrLdFld->GetSrc1()->IsSymOpnd());
  4723. if (!instrLdFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
  4724. {
  4725. return false;
  4726. }
  4727. IR::PropertySymOpnd *propertySymOpnd = instrLdFld->GetSrc1()->AsPropertySymOpnd();
  4728. if (!propertySymOpnd->IsTypeCheckSeqCandidate())
  4729. {
  4730. return false;
  4731. }
  4732. AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
  4733. if (!propertySymOpnd->IsTypeCheckSeqParticipant() && !propertySymOpnd->NeedsLocalTypeCheck())
  4734. {
  4735. return false;
  4736. }
  4737. Assert(!propertySymOpnd->NeedsTypeCheckAndBailOut() || (instrLdFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrLdFld->GetBailOutKind())));
  4738. // In the backwards pass we only add guarded property operations to instructions that are not already
  4739. // protected by an upstream type check.
  4740. Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
  4741. PHASE_PRINT_TESTTRACE(
  4742. Js::ObjTypeSpecPhase,
  4743. this->m_func,
  4744. L"Field load: %s, property: %s, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n",
  4745. Js::OpCodeUtil::GetOpCodeName(instrLdFld->m_opcode),
  4746. this->m_func->GetScriptContext()->GetPropertyNameLocked(
  4747. propertySymOpnd->m_sym->AsPropertySym()->m_propertyId)->GetBuffer(),
  4748. this->m_func->GetJnFunction()->GetDisplayName(),
  4749. propertySymOpnd->m_inlineCacheIndex,
  4750. propertySymOpnd->GetCacheLayoutString(),
  4751. propertySymOpnd->IsTypeChecked() ? L"true" : L"false");
  4752. if (propertySymOpnd->HasFinalType() && !propertySymOpnd->IsLoadedFromProto())
  4753. {
  4754. propertySymOpnd->UpdateSlotForFinalType();
  4755. }
  4756. // TODO (ObjTypeSpec): If ((PropertySym*)propertySymOpnd->m_sym)->m_stackSym->m_isIntConst consider emitting a direct
  4757. // jump to helper or bailout. If we have a type check bailout, we could even abort compilation.
  4758. bool hasTypeCheckBailout = instrLdFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrLdFld->GetBailOutKind());
  4759. // If the hard-coded type is not available here, do a type check, and branch to the helper if the check fails.
  4760. // In the prototype case, we have to check the type even if it was checked upstream, to cover the case where
  4761. // the property has been added locally. Note that this is not necessary if the proto chain has been checked,
  4762. // because then we know there's been no store of the property since the type was checked.
  4763. bool emitPrimaryTypeCheck = propertySymOpnd->NeedsPrimaryTypeCheck();
  4764. bool emitLocalTypeCheck = propertySymOpnd->NeedsLocalTypeCheck();
  4765. bool emitLoadFromProtoTypeCheck = propertySymOpnd->NeedsLoadFromProtoTypeCheck();
  4766. if (emitPrimaryTypeCheck || emitLocalTypeCheck || emitLoadFromProtoTypeCheck)
  4767. {
  4768. if (emitLoadFromProtoTypeCheck)
  4769. {
  4770. propertySymOpnd->EnsureGuardedPropOps(this->m_func->m_alloc);
  4771. propertySymOpnd->SetGuardedPropOp(propertySymOpnd->GetObjTypeSpecFldId());
  4772. }
  4773. labelTypeCheckFailed = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4774. labelObjCheckFailed = hasTypeCheckBailout ? labelTypeCheckFailed : IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4775. *typeOpndOut = this->GenerateCachedTypeCheck(instrLdFld, propertySymOpnd, labelObjCheckFailed, labelTypeCheckFailed);
  4776. }
  4777. IR::Opnd *opndSlotArray;
  4778. if (propertySymOpnd->IsLoadedFromProto())
  4779. {
  4780. opndSlotArray = this->LoadSlotArrayWithCachedProtoType(instrLdFld, propertySymOpnd);
  4781. }
  4782. else
  4783. {
  4784. opndSlotArray = this->LoadSlotArrayWithCachedLocalType(instrLdFld, propertySymOpnd);
  4785. }
  4786. // Load the value from the slot, getting the slot ID from the cache.
  4787. uint16 index = propertySymOpnd->GetSlotIndex();
  4788. Assert(index != -1);
  4789. if (opndSlotArray->IsRegOpnd())
  4790. {
  4791. opnd = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, this->m_func);
  4792. }
  4793. else
  4794. {
  4795. Assert(opndSlotArray->IsMemRefOpnd());
  4796. opnd = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, this->m_func, IR::AddrOpndKindDynamicPropertySlotRef);
  4797. }
  4798. Lowerer::InsertMove(instrLdFld->GetDst(), opnd, instrLdFld);
  4799. // We eliminate the helper, or the type check succeeds, or we bail out before the operation.
  4800. // Either delete the original instruction or replace it with a bailout.
  4801. if (!emitPrimaryTypeCheck && !emitLocalTypeCheck && !emitLoadFromProtoTypeCheck)
  4802. {
  4803. Assert(labelTypeCheckFailed == nullptr);
  4804. AssertMsg(!instrLdFld->HasBailOutInfo(), "Why does a direct field load have bailout?");
  4805. instrLdFld->Remove();
  4806. return true;
  4807. }
  4808. // Otherwise, branch around the bailout or helper.
  4809. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4810. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  4811. instrLdFld->InsertBefore(instr);
  4812. // Insert the bailout or helper label here.
  4813. instrLdFld->InsertBefore(labelTypeCheckFailed);
  4814. instrLdFld->InsertAfter(labelDone);
  4815. if (hasTypeCheckBailout)
  4816. {
  4817. AssertMsg(PHASE_ON1(Js::ObjTypeSpecIsolatedFldOpsWithBailOutPhase) || !propertySymOpnd->IsTypeDead(),
  4818. "Why does a field load have a type check bailout, if its type is dead?");
  4819. // Convert the original instruction to a bailout.
  4820. if (instrLdFld->GetBailOutInfo()->bailOutInstr != instrLdFld)
  4821. {
  4822. // Set the cache index in the bailout info so that the bailout code will write it into the
  4823. // bailout record at runtime.
  4824. instrLdFld->GetBailOutInfo()->polymorphicCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  4825. }
  4826. instrLdFld->FreeDst();
  4827. instrLdFld->FreeSrc1();
  4828. instrLdFld->m_opcode = Js::OpCode::BailOut;
  4829. this->GenerateBailOut(instrLdFld);
  4830. return true;
  4831. }
  4832. else
  4833. {
  4834. *continueAsHelperOut = true;
  4835. Assert(labelObjCheckFailed != nullptr && labelObjCheckFailed != labelTypeCheckFailed);
  4836. *labelHelperOut = labelObjCheckFailed;
  4837. return false;
  4838. }
  4839. }
  4840. template<bool isRoot>
  4841. IR::Instr* Lowerer::GenerateCompleteLdFld(IR::Instr* instr, bool emitFastPath, IR::JnHelperMethod monoHelperAfterFastPath, IR::JnHelperMethod polyHelperAfterFastPath,
  4842. IR::JnHelperMethod monoHelperWithoutFastPath, IR::JnHelperMethod polyHelperWithoutFastPath)
  4843. {
  4844. if(instr->CallsAccessor() && instr->HasBailOutInfo())
  4845. {
  4846. IR::BailOutKind kindMinusBits = instr->GetBailOutKind() & ~IR::BailOutKindBits;
  4847. Assert(kindMinusBits != IR::BailOutOnImplicitCalls && kindMinusBits != IR::BailOutOnImplicitCallsPreOp);
  4848. }
  4849. IR::Instr* prevInstr = instr->m_prev;
  4850. IR::LabelInstr* labelHelper = nullptr;
  4851. IR::LabelInstr* labelBailOut = nullptr;
  4852. bool isHelper = false;
  4853. IR::RegOpnd* typeOpnd = nullptr;
  4854. if (isRoot)
  4855. {
  4856. // Don't do the fast path here if emitFastPath is false, even if we can.
  4857. if (emitFastPath && (this->GenerateLdFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd) || this->GenerateNonConfigurableLdRootFld(instr)))
  4858. {
  4859. Assert(labelHelper == nullptr);
  4860. return prevInstr;
  4861. }
  4862. }
  4863. else
  4864. {
  4865. if (this->GenerateLdFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd))
  4866. {
  4867. Assert(labelHelper == nullptr);
  4868. return prevInstr;
  4869. }
  4870. }
  4871. if (emitFastPath)
  4872. {
  4873. if (!GenerateFastLdFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, &labelBailOut, typeOpnd, &isHelper, &labelHelper))
  4874. {
  4875. if (labelHelper != nullptr)
  4876. {
  4877. labelHelper->isOpHelper = isHelper;
  4878. instr->InsertBefore(labelHelper);
  4879. }
  4880. prevInstr = LowerLdFld(instr, monoHelperAfterFastPath, polyHelperAfterFastPath, true, labelBailOut, isHelper);
  4881. }
  4882. }
  4883. else
  4884. {
  4885. if (labelHelper != nullptr)
  4886. {
  4887. labelHelper->isOpHelper = isHelper;
  4888. instr->InsertBefore(labelHelper);
  4889. }
  4890. prevInstr = LowerLdFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, true, labelBailOut, isHelper);
  4891. }
  4892. return prevInstr;
  4893. }
  4894. bool
  4895. Lowerer::GenerateCheckFixedFld(IR::Instr * instrChkFld)
  4896. {
  4897. IR::Instr *instr;
  4898. IR::LabelInstr *labelBailOut = nullptr;
  4899. IR::LabelInstr *labelDone = nullptr;
  4900. AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instrChkFld->m_func->GetJnFunction()) ||
  4901. !PHASE_OFF(Js::UseFixedDataPropsPhase, instrChkFld->m_func->GetJnFunction()), "Lowering a check fixed field with fixed data/method phase disabled?");
  4902. Assert(instrChkFld->GetSrc1()->IsSymOpnd() && instrChkFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd());
  4903. IR::PropertySymOpnd *propertySymOpnd = instrChkFld->GetSrc1()->AsPropertySymOpnd();
  4904. AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
  4905. Assert(propertySymOpnd->MayNeedTypeCheckProtection());
  4906. // In the backwards pass we only add guarded property operations to instructions that are not already
  4907. // protected by an upstream type check.
  4908. Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
  4909. // For the non-configurable properties on the global object we do not need a type check. Otherwise,
  4910. // we need a type check and bailout here unless this operation is part of the type check sequence and
  4911. // is protected by a type check upstream.
  4912. bool emitPrimaryTypeCheck = propertySymOpnd->NeedsPrimaryTypeCheck();
  4913. // In addition, we may also need a local type check in case the property comes from the prototype and
  4914. // it may have been overwritten on the instance after the primary type check upstream. If the property
  4915. // comes from the instance, we must still protect against its value changing after the type check, but
  4916. // for this a cheaper guard check is sufficient (see below).
  4917. bool emitFixedFieldTypeCheck = propertySymOpnd->NeedsCheckFixedFieldTypeCheck() &&
  4918. (!propertySymOpnd->IsTypeChecked() || propertySymOpnd->IsLoadedFromProto());
  4919. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  4920. uint inlineCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  4921. OUTPUT_TRACE_FUNC(
  4922. Js::ObjTypeSpecPhase,
  4923. this->m_func,
  4924. L"Fixed field check: %s, property: %s, cache ID: %u, cloned cache: true, layout: %s, redundant check: %s count of props: %u \n",
  4925. Js::OpCodeUtil::GetOpCodeName(instrChkFld->m_opcode),
  4926. this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(),
  4927. inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(), propertySymOpnd->IsTypeChecked() ? L"true" : L"false",
  4928. propertySymOpnd->GetGuardedPropOps() ? propertySymOpnd->GetGuardedPropOps()->Count() : 0);
  4929. if (emitPrimaryTypeCheck || emitFixedFieldTypeCheck)
  4930. {
  4931. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4932. if(emitFixedFieldTypeCheck && propertySymOpnd->IsRootObjectNonConfigurableFieldLoad())
  4933. {
  4934. AssertMsg(!propertySymOpnd->GetGuardedPropOps() || propertySymOpnd->GetGuardedPropOps()->IsEmpty(), "This property Guard is used only for one property");
  4935. //We need only cheaper Guard check, if the property belongs to the GlobalObject.
  4936. GenerateFixedFieldGuardCheck(instrChkFld, propertySymOpnd, labelBailOut);
  4937. }
  4938. else
  4939. {
  4940. if (emitFixedFieldTypeCheck)
  4941. {
  4942. propertySymOpnd->EnsureGuardedPropOps(this->m_func->m_alloc);
  4943. propertySymOpnd->SetGuardedPropOp(propertySymOpnd->GetObjTypeSpecFldId());
  4944. }
  4945. this->GenerateCachedTypeCheck(instrChkFld, propertySymOpnd, labelBailOut, labelBailOut);
  4946. }
  4947. }
  4948. // We may still need this guard if we didn't emit the write protect type check above. This situation arises if we have
  4949. // a fixed field from the instance (not proto) and a property of the same name has been written somewhere between the
  4950. // primary type check and here. Note that we don't need a type check, because we know the fixed field exists on the
  4951. // object even if it has been written since primary type check, but we need to verify the fixed value didn't get overwritten.
  4952. if (!emitPrimaryTypeCheck && !emitFixedFieldTypeCheck && !propertySymOpnd->IsWriteGuardChecked())
  4953. {
  4954. if (!PHASE_OFF(Js::FixedFieldGuardCheckPhase, this->m_func))
  4955. {
  4956. Assert(labelBailOut == nullptr);
  4957. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4958. GenerateFixedFieldGuardCheck(instrChkFld, propertySymOpnd, labelBailOut);
  4959. }
  4960. }
  4961. // Note that a type handler holds only a weak reference to the singleton instance it represents, so
  4962. // it is possible that the instance gets collected before the type and handler do. Hence, the upstream
  4963. // type check may succeed, even as the original instance no longer exists. However, this would happen
  4964. // only if another instance reached the same type (otherwise we wouldn't ever pass the type check
  4965. // upstream). In that case we would have invalidated all fixed fields on that type, and so the type
  4966. // check (or property guard check, if necessary) above would fail. All in all, we would never attempt
  4967. // to access a fixed field from an instance that has been collected.
  4968. if (!emitPrimaryTypeCheck && !emitFixedFieldTypeCheck && propertySymOpnd->IsWriteGuardChecked())
  4969. {
  4970. Assert(labelBailOut == nullptr);
  4971. AssertMsg(!instrChkFld->HasBailOutInfo(), "Why does a direct fixed field check have bailout?");
  4972. instrChkFld->Remove();
  4973. return true;
  4974. }
  4975. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4976. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  4977. instrChkFld->InsertBefore(instr);
  4978. // Insert the helper label here.
  4979. instrChkFld->InsertBefore(labelBailOut);
  4980. instrChkFld->InsertAfter(labelDone);
  4981. // Convert the original instruction to a bailout.
  4982. Assert(instrChkFld->HasBailOutInfo());
  4983. if (instrChkFld->GetBailOutInfo()->bailOutInstr != instrChkFld)
  4984. {
  4985. // Set the cache index in the bailout info so that the bailout code will write it into the
  4986. // bailout record at runtime.
  4987. instrChkFld->GetBailOutInfo()->polymorphicCacheIndex = inlineCacheIndex;
  4988. }
  4989. instrChkFld->FreeSrc1();
  4990. instrChkFld->m_opcode = Js::OpCode::BailOut;
  4991. this->GenerateBailOut(instrChkFld);
  4992. return true;
  4993. }
  4994. void
  4995. Lowerer::GenerateCheckObjType(IR::Instr * instrChkObjType)
  4996. {
  4997. Assert(instrChkObjType->GetSrc1()->IsSymOpnd() && instrChkObjType->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd());
  4998. IR::PropertySymOpnd *propertySymOpnd = instrChkObjType->GetSrc1()->AsPropertySymOpnd();
  4999. // Why do we have an explicit type check if the cached type has been checked upstream? The dead store pass should have
  5000. // removed this instruction.
  5001. Assert(propertySymOpnd->IsTypeCheckSeqCandidate() && !propertySymOpnd->IsTypeChecked());
  5002. // Why do we have an explicit type check on a non-configurable root field load?
  5003. Assert(!propertySymOpnd->IsRootObjectNonConfigurableFieldLoad());
  5004. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  5005. uint inlineCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  5006. PHASE_PRINT_TESTTRACE(
  5007. Js::ObjTypeSpecPhase,
  5008. this->m_func,
  5009. L"Object type check: %s, property: %s, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n",
  5010. Js::OpCodeUtil::GetOpCodeName(instrChkObjType->m_opcode),
  5011. this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(),
  5012. this->m_func->GetJnFunction()->GetDisplayName(),
  5013. inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(), L"false");
  5014. IR::LabelInstr* labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5015. this->GenerateCachedTypeCheck(instrChkObjType, propertySymOpnd, labelBailOut, labelBailOut);
  5016. IR::LabelInstr* labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5017. IR::Instr* instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  5018. instrChkObjType->InsertBefore(instr);
  5019. // Insert the bailout label here.
  5020. instrChkObjType->InsertBefore(labelBailOut);
  5021. instrChkObjType->InsertAfter(labelDone);
  5022. // Convert the original instruction to a bailout.
  5023. Assert(instrChkObjType->HasBailOutInfo());
  5024. if (instrChkObjType->GetBailOutInfo()->bailOutInstr != instrChkObjType)
  5025. {
  5026. // Set the cache index in the bailout info so that the bailout code will write it into the
  5027. // bailout record at runtime.
  5028. instrChkObjType->GetBailOutInfo()->polymorphicCacheIndex = inlineCacheIndex;
  5029. }
  5030. instrChkObjType->FreeSrc1();
  5031. instrChkObjType->m_opcode = Js::OpCode::BailOut;
  5032. this->GenerateBailOut(instrChkObjType);
  5033. }
  5034. void
  5035. Lowerer::LowerAdjustObjType(IR::Instr * instrAdjustObjType)
  5036. {
  5037. IR::AddrOpnd *finalTypeOpnd = instrAdjustObjType->UnlinkDst()->AsAddrOpnd();
  5038. IR::AddrOpnd *initialTypeOpnd = instrAdjustObjType->UnlinkSrc2()->AsAddrOpnd();
  5039. IR::RegOpnd *baseOpnd = instrAdjustObjType->UnlinkSrc1()->AsRegOpnd();
  5040. this->GenerateAdjustBaseSlots(
  5041. instrAdjustObjType, baseOpnd, (Js::Type*)initialTypeOpnd->m_address, (Js::Type*)finalTypeOpnd->m_address);
  5042. this->m_func->PinTypeRef(finalTypeOpnd->m_address);
  5043. IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrAdjustObjType->m_func);
  5044. this->m_lowererMD.CreateAssign(opnd, finalTypeOpnd, instrAdjustObjType);
  5045. initialTypeOpnd->Free(instrAdjustObjType->m_func);
  5046. instrAdjustObjType->Remove();
  5047. }
  5048. bool
  5049. Lowerer::GenerateNonConfigurableLdRootFld(IR::Instr * instrLdFld)
  5050. {
  5051. if (!instrLdFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
  5052. {
  5053. return false;
  5054. }
  5055. IR::PropertySymOpnd *propertySymOpnd = instrLdFld->GetSrc1()->AsPropertySymOpnd();
  5056. if (!propertySymOpnd->IsRootObjectNonConfigurableFieldLoad())
  5057. {
  5058. return false;
  5059. }
  5060. Assert(!PHASE_OFF(Js::RootObjectFldFastPathPhase, this->m_func->GetJnFunction()));
  5061. Assert(!instrLdFld->HasBailOutInfo());
  5062. IR::Opnd * srcOpnd;
  5063. Js::RootObjectBase * rootObject = this->m_func->GetJnFunction()->GetRootObject();
  5064. if (propertySymOpnd->UsesAuxSlot())
  5065. {
  5066. IR::RegOpnd * auxSlotOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  5067. this->InsertMove(auxSlotOpnd, IR::MemRefOpnd::New((byte *)rootObject + Js::DynamicObject::GetOffsetOfAuxSlots(),
  5068. TyMachPtr, this->m_func), instrLdFld);
  5069. srcOpnd = IR::IndirOpnd::New(auxSlotOpnd, propertySymOpnd->GetSlotIndex() * sizeof(Js::Var *),
  5070. TyVar, this->m_func);
  5071. }
  5072. else
  5073. {
  5074. srcOpnd = IR::MemRefOpnd::New((Js::Var *)rootObject + propertySymOpnd->GetSlotIndex(),
  5075. TyVar, this->m_func);
  5076. }
  5077. instrLdFld->ReplaceSrc1(srcOpnd);
  5078. instrLdFld->m_opcode = Js::OpCode::Ld_A;
  5079. LowererMD::ChangeToAssign(instrLdFld);
  5080. return true;
  5081. }
  5082. IR::Instr *
  5083. Lowerer::LowerDelFld(IR::Instr *delFldInstr, IR::JnHelperMethod helperMethod, bool useInlineCache, bool strictMode)
  5084. {
  5085. IR::Instr *instrPrev;
  5086. Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
  5087. if (strictMode)
  5088. {
  5089. propertyOperationFlag = Js::PropertyOperation_StrictMode;
  5090. }
  5091. instrPrev = m_lowererMD.LoadHelperArgument(delFldInstr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
  5092. LowerLdFld(delFldInstr, helperMethod, helperMethod, useInlineCache);
  5093. return instrPrev;
  5094. }
  5095. IR::Instr *
  5096. Lowerer::LowerIsInst(IR::Instr * isInstInstr, IR::JnHelperMethod helperMethod)
  5097. {
  5098. IR::Instr * instrPrev;
  5099. IR::Instr * instrArg;
  5100. IR::RegOpnd * argOpnd;
  5101. // inlineCache
  5102. instrPrev = m_lowererMD.LoadHelperArgument(isInstInstr, LoadIsInstInlineCacheOpnd(isInstInstr, isInstInstr->GetSrc1()->AsIntConstOpnd()->AsUint32()));
  5103. isInstInstr->FreeSrc1();
  5104. argOpnd = isInstInstr->UnlinkSrc2()->AsRegOpnd();
  5105. Assert(argOpnd->m_sym->m_isSingleDef);
  5106. instrArg = argOpnd->m_sym->m_instrDef;
  5107. argOpnd->Free(m_func);
  5108. // scriptContext
  5109. LoadScriptContext(isInstInstr);
  5110. // instance goes last, so remember it now
  5111. IR::Opnd * instanceOpnd = instrArg->UnlinkSrc1();
  5112. argOpnd = instrArg->UnlinkSrc2()->AsRegOpnd();
  5113. Assert(argOpnd->m_sym->m_isSingleDef);
  5114. instrArg->Remove();
  5115. instrArg = argOpnd->m_sym->m_instrDef;
  5116. argOpnd->Free(m_func);
  5117. // function
  5118. IR::Opnd *opnd = instrArg->UnlinkSrc1();
  5119. m_lowererMD.LoadHelperArgument(isInstInstr, opnd);
  5120. Assert(instrArg->GetSrc2() == NULL);
  5121. instrArg->Remove();
  5122. // instance
  5123. m_lowererMD.LoadHelperArgument(isInstInstr, instanceOpnd);
  5124. m_lowererMD.ChangeToHelperCall(isInstInstr, helperMethod);
  5125. return instrPrev;
  5126. }
  5127. void
  5128. Lowerer::GenerateStackScriptFunctionInit(StackSym * stackSym, Js::FunctionProxyPtrPtr nestedProxy)
  5129. {
  5130. Func * func = this->m_func;
  5131. Assert(func->HasAnyStackNestedFunc());
  5132. Assert(nextStackFunctionOpnd);
  5133. IR::Instr * insertBeforeInstr = func->GetFunctionEntryInsertionPoint();
  5134. IR::RegOpnd * addressOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5135. const IR::AutoReuseOpnd autoReuseAddressOpnd(addressOpnd, func);
  5136. InsertLea(addressOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertBeforeInstr);
  5137. // Currently we don't initialize the environment until we actually allocate the function, we also
  5138. // walk the list of stack function when we need to box them. so we should use initialize it to NullFrameDisplay
  5139. GenerateStackScriptFunctionInit(addressOpnd, nestedProxy,
  5140. IR::AddrOpnd::New((Js::Var)&Js::NullFrameDisplay, IR::AddrOpndKindDynamicMisc, func), insertBeforeInstr);
  5141. // Establish the next link
  5142. InsertMove(nextStackFunctionOpnd, addressOpnd, insertBeforeInstr);
  5143. this->nextStackFunctionOpnd = IR::SymOpnd::New(stackSym, sizeof(Js::StackScriptFunction), TyMachPtr, func);
  5144. }
  5145. void
  5146. Lowerer::GenerateScriptFunctionInit(IR::RegOpnd * regOpnd, IR::Opnd * vtableAddressOpnd,
  5147. Js::FunctionProxyPtrPtr nestedProxy, IR::Opnd * envOpnd, IR::Instr * insertBeforeInstr, bool isZeroed)
  5148. {
  5149. Func * func = this->m_func;
  5150. IR::Opnd * functionProxyOpnd;
  5151. Js::FunctionProxy * functionProxy = *nestedProxy;
  5152. IR::Opnd * typeOpnd = nullptr;
  5153. bool doCheckTypeOpnd = true;
  5154. if (functionProxy->IsDeferred())
  5155. {
  5156. functionProxyOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5157. InsertMove(functionProxyOpnd, IR::MemRefOpnd::New((Js::FunctionProxy**) nestedProxy, TyMachPtr, func), insertBeforeInstr);
  5158. typeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5159. InsertMove(typeOpnd, IR::IndirOpnd::New(functionProxyOpnd->AsRegOpnd(), Js::FunctionProxy::GetOffsetOfDeferredPrototypeType(),
  5160. TyMachPtr, func), insertBeforeInstr);
  5161. }
  5162. else
  5163. {
  5164. Js::FunctionBody * functionBody = functionProxy->GetFunctionBody();
  5165. functionProxyOpnd = CreateFunctionBodyOpnd(functionBody);
  5166. Js::ScriptFunctionType * type = functionProxy->GetDeferredPrototypeType();
  5167. if (type != nullptr)
  5168. {
  5169. typeOpnd = IR::AddrOpnd::New(type, IR::AddrOpndKindDynamicType, func);
  5170. doCheckTypeOpnd = false;
  5171. }
  5172. else
  5173. {
  5174. typeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5175. InsertMove(typeOpnd,
  5176. IR::MemRefOpnd::New(((byte *)functionBody) + Js::FunctionProxy::GetOffsetOfDeferredPrototypeType(), TyMachPtr, func),
  5177. insertBeforeInstr);
  5178. }
  5179. }
  5180. if (doCheckTypeOpnd)
  5181. {
  5182. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5183. InsertTestBranch(typeOpnd, typeOpnd, Js::OpCode::BrEq_A, labelHelper, insertBeforeInstr);
  5184. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  5185. InsertBranch(Js::OpCode::Br, labelDone, insertBeforeInstr);
  5186. insertBeforeInstr->InsertBefore(labelHelper);
  5187. m_lowererMD.LoadHelperArgument(insertBeforeInstr, functionProxyOpnd);
  5188. IR::Instr * callHelperInstr = IR::Instr::New(Js::OpCode::Call, typeOpnd,
  5189. IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperEnsureFunctionProxyDeferredPrototypeType, func), func);
  5190. insertBeforeInstr->InsertBefore(callHelperInstr);
  5191. m_lowererMD.LowerCall(callHelperInstr, 0);
  5192. insertBeforeInstr->InsertBefore(labelDone);
  5193. }
  5194. GenerateMemInit(regOpnd, 0, vtableAddressOpnd, insertBeforeInstr, isZeroed);
  5195. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfType(), typeOpnd, insertBeforeInstr, isZeroed);
  5196. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfAuxSlots(), insertBeforeInstr, isZeroed);
  5197. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfObjectArray(), insertBeforeInstr, isZeroed);
  5198. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfConstructorCache(),
  5199. LoadLibraryValueOpnd(insertBeforeInstr, LibraryValue::ValueConstructorCacheDefaultInstance),
  5200. insertBeforeInstr, isZeroed);
  5201. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfFunctionInfo(), functionProxyOpnd, insertBeforeInstr, isZeroed);
  5202. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfEnvironment(), envOpnd, insertBeforeInstr, isZeroed);
  5203. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfCachedScopeObj(), insertBeforeInstr, isZeroed);
  5204. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfHasInlineCaches(), insertBeforeInstr, isZeroed);
  5205. }
  5206. void
  5207. Lowerer::GenerateStackScriptFunctionInit(IR::RegOpnd * regOpnd, Js::FunctionProxyPtrPtr nestedProxy, IR::Opnd * envOpnd, IR::Instr * insertBeforeInstr)
  5208. {
  5209. Func * func = this->m_func;
  5210. GenerateScriptFunctionInit(regOpnd,
  5211. LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableStackScriptFunction),
  5212. nestedProxy, envOpnd, insertBeforeInstr);
  5213. InsertMove(IR::IndirOpnd::New(regOpnd, Js::StackScriptFunction::GetOffsetOfBoxedScriptFunction(), TyMachPtr, func),
  5214. IR::AddrOpnd::NewNull(func), insertBeforeInstr);
  5215. }
  5216. void
  5217. Lowerer::EnsureStackFunctionListStackSym()
  5218. {
  5219. Func * func = this->m_func;
  5220. Assert(func->HasAnyStackNestedFunc());
  5221. #if defined(_M_IX86) || defined(_M_X64)
  5222. Assert(func->m_localStackHeight == (func->HasArgumentSlot()? MachArgsSlotOffset : 0));
  5223. StackSym * stackFunctionListStackSym = StackSym::New(TyMachPtr, func);
  5224. func->StackAllocate(stackFunctionListStackSym, sizeof(Js::ScriptFunction *));
  5225. nextStackFunctionOpnd = IR::SymOpnd::New(stackFunctionListStackSym, TyMachPtr, func);
  5226. #else
  5227. Assert(func->m_localStackHeight == 0);
  5228. nextStackFunctionOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(NULL, FRAME_REG, TyMachReg, func),
  5229. -(int32)(Js::Constants::StackNestedFuncList * sizeof(Js::Var)), TyMachPtr, func);
  5230. #endif
  5231. }
  5232. void
  5233. Lowerer::AllocStackClosure()
  5234. {
  5235. m_func->StackAllocate(m_func->GetLocalFrameDisplaySym(), sizeof(Js::Var));
  5236. m_func->StackAllocate(m_func->GetLocalClosureSym(), sizeof(Js::Var));
  5237. }
  5238. void
  5239. Lowerer::EnsureZeroLastStackFunctionNext()
  5240. {
  5241. Assert(nextStackFunctionOpnd != nullptr);
  5242. Func * func = this->m_func;
  5243. IR::Instr * insertBeforeInstr = func->GetFunctionEntryInsertionPoint();
  5244. InsertMove(nextStackFunctionOpnd, IR::AddrOpnd::NewNull(func), insertBeforeInstr);
  5245. }
  5246. IR::Instr *
  5247. Lowerer::GenerateNewStackScFunc(IR::Instr * newScFuncInstr)
  5248. {
  5249. Assert(newScFuncInstr->m_func->DoStackNestedFunc());
  5250. Func * func = newScFuncInstr->m_func;
  5251. uint index = newScFuncInstr->GetSrc1()->AsIntConstOpnd()->AsUint32();
  5252. Assert(index < func->GetJnFunction()->GetNestedCount());
  5253. Js::FunctionProxyPtrPtr nestedProxy = func->GetJnFunction()->GetNestedFuncReference(index);
  5254. // the stackAllocate Call below for this sym is passing a size that is not represented by any IRType and hence passing TyMisc for the constructor
  5255. StackSym * stackSym = StackSym::New(TyMisc, func);
  5256. // ScriptFunction and it's next pointer
  5257. this->m_func->StackAllocate(stackSym, sizeof(Js::StackScriptFunction) + sizeof(Js::StackScriptFunction *));
  5258. IR::Opnd * envOpnd = newScFuncInstr->GetSrc2();
  5259. GenerateStackScriptFunctionInit(stackSym, nestedProxy);
  5260. IR::LabelInstr * labelNoStackFunc = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5261. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  5262. InsertTestBranch(IR::MemRefOpnd::New(func->GetJnFunction()->GetAddressOfFlags(), TyInt8, func),
  5263. IR::IntConstOpnd::New(Js::FunctionBody::Flags_StackNestedFunc, TyInt8, func, true),
  5264. Js::OpCode::BrEq_A, labelNoStackFunc, newScFuncInstr);
  5265. InsertMove(IR::SymOpnd::New(stackSym, Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, func),
  5266. envOpnd,
  5267. newScFuncInstr);
  5268. IR::Instr * lea =
  5269. InsertLea(newScFuncInstr->GetDst()->AsRegOpnd(), IR::SymOpnd::New(stackSym, TyMachPtr, func), newScFuncInstr);
  5270. InsertBranch(Js::OpCode::Br, labelDone, newScFuncInstr);
  5271. newScFuncInstr->InsertBefore(labelNoStackFunc);
  5272. newScFuncInstr->InsertAfter(labelDone);
  5273. return lea;
  5274. }
  5275. IR::Instr *
  5276. Lowerer::LowerNewScFunc(IR::Instr * newScFuncInstr)
  5277. {
  5278. IR::Instr *stackNewScFuncInstr = nullptr;
  5279. if (newScFuncInstr->m_func->DoStackNestedFunc())
  5280. {
  5281. stackNewScFuncInstr = GenerateNewStackScFunc(newScFuncInstr);
  5282. }
  5283. IR::IntConstOpnd * functionBodySlotOpnd = newScFuncInstr->UnlinkSrc1()->AsIntConstOpnd();
  5284. IR::RegOpnd * envOpnd = newScFuncInstr->UnlinkSrc2()->AsRegOpnd();
  5285. IR::Instr * instrPrev = this->LoadFunctionBodyAsArgument(newScFuncInstr, functionBodySlotOpnd, envOpnd);
  5286. m_lowererMD.ChangeToHelperCall(newScFuncInstr, IR::HelperScrFunc_OP_NewScFunc );
  5287. return stackNewScFuncInstr == nullptr? instrPrev : stackNewScFuncInstr;
  5288. }
  5289. IR::Instr *
  5290. Lowerer::LowerNewScGenFunc(IR::Instr * newScFuncInstr)
  5291. {
  5292. IR::IntConstOpnd * functionBodySlotOpnd = newScFuncInstr->UnlinkSrc1()->AsIntConstOpnd();
  5293. IR::RegOpnd * envOpnd = newScFuncInstr->UnlinkSrc2()->AsRegOpnd();
  5294. IR::Instr * instrPrev = this->LoadFunctionBodyAsArgument(newScFuncInstr, functionBodySlotOpnd, envOpnd);
  5295. m_lowererMD.ChangeToHelperCall(newScFuncInstr, IR::HelperScrFunc_OP_NewScGenFunc );
  5296. return instrPrev;
  5297. }
  5298. ///----------------------------------------------------------------------------
  5299. ///
  5300. /// Lowerer::LowerScopedLdFld
  5301. ///
  5302. /// Lower a load instruction that takes an additional instance to use as a
  5303. /// a default if the scope chain provided doesn't contain the property.
  5304. ///
  5305. ///----------------------------------------------------------------------------
  5306. IR::Instr *
  5307. Lowerer::LowerScopedLdFld(IR::Instr * ldFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache)
  5308. {
  5309. IR::Opnd *src;
  5310. IR::Instr *instrPrev = ldFldInstr->m_prev;
  5311. if(!withInlineCache)
  5312. {
  5313. LoadScriptContext(ldFldInstr);
  5314. }
  5315. src = ldFldInstr->UnlinkSrc2();
  5316. AssertMsg(src->IsRegOpnd(), "Expected reg opnd as src2");
  5317. instrPrev = m_lowererMD.LoadHelperArgument(ldFldInstr, src);
  5318. src = ldFldInstr->UnlinkSrc1();
  5319. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  5320. this->LoadPropertySymAsArgument(ldFldInstr, src);
  5321. if (withInlineCache)
  5322. {
  5323. AssertMsg(src->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  5324. m_lowererMD.LoadHelperArgument(
  5325. ldFldInstr,
  5326. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  5327. // Not using the polymorphic inline cache because the fast path only uses the monomorphic inline cache
  5328. this->m_lowererMD.LoadHelperArgument(ldFldInstr, this->LoadRuntimeInlineCacheOpnd(ldFldInstr, src->AsPropertySymOpnd()));
  5329. m_lowererMD.LoadHelperArgument(ldFldInstr, LoadFunctionBodyOpnd(ldFldInstr));
  5330. }
  5331. m_lowererMD.ChangeToHelperCall(ldFldInstr, helperMethod);
  5332. return instrPrev;
  5333. }
  5334. ///----------------------------------------------------------------------------
  5335. ///
  5336. /// Lowerer::LowerScopedLdInst
  5337. ///
  5338. /// Lower a load instruction that takes an additional instance to use as a
  5339. /// a default if the scope chain provided doesn't contain the property.
  5340. ///
  5341. ///----------------------------------------------------------------------------
  5342. IR::Instr *
  5343. Lowerer::LowerScopedLdInst(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  5344. {
  5345. IR::Opnd *src;
  5346. IR::Instr *instrPrev;
  5347. // last argument is the scriptContext
  5348. instrPrev = LoadScriptContext(instr);
  5349. src = instr->UnlinkSrc2();
  5350. AssertMsg(src->IsRegOpnd(), "Expected Reg opnd as src2");
  5351. // __out Var*. The StackSym is allocated in irbuilder, and here we need to insert a lea
  5352. StackSym* dstSym = src->GetStackSym();
  5353. IR::Instr *load = this->m_lowererMD.LoadStackAddress(dstSym);
  5354. instr->InsertBefore(load);
  5355. IR::Opnd* tempOpnd = load->GetDst();
  5356. m_lowererMD.LoadHelperArgument(instr, tempOpnd);
  5357. // now 3rd last argument is the rootObject of the function. Need to add addrOpnd to
  5358. // pass in the address of the roobObject.
  5359. IR::Opnd * srcOpnd;
  5360. Js::RootObjectBase * rootObject = this->m_func->GetJnFunction()->GetRootObject();
  5361. srcOpnd = IR::AddrOpnd::New(rootObject, IR::AddrOpndKindDynamicVar, instr->m_func, true);
  5362. instrPrev = m_lowererMD.LoadHelperArgument(instr, srcOpnd);
  5363. // no change, the property field built from irbuilder.
  5364. src = instr->UnlinkSrc1();
  5365. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  5366. this->LoadPropertySymAsArgument(instr, src);
  5367. instrPrev = m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  5368. IR::RegOpnd* regOpnd = IR::RegOpnd::New(dstSym, TyVar, this->m_func);
  5369. IR::SymOpnd*symOpnd = IR::SymOpnd::New(dstSym, TyVar, this->m_func);
  5370. this->m_lowererMD.CreateAssign(regOpnd, symOpnd, instrPrev);
  5371. return instrPrev;
  5372. }
  5373. IR::Instr *
  5374. Lowerer::LowerScopedDelFld(IR::Instr * delFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache, bool strictMode)
  5375. {
  5376. IR::Instr *instrPrev;
  5377. Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
  5378. if (strictMode)
  5379. {
  5380. propertyOperationFlag = Js::PropertyOperation_StrictMode;
  5381. }
  5382. instrPrev = m_lowererMD.LoadHelperArgument(delFldInstr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
  5383. LowerScopedLdFld(delFldInstr, helperMethod, withInlineCache);
  5384. return instrPrev;
  5385. }
  5386. IR::Instr *
  5387. Lowerer::LowerProfiledStFld(IR::JitProfilingInstr *stFldInstr, Js::PropertyOperationFlags flags)
  5388. {
  5389. Assert(stFldInstr->profileId == Js::Constants::NoProfileId);
  5390. IR::Instr *const instrPrev = stFldInstr->m_prev;
  5391. /*
  5392. void ProfilingHelpers::ProfiledInitFld_Jit(
  5393. const Var instance,
  5394. const PropertyId propertyId,
  5395. const InlineCacheIndex inlineCacheIndex,
  5396. const Var value,
  5397. void *const framePointer)
  5398. void ProfilingHelpers::ProfiledStFld_Jit(
  5399. const Var instance,
  5400. const PropertyId propertyId,
  5401. const InlineCacheIndex inlineCacheIndex,
  5402. const Var value,
  5403. void *const framePointer)
  5404. void ProfilingHelpers::ProfiledStSuperFld_Jit(
  5405. const Var instance,
  5406. const PropertyId propertyId,
  5407. const InlineCacheIndex inlineCacheIndex,
  5408. const Var value,
  5409. void *const framePointer,
  5410. const Var thisInstance)
  5411. {
  5412. */
  5413. m_lowererMD.LoadHelperArgument(stFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  5414. if (stFldInstr->m_opcode == Js::OpCode::StSuperFld)
  5415. {
  5416. m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc2());
  5417. }
  5418. m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc1());
  5419. IR::Opnd *dst = stFldInstr->UnlinkDst();
  5420. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
  5421. m_lowererMD.LoadHelperArgument(
  5422. stFldInstr,
  5423. IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  5424. LoadPropertySymAsArgument(stFldInstr, dst);
  5425. IR::JnHelperMethod helper;
  5426. switch (stFldInstr->m_opcode)
  5427. {
  5428. case Js::OpCode::InitFld:
  5429. case Js::OpCode::InitRootFld:
  5430. helper = IR::HelperProfiledInitFld;
  5431. break;
  5432. case Js::OpCode::StSuperFld:
  5433. helper = IR::HelperProfiledStSuperFld;
  5434. break;
  5435. default:
  5436. helper =
  5437. flags & Js::PropertyOperation_Root
  5438. ? flags & Js::PropertyOperation_StrictMode ? IR::HelperProfiledStRootFld_Strict : IR::HelperProfiledStRootFld
  5439. : flags & Js::PropertyOperation_StrictMode ? IR::HelperProfiledStFld_Strict : IR::HelperProfiledStFld;
  5440. break;
  5441. }
  5442. stFldInstr->SetSrc1(IR::HelperCallOpnd::New(helper, m_func));
  5443. m_lowererMD.LowerCall(stFldInstr, 0);
  5444. return instrPrev;
  5445. }
  5446. ///----------------------------------------------------------------------------
  5447. ///
  5448. /// Lowerer::LowerStFld
  5449. ///
  5450. ///----------------------------------------------------------------------------
  5451. IR::Instr *
  5452. Lowerer::LowerStFld(
  5453. IR::Instr * stFldInstr,
  5454. IR::JnHelperMethod helperMethod,
  5455. IR::JnHelperMethod polymorphicHelperMethod,
  5456. bool withInlineCache,
  5457. IR::LabelInstr *labelBailOut,
  5458. bool isHelper,
  5459. bool withPutFlags,
  5460. Js::PropertyOperationFlags flags)
  5461. {
  5462. if (stFldInstr->IsJitProfilingInstr())
  5463. {
  5464. // If we want to profile then do something completely different
  5465. return this->LowerProfiledStFld(stFldInstr->AsJitProfilingInstr(), flags);
  5466. }
  5467. IR::Instr *instrPrev = stFldInstr->m_prev;
  5468. IR::Opnd *dst = stFldInstr->UnlinkDst();
  5469. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
  5470. IR::Opnd * inlineCacheOpnd = nullptr;
  5471. if (withInlineCache)
  5472. {
  5473. AssertMsg(dst->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  5474. if (dst->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache && polymorphicHelperMethod != helperMethod)
  5475. {
  5476. Js::PolymorphicInlineCache * polymorphicInlineCache = dst->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache;
  5477. helperMethod = polymorphicHelperMethod;
  5478. inlineCacheOpnd = IR::AddrOpnd::New(polymorphicInlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func);
  5479. }
  5480. else
  5481. {
  5482. // Need to load runtime inline cache opnd first before loading any helper argument
  5483. // because LoadRuntimeInlineCacheOpnd may create labels marked as helper
  5484. // and cause op helper register push/pop save in x86, messing up with any helper arguments that is already pushed
  5485. inlineCacheOpnd = this->LoadRuntimeInlineCacheOpnd(stFldInstr, dst->AsPropertySymOpnd(), isHelper);
  5486. }
  5487. }
  5488. if (withPutFlags)
  5489. {
  5490. m_lowererMD.LoadHelperArgument(stFldInstr,
  5491. IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
  5492. }
  5493. IR::Opnd *src = stFldInstr->UnlinkSrc1();
  5494. if (stFldInstr->m_opcode == Js::OpCode::StSuperFld)
  5495. {
  5496. m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc2());
  5497. }
  5498. m_lowererMD.LoadHelperArgument(stFldInstr, src);
  5499. this->LoadPropertySymAsArgument(stFldInstr, dst);
  5500. if (withInlineCache)
  5501. {
  5502. Assert(inlineCacheOpnd != nullptr);
  5503. this->m_lowererMD.LoadHelperArgument(
  5504. stFldInstr,
  5505. IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  5506. this->m_lowererMD.LoadHelperArgument(stFldInstr, inlineCacheOpnd);
  5507. this->m_lowererMD.LoadHelperArgument(stFldInstr, LoadFunctionBodyOpnd(stFldInstr));
  5508. }
  5509. IR::RegOpnd *opndBase = dst->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func);
  5510. m_lowererMD.ChangeToHelperCall(stFldInstr, helperMethod, labelBailOut, opndBase, dst->AsSymOpnd()->IsPropertySymOpnd() ? dst->AsSymOpnd()->AsPropertySymOpnd() : nullptr, isHelper);
  5511. return instrPrev;
  5512. }
  5513. IR::Instr* Lowerer::GenerateCompleteStFld(IR::Instr* instr, bool emitFastPath, IR::JnHelperMethod monoHelperAfterFastPath, IR::JnHelperMethod polyHelperAfterFastPath,
  5514. IR::JnHelperMethod monoHelperWithoutFastPath, IR::JnHelperMethod polyHelperWithoutFastPath, bool withPutFlags, Js::PropertyOperationFlags flags)
  5515. {
  5516. if(instr->CallsAccessor() && instr->HasBailOutInfo())
  5517. {
  5518. IR::BailOutKind kindMinusBits = instr->GetBailOutKind() & ~IR::BailOutKindBits;
  5519. Assert(kindMinusBits != IR::BailOutOnImplicitCalls && kindMinusBits != IR::BailOutOnImplicitCallsPreOp);
  5520. }
  5521. IR::Instr* prevInstr = instr->m_prev;
  5522. IR::LabelInstr* labelBailOut = nullptr;
  5523. IR::LabelInstr* labelHelper = nullptr;
  5524. bool isHelper = false;
  5525. IR::RegOpnd* typeOpnd = nullptr;
  5526. if(emitFastPath && GenerateFastStFldForCustomProperty(instr, &labelHelper))
  5527. {
  5528. if(labelHelper)
  5529. {
  5530. Assert(labelHelper->isOpHelper);
  5531. instr->InsertBefore(labelHelper);
  5532. prevInstr = this->LowerStFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
  5533. }
  5534. else
  5535. {
  5536. instr->Remove();
  5537. return prevInstr;
  5538. }
  5539. }
  5540. else if (this->GenerateStFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd))
  5541. {
  5542. Assert(labelHelper == nullptr);
  5543. return prevInstr;
  5544. }
  5545. else if (emitFastPath)
  5546. {
  5547. if (!GenerateFastStFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, &labelBailOut, typeOpnd, &isHelper, &labelHelper, withPutFlags, flags))
  5548. {
  5549. if (labelHelper != nullptr)
  5550. {
  5551. labelHelper->isOpHelper = isHelper;
  5552. instr->InsertBefore(labelHelper);
  5553. }
  5554. prevInstr = this->LowerStFld(instr, monoHelperAfterFastPath, polyHelperAfterFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
  5555. }
  5556. }
  5557. else
  5558. {
  5559. if (labelHelper != nullptr)
  5560. {
  5561. labelHelper->isOpHelper = isHelper;
  5562. instr->InsertBefore(labelHelper);
  5563. }
  5564. prevInstr = this->LowerStFld(instr, monoHelperWithoutFastPath, monoHelperWithoutFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
  5565. }
  5566. return prevInstr;
  5567. }
  5568. void
  5569. Lowerer::GenerateDirectFieldStore(IR::Instr* instrStFld, IR::PropertySymOpnd* propertySymOpnd)
  5570. {
  5571. Func* func = instrStFld->m_func;
  5572. IR::Opnd *opndSlotArray = this->LoadSlotArrayWithCachedLocalType(instrStFld, propertySymOpnd);
  5573. // Store the value to the slot, getting the slot index from the cache.
  5574. uint16 index = propertySymOpnd->GetSlotIndex();
  5575. Assert(index != -1);
  5576. #ifdef RECYCLER_RECYCLER_WRITE_BARRIER_JIT
  5577. if (opndSlotArray->IsRegOpnd())
  5578. {
  5579. IR::IndirOpnd * opndDst = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, func);
  5580. LowererMD::GenerateWriteBarrierAssign(opndDst, instrStFld->GetSrc1(), instrStFld);
  5581. }
  5582. else
  5583. {
  5584. Assert(opndSlotArray->IsMemRefOpnd());
  5585. IR::MemRefOpnd * opndDst = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, func);
  5586. LowererMD::GenerateWriteBarrierAssign(opndDst, instrStFld->GetSrc1(), instrStFld);
  5587. }
  5588. #else
  5589. IR::Opnd *opnd;
  5590. if (opndSlotArray->IsRegOpnd())
  5591. {
  5592. opnd = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, func);
  5593. }
  5594. else
  5595. {
  5596. opnd = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, func);
  5597. }
  5598. this->m_lowererMD.CreateAssign(opnd, instrStFld->GetSrc1(), instrStFld);
  5599. #endif
  5600. }
  5601. bool
  5602. Lowerer::GenerateStFldWithCachedType(IR::Instr *instrStFld, bool* continueAsHelperOut, IR::LabelInstr** labelHelperOut, IR::RegOpnd** typeOpndOut)
  5603. {
  5604. IR::Instr *instr;
  5605. IR::RegOpnd *typeOpnd = nullptr;
  5606. IR::LabelInstr* labelObjCheckFailed = nullptr;
  5607. IR::LabelInstr *labelTypeCheckFailed = nullptr;
  5608. IR::LabelInstr *labelBothTypeChecksFailed = nullptr;
  5609. IR::LabelInstr *labelDone = nullptr;
  5610. Assert(continueAsHelperOut != nullptr);
  5611. *continueAsHelperOut = false;
  5612. Assert(labelHelperOut != nullptr);
  5613. *labelHelperOut = nullptr;
  5614. Assert(typeOpndOut != nullptr);
  5615. *typeOpndOut = nullptr;
  5616. Assert(instrStFld->GetDst()->IsSymOpnd());
  5617. if (!instrStFld->GetDst()->AsSymOpnd()->IsPropertySymOpnd() || !instrStFld->GetDst()->AsPropertySymOpnd()->IsTypeCheckSeqCandidate())
  5618. {
  5619. return false;
  5620. }
  5621. IR::PropertySymOpnd *propertySymOpnd = instrStFld->GetDst()->AsPropertySymOpnd();
  5622. // If we have any object type spec info, we better not believe this is a load from prototype, since this is a store
  5623. // and we never share inline caches between loads and stores.
  5624. Assert(!propertySymOpnd->HasObjTypeSpecFldInfo() || !propertySymOpnd->IsLoadedFromProto());
  5625. AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
  5626. if (!propertySymOpnd->IsTypeCheckSeqCandidate())
  5627. {
  5628. return false;
  5629. }
  5630. if (!propertySymOpnd->IsTypeCheckSeqParticipant() && !propertySymOpnd->NeedsLocalTypeCheck())
  5631. {
  5632. return false;
  5633. }
  5634. Assert(!propertySymOpnd->NeedsTypeCheckAndBailOut() || (instrStFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrStFld->GetBailOutKind())));
  5635. // In the backwards pass we only add guarded property operations to instructions that are not already
  5636. // protected by an upstream type check.
  5637. Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
  5638. PHASE_PRINT_TESTTRACE(
  5639. Js::ObjTypeSpecPhase,
  5640. this->m_func,
  5641. L"Field store: %s, property: %s, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n",
  5642. Js::OpCodeUtil::GetOpCodeName(instrStFld->m_opcode),
  5643. this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySymOpnd->m_sym->AsPropertySym()->m_propertyId)->GetBuffer(),
  5644. this->m_func->GetJnFunction()->GetDisplayName(),
  5645. propertySymOpnd->m_inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(),
  5646. propertySymOpnd->IsTypeChecked() ? L"true" : L"false");
  5647. if (propertySymOpnd->HasFinalType() && !propertySymOpnd->IsLoadedFromProto())
  5648. {
  5649. propertySymOpnd->UpdateSlotForFinalType();
  5650. }
  5651. Func* func = instrStFld->m_func;
  5652. // TODO (ObjTypeSpec): If ((PropertySym*)propertySymOpnd->m_sym)->m_stackSym->m_isIntConst consider emitting a direct
  5653. // jump to helper or bailout. If we have a type check bailout, we could even abort compilation.
  5654. bool hasTypeCheckBailout = instrStFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrStFld->GetBailOutKind());
  5655. // If the type hasn't been checked upstream, see if it makes sense to check it here.
  5656. bool isTypeChecked = propertySymOpnd->IsTypeChecked();
  5657. if (!isTypeChecked)
  5658. {
  5659. // If the initial type has been checked, we can do a hard coded type transition without any type checks
  5660. // (see GenerateStFldWithCachedFinalType), which is always worth doing, even if the type is not needed
  5661. // downstream. We're not introducing any additional bailouts.
  5662. if (propertySymOpnd->HasFinalType() && propertySymOpnd->HasInitialType() && !propertySymOpnd->IsTypeDead())
  5663. {
  5664. // We have a final type in hand, so we can JIT (most of) the type transition work.
  5665. return this->GenerateStFldWithCachedFinalType(instrStFld, propertySymOpnd);
  5666. }
  5667. if (propertySymOpnd->HasTypeMismatch())
  5668. {
  5669. // So we have a type mismatch, which happens when the type (and the type without property if ObjTypeSpecStore
  5670. // is on) on this instruction didn't match the live type value according to the flow. We must have hit some
  5671. // stale inline cache (perhaps inlined from a different function, or on a code path not taken for a while).
  5672. // Either way, we know exactly what type the object must have at this point (fully determined by flow), but
  5673. // we don't know whether that type already has the property we're storing here. All in all, we know exactly
  5674. // what shape the object will have after this operation, but we're not sure what label (type) to give this
  5675. // shape. Thus we can simply let the fast path do its thing based on the live inline cache. The downstream
  5676. // instructions relying only on this shape (loads and stores) are safe, and those that need the next type
  5677. // (i.e. adds) will do the same thing as this instruction.
  5678. return false;
  5679. }
  5680. // If we're still here then we must need a primary type check on this instruction to protect
  5681. // a sequence of field operations downstream, or a local type check for an isolated field store.
  5682. Assert(propertySymOpnd->NeedsPrimaryTypeCheck() || propertySymOpnd->NeedsLocalTypeCheck());
  5683. labelTypeCheckFailed = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5684. labelBothTypeChecksFailed = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5685. labelObjCheckFailed = hasTypeCheckBailout ? labelBothTypeChecksFailed : IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5686. typeOpnd = this->GenerateCachedTypeCheck(instrStFld, propertySymOpnd, labelObjCheckFailed, labelBothTypeChecksFailed, labelTypeCheckFailed);
  5687. *typeOpndOut = typeOpnd;
  5688. }
  5689. // Either we are protected by a type check upstream or we just emitted a type check above,
  5690. // now it's time to store the field value.
  5691. GenerateDirectFieldStore(instrStFld, propertySymOpnd);
  5692. // If we are protected by a type check upstream, we don't need a bailout or helper here, delete the instruction
  5693. // and return "true" to indicate that we succeeded in eliminating it.
  5694. if (isTypeChecked)
  5695. {
  5696. Assert(labelTypeCheckFailed == nullptr && labelBothTypeChecksFailed == nullptr);
  5697. AssertMsg(!instrStFld->HasBailOutInfo(), "Why does a direct field store have bailout?");
  5698. instrStFld->Remove();
  5699. return true;
  5700. }
  5701. // Otherwise, branch around the helper on successful type check.
  5702. labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  5703. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, func);
  5704. instrStFld->InsertBefore(instr);
  5705. // On failed type check, try the type without property if we've got one.
  5706. instrStFld->InsertBefore(labelTypeCheckFailed);
  5707. // Caution, this is one of the dusty corners of the JIT. We only get here if this is an isolated StFld which adds a property, or
  5708. // ObjTypeSpecStore is off. In the former case no downstream operations depend on the final type produced here, and we can fall
  5709. // back on live cache and helper if the type doesn't match. In the latter we may have a cache with type transition, which must
  5710. // produce a value for the type after transition, because that type is consumed downstream. Thus, if the object's type doesn't
  5711. // match either the type with or the type without the property we're storing, we must bail out here.
  5712. bool emitAddProperty = propertySymOpnd->IsMono() && propertySymOpnd->HasInitialType();
  5713. if (emitAddProperty)
  5714. {
  5715. GenerateCachedTypeWithoutPropertyCheck(instrStFld, propertySymOpnd, typeOpnd, labelBothTypeChecksFailed);
  5716. GenerateFieldStoreWithTypeChange(instrStFld, propertySymOpnd, propertySymOpnd->GetInitialType(), propertySymOpnd->GetType());
  5717. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, func);
  5718. instrStFld->InsertBefore(instr);
  5719. }
  5720. instrStFld->InsertBefore(labelBothTypeChecksFailed);
  5721. instrStFld->InsertAfter(labelDone);
  5722. if (hasTypeCheckBailout)
  5723. {
  5724. AssertMsg(PHASE_ON1(Js::ObjTypeSpecIsolatedFldOpsWithBailOutPhase) || !propertySymOpnd->IsTypeDead(),
  5725. "Why does a field store have a type check bailout, if its type is dead?");
  5726. if (instrStFld->GetBailOutInfo()->bailOutInstr != instrStFld)
  5727. {
  5728. // Set the cache index in the bailout info so that the generated code will write it into the
  5729. // bailout record at runtime.
  5730. instrStFld->GetBailOutInfo()->polymorphicCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  5731. }
  5732. else
  5733. {
  5734. Assert(instrStFld->GetBailOutInfo()->polymorphicCacheIndex == propertySymOpnd->m_inlineCacheIndex);
  5735. }
  5736. instrStFld->m_opcode = Js::OpCode::BailOut;
  5737. instrStFld->FreeSrc1();
  5738. instrStFld->FreeDst();
  5739. this->GenerateBailOut(instrStFld);
  5740. return true;
  5741. }
  5742. else
  5743. {
  5744. *continueAsHelperOut = true;
  5745. Assert(labelObjCheckFailed != nullptr && labelObjCheckFailed != labelBothTypeChecksFailed);
  5746. *labelHelperOut = labelObjCheckFailed;
  5747. return false;
  5748. }
  5749. }
  5750. IR::RegOpnd *
  5751. Lowerer::GenerateCachedTypeCheck(IR::Instr *instrChk, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr* labelObjCheckFailed, IR::LabelInstr *labelTypeCheckFailed, IR::LabelInstr *labelSecondChance)
  5752. {
  5753. Assert(propertySymOpnd->MayNeedTypeCheckProtection());
  5754. Func* func = instrChk->m_func;
  5755. IR::RegOpnd *regOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(func);
  5756. regOpnd->SetValueType(propertySymOpnd->GetPropertyOwnerValueType());
  5757. if (!regOpnd->IsNotTaggedValue())
  5758. {
  5759. m_lowererMD.GenerateObjectTest(regOpnd, instrChk, labelObjCheckFailed);
  5760. }
  5761. IR::Opnd *expectedTypeOpnd;
  5762. bool emitDirectCheck = true;
  5763. // Note: don't attempt equivalent type check if we're doing a final type optimization or if we have a monomorphic
  5764. // cache and no type check bailout. In the latter case, we can wind up doing expensive failed equivalence checks
  5765. // repeatedly and never rejit.
  5766. bool doEquivTypeCheck =
  5767. propertySymOpnd->HasEquivalentTypeSet() &&
  5768. !(propertySymOpnd->HasFinalType() && propertySymOpnd->HasInitialType()) &&
  5769. !propertySymOpnd->MustDoMonoCheck() &&
  5770. (propertySymOpnd->IsPoly() || instrChk->HasTypeCheckBailOut());
  5771. Assert(doEquivTypeCheck || !instrChk->HasEquivalentTypeCheckBailOut());
  5772. Js::Type* type = doEquivTypeCheck ? propertySymOpnd->GetFirstEquivalentType() : propertySymOpnd->GetType();
  5773. Js::PropertyGuard* typeCheckGuard = doEquivTypeCheck ?
  5774. (Js::PropertyGuard*)CreateEquivalentTypeGuardAndLinkToGuardedProperties(type, propertySymOpnd) :
  5775. (Js::PropertyGuard*)CreateTypePropertyGuardForGuardedProperties(type, propertySymOpnd);
  5776. if (typeCheckGuard == nullptr)
  5777. {
  5778. Assert(type != nullptr);
  5779. expectedTypeOpnd = IR::AddrOpnd::New(type, IR::AddrOpndKindDynamicType, func, true);
  5780. }
  5781. else
  5782. {
  5783. Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
  5784. expectedTypeOpnd = IR::MemRefOpnd::New((void*)(typeCheckGuard->GetAddressOfValue()), TyMachPtr, func, IR::AddrOpndKindDynamicGuardValueRef);
  5785. emitDirectCheck = false;
  5786. }
  5787. if (PHASE_VERBOSE_TRACE(Js::ObjTypeSpecPhase, this->m_func))
  5788. {
  5789. OUTPUT_VERBOSE_TRACE_FUNC(Js::ObjTypeSpecPhase, this->m_func, L"Emitted %s type check for type 0x%p",
  5790. emitDirectCheck ? L"direct" : propertySymOpnd->IsPoly() ? L"equivalent" : L"indirect", type);
  5791. #if DBG
  5792. if (propertySymOpnd->GetGuardedPropOps() != nullptr)
  5793. {
  5794. Output::Print(L" guarding operations:\n ");
  5795. propertySymOpnd->GetGuardedPropOps()->Dump();
  5796. }
  5797. else
  5798. {
  5799. Output::Print(L"\n");
  5800. }
  5801. #else
  5802. Output::Print(L"\n");
  5803. #endif
  5804. Output::Flush();
  5805. }
  5806. IR::RegOpnd* typeOpnd = IR::RegOpnd::New(TyMachReg, func);
  5807. IR::Opnd *sourceType;
  5808. if (regOpnd->m_sym->IsConst() && !regOpnd->m_sym->IsIntConst() && !regOpnd->m_sym->IsFloatConst())
  5809. {
  5810. sourceType = IR::MemRefOpnd::New((BYTE*)regOpnd->m_sym->GetConstAddress() +
  5811. Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func, IR::AddrOpndKindDynamicObjectTypeRef);
  5812. }
  5813. else
  5814. {
  5815. sourceType = IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func);
  5816. }
  5817. m_lowererMD.CreateAssign(typeOpnd, sourceType, instrChk);
  5818. if (doEquivTypeCheck)
  5819. {
  5820. // TODO (ObjTypeSpec): For isolated equivalent type checks it would be good to emit a check if the cache is still valid, and
  5821. // if not go straight to live polymorphic cache. This way we wouldn't have to bail out and re-JIT, and also wouldn't continue
  5822. // to try the equivalent type cache, miss it and do the slow comparison. This may be as easy as sticking a null on the main
  5823. // type in the equivalent type cache.
  5824. IR::LabelInstr* labelCheckEquivalentType = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5825. InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelCheckEquivalentType, instrChk);
  5826. IR::LabelInstr *labelTypeCheckSucceeded = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  5827. InsertBranch(Js::OpCode::Br, labelTypeCheckSucceeded, instrChk);
  5828. instrChk->InsertBefore(labelCheckEquivalentType);
  5829. this->m_lowererMD.LoadHelperArgument(instrChk, IR::AddrOpnd::New((Js::Var)typeCheckGuard, IR::AddrOpndKindDynamicTypeCheckGuard, func, true));
  5830. this->m_lowererMD.LoadHelperArgument(instrChk, typeOpnd);
  5831. IR::RegOpnd* equivalentTypeCheckResultOpnd = IR::RegOpnd::New(TyUint8, func);
  5832. IR::HelperCallOpnd* equivalentTypeCheckHelperCallOpnd = IR::HelperCallOpnd::New(IR::HelperCheckIfTypeIsEquivalent, func);
  5833. IR::Instr* equivalentTypeCheckCallInstr = IR::Instr::New(Js::OpCode::Call, equivalentTypeCheckResultOpnd, equivalentTypeCheckHelperCallOpnd, func);
  5834. instrChk->InsertBefore(equivalentTypeCheckCallInstr);
  5835. this->m_lowererMD.LowerCall(equivalentTypeCheckCallInstr, 0);
  5836. InsertTestBranch(equivalentTypeCheckResultOpnd, equivalentTypeCheckResultOpnd, Js::OpCode::BrEq_A, labelTypeCheckFailed, instrChk);
  5837. // TODO (ObjTypeSpec): Consider emitting a shared bailout to which a specific bailout kind is written at runtime. This would allow us to distinguish
  5838. // between non-equivalent type and other cases, such as invalidated guard (due to fixed field overwrite, perhaps) or too much thrashing on the
  5839. // equivalent type cache. We could determine bailout kind based on the value returned by the helper. In the case of cache thrashing we could just
  5840. // turn off the whole optimization for a given function.
  5841. instrChk->InsertBefore(labelTypeCheckSucceeded);
  5842. }
  5843. else
  5844. {
  5845. InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelSecondChance != nullptr ? labelSecondChance : labelTypeCheckFailed, instrChk);
  5846. }
  5847. // Don't pin the type for polymorphic operations. The code can successfully execute even if this type is no longer referenced by any objects,
  5848. // as long as there are other objects with types equivalent on the properties referenced by this code. The type is kept alive until entry point
  5849. // installation by the JIT transfer data, and after that by the equivalent type cache, so it will stay alive unless or until it gets evicted
  5850. // from the cache.
  5851. if (!doEquivTypeCheck)
  5852. {
  5853. PinTypeRef(type, type, instrChk, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
  5854. }
  5855. return typeOpnd;
  5856. }
  5857. void
  5858. Lowerer::PinTypeRef(Js::Type* type, void* typeRef, IR::Instr* instr, Js::PropertyId propertyId)
  5859. {
  5860. this->m_func->PinTypeRef(typeRef);
  5861. if (PHASE_TRACE(Js::TracePinnedTypesPhase, this->m_func))
  5862. {
  5863. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  5864. Output::Print(L"PinnedTypes: function %s(%s) instr %s property %s(#%u) pinned %s reference 0x%p to type 0x%p.\n",
  5865. this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  5866. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode), GetScriptContext()->GetPropertyNameLocked(propertyId)->GetBuffer(), propertyId,
  5867. typeRef == type ? L"strong" : L"weak", typeRef, type);
  5868. Output::Flush();
  5869. }
  5870. }
  5871. void
  5872. Lowerer::GenerateCachedTypeWithoutPropertyCheck(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, IR::Opnd *typeOpnd, IR::LabelInstr *labelTypeCheckFailed)
  5873. {
  5874. Assert(propertySymOpnd->IsMonoObjTypeSpecCandidate());
  5875. Assert(propertySymOpnd->HasInitialType());
  5876. Js::Type* typeWithoutProperty = propertySymOpnd->GetInitialType();
  5877. // We should never add properties to objects of static types.
  5878. Assert(Js::DynamicType::Is(typeWithoutProperty->GetTypeId()));
  5879. if (typeOpnd == nullptr)
  5880. {
  5881. // No opnd holding the type was passed in, so we have to load the type here.
  5882. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  5883. if (!baseOpnd->IsNotTaggedValue())
  5884. {
  5885. m_lowererMD.GenerateObjectTest(baseOpnd, instrInsert, labelTypeCheckFailed);
  5886. }
  5887. IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  5888. typeOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  5889. m_lowererMD.CreateAssign(typeOpnd, opnd, instrInsert);
  5890. }
  5891. Js::JitTypePropertyGuard* typePropertyGuard = CreateTypePropertyGuardForGuardedProperties(typeWithoutProperty, propertySymOpnd);
  5892. IR::Opnd *expectedTypeOpnd;
  5893. if (typePropertyGuard)
  5894. {
  5895. bool emitDirectCheck = true;
  5896. Assert(typePropertyGuard != nullptr);
  5897. Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
  5898. expectedTypeOpnd = IR::MemRefOpnd::New((void*)(typePropertyGuard->GetAddressOfValue()), TyMachPtr, this->m_func, IR::AddrOpndKindDynamicGuardValueRef);
  5899. emitDirectCheck = false;
  5900. OUTPUT_VERBOSE_TRACE_FUNC(Js::ObjTypeSpecPhase, this->m_func, L"Emitted %s type check for type 0x%p.\n",
  5901. emitDirectCheck ? L"direct" : L"indirect", typeWithoutProperty);
  5902. }
  5903. else
  5904. {
  5905. expectedTypeOpnd = IR::AddrOpnd::New(typeWithoutProperty, IR::AddrOpndKindDynamicType, m_func, true);
  5906. }
  5907. InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelTypeCheckFailed, instrInsert);
  5908. // Technically, it should be enough to pin the final type, because it should keep all of its predecessors alive, but
  5909. // just to be extra cautious, let's pin the initial type as well.
  5910. PinTypeRef(typeWithoutProperty, typeWithoutProperty, instrInsert, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
  5911. }
  5912. void
  5913. Lowerer::GenerateFixedFieldGuardCheck(IR::Instr *insertPointInstr, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
  5914. {
  5915. GeneratePropertyGuardCheck(insertPointInstr, propertySymOpnd, labelBailOut);
  5916. }
  5917. Js::JitTypePropertyGuard*
  5918. Lowerer::CreateTypePropertyGuardForGuardedProperties(Js::Type* type, IR::PropertySymOpnd* propertySymOpnd)
  5919. {
  5920. // We should always have a list of guarded properties.
  5921. Assert(propertySymOpnd->GetGuardedPropOps() != nullptr);
  5922. Js::JitTypePropertyGuard* guard = nullptr;
  5923. Js::EntryPointInfo* entryPointInfo = this->m_func->m_workItem->GetEntryPoint();
  5924. if (entryPointInfo->HasSharedPropertyGuards())
  5925. {
  5926. // Consider (ObjTypeSpec): Because we allocate these guards from the JIT thread we can't share guards for the same type across multiple functions.
  5927. // This leads to proliferation of property guards on the thread context. The alternative would be to pre-allocate shared (by value) guards
  5928. // from the thread context during work item creation. We would create too many of them (because some types aren't actually used as guards),
  5929. // but we could share a guard for a given type between functions. This may ultimately be better.
  5930. LinkGuardToGuardedProperties(entryPointInfo, propertySymOpnd->GetGuardedPropOps(), [this, type, &guard](Js::PropertyId propertyId)
  5931. {
  5932. if (DoLazyFixedTypeBailout(this->m_func))
  5933. {
  5934. this->m_func->lazyBailoutProperties.Item(propertyId);
  5935. }
  5936. else
  5937. {
  5938. if (guard == nullptr)
  5939. {
  5940. guard = this->m_func->GetOrCreateSingleTypeGuard(type);
  5941. }
  5942. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
  5943. {
  5944. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  5945. wchar_t workItemName[256];
  5946. this->m_func->m_workItem->GetDisplayName(workItemName, _countof(workItemName));
  5947. Output::Print(L"ObjTypeSpec: function %s(%s) registered guard 0x%p with value 0x%p for property %s (%u).\n",
  5948. workItemName, this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  5949. guard, guard->GetValue(), this->GetScriptContext()->GetPropertyNameLocked(propertyId)->GetBuffer(), propertyId);
  5950. Output::Flush();
  5951. }
  5952. this->m_func->EnsurePropertyGuardsByPropertyId();
  5953. this->m_func->LinkGuardToPropertyId(propertyId, guard);
  5954. }
  5955. });
  5956. }
  5957. return guard;
  5958. }
  5959. Js::JitEquivalentTypeGuard*
  5960. Lowerer::CreateEquivalentTypeGuardAndLinkToGuardedProperties(Js::Type* type, IR::PropertySymOpnd* propertySymOpnd)
  5961. {
  5962. // We should always have a list of guarded properties.
  5963. Assert(propertySymOpnd->HasObjTypeSpecFldInfo() && propertySymOpnd->HasEquivalentTypeSet() && propertySymOpnd->GetGuardedPropOps());
  5964. Js::JitEquivalentTypeGuard* guard = this->m_func->CreateEquivalentTypeGuard(type, propertySymOpnd->GetObjTypeSpecFldId());
  5965. Js::EntryPointInfo* entryPointInfo = this->m_func->m_workItem->GetEntryPoint();
  5966. if (entryPointInfo->HasSharedPropertyGuards())
  5967. {
  5968. LinkGuardToGuardedProperties(entryPointInfo, propertySymOpnd->GetGuardedPropOps(), [=](Js::PropertyId propertyId)
  5969. {
  5970. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
  5971. {
  5972. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  5973. Output::Print(L"ObjTypeSpec: function %s(%s) registered equivalent type spec guard 0x%p with value 0x%p for property %s (%u).\n",
  5974. this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  5975. guard, guard->GetValue(), GetScriptContext()->GetPropertyNameLocked(propertyId)->GetBuffer(), propertyId);
  5976. Output::Flush();
  5977. }
  5978. this->m_func->EnsurePropertyGuardsByPropertyId();
  5979. this->m_func->LinkGuardToPropertyId(propertyId, guard);
  5980. });
  5981. }
  5982. Assert(guard->GetCache() != nullptr);
  5983. Js::EquivalentTypeCache* cache = guard->GetCache();
  5984. // TODO (ObjTypeSpec): If we delayed populating the types until encoder, we could bulk allocate all equivalent type caches
  5985. // in one block from the heap. This would allow us to not allocate them from the native code data allocator and free them
  5986. // when no longer needed. However, we would need to store the global property operation ID in the guard, so we can look up
  5987. // the info in the encoder. Perhaps we could overload the cache pointer to be the ID until encoder.
  5988. // Copy types from the type set to the guard's cache
  5989. Js::EquivalentTypeSet* typeSet = propertySymOpnd->GetEquivalentTypeSet();
  5990. uint16 cachedTypeCount = typeSet->GetCount() < EQUIVALENT_TYPE_CACHE_SIZE ? typeSet->GetCount() : EQUIVALENT_TYPE_CACHE_SIZE;
  5991. for (uint16 ti = 0; ti < cachedTypeCount; ti++)
  5992. {
  5993. cache->types[ti] = typeSet->GetType(ti);
  5994. }
  5995. // Populate property ID and slot index arrays on the guard's cache. We iterate over the
  5996. // bit vector of property operations protected by this guard, but some property operations
  5997. // may be referring to the same property ID (but not share the same cache). We skip
  5998. // redundant entries by maintaining a hash set of property IDs we've already encountered.
  5999. auto propOps = propertySymOpnd->GetGuardedPropOps();
  6000. uint propOpCount = propOps->Count();
  6001. bool isTypeStatic = Js::StaticType::Is(type->GetTypeId());
  6002. JsUtil::BaseDictionary<Js::PropertyId, Js::EquivalentPropertyEntry*, JitArenaAllocator> propIds(this->m_alloc, propOpCount);
  6003. Js::EquivalentPropertyEntry* properties = AnewArray(this->m_alloc, Js::EquivalentPropertyEntry, propOpCount);
  6004. uint propIdCount = 0;
  6005. FOREACH_BITSET_IN_SPARSEBV(propOpId, propOps)
  6006. {
  6007. Js::ObjTypeSpecFldInfo* propOpInfo = this->m_func->GetGlobalObjTypeSpecFldInfo(propOpId);
  6008. Js::PropertyId propertyId = propOpInfo->GetPropertyId();
  6009. Js::PropertyIndex propOpIndex = Js::Constants::NoSlot;
  6010. bool hasFixedValue = propOpInfo->HasFixedValue();
  6011. if (hasFixedValue)
  6012. {
  6013. cache->SetHasFixedValue();
  6014. }
  6015. bool isLoadedFromProto = propOpInfo->IsLoadedFromProto();
  6016. if (isLoadedFromProto)
  6017. {
  6018. cache->SetIsLoadedFromProto();
  6019. }
  6020. else
  6021. {
  6022. propOpIndex = propOpInfo->GetSlotIndex();
  6023. }
  6024. bool propOpUsesAuxSlot = propOpInfo->UsesAuxSlot();
  6025. AssertMsg(!isTypeStatic || !propOpInfo->IsBeingStored(), "Why are we storing a field to an object of static type?");
  6026. Js::EquivalentPropertyEntry* entry;
  6027. if (propIds.TryGetValue(propertyId, &entry))
  6028. {
  6029. if (propOpIndex == entry->slotIndex && propOpUsesAuxSlot == entry->isAuxSlot)
  6030. {
  6031. entry->mustBeWritable |= propOpInfo->IsBeingStored();
  6032. }
  6033. else
  6034. {
  6035. // Due to inline cache sharing we have the same property accessed using different caches
  6036. // with inconsistent info. This means a guaranteed bailout on the equivalent type check.
  6037. // We'll just let it happen and turn off the optimization for this function. We could avoid
  6038. // this problem by tracking property information on the value type in glob opt.
  6039. if (PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->m_func))
  6040. {
  6041. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6042. Js::FunctionBody* topFunctionBody = this->m_func->GetJnFunction();
  6043. Js::ScriptContext* scriptContext = topFunctionBody->GetScriptContext();
  6044. Output::Print(L"EquivObjTypeSpec: top function %s (%s): duplicate property clash on %s(#%d) \n",
  6045. topFunctionBody->GetDisplayName(), topFunctionBody->GetDebugNumberSet(debugStringBuffer), propertyId, scriptContext->GetPropertyNameLocked(propertyId)->GetBuffer());
  6046. Output::Flush();
  6047. }
  6048. Assert(propIdCount < propOpCount);
  6049. __analysis_assume(propIdCount < propOpCount);
  6050. entry = &properties[propIdCount++];
  6051. entry->propertyId = propertyId;
  6052. entry->slotIndex = propOpIndex;
  6053. entry->isAuxSlot = propOpUsesAuxSlot;
  6054. entry->mustBeWritable = propOpInfo->IsBeingStored();
  6055. }
  6056. }
  6057. else
  6058. {
  6059. Assert(propIdCount < propOpCount);
  6060. __analysis_assume(propIdCount < propOpCount);
  6061. entry = &properties[propIdCount++];
  6062. entry->propertyId = propertyId;
  6063. entry->slotIndex = propOpIndex;
  6064. entry->isAuxSlot = propOpUsesAuxSlot;
  6065. entry->mustBeWritable = propOpInfo->IsBeingStored();
  6066. propIds.AddNew(propertyId, entry);
  6067. }
  6068. }
  6069. NEXT_BITSET_IN_SPARSEBV;
  6070. cache->record.propertyCount = propIdCount;
  6071. cache->record.properties = NativeCodeDataNewArray(this->m_func->GetNativeCodeDataAllocator(), Js::EquivalentPropertyEntry, propIdCount);
  6072. memcpy(cache->record.properties, properties, propIdCount * sizeof(Js::EquivalentPropertyEntry));
  6073. return guard;
  6074. }
  6075. bool
  6076. Lowerer::LinkCtorCacheToGuardedProperties(Js::JitTimeConstructorCache* ctorCache)
  6077. {
  6078. // We do not always have guarded properties. If the constructor is empty and the subsequent code doesn't load or store any of
  6079. // the constructed object's properties, or if all inline caches are empty then this ctor cache doesn't guard any properties.
  6080. if (ctorCache->GetGuardedPropOps() == nullptr)
  6081. {
  6082. return false;
  6083. }
  6084. bool linked = false;
  6085. Js::EntryPointInfo* entryPointInfo = this->m_func->m_workItem->GetEntryPoint();
  6086. if (entryPointInfo->HasSharedPropertyGuards())
  6087. {
  6088. linked = LinkGuardToGuardedProperties(entryPointInfo, ctorCache->GetGuardedPropOps(), [=](Js::PropertyId propertyId)
  6089. {
  6090. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
  6091. {
  6092. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6093. Output::Print(L"ObjTypeSpec: function %s(%s) registered ctor cache 0x%p with value 0x%p for property %s (%u).\n",
  6094. this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  6095. ctorCache->runtimeCache, ctorCache->type, GetScriptContext()->GetPropertyNameLocked(propertyId)->GetBuffer(), propertyId);
  6096. Output::Flush();
  6097. }
  6098. this->m_func->EnsureCtorCachesByPropertyId();
  6099. this->m_func->LinkCtorCacheToPropertyId(propertyId, ctorCache);
  6100. });
  6101. }
  6102. return linked;
  6103. }
  6104. template<typename LinkFunc>
  6105. bool
  6106. Lowerer::LinkGuardToGuardedProperties(Js::EntryPointInfo* entryPointInfo, const BVSparse<JitArenaAllocator>* guardedPropOps, LinkFunc link)
  6107. {
  6108. Assert(entryPointInfo != nullptr);
  6109. Assert(entryPointInfo->HasSharedPropertyGuards());
  6110. Assert(guardedPropOps != nullptr);
  6111. bool linked = false;
  6112. // For every entry in the bit vector, register the guard for the corresponding property ID.
  6113. FOREACH_BITSET_IN_SPARSEBV(propertyOpId, guardedPropOps)
  6114. {
  6115. Js::ObjTypeSpecFldInfo* propertyOpInfo = this->m_func->GetGlobalObjTypeSpecFldInfo(propertyOpId);
  6116. Js::PropertyId propertyId = propertyOpInfo->GetPropertyId();
  6117. // It's okay for an equivalent type check to be registered as a guard against a property becoming read-only. This transpires if, there is
  6118. // a different monomorphic type check upstream, which guarantees the actual type of the object needed for the hard-coded type transition,
  6119. // but it is later followed by a sequence of polymorphic inline caches, which do not have that type in the type set. At the beginning of
  6120. // that sequence we'll emit an equivalent type check to verify that the actual type has relevant properties on appropriate slots. Then in
  6121. // the dead store pass we'll walk upwards and encounter this check first, thus we'll drop the guarded properties accumulated thus far
  6122. // (including the one being added) on that check.
  6123. // AssertMsg(!propertyOpInfo->IsBeingAdded() || !isEquivalentTypeGuard, "Why do we have an equivalent type check protecting a property add?");
  6124. if (propertyOpInfo->IsBeingAdded() || propertyOpInfo->IsLoadedFromProto() || propertyOpInfo->HasFixedValue())
  6125. {
  6126. // Equivalent object type spec only supports fixed fields on prototypes. This is to simplify the slow type equivalence check.
  6127. // See JavascriptOperators::CheckIfTypeIsEquivalent.
  6128. Assert(!propertyOpInfo->IsPoly() || (!propertyOpInfo->HasFixedValue() || propertyOpInfo->IsLoadedFromProto() || propertyOpInfo->UsesAccessor()));
  6129. if (entryPointInfo->HasSharedPropertyGuard(propertyId))
  6130. {
  6131. link(propertyId);
  6132. linked = true;
  6133. }
  6134. else
  6135. {
  6136. #if TRUE
  6137. AssertMsg(false, "Did we fail to create a shared property guard for a guarded property?");
  6138. #else
  6139. if (PHASE_VERBOSE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
  6140. {
  6141. if (!this->m_func->m_workItem->GetEntryPoint()->HasSharedPropertyGuard(propertyId))
  6142. {
  6143. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6144. Output::Print(L"ObjTypeStore: function %s(%s): no shared property guard for property % (%u).\n",
  6145. this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  6146. GetScriptContext()->GetPropertyNameLocked(propertyId)->GetBuffer(), propertyId);
  6147. Output::Flush();
  6148. }
  6149. }
  6150. #endif
  6151. }
  6152. }
  6153. }
  6154. NEXT_BITSET_IN_SPARSEBV;
  6155. return linked;
  6156. }
  6157. void
  6158. Lowerer::GeneratePropertyGuardCheck(IR::Instr *insertPointInstr, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
  6159. {
  6160. Js::PropertyGuard* guard = propertySymOpnd->GetPropertyGuard();
  6161. Assert(guard != nullptr);
  6162. if (!DoLazyFixedDataBailout(this->m_func))
  6163. {
  6164. Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
  6165. IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
  6166. IR::MemRefOpnd* guardOpnd = IR::MemRefOpnd::New((void*)guard->GetAddressOfValue(), TyMachPtr, this->m_func, IR::AddrOpndKindDynamicGuardValueRef);
  6167. InsertCompareBranch(guardOpnd, zeroOpnd, Js::OpCode::BrEq_A, labelBailOut, insertPointInstr);
  6168. }
  6169. else
  6170. {
  6171. this->m_func->lazyBailoutProperties.Item(propertySymOpnd->GetPropertyId());
  6172. }
  6173. }
  6174. IR::Instr*
  6175. Lowerer::GeneratePropertyGuardCheckBailoutAndLoadType(IR::Instr *insertInstr)
  6176. {
  6177. IR::Instr* instrPrev = insertInstr->m_prev;
  6178. IR::Opnd* numberTypeOpnd = IR::AddrOpnd::New(insertInstr->m_func->GetScriptContext()->GetLibrary()->GetNumberTypeStatic(), IR::AddrOpndKindDynamicType, insertInstr->m_func);
  6179. IR::PropertySymOpnd* propertySymOpnd = insertInstr->GetSrc1()->AsPropertySymOpnd();
  6180. IR::LabelInstr* labelBailout = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func, true);
  6181. IR::LabelInstr* labelContinue = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func);
  6182. IR::LabelInstr* loadNumberTypeLabel = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func, true);
  6183. GeneratePropertyGuardCheck(insertInstr, propertySymOpnd, labelBailout);
  6184. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  6185. GenerateObjectTestAndTypeLoad(insertInstr, baseOpnd, insertInstr->GetDst()->AsRegOpnd(), loadNumberTypeLabel);
  6186. insertInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelContinue, this->m_func));
  6187. insertInstr->InsertBefore(loadNumberTypeLabel);
  6188. this->m_lowererMD.CreateAssign(insertInstr->GetDst(), numberTypeOpnd, insertInstr);
  6189. insertInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelContinue, this->m_func));
  6190. insertInstr->InsertBefore(labelBailout);
  6191. insertInstr->InsertAfter(labelContinue);
  6192. insertInstr->FreeSrc1();
  6193. insertInstr->m_opcode = Js::OpCode::BailOut;
  6194. this->GenerateBailOut(insertInstr);
  6195. return instrPrev;
  6196. }
  6197. void
  6198. Lowerer::GenerateNonWritablePropertyCheck(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
  6199. {
  6200. IR::Opnd *opnd;
  6201. IR::Instr *instr;
  6202. // Generate a check for non-writable properties, on the model of the work done by PatchPutValueetc.
  6203. // Inline the check on the bit in the prototype object's type. If that check fails, call the helper.
  6204. // If the helper finds a non-writable property, bail out, as we're counting on being able to add the property.
  6205. Js::Type *typeWithoutProperty = propertySymOpnd->GetInitialType();
  6206. Assert(typeWithoutProperty);
  6207. Js::RecyclableObject *protoObject = typeWithoutProperty->GetPrototype();
  6208. Assert(protoObject);
  6209. // s1 = MOV [proto->type].ptr
  6210. IR::RegOpnd *typeOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  6211. opnd = IR::MemRefOpnd::New((char*)protoObject + Js::RecyclableObject::GetOffsetOfType(), TyMachReg,
  6212. this->m_func, IR::AddrOpndKindDynamicObjectTypeRef);
  6213. m_lowererMD.CreateAssign(typeOpnd, opnd, instrInsert);
  6214. // TEST [s1->areThisAndPrototypesEnsuredToHaveOnlyWritableDataProperties].u8, 1
  6215. // JNE $continue
  6216. IR::LabelInstr *labelContinue = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6217. opnd = IR::IndirOpnd::New(typeOpnd, (int32)Js::Type::OffsetOfWritablePropertiesFlag(), TyUint8, this->m_func);
  6218. InsertTestBranch(opnd, IR::IntConstOpnd::New(1, TyUint8, this->m_func), Js::OpCode::BrNeq_A, labelContinue, instrInsert);
  6219. // $Lhelper:
  6220. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6221. instrInsert->InsertBefore(labelHelper);
  6222. // s2 = CALL DoProtoCheck, prototype
  6223. opnd = IR::AddrOpnd::New(protoObject, IR::AddrOpndKindDynamicVar, this->m_func, true);
  6224. m_lowererMD.LoadHelperArgument(instrInsert, opnd);
  6225. opnd = IR::HelperCallOpnd::New(IR::HelperCheckProtoHasNonWritable, this->m_func);
  6226. instr = IR::Instr::New(Js::OpCode::Call, IR::RegOpnd::New(TyUint8, this->m_func), opnd, this->m_func);
  6227. instrInsert->InsertBefore(instr);
  6228. opnd = instr->GetDst();
  6229. m_lowererMD.LowerCall(instr, 0);
  6230. InsertTestBranch(opnd, opnd, Js::OpCode::BrEq_A, labelBailOut, instrInsert);
  6231. // $Lcontinue:
  6232. instrInsert->InsertBefore(labelContinue);
  6233. }
  6234. void
  6235. Lowerer::GenerateAdjustSlots(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, Js::Type* initialType, Js::Type* finalType)
  6236. {
  6237. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  6238. bool adjusted = this->GenerateAdjustBaseSlots(instrInsert, baseOpnd, initialType, finalType);
  6239. if (!adjusted)
  6240. {
  6241. baseOpnd->Free(m_func);
  6242. }
  6243. }
  6244. bool
  6245. Lowerer::GenerateAdjustBaseSlots(IR::Instr *instrInsert, IR::RegOpnd *baseOpnd, Js::Type* initialType, Js::Type* finalType)
  6246. {
  6247. // Possibly allocate new slot capacity to accommodate a type transition.
  6248. Js::DynamicType *oldType = static_cast<Js::DynamicType*>(initialType);
  6249. Assert(oldType);
  6250. Js::DynamicType *newType = static_cast<Js::DynamicType*>(finalType);
  6251. Assert(newType);
  6252. AssertMsg(Js::DynamicObject::IsTypeHandlerCompatibleForObjectHeaderInlining(oldType->GetTypeHandler(), newType->GetTypeHandler()),
  6253. "Incompatible typeHandler transition?");
  6254. int oldCount = oldType->GetTypeHandler()->GetSlotCapacity();
  6255. int newCount = newType->GetTypeHandler()->GetSlotCapacity();
  6256. Js::PropertyIndex inlineSlotCapacity = oldType->GetTypeHandler()->GetInlineSlotCapacity();
  6257. Js::PropertyIndex newInlineSlotCapacity = newType->GetTypeHandler()->GetInlineSlotCapacity();
  6258. if (oldCount >= newCount || newCount <= inlineSlotCapacity)
  6259. {
  6260. // Already have enough slot capacity. Do nothing.
  6261. return false;
  6262. }
  6263. // Call AdjustSlots using the new counts. Because AdjustSlots uses the "no dispose" flavor of alloc,
  6264. // no implicit calls are possible, and we don't need an implicit call check and bailout.
  6265. // CALL AdjustSlots, instance, newInlineSlotCapacity, newAuxSlotCapacity
  6266. //3rd Param
  6267. Assert(newCount > newInlineSlotCapacity);
  6268. const int newAuxSlotCapacity = newCount - newInlineSlotCapacity;
  6269. m_lowererMD.LoadHelperArgument(instrInsert, IR::IntConstOpnd::New(newAuxSlotCapacity, TyInt32, this->m_func));
  6270. //2nd Param
  6271. m_lowererMD.LoadHelperArgument(instrInsert, IR::IntConstOpnd::New(newInlineSlotCapacity, TyUint16, this->m_func));
  6272. //1st Param (instance)
  6273. m_lowererMD.LoadHelperArgument(instrInsert, baseOpnd);
  6274. //CALL HelperAdjustSlots
  6275. IR::Opnd *opnd = IR::HelperCallOpnd::New(IR::HelperAdjustSlots, this->m_func);
  6276. IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  6277. instr->SetSrc1(opnd);
  6278. instrInsert->InsertBefore(instr);
  6279. m_lowererMD.LowerCall(instr, 0);
  6280. return true;
  6281. }
  6282. void
  6283. Lowerer::GenerateFieldStoreWithTypeChange(IR::Instr * instrStFld, IR::PropertySymOpnd *propertySymOpnd, Js::Type* initialType, Js::Type* finalType)
  6284. {
  6285. // Adjust instance slots, if necessary.
  6286. this->GenerateAdjustSlots(instrStFld, propertySymOpnd, initialType, finalType);
  6287. // We should never add properties to objects of static types.
  6288. Assert(Js::DynamicType::Is(finalType->GetTypeId()));
  6289. // Let's pin the final type to be sure its alive when we try to do the type transition.
  6290. PinTypeRef(finalType, finalType, instrStFld, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
  6291. IR::Opnd *finalTypeOpnd = IR::AddrOpnd::New(finalType, IR::AddrOpndKindDynamicType, instrStFld->m_func, true);
  6292. // Set the new type.
  6293. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(instrStFld->m_func);
  6294. IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrStFld->m_func);
  6295. this->m_lowererMD.CreateAssign(opnd, finalTypeOpnd, instrStFld);
  6296. // Now do the store.
  6297. GenerateDirectFieldStore(instrStFld, propertySymOpnd);
  6298. }
  6299. bool
  6300. Lowerer::GenerateStFldWithCachedFinalType(IR::Instr * instrStFld, IR::PropertySymOpnd *propertySymOpnd)
  6301. {
  6302. // This function tries to treat a sequence of add-property stores as a single type transition.
  6303. Assert(propertySymOpnd == instrStFld->GetDst()->AsPropertySymOpnd());
  6304. Assert(propertySymOpnd->IsMonoObjTypeSpecCandidate());
  6305. Assert(propertySymOpnd->HasFinalType());
  6306. Assert(propertySymOpnd->HasInitialType());
  6307. IR::Instr *instr;
  6308. IR::LabelInstr *labelBailOut = nullptr;
  6309. AssertMsg(!propertySymOpnd->IsTypeChecked(), "Why are we doing a type transition when we have the type we want?");
  6310. // If the initial type must be checked here, do it.
  6311. Assert(instrStFld->HasBailOutInfo());
  6312. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6313. GenerateCachedTypeWithoutPropertyCheck(instrStFld, propertySymOpnd, nullptr/*typeOpnd*/, labelBailOut);
  6314. // Do the type transition.
  6315. GenerateFieldStoreWithTypeChange(instrStFld, propertySymOpnd, propertySymOpnd->GetInitialType(), propertySymOpnd->GetFinalType());
  6316. instrStFld->FreeSrc1();
  6317. instrStFld->FreeDst();
  6318. // Insert the bailout and let the main path branch around it.
  6319. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6320. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  6321. instrStFld->InsertBefore(instr);
  6322. if (instrStFld->HasBailOutInfo())
  6323. {
  6324. Assert(labelBailOut != nullptr);
  6325. instrStFld->InsertBefore(labelBailOut);
  6326. instrStFld->InsertAfter(labelDone);
  6327. instrStFld->m_opcode = Js::OpCode::BailOut;
  6328. this->GenerateBailOut(instrStFld);
  6329. }
  6330. else
  6331. {
  6332. instrStFld->InsertAfter(labelDone);
  6333. instrStFld->Remove();
  6334. }
  6335. return true;
  6336. }
  6337. ///----------------------------------------------------------------------------
  6338. ///
  6339. /// Lowerer::LowerScopedStFld
  6340. ///
  6341. ///----------------------------------------------------------------------------
  6342. IR::Instr *
  6343. Lowerer::LowerScopedStFld(IR::Instr * stFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache,
  6344. bool withPropertyOperationFlags, Js::PropertyOperationFlags flags)
  6345. {
  6346. IR::Instr *instrPrev = stFldInstr->m_prev;
  6347. if (withPropertyOperationFlags)
  6348. {
  6349. m_lowererMD.LoadHelperArgument(stFldInstr,
  6350. IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
  6351. }
  6352. if(!withInlineCache)
  6353. {
  6354. LoadScriptContext(stFldInstr);
  6355. }
  6356. // Pass the default instance
  6357. IR::Opnd *src = stFldInstr->UnlinkSrc2();
  6358. m_lowererMD.LoadHelperArgument(stFldInstr, src);
  6359. // Pass the value to store
  6360. src = stFldInstr->UnlinkSrc1();
  6361. m_lowererMD.LoadHelperArgument(stFldInstr, src);
  6362. // Pass the property sym to store to
  6363. IR::Opnd *dst = stFldInstr->UnlinkDst();
  6364. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
  6365. this->LoadPropertySymAsArgument(stFldInstr, dst);
  6366. if (withInlineCache)
  6367. {
  6368. AssertMsg(dst->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  6369. m_lowererMD.LoadHelperArgument(
  6370. stFldInstr,
  6371. IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  6372. // Not using the polymorphic inline cache because the fast path only uses the monomorphic inline cache
  6373. this->m_lowererMD.LoadHelperArgument(stFldInstr, this->LoadRuntimeInlineCacheOpnd(stFldInstr, dst->AsPropertySymOpnd()));
  6374. m_lowererMD.LoadHelperArgument(stFldInstr, LoadFunctionBodyOpnd(stFldInstr));
  6375. }
  6376. m_lowererMD.ChangeToHelperCall(stFldInstr, helperMethod);
  6377. return instrPrev;
  6378. }
  6379. ///----------------------------------------------------------------------------
  6380. ///
  6381. /// Lowerer::LowerLoadVar
  6382. ///
  6383. ///----------------------------------------------------------------------------
  6384. IR::Instr *
  6385. Lowerer::LowerLoadVar(IR::Instr *instr, IR::Opnd *opnd)
  6386. {
  6387. instr->SetSrc1(opnd);
  6388. return m_lowererMD.ChangeToAssign(instr);
  6389. }
  6390. IR::Instr *
  6391. Lowerer::LoadHelperTemp(IR::Instr * instr, IR::Instr * instrInsert)
  6392. {
  6393. IR::Opnd *tempOpnd;
  6394. IR::Opnd *dst = instr->GetDst();
  6395. AssertMsg(dst != nullptr, "Always expect a dst for these.");
  6396. AssertMsg(instr->dstIsTempNumber, "Should only be loading temps here");
  6397. Assert(dst->IsRegOpnd());
  6398. StackSym * tempNumberSym = this->GetTempNumberSym(dst, instr->dstIsTempNumberTransferred);
  6399. IR::Instr *load = this->m_lowererMD.LoadStackAddress(tempNumberSym);
  6400. instrInsert->InsertBefore(load);
  6401. tempOpnd = load->GetDst();
  6402. m_lowererMD.LoadHelperArgument(instrInsert, tempOpnd);
  6403. return load;
  6404. }
  6405. void
  6406. Lowerer::LoadArgumentCount(IR::Instr *const instr)
  6407. {
  6408. Assert(instr);
  6409. Assert(instr->GetDst());
  6410. Assert(!instr->GetSrc1());
  6411. Assert(!instr->GetSrc2());
  6412. if(instr->m_func->IsInlinee())
  6413. {
  6414. // Argument count including 'this'
  6415. instr->SetSrc1(IR::IntConstOpnd::New(instr->m_func->actualCount, TyUint32, instr->m_func, true));
  6416. LowererMD::ChangeToAssign(instr);
  6417. }
  6418. else if (instr->m_func->GetJnFunction()->IsGenerator())
  6419. {
  6420. IR::SymOpnd* symOpnd = LoadCallInfo(instr);
  6421. instr->SetSrc1(symOpnd);
  6422. LowererMD::ChangeToAssign(instr);
  6423. }
  6424. else
  6425. {
  6426. m_lowererMD.LoadArgumentCount(instr);
  6427. }
  6428. }
  6429. void
  6430. Lowerer::LoadStackArgPtr(IR::Instr *const instr)
  6431. {
  6432. Assert(instr);
  6433. Assert(instr->GetDst());
  6434. Assert(!instr->GetSrc1());
  6435. Assert(!instr->GetSrc2());
  6436. if(instr->m_func->IsInlinee())
  6437. {
  6438. // Address of argument after 'this'
  6439. const auto firstRealArgStackSym = instr->m_func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  6440. this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr);
  6441. instr->SetSrc1(IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, instr->m_func));
  6442. LowererMD::ChangeToLea(instr);
  6443. }
  6444. else
  6445. {
  6446. m_lowererMD.LoadStackArgPtr(instr);
  6447. }
  6448. }
  6449. void
  6450. Lowerer::LoadArgumentsFromFrame(IR::Instr *const instr)
  6451. {
  6452. Assert(instr);
  6453. Assert(instr->GetDst());
  6454. Assert(!instr->GetSrc1());
  6455. Assert(!instr->GetSrc2());
  6456. if(instr->m_func->IsInlinee())
  6457. {
  6458. // Use the inline object meta arg slot for the arguments object
  6459. instr->SetSrc1(instr->m_func->GetInlineeArgumentsObjectSlotOpnd());
  6460. LowererMD::ChangeToAssign(instr);
  6461. }
  6462. else
  6463. {
  6464. m_lowererMD.LoadArgumentsFromFrame(instr);
  6465. }
  6466. }
  6467. IR::Instr *
  6468. Lowerer::LowerUnaryHelper(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::Opnd* opndBailoutArg)
  6469. {
  6470. IR::Instr *instrPrev;
  6471. IR::Opnd *src1 = instr->UnlinkSrc1();
  6472. instrPrev = m_lowererMD.LoadHelperArgument(instr, src1);
  6473. m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, opndBailoutArg);
  6474. return instrPrev;
  6475. }
  6476. // helper takes memory context as second argument
  6477. IR::Instr *
  6478. Lowerer::LowerUnaryHelperMem(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::Opnd* opndBailoutArg)
  6479. {
  6480. IR::Instr *instrPrev;
  6481. instrPrev = LoadScriptContext(instr);
  6482. return this->LowerUnaryHelper(instr, helperMethod, opndBailoutArg);
  6483. }
  6484. IR::Instr *
  6485. Lowerer::LowerUnaryHelperMemWithFuncBody(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6486. {
  6487. m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionBodyOpnd(instr));
  6488. return this->LowerUnaryHelperMem(instr, helperMethod);
  6489. }
  6490. IR::Instr *
  6491. Lowerer::LowerBinaryHelperMemWithFuncBody(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6492. {
  6493. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Int2, "Expected a binary instruction...");
  6494. m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionBodyOpnd(instr));
  6495. return this->LowerBinaryHelperMem(instr, helperMethod);
  6496. }
  6497. IR::Instr *
  6498. Lowerer::LowerUnaryHelperMemWithTemp(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6499. {
  6500. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2, "Expected a unary instruction...");
  6501. IR::Instr * instrFirst;
  6502. IR::Opnd * tempOpnd;
  6503. if (instr->dstIsTempNumber)
  6504. {
  6505. instrFirst = this->LoadHelperTemp(instr, instr);
  6506. }
  6507. else
  6508. {
  6509. tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  6510. instrFirst = m_lowererMD.LoadHelperArgument(instr, tempOpnd);
  6511. }
  6512. this->LowerUnaryHelperMem(instr, helperMethod);
  6513. return instrFirst;
  6514. }
  6515. IR::Instr *
  6516. Lowerer::LowerUnaryHelperMemWithTemp2(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::JnHelperMethod helperMethodWithTemp)
  6517. {
  6518. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2, "Expected a unary instruction...");
  6519. if (instr->dstIsTempNumber)
  6520. {
  6521. IR::Instr * instrFirst = this->LoadHelperTemp(instr, instr);
  6522. this->LowerUnaryHelperMem(instr, helperMethodWithTemp);
  6523. return instrFirst;
  6524. }
  6525. return this->LowerUnaryHelperMem(instr, helperMethod);
  6526. }
  6527. IR::Instr *
  6528. Lowerer::LowerUnaryHelperMemWithBoolReference(IR::Instr *instr, IR::JnHelperMethod helperMethod, bool useBoolForBailout)
  6529. {
  6530. if (!this->m_func->tempSymBool)
  6531. {
  6532. this->m_func->tempSymBool = StackSym::New(TyUint8, this->m_func);
  6533. this->m_func->StackAllocate(this->m_func->tempSymBool, TySize[TyUint8]);
  6534. }
  6535. IR::SymOpnd * boolOpnd = IR::SymOpnd::New(this->m_func->tempSymBool, TyUint8, this->m_func);
  6536. IR::RegOpnd * boolRefOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  6537. InsertLea(boolRefOpnd, boolOpnd, instr);
  6538. m_lowererMD.LoadHelperArgument(instr, boolRefOpnd);
  6539. return this->LowerUnaryHelperMem(instr, helperMethod, useBoolForBailout ? boolOpnd : nullptr);
  6540. }
  6541. ///----------------------------------------------------------------------------
  6542. ///
  6543. /// Lowerer::LowerBinaryHelper
  6544. ///
  6545. ///----------------------------------------------------------------------------
  6546. IR::Instr *
  6547. Lowerer::LowerBinaryHelper(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6548. {
  6549. // The only case where this would still be null when we return is when
  6550. // helperMethod == HelperOP_CmSrEq_EmptyString; in which case we ignore
  6551. // instrPrev.
  6552. IR::Instr *instrPrev = nullptr;
  6553. AssertMsg((Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Unsigned1 && !instr->GetDst()) ||
  6554. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3 ||
  6555. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2Int1 ||
  6556. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Int2 ||
  6557. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::ElementU ||
  6558. instr->m_opcode == Js::OpCode::InvalCachedScope, "Expected a binary instruction...");
  6559. IR::Opnd *src2 = instr->UnlinkSrc2();
  6560. if (helperMethod != IR::HelperOP_CmSrEq_EmptyString)
  6561. instrPrev = m_lowererMD.LoadHelperArgument(instr, src2);
  6562. IR::Opnd *src1 = instr->UnlinkSrc1();
  6563. m_lowererMD.LoadHelperArgument(instr, src1);
  6564. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  6565. return instrPrev;
  6566. }
  6567. // helper takes memory context as third argument
  6568. IR::Instr *
  6569. Lowerer::LowerBinaryHelperMem(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6570. {
  6571. IR::Instr *instrPrev;
  6572. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3 ||
  6573. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2Int1 ||
  6574. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Int2, "Expected a binary instruction...");
  6575. instrPrev = LoadScriptContext(instr);
  6576. return this->LowerBinaryHelper(instr, helperMethod);
  6577. }
  6578. IR::Instr *
  6579. Lowerer::LowerBinaryHelperMemWithTemp(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6580. {
  6581. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3, "Expected a binary instruction...");
  6582. IR::Instr * instrFirst;
  6583. IR::Opnd * tempOpnd;
  6584. if (instr->dstIsTempNumber)
  6585. {
  6586. instrFirst = this->LoadHelperTemp(instr, instr);
  6587. }
  6588. else
  6589. {
  6590. tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  6591. instrFirst = m_lowererMD.LoadHelperArgument(instr, tempOpnd);
  6592. }
  6593. this->LowerBinaryHelperMem(instr, helperMethod);
  6594. return instrFirst;
  6595. }
  6596. IR::Instr *
  6597. Lowerer::LowerBinaryHelperMemWithTemp2(
  6598. IR::Instr *instr,
  6599. IR::JnHelperMethod helperMethod,
  6600. IR::JnHelperMethod helperMethodWithTemp
  6601. )
  6602. {
  6603. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3, "Expected a binary instruction...");
  6604. if (instr->dstIsTempNumber && instr->GetDst() && instr->GetDst()->GetValueType().HasBeenNumber())
  6605. {
  6606. IR::Instr * instrFirst = this->LoadHelperTemp(instr, instr);
  6607. this->LowerBinaryHelperMem(instr, helperMethodWithTemp);
  6608. return instrFirst;
  6609. }
  6610. return this->LowerBinaryHelperMem(instr, helperMethod);
  6611. }
  6612. IR::Instr *
  6613. Lowerer::LowerAddLeftDeadForString(IR::Instr *instr)
  6614. {
  6615. IR::Opnd * opndLeft;
  6616. IR::Opnd * opndRight;
  6617. opndLeft = instr->GetSrc1();
  6618. opndRight = instr->GetSrc2();
  6619. Assert(opndLeft && opndRight);
  6620. bool generateFastPath = this->m_func->DoFastPaths();
  6621. if (!generateFastPath
  6622. || !opndLeft->IsRegOpnd()
  6623. || !opndRight->IsRegOpnd()
  6624. || !instr->GetDst()->IsRegOpnd()
  6625. || !opndLeft->GetValueType().IsLikelyString()
  6626. || !opndRight->GetValueType().IsLikelyString()
  6627. || !opndLeft->IsEqual(instr->GetDst()->AsRegOpnd())
  6628. || opndLeft->IsEqual(opndRight))
  6629. {
  6630. return this->LowerBinaryHelperMemWithTemp(instr, IR::HelperOp_AddLeftDead);
  6631. }
  6632. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6633. IR::LabelInstr * labelFallThrough = instr->GetOrCreateContinueLabel(false);
  6634. IR::LabelInstr *insertBeforeInstr = labelHelper;
  6635. instr->InsertBefore(labelHelper);
  6636. if (!opndLeft->IsNotTaggedValue())
  6637. {
  6638. this->m_lowererMD.GenerateObjectTest(opndLeft->AsRegOpnd(), insertBeforeInstr, labelHelper);
  6639. }
  6640. InsertCompareBranch(
  6641. IR::IndirOpnd::New(opndLeft->AsRegOpnd(), 0, TyMachPtr, m_func),
  6642. this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableCompoundString),
  6643. Js::OpCode::BrNeq_A,
  6644. labelHelper,
  6645. insertBeforeInstr);
  6646. GenerateStringTest(opndRight->AsRegOpnd(), insertBeforeInstr, labelHelper);
  6647. // left->m_charLength <= JavascriptArray::MaxCharLength
  6648. IR::IndirOpnd *indirLeftCharLengthOpnd = IR::IndirOpnd::New(opndLeft->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, m_func);
  6649. IR::RegOpnd *regLeftCharLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  6650. InsertMove(regLeftCharLengthOpnd, indirLeftCharLengthOpnd, insertBeforeInstr);
  6651. InsertCompareBranch(
  6652. regLeftCharLengthOpnd,
  6653. IR::IntConstOpnd::New(Js::JavascriptString::MaxCharLength, TyUint32, m_func),
  6654. Js::OpCode::BrGt_A,
  6655. labelHelper,
  6656. insertBeforeInstr);
  6657. // left->m_pszValue == NULL (!left->IsFinalized())
  6658. InsertCompareBranch(
  6659. IR::IndirOpnd::New(opndLeft->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func),
  6660. IR::AddrOpnd::NewNull(m_func),
  6661. Js::OpCode::BrNeq_A,
  6662. labelHelper,
  6663. insertBeforeInstr);
  6664. // right->m_pszValue != NULL (right->IsFinalized())
  6665. InsertCompareBranch(
  6666. IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func),
  6667. IR::AddrOpnd::NewNull(m_func),
  6668. Js::OpCode::BrEq_A,
  6669. labelHelper,
  6670. insertBeforeInstr);
  6671. // if ownsLastBlock != 0
  6672. InsertCompareBranch(
  6673. IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfOwnsLastBlock(), TyUint8, m_func),
  6674. IR::IntConstOpnd::New(0, TyUint8, m_func),
  6675. Js::OpCode::BrEq_A,
  6676. labelHelper,
  6677. insertBeforeInstr);
  6678. // if right->m_charLength == 1
  6679. InsertCompareBranch(IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_charLength), TyUint32, m_func),
  6680. IR::IntConstOpnd::New(1, TyUint32, m_func),
  6681. Js::OpCode::BrNeq_A, labelHelper, insertBeforeInstr);
  6682. // if left->m_directCharLength == -1
  6683. InsertCompareBranch(IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfDirectCharLength(), TyUint32, m_func),
  6684. IR::IntConstOpnd::New(UINT32_MAX, TyUint32, m_func),
  6685. Js::OpCode::BrNeq_A, labelHelper, insertBeforeInstr);
  6686. // if lastBlockInfo.charLength < lastBlockInfo.charCapacity
  6687. IR::IndirOpnd *indirCharLength = IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo()+ (int32)Js::CompoundString::GetOffsetOfLastBlockInfoCharLength(), TyMachPtr, m_func);
  6688. IR::RegOpnd *charLengthOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
  6689. InsertMove(charLengthOpnd, indirCharLength, insertBeforeInstr);
  6690. InsertCompareBranch(charLengthOpnd, IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo() + (int32)Js::CompoundString::GetOffsetOfLastBlockInfoCharCapacity(), TyMachPtr, m_func), Js::OpCode::BrGe_A, labelHelper, insertBeforeInstr);
  6691. // load c= right->m_pszValue[0]
  6692. IR::RegOpnd *pszValue0Opnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  6693. IR::IndirOpnd *indirRightPszOpnd = IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func);
  6694. InsertMove(pszValue0Opnd, indirRightPszOpnd, insertBeforeInstr);
  6695. IR::RegOpnd *charResultOpnd = IR::RegOpnd::New(TyUint16, this->m_func);
  6696. InsertMove(charResultOpnd, IR::IndirOpnd::New(pszValue0Opnd, 0, TyUint16, this->m_func), insertBeforeInstr);
  6697. // lastBlockInfo.buffer[blockCharLength] = c;
  6698. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  6699. InsertMove(baseOpnd, IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo() + (int32)Js::CompoundString::GetOffsetOfLastBlockInfoBuffer(), TyMachPtr, m_func), insertBeforeInstr);
  6700. IR::IndirOpnd *indirBufferToStore = IR::IndirOpnd::New(baseOpnd, charLengthOpnd, (byte)Math::Log2(sizeof(wchar_t)), TyUint16, m_func);
  6701. InsertMove(indirBufferToStore, charResultOpnd, insertBeforeInstr);
  6702. // left->m_charLength++
  6703. InsertAdd(false, indirLeftCharLengthOpnd, regLeftCharLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
  6704. // lastBlockInfo.charLength++
  6705. InsertAdd(false, indirCharLength, indirCharLength, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
  6706. InsertBranch(Js::OpCode::Br, labelFallThrough, insertBeforeInstr);
  6707. return this->LowerBinaryHelperMemWithTemp(instr, IR::HelperOp_AddLeftDead);
  6708. }
  6709. IR::Instr *
  6710. Lowerer::LowerBinaryHelperMemWithTemp3(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::JnHelperMethod helperMethodWithTemp, IR::JnHelperMethod helperMethodLeftDead)
  6711. {
  6712. IR::Opnd *src1 = instr->GetSrc1();
  6713. if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_isTempLastUse && !src1->GetValueType().IsNotString())
  6714. {
  6715. Assert(helperMethodLeftDead == IR::HelperOp_AddLeftDead);
  6716. return LowerAddLeftDeadForString(instr);
  6717. }
  6718. else
  6719. {
  6720. return this->LowerBinaryHelperMemWithTemp2(instr, helperMethod, helperMethodWithTemp);
  6721. }
  6722. }
  6723. StackSym *
  6724. Lowerer::GetTempNumberSym(IR::Opnd * opnd, bool isTempTransferred)
  6725. {
  6726. AssertMsg(opnd->IsRegOpnd(), "Expected regOpnd");
  6727. if (isTempTransferred)
  6728. {
  6729. StackSym * tempNumberSym = StackSym::New(TyMisc, m_func);
  6730. this->m_func->StackAllocate(tempNumberSym, sizeof(Js::JavascriptNumber));
  6731. return tempNumberSym;
  6732. }
  6733. StackSym * stackSym = opnd->AsRegOpnd()->m_sym;
  6734. StackSym * tempNumberSym = stackSym->m_tempNumberSym;
  6735. if (tempNumberSym == nullptr)
  6736. {
  6737. tempNumberSym = StackSym::New(TyMisc, m_func);
  6738. this->m_func->StackAllocate(tempNumberSym, sizeof(Js::JavascriptNumber));
  6739. stackSym->m_tempNumberSym = tempNumberSym;
  6740. }
  6741. return tempNumberSym;
  6742. }
  6743. void Lowerer::LowerProfiledLdElemI(IR::JitProfilingInstr *const instr)
  6744. {
  6745. Assert(instr);
  6746. /*
  6747. Var ProfilingHelpers::ProfiledLdElem(
  6748. const Var base,
  6749. const Var varIndex,
  6750. FunctionBody *const functionBody,
  6751. const ProfileId profileId)
  6752. */
  6753. Func *const func = instr->m_func;
  6754. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, func));
  6755. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(func));
  6756. IR::IndirOpnd *const indir = instr->UnlinkSrc1()->AsIndirOpnd();
  6757. IR::Opnd *const indexOpnd = indir->UnlinkIndexOpnd();
  6758. Assert(indexOpnd || indir->GetOffset() >= 0 && !Js::TaggedInt::IsOverflow(indir->GetOffset()));
  6759. m_lowererMD.LoadHelperArgument(
  6760. instr,
  6761. indexOpnd
  6762. ? static_cast<IR::Opnd *>(indexOpnd)
  6763. : IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(indir->GetOffset()), IR::AddrOpndKindDynamicVar, func));
  6764. m_lowererMD.LoadHelperArgument(instr, indir->UnlinkBaseOpnd());
  6765. indir->Free(func);
  6766. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledLdElem, func));
  6767. m_lowererMD.LowerCall(instr, 0);
  6768. }
  6769. void Lowerer::LowerProfiledStElemI(IR::JitProfilingInstr *const instr, const Js::PropertyOperationFlags flags)
  6770. {
  6771. Assert(instr);
  6772. /*
  6773. void ProfilingHelpers::ProfiledStElem(
  6774. const Var base,
  6775. const Var varIndex,
  6776. const Var value,
  6777. FunctionBody *const functionBody,
  6778. const ProfileId profileId,
  6779. const PropertyOperationFlags flags)
  6780. */
  6781. Func *const func = instr->m_func;
  6782. IR::JnHelperMethod helper;
  6783. if(flags == Js::PropertyOperation_None)
  6784. {
  6785. helper = IR::HelperProfiledStElem_DefaultFlags;
  6786. }
  6787. else
  6788. {
  6789. helper = IR::HelperProfiledStElem;
  6790. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(flags, TyInt32, func, true));
  6791. }
  6792. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, func));
  6793. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(func));
  6794. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  6795. IR::IndirOpnd *const indir = instr->UnlinkDst()->AsIndirOpnd();
  6796. IR::Opnd *const indexOpnd = indir->UnlinkIndexOpnd();
  6797. Assert(indexOpnd || indir->GetOffset() >= 0 && !Js::TaggedInt::IsOverflow(indir->GetOffset()));
  6798. m_lowererMD.LoadHelperArgument(
  6799. instr,
  6800. indexOpnd
  6801. ? static_cast<IR::Opnd *>(indexOpnd)
  6802. : IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(indir->GetOffset()), IR::AddrOpndKindDynamicVar, func));
  6803. m_lowererMD.LoadHelperArgument(instr, indir->UnlinkBaseOpnd());
  6804. indir->Free(func);
  6805. instr->SetSrc1(IR::HelperCallOpnd::New(helper, func));
  6806. m_lowererMD.LowerCall(instr, 0);
  6807. }
  6808. ///----------------------------------------------------------------------------
  6809. ///
  6810. /// Lowerer::LowerStElemI
  6811. ///
  6812. ///----------------------------------------------------------------------------
  6813. IR::Instr *
  6814. Lowerer::LowerStElemI(IR::Instr * instr, Js::PropertyOperationFlags flags, bool isHelper, IR::JnHelperMethod helperMethod)
  6815. {
  6816. IR::Instr *instrPrev = instr->m_prev;
  6817. if (instr->IsJitProfilingInstr())
  6818. {
  6819. Assert(!isHelper);
  6820. LowerProfiledStElemI(instr->AsJitProfilingInstr(), flags);
  6821. return instrPrev;
  6822. }
  6823. IR::Opnd *src1 = instr->GetSrc1();
  6824. IR::Opnd *dst = instr->GetDst();
  6825. IR::Opnd *newDst = nullptr;
  6826. IRType srcType = src1->GetType();
  6827. AssertMsg(dst->IsIndirOpnd(), "Expected indirOpnd on StElementI");
  6828. #if !FLOATVAR
  6829. if (dst->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyOptimizedTypedArray() && src1->IsRegOpnd())
  6830. {
  6831. // We allow the source of typedArray StElem to be marked as temp, since we just need the value,
  6832. // however if the array turns out to be a non-typed array, or the index isn't valid (the value is then stored as a property)
  6833. // the temp needs to be boxed if it is a float. The BoxStackNumber helper will box JavascriptNumbers
  6834. // which are on the stack.
  6835. // regVar = BoxStackNumber(src1, scriptContext)
  6836. IR::Instr *newInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  6837. IR::RegOpnd *regVar = IR::RegOpnd::New(TyVar, this->m_func);
  6838. newInstr->SetDst(regVar);
  6839. newInstr->SetSrc1(src1);
  6840. instr->InsertBefore(newInstr);
  6841. LowerUnaryHelperMem(newInstr, IR::HelperBoxStackNumber);
  6842. // MOV src1, regVar
  6843. newInstr = IR::Instr::New(Js::OpCode::Ld_A, src1, regVar, this->m_func);
  6844. instr->InsertBefore(m_lowererMD.ChangeToAssign(newInstr));
  6845. }
  6846. #endif
  6847. if(instr->HasBailOutInfo())
  6848. {
  6849. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  6850. if(bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment)
  6851. {
  6852. Assert(!(bailOutKind & IR::BailOutOnMissingValue));
  6853. LowerBailOnInvalidatedArrayHeadSegment(instr, isHelper);
  6854. bailOutKind ^= IR::BailOutOnInvalidatedArrayHeadSegment;
  6855. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  6856. }
  6857. else if(bailOutKind & IR::BailOutOnMissingValue)
  6858. {
  6859. LowerBailOnCreatedMissingValue(instr, isHelper);
  6860. bailOutKind ^= IR::BailOutOnMissingValue;
  6861. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  6862. }
  6863. if(bailOutKind & IR::BailOutOnInvalidatedArrayLength)
  6864. {
  6865. LowerBailOnInvalidatedArrayLength(instr, isHelper);
  6866. bailOutKind ^= IR::BailOutOnInvalidatedArrayLength;
  6867. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  6868. }
  6869. if(bailOutKind & IR::BailOutConvertedNativeArray)
  6870. {
  6871. IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  6872. instr->InsertAfter(labelSkipBailOut);
  6873. LowerOneBailOutKind(instr, IR::BailOutConvertedNativeArray, isHelper);
  6874. newDst = IR::RegOpnd::New(TyMachReg, m_func);
  6875. InsertTestBranch(newDst, newDst, Js::OpCode::BrEq_A, labelSkipBailOut, instr->m_next);
  6876. }
  6877. }
  6878. instr->UnlinkDst();
  6879. instr->UnlinkSrc1();
  6880. IR::Opnd *indexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
  6881. Assert(
  6882. helperMethod == IR::HelperOP_InitElemGetter ||
  6883. helperMethod == IR::HelperOP_InitElemSetter ||
  6884. helperMethod == IR::HelperOP_InitComputedProperty ||
  6885. helperMethod == IR::HelperOp_SetElementI ||
  6886. helperMethod == IR::HelperOp_InitClassMemberComputedName ||
  6887. helperMethod == IR::HelperOp_InitClassMemberGetComputedName ||
  6888. helperMethod == IR::HelperOp_InitClassMemberSetComputedName
  6889. );
  6890. if (indexOpnd && indexOpnd->GetType() != TyVar)
  6891. {
  6892. if (indexOpnd->GetType() == TyInt32)
  6893. {
  6894. helperMethod =
  6895. srcType == TyVar ? IR::HelperOp_SetElementI_Int32 :
  6896. srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI_Int32 :
  6897. IR::HelperOp_SetNativeFloatElementI_Int32;
  6898. }
  6899. else if (indexOpnd->GetType() == TyUint32)
  6900. {
  6901. helperMethod =
  6902. srcType == TyVar ? IR::HelperOp_SetElementI_UInt32 :
  6903. srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI_UInt32 :
  6904. IR::HelperOp_SetNativeFloatElementI_UInt32;
  6905. }
  6906. else
  6907. {
  6908. Assert(FALSE);
  6909. }
  6910. }
  6911. else
  6912. {
  6913. if (indexOpnd == nullptr)
  6914. {
  6915. // No index; the offset identifies the element.
  6916. IntConstType offset = (IntConstType)dst->AsIndirOpnd()->GetOffset();
  6917. indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
  6918. }
  6919. if (srcType != TyVar)
  6920. {
  6921. helperMethod =
  6922. srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI : IR::HelperOp_SetNativeFloatElementI;
  6923. }
  6924. }
  6925. if (srcType == TyFloat64)
  6926. {
  6927. m_lowererMD.LoadDoubleHelperArgument(instr, src1);
  6928. }
  6929. m_lowererMD.LoadHelperArgument(instr,
  6930. IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
  6931. LoadScriptContext(instr);
  6932. if (srcType != TyFloat64)
  6933. {
  6934. m_lowererMD.LoadHelperArgument(instr, src1);
  6935. }
  6936. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  6937. IR::Opnd *baseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
  6938. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  6939. dst->Free(this->m_func);
  6940. if (newDst)
  6941. {
  6942. instr->SetDst(newDst);
  6943. }
  6944. m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, nullptr, nullptr, isHelper);
  6945. return instrPrev;
  6946. }
  6947. ///----------------------------------------------------------------------------
  6948. ///
  6949. /// Lowerer::LowerLdElemI
  6950. ///
  6951. ///----------------------------------------------------------------------------
  6952. IR::Instr *
  6953. Lowerer::LowerLdElemI(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool isHelper)
  6954. {
  6955. IR::Instr *instrPrev = instr->m_prev;
  6956. if(instr->IsJitProfilingInstr())
  6957. {
  6958. Assert(helperMethod == IR::HelperOp_GetElementI);
  6959. Assert(!isHelper);
  6960. LowerProfiledLdElemI(instr->AsJitProfilingInstr());
  6961. return instrPrev;
  6962. }
  6963. if (!isHelper && instr->DoStackArgsOpt(this->m_func))
  6964. {
  6965. IR::LabelInstr * labelLdElem = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  6966. // Pass in null for labelFallThru to only generate the LdHeapArgument call
  6967. GenerateFastArgumentsLdElemI(instr, labelLdElem, nullptr);
  6968. instr->InsertBefore(labelLdElem);
  6969. }
  6970. IR::Opnd *src1 = instr->UnlinkSrc1();
  6971. AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd");
  6972. IR::IndirOpnd *indirOpnd = src1->AsIndirOpnd();
  6973. bool loadScriptContext = true;
  6974. IRType dstType = instr->GetDst()->GetType();
  6975. IR::Opnd *indexOpnd = indirOpnd->UnlinkIndexOpnd();
  6976. if (indexOpnd && indexOpnd->GetType() != TyVar)
  6977. {
  6978. Assert(indexOpnd->GetType() == TyUint32 || indexOpnd->GetType() == TyInt32);
  6979. switch (helperMethod)
  6980. {
  6981. case IR::HelperOp_GetElementI:
  6982. if (indexOpnd->GetType() == TyUint32)
  6983. {
  6984. helperMethod =
  6985. dstType == TyVar ? IR::HelperOp_GetElementI_UInt32 :
  6986. dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI_UInt32 :
  6987. IR::HelperOp_GetNativeFloatElementI_UInt32;
  6988. }
  6989. else
  6990. {
  6991. helperMethod =
  6992. dstType == TyVar ? IR::HelperOp_GetElementI_Int32 :
  6993. dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI_Int32 :
  6994. IR::HelperOp_GetNativeFloatElementI_Int32;
  6995. }
  6996. break;
  6997. case IR::HelperOp_GetMethodElement:
  6998. Assert(dstType == TyVar);
  6999. helperMethod = indexOpnd->GetType() == TyUint32?
  7000. IR::HelperOp_GetMethodElement_UInt32 : IR::HelperOp_GetMethodElement_Int32;
  7001. break;
  7002. case IR::HelperOp_TypeofElem:
  7003. Assert(dstType == TyVar);
  7004. helperMethod = indexOpnd->GetType() == TyUint32?
  7005. IR::HelperOp_TypeofElem_UInt32 : IR::HelperOp_TypeofElem_Int32;
  7006. break;
  7007. default:
  7008. Assert(false);
  7009. }
  7010. }
  7011. else
  7012. {
  7013. if (indexOpnd == nullptr)
  7014. {
  7015. // No index; the offset identifies the element.
  7016. IntConstType offset = (IntConstType)src1->AsIndirOpnd()->GetOffset();
  7017. indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
  7018. }
  7019. if (dstType != TyVar)
  7020. {
  7021. loadScriptContext = false;
  7022. helperMethod =
  7023. dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI : IR::HelperOp_GetNativeFloatElementI;
  7024. }
  7025. }
  7026. // Jitted loop bodies have volatile information about values created outside the loop, so don't update array creation site
  7027. // profile data from jitted loop bodies
  7028. if(!m_func->IsLoopBody())
  7029. {
  7030. const ValueType baseValueType(indirOpnd->GetBaseOpnd()->GetValueType());
  7031. if( baseValueType.IsLikelyObject() &&
  7032. baseValueType.GetObjectType() == ObjectType::Array &&
  7033. !baseValueType.HasIntElements())
  7034. {
  7035. switch(helperMethod)
  7036. {
  7037. case IR::HelperOp_GetElementI:
  7038. helperMethod =
  7039. baseValueType.HasFloatElements()
  7040. ? IR::HelperOp_GetElementI_ExpectingNativeFloatArray
  7041. : IR::HelperOp_GetElementI_ExpectingVarArray;
  7042. break;
  7043. case IR::HelperOp_GetElementI_UInt32:
  7044. helperMethod =
  7045. baseValueType.HasFloatElements()
  7046. ? IR::HelperOp_GetElementI_UInt32_ExpectingNativeFloatArray
  7047. : IR::HelperOp_GetElementI_UInt32_ExpectingVarArray;
  7048. break;
  7049. case IR::HelperOp_GetElementI_Int32:
  7050. helperMethod =
  7051. baseValueType.HasFloatElements()
  7052. ? IR::HelperOp_GetElementI_Int32_ExpectingNativeFloatArray
  7053. : IR::HelperOp_GetElementI_Int32_ExpectingVarArray;
  7054. break;
  7055. }
  7056. }
  7057. }
  7058. if (loadScriptContext)
  7059. {
  7060. LoadScriptContext(instr);
  7061. }
  7062. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  7063. IR::Opnd *baseOpnd = indirOpnd->UnlinkBaseOpnd();
  7064. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  7065. src1->Free(this->m_func);
  7066. m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, nullptr, nullptr, isHelper);
  7067. return instrPrev;
  7068. }
  7069. void Lowerer::LowerLdLen(IR::Instr *const instr, const bool isHelper)
  7070. {
  7071. Assert(instr);
  7072. Assert(instr->m_opcode == Js::OpCode::LdLen_A);
  7073. // LdLen has persisted to this point for the sake of pre-lower opts.
  7074. // Turn it into a LdFld of the "length" property.
  7075. // This is normally a load of the internal "length" of an Array, so it probably doesn't benefit
  7076. // from inline caching.
  7077. // Changing the opcode to LdFld is done in LowerLdFld and needs to remain that way to take into
  7078. // account ProfiledLdLen_A
  7079. IR::RegOpnd * baseOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  7080. PropertySym* fieldSym = PropertySym::FindOrCreate(baseOpnd->m_sym->m_id, Js::PropertyIds::length, (uint32)-1, (uint)-1, PropertyKindData, m_func);
  7081. baseOpnd->Free(this->m_func);
  7082. instr->SetSrc1(IR::SymOpnd::New(fieldSym, TyVar, m_func));
  7083. LowerLdFld(instr, IR::HelperOp_GetProperty, IR::HelperOp_GetProperty, false, nullptr, isHelper);
  7084. }
  7085. IR::Instr *
  7086. Lowerer::LowerLdArrViewElem(IR::Instr * instr)
  7087. {
  7088. Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
  7089. Assert(instr);
  7090. Assert(instr->m_opcode == Js::OpCode::LdInt8ArrViewElem ||
  7091. instr->m_opcode == Js::OpCode::LdUInt8ArrViewElem ||
  7092. instr->m_opcode == Js::OpCode::LdInt16ArrViewElem ||
  7093. instr->m_opcode == Js::OpCode::LdUInt16ArrViewElem ||
  7094. instr->m_opcode == Js::OpCode::LdInt32ArrViewElem ||
  7095. instr->m_opcode == Js::OpCode::LdUInt32ArrViewElem ||
  7096. instr->m_opcode == Js::OpCode::LdFloat32ArrViewElem ||
  7097. instr->m_opcode == Js::OpCode::LdFloat64ArrViewElem);
  7098. IR::Instr * instrPrev = instr->m_prev;
  7099. IR::RegOpnd * indexOpnd = instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd();
  7100. IR::Opnd * dst = instr->GetDst();
  7101. IR::Opnd * src1 = instr->GetSrc1();
  7102. IR::Opnd * src2 = instr->GetSrc2();
  7103. IR::Instr * done;
  7104. if (indexOpnd || (uint32)src1->AsIndirOpnd()->GetOffset() >= 0x1000000)
  7105. {
  7106. // CMP indexOpnd, src2(arrSize)
  7107. // JA $helper
  7108. // JMP $load
  7109. // $helper:
  7110. // MOV dst, 0
  7111. // JMP $done
  7112. // $load:
  7113. // MOV dst, src1([arrayBuffer + indexOpnd])
  7114. // $done:
  7115. Assert(!dst->IsFloat32() || src1->IsFloat32());
  7116. Assert(!dst->IsFloat64() || src1->IsFloat64());
  7117. done = m_lowererMD.LowerAsmJsLdElemHelper(instr);
  7118. }
  7119. else
  7120. {
  7121. // any access below 0x1000000 is safe
  7122. instr->UnlinkDst();
  7123. instr->UnlinkSrc1();
  7124. if (src2)
  7125. {
  7126. instr->FreeSrc2();
  7127. }
  7128. done = instr;
  7129. }
  7130. InsertMove(dst, src1, done);
  7131. instr->Remove();
  7132. return instrPrev;
  7133. }
  7134. IR::Instr *
  7135. Lowerer::LowerMemset(IR::Instr * instr, IR::RegOpnd * helperRet)
  7136. {
  7137. IR::Opnd * dst = instr->UnlinkDst();
  7138. IR::Opnd * src1 = instr->UnlinkSrc1();
  7139. Assert(dst->IsIndirOpnd());
  7140. IR::Opnd *baseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
  7141. IR::Opnd *indexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
  7142. IR::Opnd *sizeOpnd = instr->UnlinkSrc2();
  7143. Assert(baseOpnd);
  7144. Assert(sizeOpnd);
  7145. Assert(indexOpnd);
  7146. IR::JnHelperMethod helperMethod = IR::HelperOp_Memset;
  7147. IR::Instr *instrPrev = nullptr;
  7148. if (src1->IsRegOpnd() && !src1->IsVar())
  7149. {
  7150. IR::RegOpnd* varOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  7151. instrPrev = IR::Instr::New(Js::OpCode::ToVar, varOpnd, src1, instr->m_func);
  7152. instr->InsertBefore(instrPrev);
  7153. src1 = varOpnd;
  7154. }
  7155. instr->SetDst(helperRet);
  7156. LoadScriptContext(instr);
  7157. m_lowererMD.LoadHelperArgument(instr, sizeOpnd);
  7158. m_lowererMD.LoadHelperArgument(instr, src1);
  7159. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  7160. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  7161. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  7162. dst->Free(m_func);
  7163. return instrPrev;
  7164. }
  7165. IR::Instr *
  7166. Lowerer::LowerMemcopy(IR::Instr * instr, IR::RegOpnd * helperRet)
  7167. {
  7168. IR::Opnd * dst = instr->UnlinkDst();
  7169. IR::Opnd * src = instr->UnlinkSrc1();
  7170. Assert(dst->IsIndirOpnd());
  7171. Assert(src->IsIndirOpnd());
  7172. IR::Opnd *dstBaseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
  7173. IR::Opnd *dstIndexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
  7174. IR::Opnd *srcBaseOpnd = src->AsIndirOpnd()->UnlinkBaseOpnd();
  7175. IR::Opnd *srcIndexOpnd = src->AsIndirOpnd()->UnlinkIndexOpnd();
  7176. IR::Opnd *sizeOpnd = instr->UnlinkSrc2();
  7177. Assert(sizeOpnd);
  7178. Assert(dstBaseOpnd);
  7179. Assert(dstIndexOpnd);
  7180. Assert(srcBaseOpnd);
  7181. Assert(srcIndexOpnd);
  7182. IR::JnHelperMethod helperMethod = IR::HelperOp_Memcopy;
  7183. instr->SetDst(helperRet);
  7184. LoadScriptContext(instr);
  7185. m_lowererMD.LoadHelperArgument(instr, sizeOpnd);
  7186. m_lowererMD.LoadHelperArgument(instr, srcIndexOpnd);
  7187. m_lowererMD.LoadHelperArgument(instr, srcBaseOpnd);
  7188. m_lowererMD.LoadHelperArgument(instr, dstIndexOpnd);
  7189. m_lowererMD.LoadHelperArgument(instr, dstBaseOpnd);
  7190. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  7191. dst->Free(m_func);
  7192. src->Free(m_func);
  7193. return nullptr;
  7194. }
  7195. IR::Instr *
  7196. Lowerer::LowerMemOp(IR::Instr * instr)
  7197. {
  7198. Assert(instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  7199. IR::Instr *instrPrev = instr->m_prev;
  7200. IR::RegOpnd* helperRet = IR::RegOpnd::New(TyInt8, instr->m_func);
  7201. const bool isHelper = false;
  7202. AssertMsg(instr->HasBailOutInfo(), "Expected bailOut on MemOp instruction");
  7203. if (instr->HasBailOutInfo())
  7204. {
  7205. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  7206. if (bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment)
  7207. {
  7208. Assert(!(bailOutKind & IR::BailOutOnMissingValue));
  7209. LowerBailOnInvalidatedArrayHeadSegment(instr, isHelper);
  7210. bailOutKind ^= IR::BailOutOnInvalidatedArrayHeadSegment;
  7211. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7212. }
  7213. else if (bailOutKind & IR::BailOutOnMissingValue)
  7214. {
  7215. LowerBailOnCreatedMissingValue(instr, isHelper);
  7216. bailOutKind ^= IR::BailOutOnMissingValue;
  7217. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7218. }
  7219. if (bailOutKind & IR::BailOutOnInvalidatedArrayLength)
  7220. {
  7221. LowerBailOnInvalidatedArrayLength(instr, isHelper);
  7222. bailOutKind ^= IR::BailOutOnInvalidatedArrayLength;
  7223. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7224. }
  7225. AssertMsg(bailOutKind & IR::BailOutOnMemOpError, "Expected BailOutOnMemOpError on MemOp instruction");
  7226. if (bailOutKind & IR::BailOutOnMemOpError)
  7227. {
  7228. // Insert or get continue label
  7229. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isHelper);
  7230. Func *const func = instr->m_func;
  7231. LowerOneBailOutKind(instr, IR::BailOutOnMemOpError, isHelper);
  7232. IR::Instr *const insertBeforeInstr = instr->m_next;
  7233. // test helperRet, helperRet
  7234. // jz $skipBailOut
  7235. InsertCompareBranch(
  7236. helperRet,
  7237. IR::IntConstOpnd::New(0, TyInt8, func),
  7238. Js::OpCode::BrNeq_A,
  7239. skipBailOutLabel,
  7240. insertBeforeInstr);
  7241. // (Bail out with IR::BailOutOnMemOpError)
  7242. // $skipBailOut:
  7243. bailOutKind ^= IR::BailOutOnMemOpError;
  7244. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7245. }
  7246. instr->ClearBailOutInfo();
  7247. }
  7248. IR::Instr* newInstrPrev = nullptr;
  7249. if (instr->m_opcode == Js::OpCode::Memset)
  7250. {
  7251. newInstrPrev = LowerMemset(instr, helperRet);
  7252. }
  7253. else if (instr->m_opcode == Js::OpCode::Memcopy)
  7254. {
  7255. newInstrPrev = LowerMemcopy(instr, helperRet);
  7256. }
  7257. if (newInstrPrev != nullptr)
  7258. {
  7259. instrPrev = newInstrPrev;
  7260. }
  7261. return instrPrev;
  7262. }
  7263. IR::Instr *
  7264. Lowerer::LowerStArrViewElem(IR::Instr * instr)
  7265. {
  7266. Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
  7267. Assert(instr);
  7268. Assert(instr->m_opcode == Js::OpCode::StInt8ArrViewElem ||
  7269. instr->m_opcode == Js::OpCode::StUInt8ArrViewElem ||
  7270. instr->m_opcode == Js::OpCode::StInt16ArrViewElem ||
  7271. instr->m_opcode == Js::OpCode::StUInt16ArrViewElem ||
  7272. instr->m_opcode == Js::OpCode::StInt32ArrViewElem ||
  7273. instr->m_opcode == Js::OpCode::StUInt32ArrViewElem ||
  7274. instr->m_opcode == Js::OpCode::StFloat32ArrViewElem ||
  7275. instr->m_opcode == Js::OpCode::StFloat64ArrViewElem);
  7276. IR::Instr * instrPrev = instr->m_prev;
  7277. IR::Opnd * dst = instr->GetDst();
  7278. IR::Opnd * src1 = instr->GetSrc1();
  7279. IR::Opnd * src2 = instr->GetSrc2();
  7280. // type of dst is the type of array
  7281. IR::RegOpnd * indexOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
  7282. Assert(!dst->IsFloat32() || src1->IsFloat32());
  7283. Assert(!dst->IsFloat64() || src1->IsFloat64());
  7284. IR::Instr * done;
  7285. if (indexOpnd || (uint32)dst->AsIndirOpnd()->GetOffset() >= 0x1000000)
  7286. {
  7287. // CMP indexOpnd, src2(arrSize)
  7288. // JA $helper
  7289. // JMP $store
  7290. // $helper:
  7291. // JMP $done
  7292. // $store:
  7293. // MOV dst([arrayBuffer + indexOpnd]), src1
  7294. // $done:
  7295. done = m_lowererMD.LowerAsmJsStElemHelper(instr);
  7296. }
  7297. else
  7298. {
  7299. // any constant access below 0x1000000 is safe, as that is the min heap size
  7300. instr->UnlinkDst();
  7301. instr->UnlinkSrc1();
  7302. done = instr;
  7303. if (src2)
  7304. {
  7305. instr->FreeSrc2();
  7306. }
  7307. }
  7308. InsertMove(dst, src1, done);
  7309. instr->Remove();
  7310. return instrPrev;
  7311. }
  7312. IR::Instr *
  7313. Lowerer::LowerArrayDetachedCheck(IR::Instr * instr)
  7314. {
  7315. // TEST isDetached, isDetached
  7316. // JE Done
  7317. // Helper:
  7318. // CALL Js::Throw::OutOfMemory
  7319. // Done:
  7320. Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
  7321. IR::Instr * instrPrev = instr->m_prev;
  7322. IR::Opnd * isDetachedOpnd = instr->UnlinkSrc1();
  7323. Assert(isDetachedOpnd->IsIndirOpnd() || isDetachedOpnd->IsMemRefOpnd());
  7324. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  7325. IR::LabelInstr * helperLabel = InsertLabel(true, instr);
  7326. InsertTestBranch(isDetachedOpnd, isDetachedOpnd, Js::OpCode::BrNotNeq_A, doneLabel, helperLabel);
  7327. m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_OutOfMemoryError);
  7328. return instrPrev;
  7329. }
  7330. ///----------------------------------------------------------------------------
  7331. ///
  7332. /// Lowerer::LowerDeleteElemI
  7333. ///
  7334. ///----------------------------------------------------------------------------
  7335. IR::Instr *
  7336. Lowerer::LowerDeleteElemI(IR::Instr * instr, bool strictMode)
  7337. {
  7338. IR::Instr *instrPrev;
  7339. IR::Opnd *src1 = instr->UnlinkSrc1();
  7340. AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd on DeleteElementI");
  7341. Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
  7342. if (strictMode)
  7343. {
  7344. propertyOperationFlag = Js::PropertyOperation_StrictMode;
  7345. }
  7346. instrPrev = instr->m_prev;
  7347. IR::JnHelperMethod helperMethod = IR::HelperOp_DeleteElementI;
  7348. IR::Opnd *indexOpnd = src1->AsIndirOpnd()->UnlinkIndexOpnd();
  7349. if (indexOpnd)
  7350. {
  7351. if (indexOpnd->GetType() == TyInt32)
  7352. {
  7353. helperMethod = IR::HelperOp_DeleteElementI_Int32;
  7354. }
  7355. else if (indexOpnd->GetType() == TyUint32)
  7356. {
  7357. helperMethod = IR::HelperOp_DeleteElementI_UInt32;
  7358. }
  7359. else
  7360. {
  7361. Assert(indexOpnd->GetType() == TyVar);
  7362. }
  7363. }
  7364. else
  7365. {
  7366. // No index; the offset identifies the element.
  7367. IntConstType offset = (IntConstType)src1->AsIndirOpnd()->GetOffset();
  7368. indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
  7369. }
  7370. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
  7371. LoadScriptContext(instr);
  7372. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  7373. IR::Opnd *baseOpnd = src1->AsIndirOpnd()->UnlinkBaseOpnd();
  7374. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  7375. src1->Free(this->m_func);
  7376. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  7377. return instrPrev;
  7378. }
  7379. ///----------------------------------------------------------------------------
  7380. ///
  7381. /// Lowerer::LowerBrB - lower 1-operand (boolean) conditional branch
  7382. ///
  7383. ///----------------------------------------------------------------------------
  7384. IR::Instr *
  7385. Lowerer::LowerBrBReturn(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool isHelper)
  7386. {
  7387. IR::Instr * instrPrev;
  7388. IR::Instr * instrCall;
  7389. IR::HelperCallOpnd * opndHelper;
  7390. IR::Opnd * opndSrc;
  7391. IR::Opnd * opndDst;
  7392. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  7393. Assert(instr->m_opcode == Js::OpCode::BrOnEmpty || instr->m_opcode == Js::OpCode::BrOnNotEmpty);
  7394. opndSrc = instr->UnlinkSrc1();
  7395. instrPrev = m_lowererMD.LoadHelperArgument(instr, opndSrc);
  7396. // Generate helper call to convert the unknown operand to boolean
  7397. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  7398. opndDst = instr->UnlinkDst();
  7399. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  7400. instr->InsertBefore(instrCall);
  7401. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  7402. // Branch on the result of the call
  7403. instr->m_opcode = (instr->m_opcode == Js::OpCode::BrOnNotEmpty? Js::OpCode::BrTrue_A : Js::OpCode::BrFalse_A);
  7404. instr->SetSrc1(opndDst);
  7405. IR::Instr *loweredInstr;
  7406. loweredInstr = this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, isHelper);
  7407. #if DBG
  7408. if (isHelper)
  7409. {
  7410. if (!loweredInstr->IsBranchInstr())
  7411. {
  7412. loweredInstr = loweredInstr->GetNextBranchOrLabel();
  7413. }
  7414. if (loweredInstr->IsBranchInstr())
  7415. {
  7416. loweredInstr->AsBranchInstr()->m_isHelperToNonHelperBranch = true;
  7417. }
  7418. }
  7419. #endif
  7420. return instrPrev;
  7421. }
  7422. ///----------------------------------------------------------------------------
  7423. ///
  7424. /// Lowerer::LowerMultiBr
  7425. /// - Lowers the instruction for dictionary look up(string case arms)
  7426. ///
  7427. ///----------------------------------------------------------------------------
  7428. IR::Instr* Lowerer::LowerMultiBr(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  7429. {
  7430. IR::Instr * instrPrev = instr->m_prev;
  7431. IR::Instr * instrCall;
  7432. IR::HelperCallOpnd * opndHelper;
  7433. IR::Opnd * opndSrc;
  7434. IR::Opnd * opndDst;
  7435. StackSym * symDst;
  7436. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnd on BrB");
  7437. // Push the args in reverse order.
  7438. // The end and start labels for the function are used to guarantee
  7439. // that the dictionary jump destinations haven't been tampered with, so we
  7440. // will always jump to some location within this function
  7441. IR::LabelOpnd * endFuncOpnd = IR::LabelOpnd::New(m_func->EnsureFuncEndLabel(), m_func);
  7442. m_lowererMD.LoadHelperArgument(instr, endFuncOpnd);
  7443. IR::LabelOpnd * startFuncOpnd = IR::LabelOpnd::New(m_func->EnsureFuncStartLabel(), m_func);
  7444. m_lowererMD.LoadHelperArgument(instr, startFuncOpnd);
  7445. //Load the address of the dictionary pair- Js::StringDictionaryWrapper
  7446. IR::AddrOpnd* nativestringDictionaryOpnd = IR::AddrOpnd::New(instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary(), IR::AddrOpndKindDynamicMisc, this->m_func);
  7447. m_lowererMD.LoadHelperArgument(instr, nativestringDictionaryOpnd);
  7448. //Load the String passed in the Switch expression for look up - JavascriptString
  7449. opndSrc = instr->UnlinkSrc1();
  7450. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  7451. // Generate helper call for dictionary lookup.
  7452. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  7453. symDst = StackSym::New(TyMachPtr,this->m_func);
  7454. opndDst = IR::RegOpnd::New(symDst, TyMachPtr, this->m_func);
  7455. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  7456. instr->InsertBefore(instrCall);
  7457. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  7458. instr->SetSrc1(instrCall->GetDst());
  7459. m_lowererMD.LowerMultiBranch(instr);
  7460. return instrPrev;
  7461. }
  7462. void
  7463. Lowerer::LowerJumpTableMultiBranch(IR::MultiBranchInstr * multiBrInstr, IR::RegOpnd * indexOpnd)
  7464. {
  7465. Func * func = this->m_func;
  7466. IR::Opnd * opndDst = IR::RegOpnd::New(TyMachPtr, func);
  7467. //Move the native address of the jump table to a register
  7468. IR::LabelInstr * nativeJumpTableLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  7469. nativeJumpTableLabel->m_isDataLabel = true;
  7470. IR::LabelOpnd * nativeJumpTable = IR::LabelOpnd::New(nativeJumpTableLabel, m_func);
  7471. IR::RegOpnd * nativeJumpTableReg = IR::RegOpnd::New(TyMachPtr, func);
  7472. m_lowererMD.CreateAssign(nativeJumpTableReg, nativeJumpTable, multiBrInstr);
  7473. BranchJumpTableWrapper * branchJumpTable = multiBrInstr->GetBranchJumpTable();
  7474. AssertMsg(branchJumpTable->labelInstr == nullptr, "Should not be already assigned");
  7475. branchJumpTable->labelInstr = nativeJumpTableLabel;
  7476. //Indirect addressing @ target location in the jump table.
  7477. //MOV eax, [nativeJumpTableReg + (offset * indirScale)]
  7478. BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  7479. IR::Opnd * opndSrc = IR::IndirOpnd::New(nativeJumpTableReg, indexOpnd, indirScale, TyMachReg, this->m_func);
  7480. IR::Instr * indirInstr = m_lowererMD.CreateAssign(opndDst, opndSrc, multiBrInstr);
  7481. //MultiBr eax
  7482. multiBrInstr->SetSrc1(indirInstr->GetDst());
  7483. //Jump to the address at the target location in the jump table
  7484. m_lowererMD.LowerMultiBranch(multiBrInstr);
  7485. }
  7486. ///----------------------------------------------------------------------------
  7487. ///
  7488. /// Lowerer::LowerMultiBr
  7489. /// - Lowers the instruction for jump table(consecutive integer case arms)
  7490. ///
  7491. ///----------------------------------------------------------------------------
  7492. IR::Instr* Lowerer::LowerMultiBr(IR::Instr * instr)
  7493. {
  7494. IR::Instr * instrPrev = instr->m_prev;
  7495. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnd on BrB");
  7496. AssertMsg(instr->IsBranchInstr() && instr->AsBranchInstr()->IsMultiBranch(), "Bad Instruction Lowering Call to LowerMultiBr()");
  7497. IR::MultiBranchInstr * multiBrInstr = instr->AsBranchInstr()->AsMultiBrInstr();
  7498. IR::RegOpnd * offset = instr->UnlinkSrc1()->AsRegOpnd();
  7499. LowerJumpTableMultiBranch(multiBrInstr, offset);
  7500. return instrPrev;
  7501. }
  7502. IR::Instr* Lowerer::LowerBrBMem(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  7503. {
  7504. IR::Instr * instrPrev;
  7505. IR::Instr * instrCall;
  7506. IR::HelperCallOpnd * opndHelper;
  7507. IR::Opnd * opndSrc;
  7508. IR::Opnd * opndDst;
  7509. StackSym * symDst;
  7510. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  7511. instrPrev = LoadScriptContext(instr);
  7512. opndSrc = instr->UnlinkSrc1();
  7513. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  7514. // Generate helper call to convert the unknown operand to boolean
  7515. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  7516. symDst = StackSym::New(TyVar, this->m_func);
  7517. opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
  7518. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  7519. instr->InsertBefore(instrCall);
  7520. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  7521. // Branch on the result of the call
  7522. instr->SetSrc1(opndDst);
  7523. m_lowererMD.LowerCondBranch(instr);
  7524. return instrPrev;
  7525. }
  7526. IR::Instr* Lowerer::LowerBrOnObject(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  7527. {
  7528. IR::Instr * instrPrev;
  7529. IR::Instr * instrCall;
  7530. IR::HelperCallOpnd * opndHelper;
  7531. IR::Opnd * opndSrc;
  7532. IR::Opnd * opndDst;
  7533. StackSym * symDst;
  7534. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  7535. opndSrc = instr->UnlinkSrc1();
  7536. instrPrev = m_lowererMD.LoadHelperArgument(instr, opndSrc);
  7537. // Generate helper call to check if the operand's type is object
  7538. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  7539. symDst = StackSym::New(TyVar, this->m_func);
  7540. opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
  7541. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  7542. instr->InsertBefore(instrCall);
  7543. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  7544. // Branch on the result of the call
  7545. instr->SetSrc1(opndDst);
  7546. m_lowererMD.LowerCondBranch(instr);
  7547. return instrPrev;
  7548. }
  7549. IR::Instr * Lowerer::LowerBrOnClassConstructor(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  7550. {
  7551. IR::Instr * instrPrev;
  7552. IR::Instr * instrCall;
  7553. IR::HelperCallOpnd * opndHelper;
  7554. IR::Opnd * opndSrc;
  7555. IR::Opnd * opndDst;
  7556. StackSym * symDst;
  7557. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  7558. opndSrc = instr->UnlinkSrc1();
  7559. instrPrev = m_lowererMD.LoadHelperArgument(instr, opndSrc);
  7560. // Generate helper call to check if the operand's type is object
  7561. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  7562. symDst = StackSym::New(TyVar, this->m_func);
  7563. opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
  7564. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  7565. instr->InsertBefore(instrCall);
  7566. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  7567. // Branch on the result of the call
  7568. instr->SetSrc1(opndDst);
  7569. m_lowererMD.LowerCondBranch(instr);
  7570. return instrPrev;
  7571. }
  7572. IR::Instr *
  7573. Lowerer::LowerBrCMem(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool noMathFastPath, bool isHelper)
  7574. {
  7575. IR::Instr * instrPrev = instr->m_prev;
  7576. IR::Instr * instrCall;
  7577. IR::HelperCallOpnd * opndHelper;
  7578. IR::Opnd * opndSrc;
  7579. IR::Opnd * opndDst;
  7580. StackSym * symDst;
  7581. bool inverted = false;
  7582. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() != nullptr, "Expected 2 src opnds on BrC");
  7583. if (!noMathFastPath && !this->GenerateFastCondBranch(instr->AsBranchInstr(), &isHelper))
  7584. {
  7585. return instrPrev;
  7586. }
  7587. // Push the args in reverse order.
  7588. const bool loadScriptContext = !(helperMethod == IR::HelperOp_StrictEqualString || helperMethod == IR::HelperOp_StrictEqualEmptyString);
  7589. const bool loadArg2 = !(helperMethod == IR::HelperOp_StrictEqualEmptyString);
  7590. if (helperMethod == IR::HelperOp_NotEqual)
  7591. {
  7592. // Op_NotEqual() returns !Op_Equal(). It is faster to call Op_Equal() directly.
  7593. helperMethod = IR::HelperOp_Equal;
  7594. instr->AsBranchInstr()->Invert();
  7595. inverted = true;
  7596. }
  7597. else if(helperMethod == IR::HelperOp_NotStrictEqual)
  7598. {
  7599. // Op_NotStrictEqual() returns !Op_StrictEqual(). It is faster to call Op_StrictEqual() directly.
  7600. helperMethod = IR::HelperOp_StrictEqual;
  7601. instr->AsBranchInstr()->Invert();
  7602. inverted = true;
  7603. }
  7604. if (loadScriptContext)
  7605. LoadScriptContext(instr);
  7606. opndSrc = instr->UnlinkSrc2();
  7607. if (loadArg2)
  7608. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  7609. opndSrc = instr->UnlinkSrc1();
  7610. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  7611. // Generate helper call to compare the source operands.
  7612. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  7613. symDst = StackSym::New(TyMachReg, this->m_func);
  7614. opndDst = IR::RegOpnd::New(symDst, TyMachReg, this->m_func);
  7615. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  7616. instr->InsertBefore(instrCall);
  7617. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  7618. switch (instr->m_opcode)
  7619. {
  7620. case Js::OpCode::BrNotEq_A:
  7621. case Js::OpCode::BrNotNeq_A:
  7622. case Js::OpCode::BrSrNotEq_A:
  7623. case Js::OpCode::BrSrNotNeq_A:
  7624. if (instr->HasBailOutInfo())
  7625. {
  7626. instr->GetBailOutInfo()->isInvertedBranch = true;
  7627. }
  7628. break;
  7629. case Js::OpCode::BrNotGe_A:
  7630. case Js::OpCode::BrNotGt_A:
  7631. case Js::OpCode::BrNotLe_A:
  7632. case Js::OpCode::BrNotLt_A:
  7633. inverted = true;
  7634. break;
  7635. }
  7636. // Branch if the result is "true".
  7637. instr->SetSrc1(opndDst);
  7638. instr->m_opcode = (inverted ? Js::OpCode::BrFalse_A : Js::OpCode::BrTrue_A);
  7639. this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, !noMathFastPath && isHelper);
  7640. return instrPrev;
  7641. }
  7642. IR::Instr *
  7643. Lowerer::LowerBrFncApply(IR::Instr * instr, IR::JnHelperMethod helperMethod) {
  7644. IR::Instr * instrPrev = instr->m_prev;
  7645. IR::Instr * instrCall;
  7646. IR::HelperCallOpnd * opndHelper;
  7647. IR::Opnd * opndSrc;
  7648. IR::Opnd * opndDst;
  7649. StackSym * symDst;
  7650. AssertMsg(instr->GetSrc1() != nullptr, "Expected 1 src opnd on BrFncApply");
  7651. LoadScriptContext(instr);
  7652. opndSrc = instr->UnlinkSrc1();
  7653. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  7654. // Generate helper call to compare the source operands.
  7655. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  7656. symDst = StackSym::New(TyMachReg, this->m_func);
  7657. opndDst = IR::RegOpnd::New(symDst, TyMachReg, this->m_func);
  7658. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  7659. instr->InsertBefore(instrCall);
  7660. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  7661. // Branch if the result is "true".
  7662. instr->SetSrc1(opndDst);
  7663. instr->m_opcode = Js::OpCode::BrTrue_A;
  7664. m_lowererMD.LowerCondBranch(instr);
  7665. return instrPrev;
  7666. }
  7667. ///----------------------------------------------------------------------------
  7668. ///
  7669. /// Lowerer::LowerBrProperty - lower branch-on-has/no-property
  7670. ///
  7671. ///----------------------------------------------------------------------------
  7672. IR::Instr *
  7673. Lowerer::LowerBrProperty(IR::Instr * instr, IR::JnHelperMethod helper)
  7674. {
  7675. IR::Instr * instrPrev;
  7676. IR::Instr * instrCall;
  7677. IR::HelperCallOpnd * opndHelper;
  7678. IR::Opnd * opndSrc;
  7679. IR::Opnd * opndDst;
  7680. opndSrc = instr->UnlinkSrc1();
  7681. AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(),
  7682. "Expected propertySym as src of BrProperty");
  7683. instrPrev = LoadScriptContext(instr);
  7684. this->LoadPropertySymAsArgument(instr, opndSrc);
  7685. opndHelper = IR::HelperCallOpnd::New(helper, this->m_func);
  7686. opndDst = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), TyMachReg, this->m_func);
  7687. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  7688. instr->InsertBefore(instrCall);
  7689. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  7690. // Branch on the result of the call
  7691. instr->SetSrc1(opndDst);
  7692. switch (instr->m_opcode)
  7693. {
  7694. case Js::OpCode::BrOnHasProperty:
  7695. instr->m_opcode = Js::OpCode::BrTrue_A;
  7696. break;
  7697. case Js::OpCode::BrOnNoProperty:
  7698. instr->m_opcode = Js::OpCode::BrFalse_A;
  7699. break;
  7700. default:
  7701. AssertMsg(0, "Unknown opcode on BrProperty branch");
  7702. break;
  7703. }
  7704. this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, false);
  7705. return instrPrev;
  7706. }
  7707. ///----------------------------------------------------------------------------
  7708. ///
  7709. /// Lowerer::LowerElementUndefined
  7710. ///
  7711. ///----------------------------------------------------------------------------
  7712. IR::Instr *
  7713. Lowerer::LowerElementUndefined(IR::Instr * instr, IR::JnHelperMethod helper)
  7714. {
  7715. IR::Opnd *dst = instr->UnlinkDst();
  7716. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as dst of Ld Undefined");
  7717. // Pass the property sym to store to
  7718. this->LoadPropertySymAsArgument(instr, dst);
  7719. m_lowererMD.ChangeToHelperCall(instr, helper);
  7720. return instr;
  7721. }
  7722. IR::Instr *
  7723. Lowerer::LowerElementUndefinedMem(IR::Instr * instr, IR::JnHelperMethod helper)
  7724. {
  7725. // Pass script context
  7726. IR::Instr * instrPrev = LoadScriptContext(instr);
  7727. this->LowerElementUndefined(instr, helper);
  7728. return instrPrev;
  7729. }
  7730. IR::Instr *
  7731. Lowerer::LowerLdElemUndef(IR::Instr * instr)
  7732. {
  7733. if (this->m_func->GetJnFunction()->IsEval())
  7734. {
  7735. return LowerElementUndefinedMem(instr, IR::HelperOp_LdElemUndefDynamic);
  7736. }
  7737. else
  7738. {
  7739. return LowerElementUndefined(instr, IR::HelperOp_LdElemUndef);
  7740. }
  7741. }
  7742. ///----------------------------------------------------------------------------
  7743. ///
  7744. /// Lowerer::LowerElementUndefinedScoped
  7745. ///
  7746. ///----------------------------------------------------------------------------
  7747. IR::Instr *
  7748. Lowerer::LowerElementUndefinedScoped(IR::Instr * instr, IR::JnHelperMethod helper)
  7749. {
  7750. IR::Instr * instrPrev = instr->m_prev;
  7751. // Pass the default instance
  7752. IR::Opnd *src = instr->UnlinkSrc1();
  7753. m_lowererMD.LoadHelperArgument(instr, src);
  7754. // Pass the property sym to store to
  7755. IR::Opnd * dst = instr->UnlinkDst();
  7756. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as dst of Ld Undefined Scoped");
  7757. this->LoadPropertySymAsArgument(instr, dst);
  7758. m_lowererMD.ChangeToHelperCall(instr, helper);
  7759. return instrPrev;
  7760. }
  7761. IR::Instr *
  7762. Lowerer::LowerElementUndefinedScopedMem(IR::Instr * instr, IR::JnHelperMethod helper)
  7763. {
  7764. // Pass script context
  7765. IR::Instr * instrPrev = LoadScriptContext(instr);
  7766. this->LowerElementUndefinedScoped(instr, helper);
  7767. return instrPrev;
  7768. }
  7769. void
  7770. Lowerer::LowerStLoopBodyCount(IR::Instr* instr)
  7771. {
  7772. Js::LoopHeader *header = ((JsLoopBodyCodeGen*)m_func->m_workItem)->loopHeader;
  7773. IR::MemRefOpnd *loopBodyCounterOpnd = IR::MemRefOpnd::New((BYTE*)(header) + header->GetOffsetOfProfiledLoopCounter(), TyUint32, this->m_func);
  7774. instr->SetDst(loopBodyCounterOpnd);
  7775. instr->ReplaceSrc1(instr->GetSrc1()->AsRegOpnd()->UseWithNewType(TyUint32, this->m_func));
  7776. IR::AutoReuseOpnd(loopBodyCounterOpnd, this->m_func);
  7777. m_lowererMD.ChangeToAssign(instr);
  7778. return;
  7779. }
  7780. #if !FLOATVAR
  7781. IR::Instr *
  7782. Lowerer::LowerStSlotBoxTemp(IR::Instr *stSlot)
  7783. {
  7784. // regVar = BoxStackNumber(src, scriptContext)
  7785. IR::RegOpnd * regSrc = stSlot->UnlinkSrc1()->AsRegOpnd();
  7786. IR::Instr * instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  7787. IR::RegOpnd *regVar = IR::RegOpnd::New(TyVar, this->m_func);
  7788. instr->SetDst(regVar);
  7789. instr->SetSrc1(regSrc);
  7790. stSlot->InsertBefore(instr);
  7791. this->LowerUnaryHelperMem(instr, IR::HelperBoxStackNumber);
  7792. stSlot->SetSrc1(regVar);
  7793. return this->LowerStSlot(stSlot);
  7794. }
  7795. #endif
  7796. IR::Opnd *
  7797. Lowerer::CreateOpndForSlotAccess(IR::Opnd * opnd)
  7798. {
  7799. IR::SymOpnd * symOpnd = opnd->AsSymOpnd();
  7800. PropertySym * dstSym = symOpnd->m_sym->AsPropertySym();
  7801. if (!m_func->IsLoopBody() &&
  7802. m_func->DoStackFrameDisplay() &&
  7803. (dstSym->m_stackSym == m_func->GetLocalClosureSym() || dstSym->m_stackSym == m_func->GetLocalFrameDisplaySym()))
  7804. {
  7805. // Stack closure syms are made to look like slot accesses for the benefit of GlobOpt, so that it can do proper
  7806. // copy prop and implicit call bailout. But what we really want is local stack load/store.
  7807. // Don't do this for loop body, though, since we don't have the value saved on the stack.
  7808. return IR::SymOpnd::New(dstSym->m_stackSym, 0, TyMachReg, this->m_func);
  7809. }
  7810. int32 offset = dstSym->m_propertyId;
  7811. if (!m_func->GetJnFunction()->GetIsAsmJsFunction())
  7812. {
  7813. offset = offset * TySize[opnd->GetType()];
  7814. }
  7815. if (m_func->IsTJLoopBody())
  7816. {
  7817. offset = offset - m_func->GetJnFunction()->GetAsmJsFunctionInfo()->GetTotalSizeinBytes();
  7818. }
  7819. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(symOpnd->CreatePropertyOwnerOpnd(m_func),
  7820. offset , opnd->GetType(), this->m_func);
  7821. return indirOpnd;
  7822. }
  7823. IR::Instr *
  7824. Lowerer::LowerStSlot(IR::Instr *instr)
  7825. {
  7826. // StSlot stores the nth Var in the buffer pointed to by the property sym's stack sym.
  7827. IR::Opnd * dstOpnd = instr->UnlinkDst();
  7828. AssertMsg(dstOpnd, "Expected dst opnd on StSlot");
  7829. IR::Opnd * dstNew = this->CreateOpndForSlotAccess(dstOpnd);
  7830. dstOpnd->Free(this->m_func);
  7831. instr->SetDst(dstNew);
  7832. m_lowererMD.ChangeToWriteBarrierAssign(instr);
  7833. return instr;
  7834. }
  7835. IR::Instr *
  7836. Lowerer::LowerStSlotChkUndecl(IR::Instr *instrStSlot)
  7837. {
  7838. Assert(instrStSlot->GetSrc2() != nullptr);
  7839. // Src2 is required only to avoid dead store false positives during GlobOpt.
  7840. instrStSlot->FreeSrc2();
  7841. IR::Opnd *dstOpnd = this->CreateOpndForSlotAccess(instrStSlot->GetDst());
  7842. IR::Instr *instr = this->LowerStSlot(instrStSlot);
  7843. this->GenUndeclChk(instr, dstOpnd);
  7844. return instr;
  7845. }
  7846. void Lowerer::LowerProfileLdSlot(IR::Opnd *const valueOpnd, Func *const ldSlotFunc, const Js::ProfileId profileId, IR::Instr *const insertBeforeInstr)
  7847. {
  7848. Assert(valueOpnd);
  7849. Assert(profileId != Js::Constants::NoProfileId);
  7850. Assert(insertBeforeInstr);
  7851. Func *const irFunc = insertBeforeInstr->m_func;
  7852. m_lowererMD.LoadHelperArgument(insertBeforeInstr, IR::Opnd::CreateProfileIdOpnd(profileId, irFunc));
  7853. m_lowererMD.LoadHelperArgument(insertBeforeInstr, CreateFunctionBodyOpnd(ldSlotFunc));
  7854. m_lowererMD.LoadHelperArgument(insertBeforeInstr, valueOpnd);
  7855. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, irFunc);
  7856. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfileLdSlot, irFunc));
  7857. insertBeforeInstr->InsertBefore(callInstr);
  7858. m_lowererMD.LowerCall(callInstr, 0);
  7859. }
  7860. IR::Instr *
  7861. Lowerer::LowerLdSlot(IR::Instr *instr)
  7862. {
  7863. IR::Opnd * srcOpnd = instr->UnlinkSrc1();
  7864. AssertMsg(srcOpnd, "Expected src opnd on LdSlot");
  7865. IR::Opnd * srcNew = this->CreateOpndForSlotAccess(srcOpnd);
  7866. srcOpnd->Free(this->m_func);
  7867. instr->SetSrc1(srcNew);
  7868. m_lowererMD.ChangeToAssign(instr);
  7869. return instr;
  7870. }
  7871. IR::Instr *
  7872. Lowerer::LowerChkUndecl(IR::Instr *instr)
  7873. {
  7874. IR::Instr *instrPrev = instr->m_prev;
  7875. this->GenUndeclChk(instr, instr->GetSrc1());
  7876. instr->Remove();
  7877. return instrPrev;
  7878. }
  7879. void
  7880. Lowerer::GenUndeclChk(IR::Instr *instrInsert, IR::Opnd *opnd)
  7881. {
  7882. IR::LabelInstr *labelContinue = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  7883. InsertCompareBranch(
  7884. opnd,
  7885. LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueUndeclBlockVar),
  7886. Js::OpCode::BrNeq_A, labelContinue, instrInsert);
  7887. IR::LabelInstr *labelThrow = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  7888. instrInsert->InsertBefore(labelThrow);
  7889. IR::Instr *instr = IR::Instr::New(
  7890. Js::OpCode::RuntimeReferenceError,
  7891. IR::RegOpnd::New(TyMachReg, m_func),
  7892. IR::IntConstOpnd::New(SCODE_CODE(JSERR_UseBeforeDeclaration), TyInt32, m_func),
  7893. m_func);
  7894. instrInsert->InsertBefore(instr);
  7895. this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeReferenceError);
  7896. instrInsert->InsertBefore(labelContinue);
  7897. }
  7898. ///----------------------------------------------------------------------------
  7899. ///
  7900. /// Lowerer::LowerStElemC
  7901. ///
  7902. ///----------------------------------------------------------------------------
  7903. IR::Instr *
  7904. Lowerer::LowerStElemC(IR::Instr * stElem)
  7905. {
  7906. IR::Instr *instrPrev = stElem->m_prev;
  7907. IR::IndirOpnd * indirOpnd = stElem->GetDst()->AsIndirOpnd();
  7908. IR::RegOpnd *indexOpnd = indirOpnd->UnlinkIndexOpnd();
  7909. Assert(!indexOpnd || indexOpnd->m_sym->IsIntConst());
  7910. IntConstType value;
  7911. if (indexOpnd)
  7912. {
  7913. value = indexOpnd->AsRegOpnd()->m_sym->GetIntConstValue();
  7914. indexOpnd->Free(this->m_func);
  7915. }
  7916. else
  7917. {
  7918. value = (IntConstType)indirOpnd->GetOffset();
  7919. }
  7920. if (stElem->IsJitProfilingInstr())
  7921. {
  7922. Assert(stElem->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
  7923. m_lowererMD.LoadHelperArgument(stElem, stElem->UnlinkSrc1());
  7924. const auto meth = stElem->m_opcode == Js::OpCode::StElemC ? IR::HelperSimpleStoreArrayHelper : IR::HelperSimpleStoreArraySegHelper;
  7925. stElem->SetSrc1(IR::HelperCallOpnd::New(meth, m_func));
  7926. m_lowererMD.LoadHelperArgument(stElem, IR::IntConstOpnd::New(value, TyUint32, m_func));
  7927. m_lowererMD.LoadHelperArgument(stElem, indirOpnd->UnlinkBaseOpnd());
  7928. stElem->UnlinkDst()->Free(m_func);
  7929. m_lowererMD.LowerCall(stElem, 0);
  7930. return instrPrev;
  7931. }
  7932. IntConstType base;
  7933. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  7934. const ValueType baseValueType(baseOpnd->GetValueType());
  7935. if(baseValueType.IsLikelyNativeArray())
  7936. {
  7937. Assert(stElem->m_opcode == Js::OpCode::StElemC);
  7938. IR::LabelInstr *labelBailOut = nullptr;
  7939. IR::Instr *instrBailOut = nullptr;
  7940. if (stElem->HasBailOutInfo())
  7941. {
  7942. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  7943. instrBailOut = stElem;
  7944. stElem = IR::Instr::New(instrBailOut->m_opcode, m_func);
  7945. instrBailOut->TransferTo(stElem);
  7946. instrBailOut->InsertBefore(stElem);
  7947. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  7948. InsertBranch(Js::OpCode::Br, labelDone, instrBailOut);
  7949. instrBailOut->InsertBefore(labelBailOut);
  7950. instrBailOut->InsertAfter(labelDone);
  7951. instrBailOut->m_opcode = Js::OpCode::BailOut;
  7952. GenerateBailOut(instrBailOut);
  7953. }
  7954. if (!baseValueType.IsObject())
  7955. {
  7956. // Likely native array: do a vtable check and bail if it fails.
  7957. Assert(labelBailOut);
  7958. GenerateArrayTest(baseOpnd, labelBailOut, labelBailOut, stElem, true);
  7959. }
  7960. if (stElem->GetSrc1()->GetType() == TyVar)
  7961. {
  7962. // Storing a non-specialized value. This may cause array conversion, which invalidates all the code
  7963. // that depends on the array check we've already done.
  7964. // Call a helper that returns the type ID of the resulting array, check it here against the one we
  7965. // expect, and bail if it fails.
  7966. Assert(labelBailOut);
  7967. // Call a helper to (try and) unbox the var and store it.
  7968. // If we had to convert the array to do the store, we'll bail.
  7969. LoadScriptContext(stElem);
  7970. m_lowererMD.LoadHelperArgument(stElem, stElem->UnlinkSrc1());
  7971. IR::Opnd *indexOpnd = IR::IntConstOpnd::New(value, TyUint32, m_func);
  7972. m_lowererMD.LoadHelperArgument(stElem, indexOpnd);
  7973. m_lowererMD.LoadHelperArgument(stElem, indirOpnd->UnlinkBaseOpnd());
  7974. IR::JnHelperMethod helperMethod;
  7975. if (baseValueType.HasIntElements())
  7976. {
  7977. helperMethod = IR::HelperScrArr_SetNativeIntElementC;
  7978. }
  7979. else
  7980. {
  7981. helperMethod = IR::HelperScrArr_SetNativeFloatElementC;
  7982. }
  7983. IR::Instr *instrInsertBranch = stElem->m_next;
  7984. IR::RegOpnd *typeIdOpnd = IR::RegOpnd::New(TyUint32, m_func);
  7985. stElem->ReplaceDst(typeIdOpnd);
  7986. m_lowererMD.ChangeToHelperCall(stElem, helperMethod);
  7987. InsertCompareBranch(
  7988. typeIdOpnd,
  7989. IR::IntConstOpnd::New(
  7990. baseValueType.HasIntElements() ?
  7991. Js::TypeIds_NativeIntArray : Js::TypeIds_NativeFloatArray, TyUint32, m_func),
  7992. Js::OpCode::BrNeq_A,
  7993. labelBailOut,
  7994. instrInsertBranch);
  7995. return instrPrev;
  7996. }
  7997. else if (baseValueType.HasIntElements() && labelBailOut)
  7998. {
  7999. Assert(stElem->GetSrc1()->GetType() == GetArrayIndirType(baseValueType));
  8000. IR::Opnd* missingElementOpnd = GetMissingItemOpnd(stElem->GetSrc1()->GetType(), m_func);
  8001. if (!stElem->GetSrc1()->IsEqual(missingElementOpnd))
  8002. {
  8003. InsertCompareBranch(stElem->GetSrc1(), missingElementOpnd , Js::OpCode::BrEq_A, labelBailOut, stElem, true);
  8004. }
  8005. else
  8006. {
  8007. //Its a missing value store and data flow proves that src1 is always missing value. Array cannot be an int array at the first place
  8008. //if this code was ever hit. Just bailout, this code path would be updated with the profile information next time around.
  8009. InsertBranch(Js::OpCode::Br, labelBailOut, stElem);
  8010. #if DBG
  8011. labelBailOut->m_noHelperAssert = true;
  8012. #endif
  8013. stElem->Remove();
  8014. return instrPrev;
  8015. }
  8016. }
  8017. else
  8018. {
  8019. Assert(stElem->GetSrc1()->GetType() == GetArrayIndirType(baseValueType));
  8020. }
  8021. stElem->GetDst()->SetType(stElem->GetSrc1()->GetType());
  8022. Assert(value <= Js::SparseArraySegmentBase::INLINE_CHUNK_SIZE);
  8023. if(baseValueType.HasIntElements())
  8024. {
  8025. base = sizeof(Js::JavascriptNativeIntArray) + offsetof(Js::SparseArraySegment<int32>, elements);
  8026. }
  8027. else
  8028. {
  8029. base = sizeof(Js::JavascriptNativeFloatArray) + offsetof(Js::SparseArraySegment<double>, elements);
  8030. }
  8031. }
  8032. else if(baseValueType.IsLikelyObject() && baseValueType.GetObjectType() == ObjectType::Array)
  8033. {
  8034. Assert(stElem->m_opcode == Js::OpCode::StElemC);
  8035. Assert(value <= Js::SparseArraySegmentBase::INLINE_CHUNK_SIZE);
  8036. base = sizeof(Js::JavascriptArray) + offsetof(Js::SparseArraySegment<Js::Var>, elements);
  8037. }
  8038. else
  8039. {
  8040. Assert(stElem->m_opcode == Js::OpCode::StElemC || stElem->m_opcode == Js::OpCode::StArrSegElemC);
  8041. Assert(indirOpnd->GetBaseOpnd()->GetType() == TyVar);
  8042. base = offsetof(Js::SparseArraySegment<Js::Var>, elements);
  8043. }
  8044. Assert(value >= 0);
  8045. // MOV [r3 + offset(element) + index], src
  8046. const BYTE indirScale =
  8047. baseValueType.IsLikelyAnyOptimizedArray() ? GetArrayIndirScale(baseValueType) : m_lowererMD.GetDefaultIndirScale();
  8048. IntConstType offset = base + (value << indirScale);
  8049. Assert(Math::FitsInDWord(offset));
  8050. indirOpnd->SetOffset((int32)offset);
  8051. m_lowererMD.ChangeToWriteBarrierAssign(stElem);
  8052. return instrPrev;
  8053. }
  8054. void Lowerer::LowerLdArrHead(IR::Instr *const instr)
  8055. {
  8056. IR::RegOpnd *array = instr->UnlinkSrc1()->AsRegOpnd();
  8057. const ValueType arrayValueType(array->GetValueType());
  8058. Assert(arrayValueType.IsAnyOptimizedArray());
  8059. if(arrayValueType.GetObjectType() == ObjectType::ObjectWithArray)
  8060. {
  8061. array = LoadObjectArray(array, instr);
  8062. }
  8063. // mov arrayHeadSegment, [array + offset(headSegment)]
  8064. instr->GetDst()->SetType(TyMachPtr);
  8065. instr->SetSrc1(
  8066. IR::IndirOpnd::New(
  8067. array,
  8068. GetArrayOffsetOfHeadSegment(arrayValueType),
  8069. TyMachPtr,
  8070. instr->m_func));
  8071. LowererMD::ChangeToAssign(instr);
  8072. }
  8073. // Creates the rest parameter array.
  8074. // Var JavascriptArray::OP_NewScArrayWithElements(
  8075. // uint32 elementCount,
  8076. // Var *elements,
  8077. // ScriptContext* scriptContext)
  8078. IR::Instr *Lowerer::LowerRestParameter(IR::Opnd *formalsOpnd, IR::Opnd *dstOpnd, IR::Opnd *excessOpnd, IR::Instr *instr, IR::RegOpnd *generatorArgsPtrOpnd)
  8079. {
  8080. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, dstOpnd, instr->m_func);
  8081. instr->InsertAfter(helperCallInstr);
  8082. // Var JavascriptArray::OP_NewScArrayWithElements(
  8083. // int32 elementCount,
  8084. // Var *elements,
  8085. // ScriptContext* scriptContext)
  8086. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScArrayWithElements;
  8087. LoadScriptContext(helperCallInstr);
  8088. BOOL isGenerator = this->m_func->GetJnFunction()->IsGenerator();
  8089. // Elements pointer = ebp + (formals count + formals offset + 1)*sizeof(Var)
  8090. IR::RegOpnd *srcOpnd = isGenerator ? generatorArgsPtrOpnd : IR::Opnd::CreateFramePointerOpnd(this->m_func);
  8091. uint16 actualOffset = isGenerator ? 0 : GetFormalParamOffset(); //4
  8092. IR::RegOpnd *argPtrOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  8093. InsertAdd(false, argPtrOpnd, srcOpnd, IR::IntConstOpnd::New((formalsOpnd->AsIntConstOpnd()->GetValue() + actualOffset) * MachPtr, TyUint32, this->m_func), helperCallInstr);
  8094. m_lowererMD.LoadHelperArgument(helperCallInstr, argPtrOpnd);
  8095. m_lowererMD.LoadHelperArgument(helperCallInstr, excessOpnd);
  8096. m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
  8097. return helperCallInstr;
  8098. }
  8099. ///----------------------------------------------------------------------------
  8100. ///
  8101. /// Lowerer::LowerArgIn
  8102. ///
  8103. /// This function checks the passed-in argument count against the index of this
  8104. /// argument and uses null for a param value if the caller didn't explicitly
  8105. /// pass anything.
  8106. ///
  8107. ///----------------------------------------------------------------------------
  8108. IR::Instr *
  8109. Lowerer::LowerArgIn(IR::Instr *instrArgIn)
  8110. {
  8111. IR::LabelInstr * labelDone;
  8112. IR::LabelInstr * labelUndef;
  8113. IR::LabelInstr * labelNormal;
  8114. IR::LabelInstr * labelInit;
  8115. IR::LabelInstr * labelInitNext;
  8116. IR::BranchInstr * instrBranch;
  8117. IR::Instr * instrArgInNext;
  8118. IR::Instr * instrInsert;
  8119. IR::Instr * instrPrev;
  8120. IR::Instr * instrResume = nullptr;
  8121. IR::Opnd * dstOpnd;
  8122. IR::Opnd * srcOpnd;
  8123. IR::Opnd * opndUndef;
  8124. Js::ArgSlot argIndex;
  8125. StackSym * symParam;
  8126. BOOLEAN isDuplicate;
  8127. IR::RegOpnd * generatorArgsPtrOpnd = nullptr;
  8128. // We start with:
  8129. // s1 = ArgIn_A param1
  8130. // s2 = ArgIn_A param2
  8131. // ...
  8132. // sn = ArgIn_A paramn
  8133. //
  8134. // We want to end up with:
  8135. //
  8136. // s1 = ArgIn_A param1 -- Note that this is unconditional
  8137. // count = (load from param area)
  8138. // BrLt_A $start, count, n -- Forward cbranch to the uncommon case
  8139. // Br $Ln
  8140. // $start:
  8141. // sn = assign undef
  8142. // BrGe_A $Ln-1, count, n-1
  8143. // sn-1 = assign undef
  8144. // ...
  8145. // s2 = assign undef
  8146. // Br $done
  8147. // $Ln:
  8148. // sn = assign paramn
  8149. // $Ln-1:
  8150. // sn-1 = assign paramn-1
  8151. // ...
  8152. // s2 = assign param2
  8153. // $done:
  8154. IR::Opnd *restDst = nullptr;
  8155. bool hasRest = instrArgIn->m_opcode == Js::OpCode::ArgIn_Rest;
  8156. if (hasRest)
  8157. {
  8158. IR::Instr *restInstr = instrArgIn;
  8159. restDst = restInstr->UnlinkDst();
  8160. if (m_func->GetJnFunction()->GetHasImplicitArgIns() && m_func->GetInParamsCount() > 1)
  8161. {
  8162. while (instrArgIn->m_opcode != Js::OpCode::ArgIn_A)
  8163. {
  8164. instrArgIn = instrArgIn->m_prev;
  8165. if (instrResume == nullptr)
  8166. {
  8167. instrResume = instrArgIn;
  8168. }
  8169. }
  8170. restInstr->Remove();
  8171. }
  8172. else
  8173. {
  8174. IR::Instr * instrCount = m_lowererMD.LoadInputParamCount(instrArgIn, -this->m_func->GetInParamsCount());
  8175. IR::Opnd * excessOpnd = instrCount->GetDst();
  8176. IR::LabelInstr *createRestArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  8177. // BrGe $createRestArray, excess, 0
  8178. InsertCompareBranch(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), Js::OpCode::BrGe_A, createRestArrayLabel, instrArgIn);
  8179. // MOV excess, 0
  8180. InsertMove(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), instrArgIn);
  8181. // $createRestArray
  8182. instrArgIn->InsertBefore(createRestArrayLabel);
  8183. if (m_func->GetJnFunction()->IsGenerator())
  8184. {
  8185. generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrArgIn);
  8186. }
  8187. IR::IntConstOpnd * formalsOpnd = IR::IntConstOpnd::New(this->m_func->GetInParamsCount(), TyUint32, this->m_func);
  8188. IR::Instr *prev = LowerRestParameter(formalsOpnd, restDst, excessOpnd, instrArgIn, generatorArgsPtrOpnd);
  8189. instrArgIn->Remove();
  8190. return prev;
  8191. }
  8192. }
  8193. srcOpnd = instrArgIn->GetSrc1();
  8194. symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
  8195. argIndex = symParam->GetParamSlotNum();
  8196. if (argIndex == 1)
  8197. {
  8198. // The "this" argument is not source-dependent and doesn't need to be checked.
  8199. if (m_func->GetJnFunction()->IsGenerator())
  8200. {
  8201. generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrArgIn);
  8202. ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
  8203. }
  8204. m_lowererMD.ChangeToAssign(instrArgIn);
  8205. return instrResume == nullptr ? instrArgIn->m_prev : instrResume;
  8206. }
  8207. Js::ArgSlot formalsCount = this->m_func->GetInParamsCount();
  8208. AssertMsg(argIndex == formalsCount, "Expect to see the ArgIn's in numerical order");
  8209. // Because there may be instructions between the ArgIn's, such as saves to the frame object,
  8210. // we find the top of the sequence of ArgIn's and insert everything there. This assumes that
  8211. // ArgIn's use param symbols as src's and not the results of previous instructions.
  8212. instrPrev = instrArgIn;
  8213. instrInsert = instrArgIn->m_next;
  8214. while (argIndex > 2)
  8215. {
  8216. instrPrev = instrPrev->m_prev;
  8217. if (instrPrev->m_opcode == Js::OpCode::ArgIn_A)
  8218. {
  8219. srcOpnd = instrPrev->GetSrc1();
  8220. symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
  8221. AssertMsg(symParam->GetParamSlotNum() == argIndex - 1, "ArgIn's not in numerical order");
  8222. argIndex = symParam->GetParamSlotNum();
  8223. }
  8224. else
  8225. {
  8226. // Make sure that this instruction gets lowered.
  8227. if (instrResume == nullptr)
  8228. {
  8229. instrResume = instrPrev;
  8230. }
  8231. }
  8232. }
  8233. // The loading of parameters will be inserted above this instruction.
  8234. instrInsert = instrPrev;
  8235. if (instrResume == nullptr)
  8236. {
  8237. // We found no intervening non-ArgIn's, so lowering can resume at the previous instruction.
  8238. instrResume = instrInsert->m_prev;
  8239. }
  8240. // Now insert all the checks and undef-assigns.
  8241. if (m_func->GetJnFunction()->IsGenerator())
  8242. {
  8243. generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrInsert);
  8244. }
  8245. // excessOpnd = (load from param area) - formalCounts
  8246. IR::Instr * instrCount = this->m_lowererMD.LoadInputParamCount(instrInsert, -formalsCount, true);
  8247. IR::Opnd * excessOpnd = instrCount->GetDst();
  8248. labelUndef = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /*helperLabel*/ true);
  8249. Lowerer::InsertBranch(Js::OpCode::BrLt_A, labelUndef, instrInsert);
  8250. // Br $Ln
  8251. labelNormal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  8252. labelInit = labelNormal;
  8253. instrBranch = IR::BranchInstr::New(Js::OpCode::Br, labelNormal, this->m_func);
  8254. instrInsert->InsertBefore(instrBranch);
  8255. this->m_lowererMD.LowerUncondBranch(instrBranch);
  8256. // Insert the labels
  8257. instrInsert->InsertBefore(labelUndef);
  8258. instrInsert->InsertBefore(labelNormal);
  8259. // MOV undefReg, undefAddress
  8260. IR::Opnd* opndUndefAddress = this->LoadLibraryValueOpnd(labelNormal, LibraryValue::ValueUndefined);
  8261. opndUndef = IR::RegOpnd::New(TyMachPtr, this->m_func);
  8262. LowererMD::CreateAssign(opndUndef, opndUndefAddress, labelNormal);
  8263. BVSparse<JitArenaAllocator> *formalsBv = JitAnew(this->m_func->m_alloc, BVSparse<JitArenaAllocator>, this->m_func->m_alloc);
  8264. while (formalsCount > 2)
  8265. {
  8266. dstOpnd = instrArgIn->GetDst();
  8267. Assert(dstOpnd->IsRegOpnd());
  8268. isDuplicate = formalsBv->TestAndSet(dstOpnd->AsRegOpnd()->m_sym->AsStackSym()->m_id);
  8269. // Now insert the undef initialization before the "normal" label
  8270. // sn = assign undef
  8271. LowererMD::CreateAssign(dstOpnd, opndUndef, labelNormal);
  8272. // INC excessOpnd
  8273. // BrEq_A $Ln-1
  8274. formalsCount--;
  8275. InsertAdd(true, excessOpnd, excessOpnd, IR::IntConstOpnd::New(1, TyInt32, this->m_func), labelNormal);
  8276. labelInitNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  8277. InsertBranch(Js::OpCode::BrEq_A, labelInitNext, labelNormal);
  8278. // And insert the "normal" initialization before the "done" label
  8279. // sn = assign paramn
  8280. // $Ln-1:
  8281. labelInit->InsertAfter(labelInitNext);
  8282. labelInit = labelInitNext;
  8283. instrArgInNext = instrArgIn->m_prev;
  8284. instrArgIn->Unlink();
  8285. // function foo(x, x) { use(x); }
  8286. // This should refer to the second 'x'. Since we reverse the order here however, we need to skip
  8287. // the initialization of the first 'x' to not override the one for the second. WOOB:1105504
  8288. if (isDuplicate)
  8289. {
  8290. instrArgIn->Free();
  8291. }
  8292. else
  8293. {
  8294. ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
  8295. labelInit->InsertBefore(instrArgIn);
  8296. this->m_lowererMD.ChangeToAssign(instrArgIn);
  8297. }
  8298. instrArgIn = instrArgInNext;
  8299. while (instrArgIn->m_opcode != Js::OpCode::ArgIn_A)
  8300. {
  8301. instrArgIn = instrArgIn->m_prev;
  8302. AssertMsg(instrArgIn, "???");
  8303. }
  8304. AssertMsg(instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum() == formalsCount,
  8305. "Expect all ArgIn's to be in numerical order by param slot");
  8306. }
  8307. // Insert final undef and normal initializations, jumping unconditionally to the end
  8308. // rather than checking against the decremented formals count as we did inside the loop above.
  8309. // s2 = assign undef
  8310. dstOpnd = instrArgIn->GetDst();
  8311. Assert(dstOpnd->IsRegOpnd());
  8312. isDuplicate = formalsBv->TestAndSet(dstOpnd->AsRegOpnd()->m_sym->AsStackSym()->m_id);
  8313. LowererMD::CreateAssign(dstOpnd, opndUndef, labelNormal);
  8314. if (hasRest)
  8315. {
  8316. InsertMove(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), labelNormal);
  8317. }
  8318. // Br $done
  8319. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  8320. instrBranch = IR::BranchInstr::New(Js::OpCode::Br, labelDone, this->m_func);
  8321. labelNormal->InsertBefore(instrBranch);
  8322. this->m_lowererMD.LowerUncondBranch(instrBranch);
  8323. // s2 = assign param2
  8324. // $done:
  8325. labelInit->InsertAfter(labelDone);
  8326. if (hasRest)
  8327. {
  8328. // The formals count has been tainted, so restore it before lowering rest
  8329. IR::IntConstOpnd * formalsOpnd = IR::IntConstOpnd::New(this->m_func->GetInParamsCount(), TyUint32, this->m_func);
  8330. LowerRestParameter(formalsOpnd, restDst, excessOpnd, labelDone, generatorArgsPtrOpnd);
  8331. }
  8332. instrArgIn->Unlink();
  8333. if (isDuplicate)
  8334. {
  8335. instrArgIn->Free();
  8336. }
  8337. else
  8338. {
  8339. ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
  8340. labelDone->InsertBefore(instrArgIn);
  8341. this->m_lowererMD.ChangeToAssign(instrArgIn);
  8342. }
  8343. return instrResume;
  8344. }
  8345. void
  8346. Lowerer::ConvertArgOpndIfGeneratorFunction(IR::Instr *instrArgIn, IR::RegOpnd *generatorArgsPtrOpnd)
  8347. {
  8348. if (this->m_func->GetJnFunction()->IsGenerator())
  8349. {
  8350. // Replace stack param operand with offset into arguments array held by
  8351. // the generator object.
  8352. IR::Opnd * srcOpnd = instrArgIn->UnlinkSrc1();
  8353. StackSym * symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
  8354. Js::ArgSlot argIndex = symParam->GetParamSlotNum();
  8355. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorArgsPtrOpnd, (argIndex - 1) * MachPtr, TyMachPtr, this->m_func);
  8356. srcOpnd->Free(this->m_func);
  8357. instrArgIn->SetSrc1(indirOpnd);
  8358. }
  8359. }
  8360. IR::RegOpnd *
  8361. Lowerer::LoadGeneratorArgsPtr(IR::Instr *instrInsert)
  8362. {
  8363. IR::Instr * instr = LoadGeneratorObject(instrInsert);
  8364. IR::RegOpnd * generatorRegOpnd = instr->GetDst()->AsRegOpnd();
  8365. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorRegOpnd, Js::JavascriptGenerator::GetArgsPtrOffset(), TyMachPtr, instrInsert->m_func);
  8366. IR::RegOpnd * argsPtrOpnd = IR::RegOpnd::New(TyMachReg, instrInsert->m_func);
  8367. LowererMD::CreateAssign(argsPtrOpnd, indirOpnd, instrInsert);
  8368. return argsPtrOpnd;
  8369. }
  8370. IR::Instr *
  8371. Lowerer::LoadGeneratorObject(IR::Instr * instrInsert)
  8372. {
  8373. StackSym * generatorSym = StackSym::NewParamSlotSym(1, instrInsert->m_func);
  8374. instrInsert->m_func->SetArgOffset(generatorSym, LowererMD::GetFormalParamOffset() * MachPtr);
  8375. IR::SymOpnd * generatorSymOpnd = IR::SymOpnd::New(generatorSym, TyMachPtr, instrInsert->m_func);
  8376. IR::RegOpnd * generatorRegOpnd = IR::RegOpnd::New(TyMachPtr, instrInsert->m_func);
  8377. return LowererMD::CreateAssign(generatorRegOpnd, generatorSymOpnd, instrInsert);
  8378. }
  8379. IR::Instr *
  8380. Lowerer::LowerArgInAsmJs(IR::Instr * instrArgIn)
  8381. {
  8382. Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
  8383. Js::ArgSlot argCount = m_func->GetJnFunction()->GetAsmJsFunctionInfo()->GetArgCount();
  8384. IR::Instr * instr = instrArgIn;
  8385. for (int argNum = argCount - 1; argNum >= 0; --argNum)
  8386. {
  8387. IR::Instr * instrPrev = instr->m_prev;
  8388. m_lowererMD.ChangeToAssign(instr);
  8389. instr = instrPrev;
  8390. }
  8391. return instr;
  8392. }
  8393. bool
  8394. Lowerer::InlineBuiltInLibraryCall(IR::Instr *callInstr)
  8395. {
  8396. IR::Opnd *src1 = callInstr->GetSrc1();
  8397. IR::Opnd *src2 = callInstr->GetSrc2();
  8398. // Get the arg count by looking at the slot number of the last arg symbol.
  8399. if (!src2->IsSymOpnd())
  8400. {
  8401. // No args? Not sure this is possible, but handle it.
  8402. return false;
  8403. }
  8404. StackSym *argLinkSym = src2->AsSymOpnd()->m_sym->AsStackSym();
  8405. // Subtract "this" from the arg count.
  8406. IntConstType argCount = argLinkSym->GetArgSlotNum() - 1;
  8407. // Find the callee's built-in index (if any).
  8408. Js::BuiltinFunction index = Func::GetBuiltInIndex(src1);
  8409. // Warning!
  8410. // Don't add new built-in to following switch. Built-ins needs to be inlined in call direct way.
  8411. // Following is only for prejit scenarios where we don't get inlining always and generate fast path in lowerer.
  8412. // Generating fastpath here misses fixed functions and globopt optimizations.
  8413. switch(index)
  8414. {
  8415. case Js::BuiltinFunction::String_CharAt:
  8416. case Js::BuiltinFunction::String_CharCodeAt:
  8417. if (argCount != 1)
  8418. {
  8419. return false;
  8420. }
  8421. if (!callInstr->GetDst())
  8422. {
  8423. // Optimization of Char[Code]At assumes result is used.
  8424. return false;
  8425. }
  8426. break;
  8427. case Js::BuiltinFunction::Math_Abs:
  8428. #ifdef _M_IX86
  8429. if (!AutoSystemInfo::Data.SSE2Available())
  8430. {
  8431. return false;
  8432. }
  8433. #endif
  8434. if (argCount != 1)
  8435. {
  8436. return false;
  8437. }
  8438. if (!callInstr->GetDst())
  8439. {
  8440. // Optimization of Abs assumes result is used.
  8441. return false;
  8442. }
  8443. break;
  8444. case Js::BuiltinFunction::Array_Push:
  8445. {
  8446. if (argCount != 1)
  8447. {
  8448. return false;
  8449. }
  8450. if (callInstr->GetDst())
  8451. {
  8452. // Optimization of push assumes result is unused.
  8453. return false;
  8454. }
  8455. StackSym *linkSym = callInstr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
  8456. Assert(linkSym->IsSingleDef());
  8457. linkSym = linkSym->m_instrDef->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
  8458. Assert(linkSym->IsSingleDef());
  8459. IR::Opnd *const arrayOpnd = linkSym->m_instrDef->GetSrc1();
  8460. if(!arrayOpnd->IsRegOpnd())
  8461. {
  8462. // This should be rare, but needs to be handled.
  8463. // By now, we've already started some of the inlining. Simply jmp to the helper.
  8464. // The branch will get peeped later.
  8465. return false;
  8466. }
  8467. if(!ShouldGenerateArrayFastPath(arrayOpnd, false, false, false) ||
  8468. arrayOpnd->GetValueType().IsLikelyNativeArray())
  8469. {
  8470. // Rejecting native array for now, since we have to do a FromVar at the call site and bail out.
  8471. return false;
  8472. }
  8473. break;
  8474. }
  8475. case Js::BuiltinFunction::String_Replace:
  8476. {
  8477. if(argCount != 2)
  8478. {
  8479. return false;
  8480. }
  8481. if(!ShouldGenerateStringReplaceFastPath(callInstr, argCount))
  8482. {
  8483. return false;
  8484. }
  8485. break;
  8486. }
  8487. default:
  8488. return false;
  8489. }
  8490. Assert(Func::IsBuiltInInlinedInLowerer(callInstr->GetSrc1()));
  8491. IR::Opnd *callTargetOpnd = callInstr->GetSrc1();
  8492. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  8493. IR::Opnd *objRefOpnd = IR::MemRefOpnd::New((void*)this->GetObjRefForBuiltInTarget(callTargetOpnd->AsRegOpnd()), TyMachReg, this->m_func);
  8494. InsertCompareBranch(callTargetOpnd, objRefOpnd, Js::OpCode::BrNeq_A, labelHelper, callInstr);
  8495. callInstr->InsertBefore(labelHelper);
  8496. Assert(argCount <= 2);
  8497. IR::Opnd *argsOpnd[3];
  8498. IR::Opnd *linkOpnd = callInstr->GetSrc2();
  8499. while(linkOpnd->IsSymOpnd())
  8500. {
  8501. IR::SymOpnd *src2 = linkOpnd->AsSymOpnd();
  8502. StackSym *sym = src2->m_sym->AsStackSym();
  8503. Assert(sym->m_isSingleDef);
  8504. IR::Instr *argInstr = sym->m_instrDef;
  8505. Assert(argCount >= 0);
  8506. argsOpnd[argCount] = argInstr->GetSrc1();
  8507. argCount--;
  8508. argInstr->Unlink();
  8509. labelHelper->InsertAfter(argInstr);
  8510. linkOpnd = argInstr->GetSrc2();
  8511. }
  8512. AnalysisAssert(argCount == -1);
  8513. // Move startcall
  8514. Assert(linkOpnd->IsRegOpnd());
  8515. StackSym *sym = linkOpnd->AsRegOpnd()->m_sym;
  8516. Assert(sym->m_isSingleDef);
  8517. IR::Instr *startCall = sym->m_instrDef;
  8518. Assert(startCall->m_opcode == Js::OpCode::StartCall);
  8519. startCall->Unlink();
  8520. labelHelper->InsertAfter(startCall);
  8521. // $doneLabel:
  8522. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  8523. callInstr->InsertAfter(doneLabel);
  8524. bool success = true;
  8525. switch(index)
  8526. {
  8527. case Js::BuiltinFunction::Math_Abs:
  8528. this->m_lowererMD.GenerateFastAbs(callInstr->GetDst(), argsOpnd[1], callInstr, labelHelper, labelHelper, doneLabel);
  8529. break;
  8530. case Js::BuiltinFunction::String_CharCodeAt:
  8531. case Js::BuiltinFunction::String_CharAt:
  8532. success = this->m_lowererMD.GenerateFastCharAt(index, callInstr->GetDst(), argsOpnd[0], argsOpnd[1],
  8533. callInstr, labelHelper, labelHelper, doneLabel);
  8534. break;
  8535. case Js::BuiltinFunction::Array_Push:
  8536. success = GenerateFastPush(argsOpnd[0], argsOpnd[1], callInstr, labelHelper, labelHelper, nullptr, doneLabel);
  8537. break;
  8538. case Js::BuiltinFunction::String_Replace:
  8539. success = GenerateFastReplace(argsOpnd[0], argsOpnd[1], argsOpnd[2], callInstr, labelHelper, labelHelper, doneLabel);
  8540. break;
  8541. default:
  8542. Assert(UNREACHED);
  8543. }
  8544. IR::Instr *instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, doneLabel, this->m_func);
  8545. labelHelper->InsertBefore(instr);
  8546. return success;
  8547. }
  8548. // Perform lowerer part of inlining built-in function.
  8549. // For details, see inline.cpp.
  8550. //
  8551. // Description of changes here (note that taking care of Argouts are similar to InlineeStart):
  8552. // - Move ArgOut_A_InlineBuiltIn next to the call instr -- used by bailout processing in register allocator.
  8553. // - Remove StartCall and InlineBuiltInStart for this call.
  8554. // Before:
  8555. // StartCall fn
  8556. // d1 = BIA s1, link1
  8557. // ...
  8558. // InlineBuiltInStart fn, link0
  8559. // After:
  8560. // ...
  8561. // d1 = BIA s1, NULL
  8562. void Lowerer::LowerInlineBuiltIn(IR::Instr* builtInEndInstr)
  8563. {
  8564. Assert(builtInEndInstr->m_opcode == Js::OpCode::InlineBuiltInEnd || builtInEndInstr->m_opcode == Js::OpCode::InlineNonTrackingBuiltInEnd);
  8565. IR::Instr* startCallInstr;
  8566. builtInEndInstr->IterateArgInstrs([&](IR::Instr* argInstr) {
  8567. startCallInstr = argInstr->GetSrc2()->GetStackSym()->m_instrDef;
  8568. return false;
  8569. });
  8570. // Keep the startCall around as bailout refers to it. Just unlink it for now - do not delete it.
  8571. startCallInstr->Unlink();
  8572. builtInEndInstr->Remove();
  8573. }
  8574. Js::JavascriptFunction **
  8575. Lowerer::GetObjRefForBuiltInTarget(IR::RegOpnd * regOpnd)
  8576. {
  8577. Js::JavascriptFunction ** mathFns =
  8578. this->m_func->GetScriptContext()->GetLibrary()->GetBuiltinFunctions();
  8579. Js::BuiltinFunction index = regOpnd->m_sym->m_builtInIndex;
  8580. AssertMsg(index < Js::BuiltinFunction::Count, "Invalid built-in index on a call target marked as built-in");
  8581. return mathFns + index;
  8582. }
  8583. IR::Instr *
  8584. Lowerer::LowerNewRegEx(IR::Instr * instr)
  8585. {
  8586. IR::Opnd *src1 = instr->UnlinkSrc1();
  8587. Assert(src1->IsAddrOpnd());
  8588. #if ENABLE_REGEX_CONFIG_OPTIONS
  8589. if (REGEX_CONFIG_FLAG(RegexTracing))
  8590. {
  8591. Assert(!instr->GetDst()->CanStoreTemp());
  8592. IR::Instr * instrPrev = LoadScriptContext(instr);
  8593. instrPrev = m_lowererMD.LoadHelperArgument(instr, src1);
  8594. m_lowererMD.ChangeToHelperCall(instr, IR::HelperScrRegEx_OP_NewRegEx);
  8595. return instrPrev;
  8596. }
  8597. #endif
  8598. IR::Instr * instrPrev = instr->m_prev;
  8599. IR::RegOpnd * dstOpnd = instr->UnlinkDst()->AsRegOpnd();
  8600. IR::SymOpnd * tempObjectSymOpnd;
  8601. bool isZeroed = GenerateRecyclerOrMarkTempAlloc(instr, dstOpnd, IR::HelperAllocMemForJavascriptRegExp, sizeof(Js::JavascriptRegExp), &tempObjectSymOpnd);
  8602. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  8603. {
  8604. // Hoist the vtable and pattern init to the outer most loop top as it never changes
  8605. InsertMove(tempObjectSymOpnd,
  8606. LoadVTableValueOpnd(this->outerMostLoopLabel, VTableValue::VtableJavascriptRegExp),
  8607. this->outerMostLoopLabel, false);
  8608. }
  8609. else
  8610. {
  8611. GenerateMemInit(dstOpnd, 0, LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp), instr, isZeroed);
  8612. }
  8613. GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfType(),
  8614. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueRegexType), instr, isZeroed);
  8615. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfAuxSlots(), instr, isZeroed);
  8616. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfObjectArray(), instr, isZeroed);
  8617. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  8618. {
  8619. InsertMove(IR::SymOpnd::New(tempObjectSymOpnd->m_sym,
  8620. tempObjectSymOpnd->m_offset + Js::JavascriptRegExp::GetOffsetOfPattern(), TyMachPtr, this->m_func),
  8621. src1, this->outerMostLoopLabel, false);
  8622. }
  8623. else
  8624. {
  8625. GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfPattern(), src1, instr, isZeroed);
  8626. }
  8627. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfSplitPattern(), instr, isZeroed);
  8628. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), instr, isZeroed);
  8629. GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), 0, instr, isZeroed);
  8630. instr->Remove();
  8631. return instrPrev;
  8632. }
  8633. IR::Instr *
  8634. Lowerer::GenerateRuntimeError(IR::Instr * insertBeforeInstr, Js::MessageId errorCode, IR::JnHelperMethod helper /*= IR::JnHelperMethod::HelperOp_RuntimeTypeError*/)
  8635. {
  8636. IR::Instr * runtimeErrorInstr = IR::Instr::New(Js::OpCode::RuntimeTypeError, this->m_func);
  8637. runtimeErrorInstr->SetSrc1(IR::IntConstOpnd::New(errorCode, TyInt32, this->m_func, true));
  8638. insertBeforeInstr->InsertBefore(runtimeErrorInstr);
  8639. return this->LowerUnaryHelperMem(runtimeErrorInstr, helper);
  8640. }
  8641. bool Lowerer::IsNullOrUndefRegOpnd(IR::RegOpnd *opnd) const
  8642. {
  8643. StackSym *sym = opnd->m_sym;
  8644. if (!sym->IsConst() || sym->IsIntConst() || sym->IsFloatConst())
  8645. {
  8646. return false;
  8647. }
  8648. Js::Var var = sym->GetConstAddress();
  8649. Js::TypeId typeId = Js::RecyclableObject::FromVar(var)->GetTypeId();
  8650. return typeId == Js::TypeIds_Null || typeId == Js::TypeIds_Undefined;
  8651. }
  8652. bool Lowerer::IsConstRegOpnd(IR::RegOpnd *opnd) const
  8653. {
  8654. StackSym *sym = opnd->m_sym;
  8655. if (!sym->IsConst() || sym->IsIntConst() || sym->IsFloatConst())
  8656. {
  8657. return false;
  8658. }
  8659. Js::Var var = sym->GetConstAddress();
  8660. Js::TypeId typeId = Js::RecyclableObject::FromVar(var)->GetTypeId();
  8661. return typeId == Js::TypeIds_Null || typeId == Js::TypeIds_Undefined || typeId == Js::TypeIds_Boolean;
  8662. }
  8663. bool
  8664. Lowerer::HasSideEffects(IR::Instr *instr)
  8665. {
  8666. if (LowererMD::IsCall(instr))
  8667. {
  8668. #ifdef _M_IX86
  8669. IR::Opnd *src1 = instr->GetSrc1();
  8670. if (src1->IsHelperCallOpnd())
  8671. {
  8672. IR::HelperCallOpnd * helper = src1->AsHelperCallOpnd();
  8673. switch(helper->m_fnHelper)
  8674. {
  8675. case IR::HelperOp_Int32ToAtomInPlace:
  8676. case IR::HelperOp_Int32ToAtom:
  8677. case IR::HelperOp_UInt32ToAtom:
  8678. return false;
  8679. }
  8680. }
  8681. #endif
  8682. return true;
  8683. }
  8684. return instr->HasAnySideEffects();
  8685. }
  8686. IR::Instr*
  8687. Lowerer::GenerateFastInlineBuiltInMathRandom(IR::Instr* instr)
  8688. {
  8689. AssertMsg(instr->GetDst()->IsFloat(), "dst must be float.");
  8690. IR::Instr* retInstr = instr->m_prev;
  8691. IR::Opnd* dst = instr->GetDst();
  8692. #if defined(_M_X64)
  8693. if (m_func->GetScriptContext()->GetLibrary()->IsPRNGSeeded())
  8694. {
  8695. const uint64 mExp = 0x3FF0000000000000;
  8696. const uint64 mMant = 0x000FFFFFFFFFFFFF;
  8697. IR::RegOpnd* r0 = IR::RegOpnd::New(TyUint64, m_func); // s0
  8698. IR::RegOpnd* r1 = IR::RegOpnd::New(TyUint64, m_func); // s1
  8699. IR::RegOpnd* r3 = IR::RegOpnd::New(TyUint64, m_func); // helper uint64 reg
  8700. IR::RegOpnd* r4 = IR::RegOpnd::New(TyFloat64, m_func); // helper float64 reg
  8701. // ===========================================================
  8702. // s0 = scriptContext->GetLibrary()->GetRandSeed1();
  8703. // s1 = scriptContext->GetLibrary()->GetRandSeed0();
  8704. // ===========================================================
  8705. this->m_lowererMD.CreateAssign(r0,
  8706. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContext()->GetLibrary() + Js::JavascriptLibrary::GetRandSeed1Offset(), TyUint64, instr->m_func), instr);
  8707. this->m_lowererMD.CreateAssign(r1,
  8708. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContext()->GetLibrary() + Js::JavascriptLibrary::GetRandSeed0Offset(), TyUint64, instr->m_func), instr);
  8709. // ===========================================================
  8710. // s1 ^= s1 << 23;
  8711. // ===========================================================
  8712. this->m_lowererMD.CreateAssign(r3, r1, instr);
  8713. this->InsertShift(Js::OpCode::Shl_A, false, r3, r3, IR::IntConstOpnd::New(23, TyInt8, m_func), instr);
  8714. this->InsertXor(r1, r1, r3, instr);
  8715. // ===========================================================
  8716. // s1 ^= s1 >> 17;
  8717. // ===========================================================
  8718. this->m_lowererMD.CreateAssign(r3, r1, instr);
  8719. this->InsertShift(Js::OpCode::ShrU_A, false, r3, r3, IR::IntConstOpnd::New(17, TyInt8, m_func), instr);
  8720. this->InsertXor(r1, r1, r3, instr);
  8721. // ===========================================================
  8722. // s1 ^= s0;
  8723. // ===========================================================
  8724. this->InsertXor(r1, r1, r0, instr);
  8725. // ===========================================================
  8726. // s1 ^= s0 >> 26;
  8727. // ===========================================================
  8728. this->m_lowererMD.CreateAssign(r3, r0, instr);
  8729. this->InsertShift(Js::OpCode::ShrU_A, false, r3, r3, IR::IntConstOpnd::New(26, TyInt8, m_func), instr);
  8730. this->InsertXor(r1, r1, r3, instr);
  8731. // ===========================================================
  8732. // scriptContext->GetLibrary()->SetRandSeed0(s0);
  8733. // scriptContext->GetLibrary()->SetRandSeed1(s1);
  8734. // ===========================================================
  8735. this->m_lowererMD.CreateAssign(
  8736. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContext()->GetLibrary() + Js::JavascriptLibrary::GetRandSeed0Offset(), TyUint64, m_func), r0, instr);
  8737. this->m_lowererMD.CreateAssign(
  8738. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContext()->GetLibrary() + Js::JavascriptLibrary::GetRandSeed1Offset(), TyUint64, m_func), r1, instr);
  8739. // ===========================================================
  8740. // dst = bit_cast<float64>(((s0 + s1) & mMant) | mExp);
  8741. // ===========================================================
  8742. this->InsertAdd(false, r1, r1, r0, instr);
  8743. this->m_lowererMD.CreateAssign(r3, IR::AddrOpnd::New((Js::Var)mMant, IR::AddrOpndKindConstantVar, m_func, true), instr);
  8744. this->InsertAnd(r1, r1, r3, instr);
  8745. this->m_lowererMD.CreateAssign(r3, IR::AddrOpnd::New((Js::Var)mExp, IR::AddrOpndKindConstantVar, m_func, true), instr);
  8746. this->InsertOr(r1, r1, r3, instr);
  8747. this->InsertMoveBitCast(dst, r1, instr);
  8748. // ===================================================================
  8749. // dst -= 1.0;
  8750. // ===================================================================
  8751. this->m_lowererMD.CreateAssign(r4, IR::MemRefOpnd::New((double*)&Js::JavascriptNumber::ONE_POINT_ZERO, TyFloat64, m_func, IR::AddrOpndKindDynamicDoubleRef), instr);
  8752. this->InsertSub(false, dst, dst, r4, instr);
  8753. }
  8754. else
  8755. #endif
  8756. {
  8757. IR::Opnd* tmpdst = dst;
  8758. if (!dst->IsRegOpnd())
  8759. {
  8760. tmpdst = IR::RegOpnd::New(dst->GetType(), instr->m_func);
  8761. }
  8762. LoadScriptContext(instr);
  8763. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, tmpdst, instr->m_func);
  8764. instr->InsertBefore(helperCallInstr);
  8765. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperDirectMath_Random);
  8766. if (tmpdst != dst)
  8767. {
  8768. InsertMove(dst, tmpdst, instr);
  8769. }
  8770. }
  8771. instr->Remove();
  8772. return retInstr;
  8773. }
  8774. IR::Instr *
  8775. Lowerer::LowerCallDirect(IR::Instr * instr)
  8776. {
  8777. IR::Opnd* linkOpnd = instr->UnlinkSrc2();
  8778. StackSym *linkSym = linkOpnd->AsSymOpnd()->m_sym->AsStackSym();
  8779. IR::Instr* argInstr = linkSym->m_instrDef;
  8780. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_InlineSpecialized);
  8781. IR::Opnd* funcObj = argInstr->UnlinkSrc1();
  8782. instr->SetSrc2(argInstr->UnlinkSrc2());
  8783. argInstr->Remove();
  8784. if(instr->HasBailOutInfo())
  8785. {
  8786. IR::Instr * bailOutInstr = this->SplitBailOnImplicitCall(instr, instr->m_next, instr->m_next);
  8787. this->LowerBailOnEqualOrNotEqual(bailOutInstr);
  8788. }
  8789. Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
  8790. return this->GenerateDirectCall(instr, funcObj, (ushort)flags);
  8791. }
  8792. IR::Instr *
  8793. Lowerer::GenerateDirectCall(IR::Instr* inlineInstr, IR::Opnd* funcObj, ushort callflags)
  8794. {
  8795. int32 argCount = m_lowererMD.LowerCallArgs(inlineInstr, callflags);
  8796. m_lowererMD.LoadHelperArgument(inlineInstr, funcObj);
  8797. m_lowererMD.LowerCall(inlineInstr, (Js::ArgSlot)argCount); //to account for function object and callinfo
  8798. return inlineInstr->m_prev;
  8799. }
  8800. /*
  8801. * GenerateHelperToArrayPushFastPath
  8802. * Generates Helper Call and pushes arguments to the Push HelperCall
  8803. */
  8804. IR::Instr *
  8805. Lowerer::GenerateHelperToArrayPushFastPath(IR::Instr * instr, IR::LabelInstr * bailOutLabelHelper)
  8806. {
  8807. IR::Opnd * arrayHelperOpnd = instr->UnlinkSrc1();
  8808. IR::Opnd * elementHelperOpnd = instr->UnlinkSrc2();
  8809. IR::JnHelperMethod helperMethod;
  8810. if(elementHelperOpnd->IsInt32())
  8811. {
  8812. Assert(arrayHelperOpnd->GetValueType().IsLikelyNativeIntArray());
  8813. helperMethod = IR::HelperArray_NativeIntPush;
  8814. m_lowererMD.LoadHelperArgument(instr, elementHelperOpnd);
  8815. }
  8816. else if(elementHelperOpnd->IsFloat())
  8817. {
  8818. Assert(arrayHelperOpnd->GetValueType().IsLikelyNativeFloatArray());
  8819. helperMethod = IR::HelperArray_NativeFloatPush;
  8820. m_lowererMD.LoadDoubleHelperArgument(instr, elementHelperOpnd);
  8821. }
  8822. else
  8823. {
  8824. helperMethod = IR::HelperArray_VarPush;
  8825. m_lowererMD.LoadHelperArgument(instr, elementHelperOpnd);
  8826. }
  8827. m_lowererMD.LoadHelperArgument(instr, arrayHelperOpnd);
  8828. LoadScriptContext(instr);
  8829. return m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  8830. }
  8831. /*
  8832. * GenerateHelperToArrayPopFastPath
  8833. * Generates Helper Call and pushes arguments to the Pop HelperCall
  8834. */
  8835. IR::Instr *
  8836. Lowerer::GenerateHelperToArrayPopFastPath(IR::Instr * instr, IR::LabelInstr * doneLabel, IR::LabelInstr * bailOutLabelHelper)
  8837. {
  8838. IR::Opnd * arrayHelperOpnd = instr->UnlinkSrc1();
  8839. ValueType arrayValueType = arrayHelperOpnd->GetValueType();
  8840. IR::JnHelperMethod helperMethod;
  8841. //Decide the helperMethod based on dst availability and nativity of the array.
  8842. if(arrayValueType.IsLikelyNativeArray() && !instr->GetDst())
  8843. {
  8844. helperMethod = IR::HelperArray_NativePopWithNoDst;
  8845. }
  8846. else if(arrayValueType.IsLikelyNativeIntArray())
  8847. {
  8848. helperMethod = IR::HelperArray_NativeIntPop;
  8849. }
  8850. else if(arrayValueType.IsLikelyNativeFloatArray())
  8851. {
  8852. helperMethod = IR::HelperArray_NativeFloatPop;
  8853. }
  8854. else
  8855. {
  8856. helperMethod = IR::HelperArray_VarPop;
  8857. }
  8858. m_lowererMD.LoadHelperArgument(instr, arrayHelperOpnd);
  8859. //We do not need scriptContext for HelperArray_NativePopWithNoDst call.
  8860. if(helperMethod != IR::HelperArray_NativePopWithNoDst)
  8861. {
  8862. LoadScriptContext(instr);
  8863. }
  8864. IR::Instr * retInstr = m_lowererMD.ChangeToHelperCall(instr, helperMethod, bailOutLabelHelper);
  8865. //We don't need missing item check for var arrays, as there it is taken care by the helper.
  8866. if(arrayValueType.IsLikelyNativeArray())
  8867. {
  8868. if(retInstr->GetDst())
  8869. {
  8870. //Do this check only for native arrays with Dst. For Var arrays, this is taken care in the Runtime helper itself.
  8871. InsertCompareBranch(GetMissingItemOpnd(retInstr->GetDst()->GetType(), m_func), retInstr->GetDst(), Js::OpCode::BrNeq_A, doneLabel, bailOutLabelHelper);
  8872. }
  8873. else
  8874. {
  8875. //We need unconditional jump to doneLabel, if there is no dst in Pop instr.
  8876. InsertBranch(Js::OpCode::Br, true, doneLabel, bailOutLabelHelper);
  8877. }
  8878. }
  8879. return retInstr;
  8880. }
  8881. IR::Instr *
  8882. Lowerer::LowerCondBranchCheckBailOut(IR::BranchInstr * branchInstr, IR::Instr * helperCall, bool isHelper)
  8883. {
  8884. Assert(branchInstr->m_opcode == Js::OpCode::BrTrue_A || branchInstr->m_opcode == Js::OpCode::BrFalse_A);
  8885. if (branchInstr->HasBailOutInfo())
  8886. {
  8887. IR::BailOutKind debuggerBailOutKind = IR::BailOutInvalid;
  8888. if (branchInstr->HasAuxBailOut())
  8889. {
  8890. // We have shared debugger bailout. For branches we lower it here, not in SplitBailForDebugger.
  8891. // See SplitBailForDebugger for details.
  8892. AssertMsg(!(branchInstr->GetBailOutKind() & IR::BailOutForDebuggerBits), "There should be no debugger bits in main bailout kind.");
  8893. debuggerBailOutKind = branchInstr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits;
  8894. AssertMsg((debuggerBailOutKind & ~(IR::BailOutIgnoreException | IR::BailOutForceByFlag)) == 0, "Only IR::BailOutIgnoreException|ForceByFlag supported here.");
  8895. }
  8896. IR::Instr * bailOutInstr = this->SplitBailOnImplicitCall(branchInstr, helperCall, branchInstr);
  8897. IR::Instr* prevInstr = this->LowerBailOnEqualOrNotEqual(bailOutInstr, branchInstr, nullptr, nullptr, isHelper);
  8898. if (debuggerBailOutKind != IR::BailOutInvalid)
  8899. {
  8900. // Note that by this time implicit calls bailout is already lowered.
  8901. // What we do here is use same bailout info and lower debugger bailout which would be shared bailout.
  8902. BailOutInfo* bailOutInfo = bailOutInstr->GetBailOutInfo();
  8903. IR::BailOutInstr* debuggerBailoutInstr = IR::BailOutInstr::New(
  8904. Js::OpCode::BailForDebugger, debuggerBailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
  8905. prevInstr->InsertAfter(debuggerBailoutInstr);
  8906. // The result of that is:
  8907. // original helper op_* instr, then debugger bailout, then implicit calls bailout/etc with the branch instr.
  8908. // Example:
  8909. // s35(eax).i32 = CALL Op_GreaterEqual.u32 # -- original op_* helper
  8910. // s34.i32 = MOV s35(eax).i32 #
  8911. // BailForDebugger # Bailout: #0042 (BailOutIgnoreException) -- the debugger bailout
  8912. // CMP [0x0003BDE0].i8, 1 (0x1).i8 # -- implicit calls check
  8913. // JEQ $L10 #
  8914. //$L11: [helper] #
  8915. // CALL SaveAllRegistersAndBranchBailOut.u32 # Bailout: #0042 (BailOutOnImplicitCalls)
  8916. // JMP $L5 #
  8917. //$L10: [helper] #
  8918. // BrFalse_A $L3, s34.i32 #0034 -- The BrTrue/BrFalse branch (branch instr)
  8919. //$L6: [helper] #0042
  8920. this->LowerBailForDebugger(debuggerBailoutInstr, isHelper);
  8921. // After lowering this we will have a check which on bailout condition will JMP to $L11.
  8922. }
  8923. }
  8924. return m_lowererMD.LowerCondBranch(branchInstr);
  8925. }
  8926. IR::Instr *
  8927. Lowerer::LoadArgumentsFromStack(IR::Instr * instr)
  8928. {
  8929. IR::Instr * prevInstr = instr->m_prev;
  8930. Assert(instr->GetDst()->IsRegOpnd());
  8931. if (instr->m_func->IsInlinee())
  8932. {
  8933. instr->ReplaceSrc1(instr->m_func->GetInlineeArgumentsObjectSlotOpnd());
  8934. }
  8935. else
  8936. {
  8937. instr->ReplaceSrc1(this->m_lowererMD.CreateStackArgumentsSlotOpnd());
  8938. }
  8939. this->m_lowererMD.ChangeToAssign(instr);
  8940. return prevInstr;
  8941. }
  8942. IR::SymOpnd *
  8943. Lowerer::LoadCallInfo(IR::Instr * instrInsert)
  8944. {
  8945. IR::SymOpnd * srcOpnd;
  8946. Func * func = instrInsert->m_func;
  8947. if (func->GetJnFunction()->IsGenerator())
  8948. {
  8949. // Generator function arguments and ArgumentsInfo are not on the stack. Instead they
  8950. // are accessed off the generator object (which is prm1).
  8951. StackSym * generatorSym = StackSym::NewParamSlotSym(1, func);
  8952. func->SetArgOffset(generatorSym, LowererMD::GetFormalParamOffset() * MachPtr);
  8953. IR::SymOpnd * generatorSymOpnd = IR::SymOpnd::New(generatorSym, TyMachPtr, func);
  8954. IR::RegOpnd * generatorRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  8955. LowererMD::CreateAssign(generatorRegOpnd, generatorSymOpnd, instrInsert);
  8956. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorRegOpnd, Js::JavascriptGenerator::GetCallInfoOffset(), TyMachPtr, func);
  8957. IR::Instr * instr = LowererMD::CreateAssign(IR::RegOpnd::New(TyMachPtr, func), indirOpnd, instrInsert);
  8958. StackSym * callInfoSym = StackSym::New(TyMachReg, func);
  8959. IR::SymOpnd * callInfoSymOpnd = IR::SymOpnd::New(callInfoSym, TyMachReg, func);
  8960. LowererMD::CreateAssign(callInfoSymOpnd, instr->GetDst(), instrInsert);
  8961. srcOpnd = IR::SymOpnd::New(callInfoSym, TyMachReg, func);
  8962. }
  8963. else
  8964. {
  8965. // Otherwise callInfo is always the "second" argument.
  8966. // The stack looks like this:
  8967. //
  8968. // script param N
  8969. // ...
  8970. // script param 1
  8971. // callinfo
  8972. // function object
  8973. // return addr
  8974. // FP -> FP chain
  8975. StackSym * srcSym = LowererMD::GetImplicitParamSlotSym(1, func);
  8976. srcOpnd = IR::SymOpnd::New(srcSym, TyMachReg, func);
  8977. }
  8978. return srcOpnd;
  8979. }
  8980. IR::Instr *
  8981. Lowerer::LowerBailOnNotStackArgs(IR::Instr * instr)
  8982. {
  8983. if (!this->m_func->GetHasStackArgs())
  8984. {
  8985. throw Js::RejitException(RejitReason::InlineApplyDisabled);
  8986. }
  8987. IR::Instr * prevInstr = instr->m_prev;
  8988. // Bail out test
  8989. // Label to skip Bailout and continue
  8990. IR::LabelInstr * continueLabelInstr;
  8991. IR::Instr *instrNext = instr->m_next;
  8992. if (instrNext->IsLabelInstr())
  8993. {
  8994. continueLabelInstr = instrNext->AsLabelInstr();
  8995. }
  8996. else
  8997. {
  8998. continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  8999. instr->InsertAfter(continueLabelInstr);
  9000. }
  9001. IR::LabelInstr * helperLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  9002. if (!instr->m_func->IsInlinee())
  9003. {
  9004. //BailOut if it is not stack args or the number of actuals (except "this" argument) is greater than or equal to 15.
  9005. IR::Opnd* stackArgs = instr->UnlinkSrc1();
  9006. InsertCompareBranch(stackArgs, instr->UnlinkSrc2(), Js::OpCode::BrNeq_A, helperLabelInstr, instr);
  9007. IR::RegOpnd* ldLenDstOpnd = IR::RegOpnd::New(TyUint32, instr->m_func);
  9008. IR::Instr* ldLen = IR::Instr::New(Js::OpCode::LdLen_A, ldLenDstOpnd, stackArgs, instr->m_func);
  9009. ldLenDstOpnd->SetValueType(ValueType::GetTaggedInt()); //LdLen_A works only on stack arguments
  9010. instr->InsertBefore(ldLen);
  9011. this->GenerateFastRealStackArgumentsLdLen(ldLen);
  9012. this->InsertCompareBranch(ldLenDstOpnd, IR::IntConstOpnd::New(Js::InlineeCallInfo::MaxInlineeArgoutCount, TyUint32, m_func, true), Js::OpCode::BrLt_A, true, continueLabelInstr, instr);
  9013. }
  9014. else
  9015. {
  9016. //For Inlined functions, we are sure actuals can't exceed Js::InlineeCallInfo::MaxInlineeArgoutCount (15).
  9017. InsertCompareBranch(instr->UnlinkSrc1(), instr->UnlinkSrc2(), Js::OpCode::BrEq_A, continueLabelInstr, instr);
  9018. }
  9019. instr->InsertBefore(helperLabelInstr);
  9020. this->GenerateBailOut(instr, nullptr, nullptr);
  9021. return prevInstr;
  9022. }
  9023. IR::Instr *
  9024. Lowerer::LowerBailOnNotSpreadable(IR::Instr *instr)
  9025. {
  9026. // We only avoid bailing out / throwing a rejit exception when the array operand is a simple, non-optimized, non-object array.
  9027. IR::Instr * prevInstr = instr->m_prev;
  9028. Func *func = instr->m_func;
  9029. IR::RegOpnd *arrayOpnd = nullptr;
  9030. IR::Opnd *arraySrcOpnd = instr->UnlinkSrc1();
  9031. if (!arraySrcOpnd->IsRegOpnd())
  9032. {
  9033. arrayOpnd = IR::RegOpnd::New(TyMachPtr, func);
  9034. LowererMD::CreateAssign(arrayOpnd, arraySrcOpnd, instr);
  9035. }
  9036. else
  9037. {
  9038. arrayOpnd = arraySrcOpnd->AsRegOpnd();
  9039. }
  9040. const ValueType baseValueType(arrayOpnd->GetValueType());
  9041. // Check if we can just throw a rejit exception based on valuetype alone instead of bailing out.
  9042. if (!baseValueType.IsLikelyArray()
  9043. || baseValueType.IsLikelyAnyOptimizedArray()
  9044. || (baseValueType.IsLikelyObject() && (baseValueType.GetObjectType() == ObjectType::ObjectWithArray))
  9045. // Validate that GenerateArrayTest will not fail.
  9046. || !(baseValueType.IsUninitialized() || baseValueType.HasBeenObject())
  9047. || m_func->IsInlinee())
  9048. {
  9049. throw Js::RejitException(RejitReason::InlineSpreadDisabled);
  9050. }
  9051. // Past this point, we will need to use a bailout.
  9052. IR::LabelInstr *bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true /* isOpHelper */);
  9053. // See if we can skip various array checks on value type alone
  9054. if (!baseValueType.IsArray())
  9055. {
  9056. GenerateArrayTest(arrayOpnd, bailOutLabel, bailOutLabel, instr, false);
  9057. }
  9058. if (!(baseValueType.IsArray() && baseValueType.HasNoMissingValues()))
  9059. {
  9060. InsertTestBranch(
  9061. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, func),
  9062. IR::IntConstOpnd::New(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues), TyUint8, func, true),
  9063. Js::OpCode::BrEq_A,
  9064. bailOutLabel,
  9065. instr);
  9066. }
  9067. IR::IndirOpnd *arrayLenPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, func);
  9068. InsertCompareBranch(arrayLenPtrOpnd, IR::IntConstOpnd::New(Js::InlineeCallInfo::MaxInlineeArgoutCount - 1, TyUint8, func), Js::OpCode::BrGt_A, true, bailOutLabel, instr);
  9069. IR::LabelInstr *skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  9070. InsertBranch(Js::OpCode::Br, skipBailOutLabel, instr);
  9071. instr->InsertBefore(bailOutLabel);
  9072. instr->InsertAfter(skipBailOutLabel);
  9073. GenerateBailOut(instr);
  9074. return prevInstr;
  9075. }
  9076. IR::Instr *
  9077. Lowerer::LowerBailOnNotPolymorphicInlinee(IR::Instr * instr)
  9078. {
  9079. Assert(instr->HasBailOutInfo() && (instr->GetBailOutKind() == IR::BailOutOnFailedPolymorphicInlineTypeCheck || instr->GetBailOutKind() == IR::BailOutOnPolymorphicInlineFunction));
  9080. IR::Instr* instrPrev = instr->m_prev;
  9081. this->GenerateBailOut(instr, nullptr, nullptr);
  9082. return instrPrev;
  9083. }
  9084. void
  9085. Lowerer::LowerBailoutCheckAndLabel(IR::Instr *instr, bool onEqual, bool isHelper)
  9086. {
  9087. // Label to skip Bailout and continue
  9088. IR::LabelInstr * continueLabelInstr;
  9089. IR::Instr *instrNext = instr->m_next;
  9090. if (instrNext->IsLabelInstr())
  9091. {
  9092. continueLabelInstr = instrNext->AsLabelInstr();
  9093. }
  9094. else
  9095. {
  9096. continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  9097. instr->InsertAfter(continueLabelInstr);
  9098. }
  9099. if(instr->GetBailOutKind() == IR::BailOutInjected)
  9100. {
  9101. // BailOnEqual 0, 0
  9102. Assert(onEqual);
  9103. Assert(instr->GetSrc1()->IsEqual(instr->GetSrc2()));
  9104. Assert(instr->GetSrc1()->AsIntConstOpnd()->GetValue() == 0);
  9105. // The operands cannot be equal when generating a compare (assert) but since this is for testing purposes, hoist a src.
  9106. // Ideally, we would just create a BailOut instruction that generates a guaranteed bailout, but there seem to be issues
  9107. // with doing this in a non-helper path. So finally, it would generate:
  9108. // xor s0, s0
  9109. // test s0, s0
  9110. // jnz $continue
  9111. // $bailout:
  9112. // // bailout
  9113. // $continue:
  9114. instr->HoistSrc1(LowererMD::GetLoadOp(instr->GetSrc1()->GetType()));
  9115. }
  9116. InsertCompareBranch(instr->UnlinkSrc1(), instr->UnlinkSrc2(),
  9117. onEqual ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, continueLabelInstr, instr);
  9118. if (!isHelper)
  9119. {
  9120. IR::LabelInstr * helperLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  9121. instr->InsertBefore(helperLabelInstr);
  9122. }
  9123. }
  9124. IR::Instr *
  9125. Lowerer::LowerBailOnEqualOrNotEqual(IR::Instr * instr,
  9126. IR::BranchInstr *branchInstr, // = nullptr
  9127. IR::LabelInstr *labelBailOut, // = nullptr
  9128. IR::PropertySymOpnd * propSymOpnd, // = nullptr
  9129. bool isHelper) // = false
  9130. {
  9131. IR::Instr * prevInstr = instr->m_prev;
  9132. // Bail out test
  9133. bool onEqual = instr->m_opcode == Js::OpCode::BailOnEqual;
  9134. LowerBailoutCheckAndLabel(instr, onEqual, isHelper);
  9135. // BailOutOnImplicitCalls is a post-op bailout. Since we look at the profile info for LdFld/StFld to decide whether the instruction may or may not call an accessor,
  9136. // we need to update this profile information on the bailout path for BailOutOnImplicitCalls if the implicit call was an accessor call.
  9137. if(propSymOpnd && ((instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCalls) && (propSymOpnd->m_inlineCacheIndex != -1) &&
  9138. instr->m_func->GetJnFunction()->HasDynamicProfileInfo())
  9139. {
  9140. // result = AND implCallFlags, ~ImplicitCall_None
  9141. // TST result, ImplicitCall_Accessor
  9142. // JEQ $bail
  9143. // OR profiledFlags, FldInfoAccessor
  9144. // $bail
  9145. IR::Opnd * implicitCallFlags = GetImplicitCallFlagsOpnd();
  9146. IR::Opnd * accessorImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_Accessor & ~Js::ImplicitCall_None, GetImplicitCallFlagsType(), instr->m_func, true);
  9147. IR::Opnd * maskNoImplicitCall = IR::IntConstOpnd::New((Js::ImplicitCallFlags)~Js::ImplicitCall_None, GetImplicitCallFlagsType(), instr->m_func, true);
  9148. IR::Opnd * fldInfoAccessor = IR::IntConstOpnd::New(Js::FldInfo_FromAccessor, GetFldInfoFlagsType(), instr->m_func, true);
  9149. IR::LabelInstr * label = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true);
  9150. IR::Instr * andInstr = InsertAnd(IR::RegOpnd::New(GetImplicitCallFlagsType(), instr->m_func), implicitCallFlags, maskNoImplicitCall, instr);
  9151. InsertTestBranch(andInstr->GetDst(), accessorImplicitCall, Js::OpCode::BrEq_A, label, instr);
  9152. Js::FldInfo * info = instr->m_func->GetJnFunction()->GetAnyDynamicProfileInfo()->GetFldInfo(instr->m_func->GetJnFunction(), propSymOpnd->m_inlineCacheIndex);
  9153. IR::Opnd * profiledFlags = IR::MemRefOpnd::New((char*)info + info->GetOffsetOfFlags(), TyInt8, instr->m_func);
  9154. InsertOr(profiledFlags, profiledFlags, fldInfoAccessor, instr);
  9155. instr->InsertBefore(label);
  9156. }
  9157. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  9158. return prevInstr;
  9159. }
  9160. void Lowerer::LowerBailOnNegative(IR::Instr *const instr)
  9161. {
  9162. Assert(instr);
  9163. Assert(instr->m_opcode == Js::OpCode::BailOnNegative);
  9164. Assert(instr->HasBailOutInfo());
  9165. Assert(!instr->GetDst());
  9166. Assert(instr->GetSrc1());
  9167. Assert(instr->GetSrc1()->GetType() == TyInt32 || instr->GetSrc1()->GetType() == TyUint32);
  9168. Assert(!instr->GetSrc2());
  9169. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(false);
  9170. LowerOneBailOutKind(instr, instr->GetBailOutKind(), false);
  9171. Assert(!instr->HasBailOutInfo());
  9172. IR::Instr *insertBeforeInstr = instr->m_next;
  9173. Func *const func = instr->m_func;
  9174. // test src, src
  9175. // jns $skipBailOut
  9176. InsertCompareBranch(
  9177. instr->UnlinkSrc1(),
  9178. IR::IntConstOpnd::New(0, TyInt32, func, true),
  9179. Js::OpCode::BrGe_A,
  9180. skipBailOutLabel,
  9181. insertBeforeInstr);
  9182. instr->Remove();
  9183. }
  9184. IR::Instr *
  9185. Lowerer::LowerBailOnNotObject(IR::Instr *instr,
  9186. IR::BranchInstr *branchInstr /* = nullptr */,
  9187. IR::LabelInstr *labelBailOut /* = nullptr */)
  9188. {
  9189. IR::Instr *prevInstr = instr->m_prev;
  9190. IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label,
  9191. m_func);
  9192. instr->InsertAfter(continueLabelInstr);
  9193. this->m_lowererMD.GenerateObjectTest(instr->UnlinkSrc1(),
  9194. instr,
  9195. continueLabelInstr,
  9196. /* fContinueLabel = */ true);
  9197. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  9198. return prevInstr;
  9199. }
  9200. IR::Instr *
  9201. Lowerer::LowerBailOnTrue(IR::Instr* instr, IR::LabelInstr* labelBailOut /*nullptr*/)
  9202. {
  9203. IR::Instr* instrPrev = instr->m_prev;
  9204. IR::LabelInstr* continueLabel = instr->GetOrCreateContinueLabel();
  9205. IR::RegOpnd * regSrc1 = IR::RegOpnd::New(instr->GetSrc1()->GetType(), this->m_func);
  9206. InsertMove(regSrc1, instr->UnlinkSrc1(), instr);
  9207. InsertTestBranch(regSrc1, regSrc1, Js::OpCode::BrEq_A, continueLabel, instr);
  9208. GenerateBailOut(instr, nullptr, labelBailOut);
  9209. return instrPrev;
  9210. }
  9211. IR::Instr *
  9212. Lowerer::LowerBailOnNotBuiltIn(IR::Instr *instr,
  9213. IR::BranchInstr *branchInstr /* = nullptr */,
  9214. IR::LabelInstr *labelBailOut /* = nullptr */)
  9215. {
  9216. Assert(instr->GetSrc2()->IsIntConstOpnd());
  9217. IR::Instr *prevInstr = instr->m_prev;
  9218. Js::JavascriptFunction ** builtInFuncs = this->m_func->GetScriptContext()->GetLibrary()->GetBuiltinFunctions();
  9219. Js::BuiltinFunction builtInIndex = instr->UnlinkSrc2()->AsIntConstOpnd()->AsInt32();
  9220. IR::Opnd *builtIn = IR::MemRefOpnd::New((void*)(builtInFuncs + builtInIndex), TyMachReg, instr->m_func);
  9221. #if TESTBUILTINFORNULL
  9222. IR::LabelInstr * continueAfterTestLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  9223. InsertTestBranch(builtIn, builtIn, Js::OpCode::BrNeq_A, continueAfterTestLabel, instr);
  9224. this->m_lowererMD.GenerateDebugBreak(instr);
  9225. instr->InsertBefore(continueAfterTestLabel);
  9226. #endif
  9227. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  9228. instr->InsertAfter(continueLabel);
  9229. InsertCompareBranch(instr->UnlinkSrc1(), builtIn, Js::OpCode::BrEq_A, continueLabel, instr);
  9230. GenerateBailOut(instr, branchInstr, labelBailOut);
  9231. return prevInstr;
  9232. }
  9233. IR::Instr *
  9234. Lowerer::LowerBailForDebugger(IR::Instr* instr, bool isInsideHelper /* = false */)
  9235. {
  9236. IR::Instr * prevInstr = instr->m_prev;
  9237. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  9238. AssertMsg(bailOutKind, "bailOutKind should not be zero at this time.");
  9239. AssertMsg(!(bailOutKind & IR::BailOutExplicit) || bailOutKind == IR::BailOutExplicit,
  9240. "BailOutExplicit cannot be combined with any other bailout flags.");
  9241. IR::LabelInstr* bailOutLabel = nullptr;
  9242. if (!(bailOutKind & IR::BailOutExplicit))
  9243. {
  9244. Js::DebugManager* debugManager = this->GetScriptContext()->GetThreadContext()->GetDebugManager();
  9245. DebuggingFlags* flags = debugManager->GetDebuggingFlags();
  9246. // Check 1 (do we need to bail out?)
  9247. // JXX bailoutLabel
  9248. // Check 2 (do we need to bail out?)
  9249. // JXX bailoutLabel
  9250. // ...
  9251. // JMP continueLabel
  9252. // bailoutDocumentLabel:
  9253. // (determine if document boundary reached - if not, JMP to continueLabel)
  9254. // NOTE: THIS BLOCK IS CONDITIONALLY GENERATED BASED ON doGenerateBailOutDocumentBlock
  9255. // bailoutLabel:
  9256. // bail out
  9257. // continueLabel:
  9258. // ...
  9259. IR::LabelInstr* bailOutDocumentLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ true);
  9260. instr->InsertBefore(bailOutDocumentLabel);
  9261. IR::LabelInstr* bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ true);
  9262. instr->InsertBefore(bailOutLabel);
  9263. IR::LabelInstr* continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ isInsideHelper);
  9264. instr->InsertAfter(continueLabel);
  9265. IR::BranchInstr* continueBranchInstr = this->InsertBranch(Js::OpCode::Br, continueLabel, bailOutDocumentLabel); // JMP continueLabel.
  9266. bool doGenerateBailOutDocumentBlock = false;
  9267. const IR::BailOutKind c_forceAndIgnoreEx = IR::BailOutForceByFlag | IR::BailOutIgnoreException;
  9268. if ((bailOutKind & c_forceAndIgnoreEx) == c_forceAndIgnoreEx)
  9269. {
  9270. // It's faster to check these together in 1 check rather than 2 separate checks at run time.
  9271. // CMP [&(flags->m_forceInterpreter, flags->m_isIgnoreException)], 0
  9272. // BNE bailout
  9273. IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetForceInterpreterOffset(), TyInt16, m_func);
  9274. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt16, m_func, /*dontEncode*/ true);
  9275. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  9276. bailOutKind ^= c_forceAndIgnoreEx;
  9277. }
  9278. else
  9279. {
  9280. if (bailOutKind & IR::BailOutForceByFlag)
  9281. {
  9282. // CMP [&flags->m_forceInterpreter], 0
  9283. // BNE bailout
  9284. IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetForceInterpreterOffset(), TyInt8, m_func);
  9285. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func, /*dontEncode*/ true);
  9286. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  9287. bailOutKind ^= IR::BailOutForceByFlag;
  9288. }
  9289. if (bailOutKind & IR::BailOutIgnoreException)
  9290. {
  9291. // CMP [&flags->m_byteCodeOffsetAfterIgnoreException], DebuggingFlags::InvalidByteCodeOffset
  9292. // BNE bailout
  9293. IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + flags->GetByteCodeOffsetAfterIgnoreExceptionOffset(), TyInt32, m_func);
  9294. IR::Opnd* opnd2 = IR::IntConstOpnd::New(DebuggingFlags::InvalidByteCodeOffset, TyInt32, m_func, /*dontEncode*/ true);
  9295. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  9296. bailOutKind ^= IR::BailOutIgnoreException;
  9297. }
  9298. }
  9299. if (bailOutKind & IR::BailOutBreakPointInFunction)
  9300. {
  9301. // CMP [&functionBody->m_sourceInfo.m_probeCount], 0
  9302. // BNE bailout
  9303. Js::FunctionBody* body = m_func->GetJnFunction();
  9304. IR::Opnd* opnd1 = IR::MemRefOpnd::New(&body->GetSourceInfo()->m_probeCount, TyInt32, m_func);
  9305. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt32, m_func, /*dontEncode*/ true);
  9306. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  9307. bailOutKind ^= IR::BailOutBreakPointInFunction;
  9308. }
  9309. // on method entry
  9310. if(bailOutKind & IR::BailOutStep)
  9311. {
  9312. // TEST STEP_BAILOUT, [&stepController->StepType]
  9313. // BNE BailoutLabel
  9314. IR::Opnd* opnd1 = IR::MemRefOpnd::New((void*)(debugManager->stepController.GetAddressOfStepType()), TyInt8, m_func);
  9315. IR::Opnd* opnd2 = IR::IntConstOpnd::New(Js::STEP_BAILOUT, TyInt8, this->m_func, /*dontEncode*/ true);
  9316. InsertTestBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  9317. // CMP STEP_DOCUMENT, [&stepController->StepType]
  9318. // BEQ BailoutDocumentLabel
  9319. opnd1 = IR::MemRefOpnd::New((void*)(debugManager->stepController.GetAddressOfStepType()), TyInt8, m_func);
  9320. opnd2 = IR::IntConstOpnd::New(Js::STEP_DOCUMENT, TyInt8, this->m_func, /*dontEncode*/ true);
  9321. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrEq_A, /*isUnsigned*/ true, bailOutDocumentLabel, continueBranchInstr);
  9322. doGenerateBailOutDocumentBlock = true;
  9323. bailOutKind ^= IR::BailOutStep;
  9324. }
  9325. // on method exit
  9326. if (bailOutKind & IR::BailOutStackFrameBase)
  9327. {
  9328. // CMP EffectiveFrameBase, [&stepController->frameAddrWhenSet]
  9329. // BA bailoutLabel
  9330. RegNum effectiveFrameBaseReg;
  9331. #ifdef _M_X64
  9332. effectiveFrameBaseReg = m_lowererMD.GetRegStackPointer();
  9333. #else
  9334. effectiveFrameBaseReg = m_lowererMD.GetRegFramePointer();
  9335. #endif
  9336. IR::Opnd* opnd1 = IR::RegOpnd::New(nullptr, effectiveFrameBaseReg, TyMachReg, m_func);
  9337. IR::Opnd* opnd2 = IR::MemRefOpnd::New(debugManager->stepController.GetAddressOfFrameAddress(), TyMachReg, m_func);
  9338. this->InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrGt_A, /*isUnsigned*/ true, bailOutLabel, continueBranchInstr);
  9339. // CMP STEP_DOCUMENT, [&stepController->StepType]
  9340. // BEQ BailoutDocumentLabel
  9341. opnd1 = IR::MemRefOpnd::New((void*)(debugManager->stepController.GetAddressOfStepType()), TyInt8, m_func);
  9342. opnd2 = IR::IntConstOpnd::New(Js::STEP_DOCUMENT, TyInt8, this->m_func, /*dontEncode*/ true);
  9343. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrEq_A, /*isUnsigned*/ true, bailOutDocumentLabel, continueBranchInstr);
  9344. doGenerateBailOutDocumentBlock = true;
  9345. bailOutKind ^= IR::BailOutStackFrameBase;
  9346. }
  9347. if (bailOutKind & IR::BailOutLocalValueChanged)
  9348. {
  9349. int32 hasLocalVarChangedOffset = m_func->GetHasLocalVarChangedOffset();
  9350. if (hasLocalVarChangedOffset != Js::Constants::InvalidOffset)
  9351. {
  9352. // CMP [EBP + hasLocalVarChangedStackOffset], 0
  9353. // BNE bailout
  9354. StackSym* sym = StackSym::New(TyInt8, m_func);
  9355. sym->m_offset = hasLocalVarChangedOffset;
  9356. sym->m_allocated = true;
  9357. IR::Opnd* opnd1 = IR::SymOpnd::New(sym, TyInt8, m_func);
  9358. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func);
  9359. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  9360. }
  9361. bailOutKind ^= IR::BailOutLocalValueChanged;
  9362. }
  9363. if (doGenerateBailOutDocumentBlock)
  9364. {
  9365. // GENERATE the BailoutDocumentLabel
  9366. // bailOutDocumentLabel:
  9367. // CMP CurrentScriptId, [&stepController->ScriptIdWhenSet]
  9368. // BEQ ContinueLabel
  9369. // bailOutLabel: // (fallthrough bailOutLabel)
  9370. Js::FunctionBody* body = m_func->GetJnFunction();
  9371. IR::Opnd* opnd1 = IR::MemRefOpnd::New(body->GetAddressOfScriptId(), TyInt32, m_func);
  9372. IR::Opnd* opnd2 = IR::MemRefOpnd::New(debugManager->stepController.GetAddressOfScriptIdWhenSet(), TyInt32, m_func);
  9373. IR::RegOpnd* reg1 = IR::RegOpnd::New(TyInt32, m_func);
  9374. InsertMove(reg1, opnd2, bailOutLabel);
  9375. InsertCompareBranch(opnd1, reg1, Js::OpCode::BrEq_A, /*isUnsigned*/ true, continueLabel, bailOutLabel);
  9376. }
  9377. AssertMsg(bailOutKind == (IR::BailOutKind)0, "Some of the bits in BailOutKind were not processed!");
  9378. // Note: at this time the 'instr' is in between bailoutLabel and continueLabel.
  9379. }
  9380. else
  9381. {
  9382. // For explicit/unconditional bailout use label which is not a helper, otherwise we would get a helper in main code path
  9383. // which breaks helper label consistency (you can only get to helper from a conditional branch in main code), see DbCheckPostLower.
  9384. bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  9385. }
  9386. this->GenerateBailOut(instr, nullptr, bailOutLabel);
  9387. return prevInstr;
  9388. }
  9389. IR::Instr*
  9390. Lowerer::LowerBailOnException(IR::Instr * instr)
  9391. {
  9392. Assert(instr->HasBailOutInfo());
  9393. IR::Instr * instrPrev = instr->m_prev;
  9394. Assert(instrPrev->m_opcode == Js::OpCode::Catch);
  9395. this->GenerateBailOut(instr, nullptr, nullptr);
  9396. return instrPrev;
  9397. }
  9398. // Generate BailOut Lowerer Instruction if the value is INT_MIN.
  9399. // It it's not INT_MIN, we continue without bailout.
  9400. IR::Instr *
  9401. Lowerer::LowerBailOnIntMin(IR::Instr *instr, IR::BranchInstr *branchInstr /* = nullptr */, IR::LabelInstr *labelBailOut /* = nullptr */)
  9402. {
  9403. Assert(instr);
  9404. Assert(instr->GetSrc1());
  9405. IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  9406. instr->InsertAfter(continueLabelInstr);
  9407. if(!instr->HasBailOutInfo())
  9408. {
  9409. instr->Remove();
  9410. }
  9411. else
  9412. {
  9413. Assert(instr->GetBailOutKind() == IR::BailOnIntMin);
  9414. // Note: src1 must be int32 at this point.
  9415. if (instr->GetSrc1()->IsIntConstOpnd())
  9416. {
  9417. // For consts we can check the value at JIT time. Note: without this check we'll have to legalize the CMP instr.
  9418. IR::IntConstOpnd* intConst = instr->UnlinkSrc1()->AsIntConstOpnd();
  9419. if (intConst->GetValue() == INT_MIN)
  9420. {
  9421. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  9422. intConst->Free(instr->m_func);
  9423. }
  9424. else
  9425. {
  9426. instr->Remove();
  9427. }
  9428. }
  9429. else
  9430. {
  9431. InsertCompareBranch(instr->UnlinkSrc1(), IR::IntConstOpnd::New(INT_MIN, TyInt32, this->m_func), Js::OpCode::BrNeq_A, continueLabelInstr, instr);
  9432. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  9433. }
  9434. }
  9435. return continueLabelInstr;
  9436. }
  9437. ///----------------------------------------------------------------------------
  9438. ///
  9439. /// Lowerer::LowerBailOnNotString
  9440. /// Generate BailOut Lowerer Instruction if not a String
  9441. ///
  9442. ///----------------------------------------------------------------------------
  9443. void Lowerer::LowerBailOnNotString(IR::Instr *instr)
  9444. {
  9445. if (!instr->GetSrc1()->GetValueType().IsString())
  9446. {
  9447. /*Creating a MOV instruction*/
  9448. IR::Instr * movInstr = IR::Instr::New(instr->m_opcode, instr->UnlinkDst(), instr->UnlinkSrc1(), instr->m_func);
  9449. instr->InsertBefore(movInstr);
  9450. IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  9451. IR::LabelInstr *helperLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  9452. instr->InsertAfter(continueLabelInstr);
  9453. IR::RegOpnd *srcReg = movInstr->GetSrc1()->IsRegOpnd() ? movInstr->GetSrc1()->AsRegOpnd() : nullptr;
  9454. this->GenerateStringTest(srcReg, instr, helperLabelInstr, continueLabelInstr);
  9455. this->GenerateBailOut(instr, nullptr, helperLabelInstr);
  9456. }
  9457. else
  9458. {
  9459. instr->ClearBailOutInfo();
  9460. }
  9461. }
  9462. void Lowerer::LowerOneBailOutKind(
  9463. IR::Instr *const instr,
  9464. const IR::BailOutKind bailOutKindToLower,
  9465. const bool isInHelperBlock,
  9466. const bool preserveBailOutKindInInstr)
  9467. {
  9468. Assert(instr);
  9469. Assert(bailOutKindToLower);
  9470. Assert(!(bailOutKindToLower & IR::BailOutKindBits) || !(bailOutKindToLower & bailOutKindToLower - 1u));
  9471. Func *const func = instr->m_func;
  9472. // Split bailouts other than the one being lowered here
  9473. BailOutInfo *const bailOutInfo = instr->GetBailOutInfo();
  9474. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  9475. Assert(
  9476. bailOutKindToLower & IR::BailOutKindBits
  9477. ? bailOutKind & bailOutKindToLower
  9478. : (bailOutKind & ~IR::BailOutKindBits) == bailOutKindToLower);
  9479. if(!preserveBailOutKindInInstr)
  9480. {
  9481. bailOutKind -= bailOutKindToLower;
  9482. }
  9483. if(bailOutKind)
  9484. {
  9485. if(bailOutInfo->bailOutInstr == instr)
  9486. {
  9487. // Create a shared bailout point for the split bailout checks
  9488. IR::Instr *const sharedBail = instr->ShareBailOut();
  9489. Assert(sharedBail->GetBailOutInfo() == bailOutInfo);
  9490. GenerateBailOut(sharedBail);
  9491. }
  9492. instr->SetBailOutKind(bailOutKind);
  9493. }
  9494. else
  9495. {
  9496. instr->UnlinkBailOutInfo();
  9497. if(bailOutInfo->bailOutInstr == instr)
  9498. {
  9499. bailOutInfo->bailOutInstr = nullptr;
  9500. }
  9501. }
  9502. IR::Instr *const insertBeforeInstr = instr->m_next;
  9503. // (Bail out with the requested bail out kind)
  9504. IR::BailOutInstr *const bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOut, bailOutKindToLower, bailOutInfo, func);
  9505. bailOutInstr->SetByteCodeOffset(instr);
  9506. insertBeforeInstr->InsertBefore(bailOutInstr);
  9507. GenerateBailOut(bailOutInstr);
  9508. // The caller is expected to generate code to decide whether to bail out
  9509. }
  9510. void Lowerer::SplitBailOnNotArray(
  9511. IR::Instr *const instr,
  9512. IR::Instr * *const bailOnNotArrayRef,
  9513. IR::Instr * *const bailOnMissingValueRef)
  9514. {
  9515. Assert(instr);
  9516. Assert(!instr->GetDst());
  9517. Assert(instr->GetSrc1());
  9518. Assert(instr->GetSrc1()->IsRegOpnd());
  9519. Assert(!instr->GetSrc2());
  9520. Assert(bailOnNotArrayRef);
  9521. Assert(bailOnMissingValueRef);
  9522. IR::Instr *&bailOnNotArray = *bailOnNotArrayRef;
  9523. IR::Instr *&bailOnMissingValue = *bailOnMissingValueRef;
  9524. bailOnNotArray = instr;
  9525. bailOnMissingValue = nullptr;
  9526. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  9527. if(bailOutKind == IR::BailOutOnNotArray ||
  9528. bailOutKind == IR::BailOutOnNotNativeArray)
  9529. {
  9530. return;
  9531. }
  9532. // Split array checks
  9533. BailOutInfo *const bailOutInfo = instr->GetBailOutInfo();
  9534. if(bailOutInfo->bailOutInstr == instr)
  9535. {
  9536. // Create a shared bailout point for the split bailout checks
  9537. IR::Instr *const sharedBail = instr->ShareBailOut();
  9538. Assert(sharedBail->GetBailOutInfo() == bailOutInfo);
  9539. LowerBailTarget(sharedBail);
  9540. }
  9541. bailOutKind -= IR::BailOutOnMissingValue;
  9542. Assert(bailOutKind == IR::BailOutOnNotArray ||
  9543. bailOutKind == IR::BailOutOnNotNativeArray);
  9544. instr->SetBailOutKind(bailOutKind);
  9545. Func *const func = bailOutInfo->bailOutFunc;
  9546. IR::Instr *const insertBeforeInstr = instr->m_next;
  9547. // Split missing value checks
  9548. bailOnMissingValue = IR::BailOutInstr::New(Js::OpCode::BailOnNotArray, IR::BailOutOnMissingValue, bailOutInfo, func);
  9549. bailOnMissingValue->SetByteCodeOffset(instr);
  9550. insertBeforeInstr->InsertBefore(bailOnMissingValue);
  9551. }
  9552. IR::RegOpnd *Lowerer::LowerBailOnNotArray(IR::Instr *const instr)
  9553. {
  9554. Assert(instr);
  9555. Assert(!instr->GetDst());
  9556. Assert(instr->GetSrc1());
  9557. Assert(instr->GetSrc1()->IsRegOpnd());
  9558. Assert(!instr->GetSrc2());
  9559. Func *const func = instr->m_func;
  9560. // Label to jump to (or fall through to) when bailing out
  9561. const auto bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true /* isOpHelper */);
  9562. instr->InsertBefore(bailOutLabel);
  9563. // Label to jump to when not bailing out
  9564. const auto skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  9565. instr->InsertAfter(skipBailOutLabel);
  9566. // Do the array tests and jump to bailOutLabel if it's not an array. Fall through if it is an array.
  9567. IR::RegOpnd *const arrayOpnd =
  9568. GenerateArrayTest(instr->UnlinkSrc1()->AsRegOpnd(), bailOutLabel, bailOutLabel, bailOutLabel, true);
  9569. // Skip bail-out when it is an array
  9570. InsertBranch(Js::OpCode::Br, skipBailOutLabel, bailOutLabel);
  9571. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  9572. // ordering instructions anymore.
  9573. GenerateBailOut(instr);
  9574. return arrayOpnd;
  9575. }
  9576. void Lowerer::LowerBailOnMissingValue(IR::Instr *const instr, IR::RegOpnd *const arrayOpnd)
  9577. {
  9578. Assert(instr);
  9579. Assert(!instr->GetDst());
  9580. Assert(!instr->GetSrc1());
  9581. Assert(!instr->GetSrc2());
  9582. Assert(arrayOpnd);
  9583. Assert(arrayOpnd->GetValueType().IsArrayOrObjectWithArray());
  9584. Func *const func = instr->m_func;
  9585. // Label to jump to when not bailing out
  9586. const auto skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  9587. instr->InsertAfter(skipBailOutLabel);
  9588. // Skip bail-out when the array has no missing values
  9589. //
  9590. // test [array + offsetOf(objectArrayOrFlags)], Js::DynamicObjectFlags::HasNoMissingValues
  9591. // jnz $skipBailOut
  9592. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, func);
  9593. CompileAssert(
  9594. static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
  9595. Js::DynamicObjectFlags::HasNoMissingValues);
  9596. InsertTestBranch(
  9597. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, func),
  9598. IR::IntConstOpnd::New(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues), TyUint8, func, true),
  9599. Js::OpCode::BrNeq_A,
  9600. skipBailOutLabel,
  9601. instr);
  9602. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  9603. // ordering instructions anymore.
  9604. GenerateBailOut(instr);
  9605. }
  9606. void Lowerer::LowerBailOnInvalidatedArrayHeadSegment(IR::Instr *const instr, const bool isInHelperBlock)
  9607. {
  9608. /*
  9609. // Generate checks for whether the head segment or the head segment length changed during the helper call
  9610. if(!(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentSym()))
  9611. {
  9612. // Record the array head segment before the helper call
  9613. headSegmentBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayHeadSegmentForArrayOrObjectWithArray(base)
  9614. }
  9615. if(!(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentLengthSym()))
  9616. {
  9617. // Record the array head segment length before the helper call
  9618. if(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentSym())
  9619. {
  9620. mov headSegmentLengthBeforeHelperCall, [headSegmentBeforeHelperCall + offsetOf(length)]
  9621. }
  9622. else
  9623. {
  9624. headSegmentLengthBeforeHelperCall =
  9625. Js::JavascriptArray::Jit_GetArrayHeadSegmentLength(headSegmentBeforeHelperCall)
  9626. }
  9627. }
  9628. helperCall:
  9629. (Helper call and other bailout checks)
  9630. // If the array has a different head segment or head segment length after the helper call, then this store needs to bail
  9631. // out
  9632. invalidatedHeadSegment =
  9633. JavascriptArray::Jit_OperationInvalidatedArrayHeadSegment(
  9634. headSegmentBeforeHelperCall,
  9635. headSegmentLengthBeforeHelperCall,
  9636. base)
  9637. test invalidatedHeadSegment, invalidatedHeadSegment
  9638. jz $skipBailOut
  9639. (Bail out with IR::BailOutOnInvalidatedArrayHeadSegment)
  9640. $skipBailOut:
  9641. */
  9642. Assert(instr);
  9643. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  9644. Assert(instr->GetDst());
  9645. Assert(instr->GetDst()->IsIndirOpnd());
  9646. Func *const func = instr->m_func;
  9647. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  9648. const ValueType baseValueType(baseOpnd->GetValueType());
  9649. Assert(!baseValueType.IsNotArrayOrObjectWithArray());
  9650. const bool isArrayOrObjectWithArray = baseValueType.IsArrayOrObjectWithArray();
  9651. IR::ArrayRegOpnd *const arrayOpnd = baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd() : nullptr;
  9652. IR::RegOpnd *headSegmentBeforeHelperCallOpnd;
  9653. IR::AutoReuseOpnd autoReuseHeadSegmentBeforeHelperCallOpnd;
  9654. if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentSym())
  9655. {
  9656. headSegmentBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->HeadSegmentSym(), TyMachPtr, func);
  9657. autoReuseHeadSegmentBeforeHelperCallOpnd.Initialize(headSegmentBeforeHelperCallOpnd, func);
  9658. }
  9659. else
  9660. {
  9661. // Record the array head segment before the helper call
  9662. // headSegmentBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayHeadSegmentForArrayOrObjectWithArray(base)
  9663. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  9664. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  9665. headSegmentBeforeHelperCallOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, func), TyMachPtr, func);
  9666. autoReuseHeadSegmentBeforeHelperCallOpnd.Initialize(headSegmentBeforeHelperCallOpnd, func);
  9667. callInstr->SetDst(headSegmentBeforeHelperCallOpnd);
  9668. instr->InsertBefore(callInstr);
  9669. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayHeadSegmentForArrayOrObjectWithArray);
  9670. }
  9671. IR::RegOpnd *headSegmentLengthBeforeHelperCallOpnd;
  9672. IR::AutoReuseOpnd autoReuseHeadSegmentLengthBeforeHelperCallOpnd;
  9673. if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentLengthSym())
  9674. {
  9675. headSegmentLengthBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->HeadSegmentLengthSym(), TyUint32, func);
  9676. autoReuseHeadSegmentLengthBeforeHelperCallOpnd.Initialize(headSegmentLengthBeforeHelperCallOpnd, func);
  9677. }
  9678. else
  9679. {
  9680. headSegmentLengthBeforeHelperCallOpnd = IR::RegOpnd::New(StackSym::New(TyUint32, func), TyUint32, func);
  9681. autoReuseHeadSegmentLengthBeforeHelperCallOpnd.Initialize(headSegmentLengthBeforeHelperCallOpnd, func);
  9682. if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentSym())
  9683. {
  9684. // Record the array head segment length before the helper call
  9685. // mov headSegmentLengthBeforeHelperCall, [headSegmentBeforeHelperCall + offsetOf(length)]
  9686. InsertMove(
  9687. headSegmentLengthBeforeHelperCallOpnd,
  9688. IR::IndirOpnd::New(
  9689. headSegmentBeforeHelperCallOpnd,
  9690. Js::SparseArraySegmentBase::GetOffsetOfLength(),
  9691. TyUint32,
  9692. func),
  9693. instr);
  9694. }
  9695. else
  9696. {
  9697. // Record the array head segment length before the helper call
  9698. // headSegmentLengthBeforeHelperCall =
  9699. // Js::JavascriptArray::Jit_GetArrayHeadSegmentLength(headSegmentBeforeHelperCall)
  9700. m_lowererMD.LoadHelperArgument(instr, headSegmentBeforeHelperCallOpnd);
  9701. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  9702. callInstr->SetDst(headSegmentLengthBeforeHelperCallOpnd);
  9703. instr->InsertBefore(callInstr);
  9704. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayHeadSegmentLength);
  9705. }
  9706. }
  9707. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
  9708. LowerOneBailOutKind(instr, IR::BailOutOnInvalidatedArrayHeadSegment, isInHelperBlock);
  9709. IR::Instr *const insertBeforeInstr = instr->m_next;
  9710. // If the array has a different head segment or head segment length after the helper call, then this store needs to bail out
  9711. // invalidatedHeadSegment =
  9712. // JavascriptArray::Jit_OperationInvalidatedArrayHeadSegment(
  9713. // headSegmentBeforeHelperCall,
  9714. // headSegmentLengthBeforeHelperCall,
  9715. // base)
  9716. m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
  9717. m_lowererMD.LoadHelperArgument(insertBeforeInstr, headSegmentLengthBeforeHelperCallOpnd);
  9718. m_lowererMD.LoadHelperArgument(insertBeforeInstr, headSegmentBeforeHelperCallOpnd);
  9719. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  9720. IR::RegOpnd *const invalidatedHeadSegmentOpnd = IR::RegOpnd::New(TyUint8, func);
  9721. const IR::AutoReuseOpnd autoReuseInvalidatedHeadSegmentOpnd(invalidatedHeadSegmentOpnd, func);
  9722. callInstr->SetDst(invalidatedHeadSegmentOpnd);
  9723. insertBeforeInstr->InsertBefore(callInstr);
  9724. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationInvalidatedArrayHeadSegment);
  9725. // test invalidatedHeadSegment, invalidatedHeadSegment
  9726. // jz $skipBailOut
  9727. InsertTestBranch(
  9728. invalidatedHeadSegmentOpnd,
  9729. invalidatedHeadSegmentOpnd,
  9730. Js::OpCode::BrEq_A,
  9731. skipBailOutLabel,
  9732. insertBeforeInstr);
  9733. // (Bail out with IR::BailOutOnInvalidatedArrayHeadSegment)
  9734. // $skipBailOut:
  9735. }
  9736. void Lowerer::LowerBailOnInvalidatedArrayLength(IR::Instr *const instr, const bool isInHelperBlock)
  9737. {
  9738. /*
  9739. // Generate checks for whether the length changed during the helper call
  9740. if(!(arrayOpnd && arrayOpnd.LengthSym() && arrayOpnd.LengthSym() != arrayOpnd.HeadSegmentLengthSym()))
  9741. {
  9742. // Record the array length before the helper call
  9743. lengthBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayLength(base)
  9744. }
  9745. helperCall:
  9746. (Helper call and other bailout checks)
  9747. // If the array has a different length after the helper call, then this store needs to bail out
  9748. invalidatedLength = JavascriptArray::Jit_OperationInvalidatedArrayLength(lengthBeforeHelperCall, base)
  9749. test invalidatedLength, invalidatedLength
  9750. jz $skipBailOut
  9751. (Bail out with IR::BailOutOnInvalidatedArrayLength)
  9752. $skipBailOut:
  9753. */
  9754. Assert(instr);
  9755. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  9756. Assert(instr->GetDst());
  9757. Assert(instr->GetDst()->IsIndirOpnd());
  9758. Func *const func = instr->m_func;
  9759. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  9760. const ValueType baseValueType(baseOpnd->GetValueType());
  9761. Assert(!baseValueType.IsNotArray());
  9762. IR::ArrayRegOpnd *const arrayOpnd = baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd() : nullptr;
  9763. IR::RegOpnd *lengthBeforeHelperCallOpnd;
  9764. IR::AutoReuseOpnd autoReuseLengthBeforeHelperCallOpnd;
  9765. if(arrayOpnd && arrayOpnd->LengthSym() && arrayOpnd->LengthSym() != arrayOpnd->HeadSegmentLengthSym())
  9766. {
  9767. lengthBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->LengthSym(), arrayOpnd->LengthSym()->GetType(), func);
  9768. autoReuseLengthBeforeHelperCallOpnd.Initialize(lengthBeforeHelperCallOpnd, func);
  9769. }
  9770. else
  9771. {
  9772. // Record the array length before the helper call
  9773. // lengthBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayLength(base)
  9774. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  9775. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  9776. lengthBeforeHelperCallOpnd = IR::RegOpnd::New(TyUint32, func);
  9777. autoReuseLengthBeforeHelperCallOpnd.Initialize(lengthBeforeHelperCallOpnd, func);
  9778. callInstr->SetDst(lengthBeforeHelperCallOpnd);
  9779. instr->InsertBefore(callInstr);
  9780. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayLength);
  9781. }
  9782. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
  9783. LowerOneBailOutKind(instr, IR::BailOutOnInvalidatedArrayLength, isInHelperBlock);
  9784. IR::Instr *const insertBeforeInstr = instr->m_next;
  9785. // If the array has a different length after the helper call, then this store needs to bail out
  9786. // invalidatedLength = JavascriptArray::Jit_OperationInvalidatedArrayLength(lengthBeforeHelperCall, base)
  9787. m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
  9788. m_lowererMD.LoadHelperArgument(insertBeforeInstr, lengthBeforeHelperCallOpnd);
  9789. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  9790. IR::RegOpnd *const invalidatedLengthOpnd = IR::RegOpnd::New(TyUint8, func);
  9791. const IR::AutoReuseOpnd autoReuseInvalidatedLengthOpnd(invalidatedLengthOpnd, func);
  9792. callInstr->SetDst(invalidatedLengthOpnd);
  9793. insertBeforeInstr->InsertBefore(callInstr);
  9794. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationInvalidatedArrayLength);
  9795. // test invalidatedLength, invalidatedLength
  9796. // jz $skipBailOut
  9797. InsertTestBranch(
  9798. invalidatedLengthOpnd,
  9799. invalidatedLengthOpnd,
  9800. Js::OpCode::BrEq_A,
  9801. skipBailOutLabel,
  9802. insertBeforeInstr);
  9803. // (Bail out with IR::BailOutOnInvalidatedArrayLength)
  9804. // $skipBailOut:
  9805. }
  9806. void Lowerer::LowerBailOnCreatedMissingValue(IR::Instr *const instr, const bool isInHelperBlock)
  9807. {
  9808. /*
  9809. // Generate checks for whether the first missing value was created during the helper call
  9810. if(!(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()))
  9811. {
  9812. // Record whether the array has missing values before the helper call
  9813. arrayFlagsBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayFlagsForArrayOrObjectWithArray(base)
  9814. }
  9815. helperCall:
  9816. (Helper call and other bailout checks)
  9817. // If the array had no missing values before the helper call, and the array has missing values after the helper
  9818. // call, then this store created the first missing value in the array and needs to bail out
  9819. if(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())
  9820. (arrayFlagsBeforeHelperCall = Js::DynamicObjectFlags::HasNoMissingValues)
  9821. createdFirstMissingValue = JavascriptArray::Jit_OperationCreatedFirstMissingValue(arrayFlagsBeforeHelperCall, base)
  9822. test createdFirstMissingValue, createdFirstMissingValue
  9823. jz $skipBailOut
  9824. (Bail out with IR::BailOutOnMissingValue)
  9825. $skipBailOut:
  9826. */
  9827. Assert(instr);
  9828. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  9829. Assert(instr->GetDst());
  9830. Assert(instr->GetDst()->IsIndirOpnd());
  9831. Func *const func = instr->m_func;
  9832. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  9833. const ValueType baseValueType(baseOpnd->GetValueType());
  9834. Assert(!baseValueType.IsNotArrayOrObjectWithArray());
  9835. IR::Opnd *arrayFlagsBeforeHelperCallOpnd = nullptr;
  9836. IR::AutoReuseOpnd autoReuseArrayFlagsBeforeHelperCallOpnd;
  9837. const IRType arrayFlagsType = sizeof(uintptr_t) == sizeof(uint32) ? TyUint32 : TyUint64;
  9838. if(!(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()))
  9839. {
  9840. // Record whether the array has missing values before the helper call
  9841. // arrayFlagsBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayFlagsForArrayOrObjectWithArray(base)
  9842. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  9843. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  9844. arrayFlagsBeforeHelperCallOpnd = IR::RegOpnd::New(arrayFlagsType, func);
  9845. autoReuseArrayFlagsBeforeHelperCallOpnd.Initialize(arrayFlagsBeforeHelperCallOpnd, func);
  9846. callInstr->SetDst(arrayFlagsBeforeHelperCallOpnd);
  9847. instr->InsertBefore(callInstr);
  9848. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayFlagsForArrayOrObjectWithArray);
  9849. }
  9850. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
  9851. LowerOneBailOutKind(instr, IR::BailOutOnMissingValue, isInHelperBlock);
  9852. IR::Instr *const insertBeforeInstr = instr->m_next;
  9853. // If the array had no missing values before the helper call, and the array has missing values after the helper
  9854. // call, then this store created the first missing value in the array and needs to bail out
  9855. if(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())
  9856. {
  9857. // (arrayFlagsBeforeHelperCall = Js::DynamicObjectFlags::HasNoMissingValues)
  9858. Assert(!arrayFlagsBeforeHelperCallOpnd);
  9859. arrayFlagsBeforeHelperCallOpnd =
  9860. arrayFlagsType == TyUint32
  9861. ? static_cast<IR::Opnd *>(
  9862. IR::IntConstOpnd::New(
  9863. static_cast<uintptr_t>(Js::DynamicObjectFlags::HasNoMissingValues),
  9864. arrayFlagsType,
  9865. func,
  9866. true))
  9867. : IR::AddrOpnd::New(
  9868. reinterpret_cast<void *>(Js::DynamicObjectFlags::HasNoMissingValues),
  9869. IR::AddrOpndKindConstantVar,
  9870. func,
  9871. true);
  9872. autoReuseArrayFlagsBeforeHelperCallOpnd.Initialize(arrayFlagsBeforeHelperCallOpnd, func);
  9873. }
  9874. else
  9875. {
  9876. Assert(arrayFlagsBeforeHelperCallOpnd);
  9877. }
  9878. // createdFirstMissingValue = JavascriptArray::Jit_OperationCreatedFirstMissingValue(arrayFlagsBeforeHelperCall, base)
  9879. m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
  9880. m_lowererMD.LoadHelperArgument(insertBeforeInstr, arrayFlagsBeforeHelperCallOpnd);
  9881. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  9882. IR::RegOpnd *const createdFirstMissingValueOpnd = IR::RegOpnd::New(TyUint8, func);
  9883. IR::AutoReuseOpnd autoReuseCreatedFirstMissingValueOpnd(createdFirstMissingValueOpnd, func);
  9884. callInstr->SetDst(createdFirstMissingValueOpnd);
  9885. insertBeforeInstr->InsertBefore(callInstr);
  9886. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationCreatedFirstMissingValue);
  9887. // test createdFirstMissingValue, createdFirstMissingValue
  9888. // jz $skipBailOut
  9889. InsertCompareBranch(
  9890. createdFirstMissingValueOpnd,
  9891. IR::IntConstOpnd::New(0, createdFirstMissingValueOpnd->GetType(), func, true),
  9892. Js::OpCode::BrEq_A,
  9893. skipBailOutLabel,
  9894. insertBeforeInstr);
  9895. // (Bail out with IR::BailOutOnMissingValue)
  9896. // $skipBailOut:
  9897. }
  9898. void Lowerer::LowerBoundCheck(IR::Instr *const instr)
  9899. {
  9900. Assert(instr);
  9901. Assert(instr->m_opcode == Js::OpCode::BoundCheck || instr->m_opcode == Js::OpCode::UnsignedBoundCheck);
  9902. #if DBG
  9903. if(instr->m_opcode == Js::OpCode::UnsignedBoundCheck)
  9904. {
  9905. // UnsignedBoundCheck is currently only supported for the pattern:
  9906. // UnsignedBoundCheck s1 <= s2 + c, where c == 0 || c == -1
  9907. Assert(instr->GetSrc1()->IsRegOpnd());
  9908. Assert(instr->GetSrc1()->IsInt32());
  9909. Assert(instr->GetSrc2());
  9910. Assert(!instr->GetSrc2()->IsIntConstOpnd());
  9911. if(instr->GetDst())
  9912. {
  9913. const int32 c = instr->GetDst()->AsIntConstOpnd()->AsInt32();
  9914. Assert(c == 0 || c == -1);
  9915. }
  9916. }
  9917. #endif
  9918. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  9919. Assert(
  9920. bailOutKind == IR::BailOutOnArrayAccessHelperCall ||
  9921. bailOutKind == IR::BailOutOnInvalidatedArrayHeadSegment ||
  9922. bailOutKind == IR::BailOutOnFailedHoistedBoundCheck ||
  9923. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  9924. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(false);
  9925. LowerOneBailOutKind(instr, bailOutKind, false);
  9926. Assert(!instr->HasBailOutInfo());
  9927. IR::Instr *insertBeforeInstr = instr->m_next;
  9928. #if DBG
  9929. const auto VerifyLeftOrRightOpnd = [&](IR::Opnd *const opnd, const bool isRightOpnd)
  9930. {
  9931. if(!opnd)
  9932. {
  9933. Assert(isRightOpnd);
  9934. return;
  9935. }
  9936. if(opnd->IsIntConstOpnd())
  9937. {
  9938. Assert(!isRightOpnd || opnd->AsIntConstOpnd()->GetValue() != 0);
  9939. return;
  9940. }
  9941. Assert(opnd->GetType() == TyInt32 || opnd->GetType() == TyUint32);
  9942. };
  9943. #endif
  9944. // left <= right + offset (src1 <= src2 + dst)
  9945. IR::Opnd *leftOpnd = instr->UnlinkSrc1();
  9946. DebugOnly(VerifyLeftOrRightOpnd(leftOpnd, false));
  9947. IR::Opnd *rightOpnd = instr->UnlinkSrc2();
  9948. DebugOnly(VerifyLeftOrRightOpnd(rightOpnd, true));
  9949. Assert(!leftOpnd->IsIntConstOpnd() || rightOpnd && !rightOpnd->IsIntConstOpnd());
  9950. IR::IntConstOpnd *offsetOpnd = instr->GetDst() ? instr->UnlinkDst()->AsIntConstOpnd() : nullptr;
  9951. Assert(!offsetOpnd || offsetOpnd->GetValue() != 0);
  9952. const bool doUnsignedCompare = instr->m_opcode == Js::OpCode::UnsignedBoundCheck;
  9953. instr->Remove();
  9954. Func *const func = insertBeforeInstr->m_func;
  9955. IntConstType offset = offsetOpnd ? offsetOpnd->GetValue() : 0;
  9956. Js::OpCode compareOpCode = Js::OpCode::BrLe_A;
  9957. if(leftOpnd->IsIntConstOpnd() && rightOpnd->IsRegOpnd() && offset != IntConstMin)
  9958. {
  9959. // Put the constants together: swap the operands, negate the offset, and invert the branch
  9960. IR::Opnd *const tempOpnd = leftOpnd;
  9961. leftOpnd = rightOpnd;
  9962. rightOpnd = tempOpnd;
  9963. offset = -offset;
  9964. compareOpCode = Js::OpCode::BrGe_A;
  9965. }
  9966. if(rightOpnd->IsIntConstOpnd())
  9967. {
  9968. // Try to aggregate right + offset into a constant offset
  9969. IntConstType newOffset;
  9970. if(!IntConstMath::Add(offset, rightOpnd->AsIntConstOpnd()->GetValue(), &newOffset))
  9971. {
  9972. offset = newOffset;
  9973. rightOpnd = nullptr;
  9974. offsetOpnd = nullptr;
  9975. }
  9976. }
  9977. // Determine if the Add for (right + offset) is necessary, and the op code that will be used for the comparison
  9978. IR::AutoReuseOpnd autoReuseAddResultOpnd;
  9979. if(offset == -1 && compareOpCode == Js::OpCode::BrLe_A)
  9980. {
  9981. offset = 0;
  9982. compareOpCode = Js::OpCode::BrLt_A;
  9983. }
  9984. else if(offset == 1 && compareOpCode == Js::OpCode::BrGe_A)
  9985. {
  9986. offset = 0;
  9987. compareOpCode = Js::OpCode::BrGt_A;
  9988. }
  9989. else if(offset != 0 && rightOpnd)
  9990. {
  9991. // Need to Add (right + offset). If it overflows, bail out.
  9992. IR::LabelInstr *const bailOutLabel = insertBeforeInstr->m_prev->GetOrCreateContinueLabel(true);
  9993. insertBeforeInstr = bailOutLabel;
  9994. // mov temp, right
  9995. // add temp, offset
  9996. // jo $bailOut
  9997. // $bailOut: (insertBeforeInstr)
  9998. Assert(!offsetOpnd || offsetOpnd->GetValue() == offset);
  9999. IR::RegOpnd *const addResultOpnd = IR::RegOpnd::New(TyMachReg, func);
  10000. autoReuseAddResultOpnd.Initialize(addResultOpnd, func);
  10001. InsertAdd(
  10002. true,
  10003. addResultOpnd,
  10004. rightOpnd,
  10005. offsetOpnd ? offsetOpnd : IR::IntConstOpnd::New(offset, TyMachReg, func, true),
  10006. insertBeforeInstr);
  10007. InsertBranch(LowererMD::MDOverflowBranchOpcode, bailOutLabel, insertBeforeInstr);
  10008. rightOpnd = addResultOpnd;
  10009. }
  10010. // cmp left, right
  10011. // jl[e] $skipBailOut
  10012. // $bailOut:
  10013. if(!rightOpnd)
  10014. {
  10015. rightOpnd = IR::IntConstOpnd::New(offset, TyInt32, func, true);
  10016. }
  10017. InsertCompareBranch(leftOpnd, rightOpnd, compareOpCode, doUnsignedCompare, skipBailOutLabel, insertBeforeInstr);
  10018. }
  10019. IR::Instr *
  10020. Lowerer::LowerBailTarget(IR::Instr * instr)
  10021. {
  10022. // this is just a bailout target, just skip over it and generate a label before so other bailout can jump here.
  10023. IR::Instr * prevInstr = instr->m_prev;
  10024. IR::LabelInstr * continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  10025. instr->InsertAfter(continueLabelInstr);
  10026. IR::BranchInstr * skipInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, continueLabelInstr, this->m_func);
  10027. instr->InsertBefore(skipInstr);
  10028. this->GenerateBailOut(instr);
  10029. return prevInstr;
  10030. }
  10031. IR::Instr *
  10032. Lowerer::SplitBailOnImplicitCall(IR::Instr *& instr)
  10033. {
  10034. Assert(instr->IsPlainInstr() || instr->IsProfiledInstr());
  10035. const auto bailOutKind = instr->GetBailOutKind();
  10036. Assert(
  10037. BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind) ||
  10038. bailOutKind == IR::BailOutExpectingObject);
  10039. IR::Opnd * implicitCallFlags = this->GetImplicitCallFlagsOpnd();
  10040. const IR::AutoReuseOpnd autoReuseImplicitCallFlags(implicitCallFlags, instr->m_func);
  10041. IR::IntConstOpnd * noImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_None, TyInt8, this->m_func, true);
  10042. const IR::AutoReuseOpnd autoReuseNoImplicitCall(noImplicitCall, instr->m_func);
  10043. // Reset the implicit call flag on every helper call
  10044. LowererMD::CreateAssign(implicitCallFlags, noImplicitCall, instr);
  10045. IR::Instr *disableImplicitCallsInstr = nullptr, *enableImplicitCallsInstr = nullptr;
  10046. if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
  10047. {
  10048. const auto disableImplicitCallAddress =
  10049. m_lowererMD.GenerateMemRef(
  10050. instr->m_func->GetScriptContext()->GetThreadContext()->GetAddressOfDisableImplicitFlags(),
  10051. TyInt8,
  10052. instr);
  10053. // Disable implicit calls since they will be called after bailing out
  10054. disableImplicitCallsInstr =
  10055. IR::Instr::New(
  10056. Js::OpCode::Ld_A,
  10057. disableImplicitCallAddress,
  10058. IR::IntConstOpnd::New(DisableImplicitCallFlag, TyInt8, instr->m_func, true),
  10059. instr->m_func);
  10060. instr->InsertBefore(disableImplicitCallsInstr);
  10061. // Create instruction for re-enabling implicit calls
  10062. enableImplicitCallsInstr =
  10063. IR::Instr::New(
  10064. Js::OpCode::Ld_A,
  10065. disableImplicitCallAddress,
  10066. IR::IntConstOpnd::New(DisableImplicitNoFlag, TyInt8, instr->m_func, true),
  10067. instr->m_func);
  10068. }
  10069. IR::Instr * bailOutInstr = instr;
  10070. instr = IR::Instr::New(instr->m_opcode, instr->m_func);
  10071. bailOutInstr->TransferTo(instr);
  10072. bailOutInstr->InsertBefore(instr);
  10073. if(disableImplicitCallsInstr)
  10074. {
  10075. // Re-enable implicit calls
  10076. Assert(enableImplicitCallsInstr);
  10077. bailOutInstr->InsertBefore(enableImplicitCallsInstr);
  10078. // Lower both instructions. Lowering an instruction may free the instruction's original operands, so do that last.
  10079. LowererMD::ChangeToAssign(disableImplicitCallsInstr);
  10080. LowererMD::ChangeToAssign(enableImplicitCallsInstr);
  10081. }
  10082. bailOutInstr->m_opcode = Js::OpCode::BailOnNotEqual;
  10083. bailOutInstr->SetSrc1(implicitCallFlags);
  10084. bailOutInstr->SetSrc2(noImplicitCall);
  10085. return bailOutInstr;
  10086. }
  10087. IR::Instr *
  10088. Lowerer::SplitBailOnImplicitCall(IR::Instr * instr, IR::Instr * helperCall, IR::Instr * insertBeforeInstr)
  10089. {
  10090. IR::Opnd * implicitCallFlags = this->GetImplicitCallFlagsOpnd();
  10091. const IR::AutoReuseOpnd autoReuseImplicitCallFlags(implicitCallFlags, instr->m_func);
  10092. IR::IntConstOpnd * noImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_None, TyInt8, this->m_func, true);
  10093. const IR::AutoReuseOpnd autoReuseNoImplicitCall(noImplicitCall, instr->m_func);
  10094. // Reset the implicit call flag on every helper call
  10095. LowererMD::CreateAssign(implicitCallFlags, noImplicitCall, helperCall->m_prev);
  10096. BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
  10097. if (bailOutInfo->bailOutInstr == instr)
  10098. {
  10099. bailOutInfo->bailOutInstr = nullptr;
  10100. }
  10101. IR::Instr * bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOnNotEqual, IR::BailOutOnImplicitCalls, bailOutInfo, bailOutInfo->bailOutFunc);
  10102. bailOutInstr->SetSrc1(implicitCallFlags);
  10103. bailOutInstr->SetSrc2(noImplicitCall);
  10104. insertBeforeInstr->InsertBefore(bailOutInstr);
  10105. instr->ClearBailOutInfo();
  10106. return bailOutInstr;
  10107. }
  10108. // Split out bailout for debugger into separate bailout instr out of real instr which has bailout for debugger.
  10109. // Returns the instr which needs to lower next, which would normally be last of splitted instr.
  10110. // IR on input:
  10111. // - Real instr with BailOutInfo but it's opcode is not BailForDebugger.
  10112. // - debugger bailout is not shared. In this case we'll have debugger bailout in instr->GetBailOutKind().
  10113. // - debugger bailout is shared. In this case we'll have debugger bailout in instr->GetAuxBailOutKind().
  10114. // IR on output:
  10115. // - Either of:
  10116. // - real instr, then debuggerBailout -- in case we only had debugger bailout.
  10117. // - real instr with BailOutInfo w/o debugger bailout, then debuggerBailout, then sharedBailout -- in case bailout for debugger was shared w/some other b.o.
  10118. IR::Instr* Lowerer::SplitBailForDebugger(IR::Instr* instr)
  10119. {
  10120. Assert(m_func->IsJitInDebugMode() && instr->m_opcode != Js::OpCode::BailForDebugger);
  10121. IR::BailOutKind debuggerBailOutKind; // Used for splitted instr.
  10122. BailOutInfo* bailOutInfo = instr->GetBailOutInfo();
  10123. IR::Instr* sharedBailoutInstr = nullptr;
  10124. if (instr->GetBailOutKind() & IR::BailOutForDebuggerBits)
  10125. {
  10126. // debugger bailout is not shared.
  10127. Assert(!instr->HasAuxBailOut());
  10128. AssertMsg(!(instr->GetBailOutKind() & ~IR::BailOutForDebuggerBits), "There should only be debugger bailout bits in the instr.");
  10129. debuggerBailOutKind = instr->GetBailOutKind() & IR::BailOutForDebuggerBits;
  10130. // There is no non-debugger bailout in the instr, still can't clear bailout info, as we use it for the splitted instr,
  10131. // but we need to mark the bailout as hasn't been generated yet.
  10132. if (bailOutInfo->bailOutInstr == instr)
  10133. {
  10134. // null will be picked up by following BailOutInstr::New which will change it to new bailout instr.
  10135. bailOutInfo->bailOutInstr = nullptr;
  10136. }
  10137. // Remove bailout info from the original instr which from now on becomes just regular instr, w/o deallocating bailout info.
  10138. instr->ClearBailOutInfo();
  10139. }
  10140. else if (instr->IsBranchInstr() && instr->HasBailOutInfo() && instr->HasAuxBailOut())
  10141. {
  10142. // Branches with shared bailout are lowered in LowerCondBranchCheckBailOut,
  10143. // can't do here because we need to use BranchBailOutRecord but don't know which BrTrue/BrFalse to use for it.
  10144. debuggerBailOutKind = IR::BailOutInvalid;
  10145. }
  10146. else if (instr->HasAuxBailOut() && instr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits)
  10147. {
  10148. // debugger bailout is shared.
  10149. AssertMsg(!(instr->GetBailOutKind() & IR::BailOutForDebuggerBits), "There should be no debugger bits in main bailout kind.");
  10150. debuggerBailOutKind = instr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits;
  10151. // This will insert SharedBail instr after current instr and set bailOutInfo->bailOutInstr to the shared one.
  10152. sharedBailoutInstr = instr->ShareBailOut();
  10153. // As we extracted aux bail out, invalidate all tracks of it in the instr.
  10154. instr->ResetAuxBailOut();
  10155. }
  10156. else
  10157. {
  10158. AssertMsg(FALSE, "shouldn't get here");
  10159. debuggerBailOutKind = IR::BailOutInvalid;
  10160. }
  10161. if (debuggerBailOutKind != IR::BailOutInvalid)
  10162. {
  10163. IR::BailOutInstr* debuggerBailoutInstr = IR::BailOutInstr::New(
  10164. Js::OpCode::BailForDebugger, debuggerBailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
  10165. instr->InsertAfter(debuggerBailoutInstr);
  10166. // Since we go backwards, we need to process extracted out bailout for debugger first.
  10167. instr = sharedBailoutInstr ? sharedBailoutInstr : debuggerBailoutInstr;
  10168. }
  10169. return instr;
  10170. }
  10171. IR::Instr *
  10172. Lowerer::SplitBailOnResultCondition(IR::Instr *const instr) const
  10173. {
  10174. Assert(instr);
  10175. Assert(!instr->IsLowered());
  10176. Assert(
  10177. instr->GetBailOutKind() & IR::BailOutOnResultConditions ||
  10178. instr->GetBailOutKind() == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  10179. const auto nonBailOutInstr = IR::Instr::New(instr->m_opcode, instr->m_func);
  10180. instr->TransferTo(nonBailOutInstr);
  10181. instr->InsertBefore(nonBailOutInstr);
  10182. return nonBailOutInstr;
  10183. }
  10184. void
  10185. Lowerer::LowerBailOnResultCondition(
  10186. IR::Instr *const instr,
  10187. IR::LabelInstr * *const bailOutLabel,
  10188. IR::LabelInstr * *const skipBailOutLabel)
  10189. {
  10190. Assert(instr);
  10191. Assert(
  10192. instr->GetBailOutKind() & IR::BailOutOnResultConditions ||
  10193. instr->GetBailOutKind() == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  10194. Assert(bailOutLabel);
  10195. Assert(skipBailOutLabel);
  10196. // Label to jump to (or fall through to) when bailing out. The actual bailout label
  10197. // (bailOutInfo->bailOutInstr->AsLabelInstr()) may be shared, and code may be added to restore values before the jump to the
  10198. // actual bailout label in the cloned bailout case, so always create a new bailout label for this particular path.
  10199. *bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true /* isOpHelper */);
  10200. instr->InsertBefore(*bailOutLabel);
  10201. // Label to jump to when not bailing out
  10202. *skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  10203. instr->InsertAfter(*skipBailOutLabel);
  10204. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  10205. // ordering instructions anymore.
  10206. GenerateBailOut(instr);
  10207. }
  10208. void
  10209. Lowerer::PreserveSourcesForBailOnResultCondition(IR::Instr *const instr, IR::LabelInstr *const skipBailOutLabel) const
  10210. {
  10211. Assert(instr);
  10212. Assert(!instr->IsLowered());
  10213. Assert(!instr->HasBailOutInfo());
  10214. // Since this instruction may bail out, writing to the destination cannot overwrite one of the sources, or we may lose one
  10215. // of the sources needed to redo the equivalent byte code instruction. Determine if the sources need to be preserved.
  10216. const auto dst = instr->GetDst();
  10217. Assert(dst);
  10218. const auto dstStackSym = dst->GetStackSym();
  10219. if(!dstStackSym || !dstStackSym->HasByteCodeRegSlot())
  10220. {
  10221. // We only need to ensure that a byte-code source is not being overwritten
  10222. return;
  10223. }
  10224. switch(instr->m_opcode)
  10225. {
  10226. // The sources of these instructions don't need restoring, or will be restored in the bailout path
  10227. case Js::OpCode::Neg_I4:
  10228. // In case of overflow or zero, the result is the same as the operand
  10229. case Js::OpCode::Add_I4:
  10230. case Js::OpCode::Sub_I4:
  10231. // In case of overflow, there is always enough information to restore the operands
  10232. return;
  10233. }
  10234. Assert(instr->GetSrc1());
  10235. if(!dst->IsEqual(instr->GetSrc1()) && !(instr->GetSrc2() && dst->IsEqual(instr->GetSrc2())))
  10236. {
  10237. // The destination is different from the sources
  10238. return;
  10239. }
  10240. // The destination is the same as one of the sources and the original sources cannot be restored after the instruction, so
  10241. // use a temporary destination for the result and move it back to the original destination after deciding not to bail out
  10242. LowererMD::ChangeToAssign(instr->SinkDst(Js::OpCode::Ld_I4, RegNOREG, skipBailOutLabel));
  10243. }
  10244. void
  10245. Lowerer::LowerInstrWithBailOnResultCondition(
  10246. IR::Instr *const instr,
  10247. const IR::BailOutKind bailOutKind,
  10248. IR::LabelInstr *const bailOutLabel,
  10249. IR::LabelInstr *const skipBailOutLabel) const
  10250. {
  10251. Assert(instr);
  10252. Assert(!instr->IsLowered());
  10253. Assert(!instr->HasBailOutInfo());
  10254. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  10255. Assert(bailOutLabel);
  10256. Assert(instr->m_next == bailOutLabel);
  10257. Assert(skipBailOutLabel);
  10258. // Preserve sources that are overwritten by the instruction if needed
  10259. PreserveSourcesForBailOnResultCondition(instr, skipBailOutLabel);
  10260. // Lower the instruction
  10261. switch(instr->m_opcode)
  10262. {
  10263. case Js::OpCode::Neg_I4:
  10264. LowererMD::LowerInt4NegWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  10265. break;
  10266. case Js::OpCode::Add_I4:
  10267. LowererMD::LowerInt4AddWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  10268. break;
  10269. case Js::OpCode::Sub_I4:
  10270. LowererMD::LowerInt4SubWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  10271. break;
  10272. case Js::OpCode::Mul_I4:
  10273. LowererMD::LowerInt4MulWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  10274. break;
  10275. case Js::OpCode::Rem_I4:
  10276. m_lowererMD.LowerInt4RemWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  10277. break;
  10278. default:
  10279. Assert(false); // not implemented
  10280. __assume(false);
  10281. }
  10282. }
  10283. void
  10284. Lowerer::GenerateObjectTestAndTypeLoad(IR::Instr *instrLdSt, IR::RegOpnd *opndBase, IR::RegOpnd *opndType, IR::LabelInstr *labelHelper)
  10285. {
  10286. IR::IndirOpnd *opndIndir;
  10287. if (!opndBase->IsNotTaggedValue())
  10288. {
  10289. m_lowererMD.GenerateObjectTest(opndBase, instrLdSt, labelHelper);
  10290. }
  10291. opndIndir = IR::IndirOpnd::New(opndBase, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  10292. m_lowererMD.CreateAssign(opndType, opndIndir, instrLdSt);
  10293. }
  10294. IR::LabelInstr *
  10295. Lowerer::GenerateBailOut(IR::Instr * instr, IR::BranchInstr * branchInstr, IR::LabelInstr *bailOutLabel)
  10296. {
  10297. BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
  10298. IR::Instr * bailOutInstr = bailOutInfo->bailOutInstr;
  10299. IR::LabelInstr *collectRuntimeStatsLabel = nullptr;
  10300. if (instr->IsCloned())
  10301. {
  10302. Assert(bailOutInstr != instr);
  10303. // jump to the cloned bail out label
  10304. IR::LabelInstr * bailOutLabelInstr = bailOutInstr->AsLabelInstr();
  10305. IR::BranchInstr * bailOutBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, bailOutLabelInstr, this->m_func);
  10306. instr->InsertBefore(bailOutBranch);
  10307. instr->Remove();
  10308. return bailOutLabel;
  10309. }
  10310. if (bailOutInstr != instr)
  10311. {
  10312. // this bailOutInfo is shared, just jump to the bailout target
  10313. // Add helper label to trigger layout.
  10314. collectRuntimeStatsLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  10315. instr->InsertBefore(collectRuntimeStatsLabel);
  10316. IR::MemRefOpnd *pIndexOpndForBailOutKind =
  10317. IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + BailOutRecord::GetOffsetOfBailOutKind(), TyUint32, this->m_func, IR::AddrOpndKindDynamicBailOutKindRef);
  10318. m_lowererMD.CreateAssign(
  10319. pIndexOpndForBailOutKind, IR::IntConstOpnd::New(instr->GetBailOutKind(), pIndexOpndForBailOutKind->GetType(), this->m_func), instr);
  10320. // No point in doing this for BailOutFailedEquivalentTypeCheck or BailOutFailedEquivalentFixedFieldTypeCheck,
  10321. // because the respective inline cache is already polymorphic, anyway.
  10322. if (instr->GetBailOutKind() == IR::BailOutFailedTypeCheck || instr->GetBailOutKind() == IR::BailOutFailedFixedFieldTypeCheck)
  10323. {
  10324. // We have a type check bailout that shares a bailout record with other instructions.
  10325. // Generate code to write the cache index into the bailout record before we jump to the call site.
  10326. Assert(bailOutInfo->polymorphicCacheIndex != (uint)-1);
  10327. Assert(bailOutInfo->bailOutRecord);
  10328. IR::MemRefOpnd *pIndexOpnd =
  10329. IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + BailOutRecord::GetOffsetOfPolymorphicCacheIndex(), TyUint32, this->m_func);
  10330. m_lowererMD.CreateAssign(
  10331. pIndexOpnd, IR::IntConstOpnd::New(bailOutInfo->polymorphicCacheIndex, TyUint32, this->m_func), instr);
  10332. }
  10333. // GenerateBailOut should have replaced this as a label as we should have already lowered
  10334. // the main bailOutInstr.
  10335. IR::LabelInstr * bailOutTargetLabel = bailOutInstr->AsLabelInstr();
  10336. #if DBG
  10337. if (bailOutTargetLabel->m_noHelperAssert)
  10338. {
  10339. collectRuntimeStatsLabel->m_noHelperAssert = true;
  10340. }
  10341. #endif
  10342. Assert(bailOutLabel == nullptr || bailOutLabel == bailOutTargetLabel);
  10343. IR::BranchInstr * branchInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, bailOutTargetLabel, this->m_func);
  10344. instr->InsertAfter(branchInstr);
  10345. instr->Remove();
  10346. return collectRuntimeStatsLabel ? collectRuntimeStatsLabel : bailOutLabel;
  10347. }
  10348. // The bailout hasn't be generated yet.
  10349. Assert(!bailOutInstr->IsLabelInstr());
  10350. // Add helper label to trigger layout.
  10351. collectRuntimeStatsLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  10352. instr->InsertBefore(collectRuntimeStatsLabel);
  10353. // capture the condition for this bailout
  10354. if (bailOutLabel == nullptr)
  10355. {
  10356. // Create a label and place it in the bailout info so that shared bailout point can jump to this one
  10357. if (instr->m_prev->IsLabelInstr())
  10358. {
  10359. bailOutLabel = instr->m_prev->AsLabelInstr();
  10360. Assert(bailOutLabel->isOpHelper);
  10361. }
  10362. else
  10363. {
  10364. bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  10365. instr->InsertBefore(bailOutLabel);
  10366. }
  10367. }
  10368. else
  10369. {
  10370. instr->InsertBefore(bailOutLabel);
  10371. }
  10372. #if DBG
  10373. if (bailOutInstr->m_opcode == Js::OpCode::BailOnNoSimdTypeSpec || bailOutInstr->m_opcode == Js::OpCode::BailOnNoProfile || bailOutInstr->m_opcode == Js::OpCode::BailOnException || bailOutInstr->m_opcode == Js::OpCode::Yield)
  10374. {
  10375. bailOutLabel->m_noHelperAssert = true;
  10376. }
  10377. #endif
  10378. bailOutInfo->bailOutInstr = bailOutLabel;
  10379. bailOutLabel->m_hasNonBranchRef = true;
  10380. // Create the bail out record
  10381. Assert(bailOutInfo->bailOutRecord == nullptr);
  10382. BailOutRecord * bailOutRecord;
  10383. IR::JnHelperMethod helperMethod;
  10384. if (branchInstr != nullptr)
  10385. {
  10386. Assert(branchInstr->GetSrc2() == nullptr);
  10387. Assert(branchInstr->GetDst() == nullptr);
  10388. IR::LabelInstr * targetLabel = branchInstr->GetTarget();
  10389. Assert(targetLabel->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset);
  10390. uint32 trueOffset;
  10391. uint32 falseOffset;
  10392. IR::Opnd *condOpnd = branchInstr->GetSrc1();
  10393. bool invertTarget = (branchInstr->m_opcode == Js::OpCode::BrFalse_A);
  10394. if (bailOutInfo->isInvertedBranch)
  10395. {
  10396. // Flip the condition
  10397. IR::Instr *subInstr = IR::Instr::New(Js::OpCode::Sub_I4, condOpnd, condOpnd, IR::IntConstOpnd::New(1, TyInt32, instr->m_func), instr->m_func);
  10398. instr->InsertBefore(subInstr);
  10399. this->m_lowererMD.EmitInt4Instr(subInstr);
  10400. // We should really do a DEC/NEG for a full 2's complement flip from 0/1 to 1/0,
  10401. // but DEC is sufficient to flip from 0/1 to -1/0, which is false/true to true/false...
  10402. //instr->InsertBefore(IR::Instr::New(Js::OpCode::Neg_I4, condOpnd, condOpnd, instr->m_func));
  10403. invertTarget = invertTarget ? false : true;
  10404. }
  10405. if (!invertTarget)
  10406. {
  10407. trueOffset = targetLabel->GetByteCodeOffset();
  10408. falseOffset = bailOutInfo->bailOutOffset;
  10409. }
  10410. else
  10411. {
  10412. falseOffset = targetLabel->GetByteCodeOffset();
  10413. trueOffset = bailOutInfo->bailOutOffset;
  10414. }
  10415. bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
  10416. BranchBailOutRecord, trueOffset, falseOffset, branchInstr->GetByteCodeReg(), instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
  10417. helperMethod = IR::HelperSaveAllRegistersAndBranchBailOut;
  10418. #ifdef _M_IX86
  10419. if(!AutoSystemInfo::Data.SSE2Available())
  10420. {
  10421. helperMethod = IR::HelperSaveAllRegistersNoSse2AndBranchBailOut;
  10422. }
  10423. #endif
  10424. // Save the condition. The register allocator will generate arguments.
  10425. bailOutInfo->branchConditionOpnd = branchInstr->GetSrc1()->Copy(branchInstr->m_func);
  10426. }
  10427. else
  10428. {
  10429. bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
  10430. BailOutRecord, bailOutInfo->bailOutOffset, bailOutInfo->polymorphicCacheIndex, instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
  10431. helperMethod = IR::HelperSaveAllRegistersAndBailOut;
  10432. #ifdef _M_IX86
  10433. if(!AutoSystemInfo::Data.SSE2Available())
  10434. {
  10435. helperMethod = IR::HelperSaveAllRegistersNoSse2AndBailOut;
  10436. }
  10437. #endif
  10438. }
  10439. // Save the bailout record. The register allocator will generate arguments.
  10440. bailOutInfo->bailOutRecord = bailOutRecord;
  10441. #if ENABLE_DEBUG_CONFIG_OPTIONS
  10442. bailOutRecord->bailOutOpcode = bailOutInfo->bailOutOpcode;
  10443. #endif
  10444. // Call the bail out wrapper
  10445. instr->m_opcode = Js::OpCode::Call;
  10446. if(instr->GetDst())
  10447. {
  10448. // To facilitate register allocation, don't assign a destination. The result will anyway go into the return register,
  10449. // but the register allocator does not need to kill that register for the call.
  10450. instr->FreeDst();
  10451. }
  10452. instr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, this->m_func));
  10453. m_lowererMD.LowerCall(instr, 0);
  10454. if (bailOutInstr->GetBailOutKind() != IR::BailOutForGeneratorYield)
  10455. {
  10456. // Defer introducing the JMP to epilog until LowerPrologEpilog phase for Yield bailouts so
  10457. // that Yield does not appear to have flow out of its containing block for the RegAlloc phase.
  10458. // Yield is an unconditional bailout but we want to simulate the flow as if the Yield were
  10459. // just like a call.
  10460. GenerateJumpToEpilogForBailOut(bailOutInfo, instr);
  10461. }
  10462. return collectRuntimeStatsLabel ? collectRuntimeStatsLabel : bailOutLabel;
  10463. }
  10464. void
  10465. Lowerer::GenerateJumpToEpilogForBailOut(BailOutInfo * bailOutInfo, IR::Instr *instr)
  10466. {
  10467. IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->m_prev;
  10468. // JMP to the epilog
  10469. IR::LabelInstr * exitTargetInstr;
  10470. if (exitPrevInstr->IsLabelInstr())
  10471. {
  10472. exitTargetInstr = exitPrevInstr->AsLabelInstr();
  10473. }
  10474. else
  10475. {
  10476. exitTargetInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  10477. exitPrevInstr->InsertAfter(exitTargetInstr);
  10478. }
  10479. exitTargetInstr = m_lowererMD.GetBailOutStackRestoreLabel(bailOutInfo, exitTargetInstr);
  10480. IR::Instr * instrAfter = instr->m_next;
  10481. IR::BranchInstr * exitInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, exitTargetInstr, this->m_func);
  10482. instrAfter->InsertBefore(exitInstr);
  10483. }
  10484. ///----------------------------------------------------------------------------
  10485. ///
  10486. /// Lowerer::GenerateFastCondBranch
  10487. ///
  10488. ///----------------------------------------------------------------------------
  10489. bool
  10490. Lowerer::GenerateFastCondBranch(IR::BranchInstr * instrBranch, bool *pIsHelper)
  10491. {
  10492. // The idea is to do an inline compare if we can prove that both sources
  10493. // are tagged ints
  10494. //
  10495. // Given:
  10496. //
  10497. // Brxx_A $L, src1, src2
  10498. //
  10499. // Generate:
  10500. //
  10501. // (If not Int31's, goto $helper)
  10502. // Jxx $L, src1, src2
  10503. // JMP $fallthru
  10504. // $helper:
  10505. // (caller will generate normal helper call sequence)
  10506. // $fallthru:
  10507. IR::LabelInstr * labelHelper = nullptr;
  10508. IR::LabelInstr * labelFallThru;
  10509. IR::BranchInstr * instr;
  10510. IR::Opnd * opndSrc1;
  10511. IR::Opnd * opndSrc2;
  10512. opndSrc1 = instrBranch->GetSrc1();
  10513. opndSrc2 = instrBranch->GetSrc2();
  10514. AssertMsg(opndSrc1 && opndSrc2, "BrC expects 2 src operands");
  10515. // Not tagged ints?
  10516. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  10517. {
  10518. return true;
  10519. }
  10520. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  10521. {
  10522. return true;
  10523. }
  10524. // Tagged ints?
  10525. bool isTaggedInts = false;
  10526. if (opndSrc1->IsTaggedInt())
  10527. {
  10528. if (opndSrc2->IsTaggedInt())
  10529. {
  10530. isTaggedInts = true;
  10531. }
  10532. }
  10533. if (!isTaggedInts)
  10534. {
  10535. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  10536. this->m_lowererMD.GenerateSmIntPairTest(instrBranch, opndSrc1, opndSrc2, labelHelper);
  10537. }
  10538. // Jxx $L, src1, src2
  10539. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  10540. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  10541. instr = IR::BranchInstr::New(instrBranch->m_opcode, instrBranch->GetTarget(), opndSrc1, opndSrc2, this->m_func);
  10542. instrBranch->InsertBefore(instr);
  10543. this->m_lowererMD.LowerCondBranch(instr);
  10544. if (isTaggedInts)
  10545. {
  10546. instrBranch->Remove();
  10547. // Skip lowering call to helper
  10548. return false;
  10549. }
  10550. // JMP $fallthru
  10551. IR::Instr *instrNext = instrBranch->GetNextRealInstrOrLabel();
  10552. if (instrNext->IsLabelInstr())
  10553. {
  10554. labelFallThru = instrNext->AsLabelInstr();
  10555. }
  10556. else
  10557. {
  10558. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /**pIsHelper*/FALSE);
  10559. instrBranch->InsertAfter(labelFallThru);
  10560. }
  10561. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallThru, this->m_func);
  10562. instrBranch->InsertBefore(instr);
  10563. // $helper:
  10564. // (caller will generate normal helper call sequence)
  10565. // $fallthru:
  10566. AssertMsg(labelHelper, "Should not be NULL");
  10567. instrBranch->InsertBefore(labelHelper);
  10568. *pIsHelper = true;
  10569. return true;
  10570. }
  10571. void
  10572. Lowerer::LowerInlineeStart(IR::Instr * inlineeStartInstr)
  10573. {
  10574. IR::Opnd *linkOpnd = inlineeStartInstr->GetSrc2();
  10575. if (!linkOpnd)
  10576. {
  10577. Assert(inlineeStartInstr->m_func->m_hasInlineArgsOpt);
  10578. return;
  10579. }
  10580. AssertMsg(inlineeStartInstr->m_func->firstActualStackOffset != -1, "This should have been already done in backward pass");
  10581. IR::Instr *startCall;
  10582. // Free the argOut links and lower them to MOVs
  10583. inlineeStartInstr->IterateArgInstrs([&](IR::Instr* argInstr){
  10584. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A || argInstr->m_opcode == Js::OpCode::ArgOut_A_Inline);
  10585. startCall = argInstr->GetSrc2()->GetStackSym()->m_instrDef;
  10586. argInstr->FreeSrc2();
  10587. #pragma prefast(suppress:6235, "Non-Zero Constant in Condition")
  10588. if (!PHASE_ON(Js::EliminateArgoutForInlineePhase, this->m_func) || inlineeStartInstr->m_func->GetJnFunction()->GetHasOrParentHasArguments())
  10589. {
  10590. m_lowererMD.ChangeToAssign(argInstr);
  10591. }
  10592. else
  10593. {
  10594. argInstr->m_opcode = Js::OpCode::ArgOut_A_InlineBuiltIn;
  10595. }
  10596. return false;
  10597. });
  10598. IR::Instr *argInsertInstr = inlineeStartInstr;
  10599. uint i = 0;
  10600. inlineeStartInstr->IterateMetaArgs( [&] (IR::Instr* metaArg)
  10601. {
  10602. if(i == 0)
  10603. {
  10604. LowererMD::CreateAssign(metaArg->m_func->GetNextInlineeFrameArgCountSlotOpnd(),
  10605. IR::AddrOpnd::NewNull(metaArg->m_func),
  10606. argInsertInstr);
  10607. }
  10608. if (i == Js::Constants::InlineeMetaArgIndex_FunctionObject)
  10609. {
  10610. metaArg->SetSrc1(inlineeStartInstr->GetSrc1());
  10611. }
  10612. metaArg->Unlink();
  10613. argInsertInstr->InsertBefore(metaArg);
  10614. IR::Instr* prev = metaArg->m_prev;
  10615. m_lowererMD.ChangeToAssign(metaArg);
  10616. if (i == Js::Constants::InlineeMetaArgIndex_Argc)
  10617. {
  10618. #if defined(_M_IX86) || defined(_M_X64)
  10619. Assert(metaArg == prev->m_next);
  10620. #else //defined(_M_ARM)
  10621. Assert(prev->m_next->m_opcode == Js::OpCode::LDIMM);
  10622. #endif
  10623. metaArg = prev->m_next;
  10624. Assert(metaArg->GetSrc1()->AsAddrOpnd()->m_dontEncode == true);
  10625. metaArg->isInlineeEntryInstr = true;
  10626. LowererMD::Legalize(metaArg);
  10627. }
  10628. argInsertInstr = metaArg;
  10629. i++;
  10630. return false;
  10631. });
  10632. if (inlineeStartInstr->m_func->m_hasInlineArgsOpt)
  10633. {
  10634. inlineeStartInstr->FreeSrc1();
  10635. inlineeStartInstr->FreeSrc2();
  10636. inlineeStartInstr->FreeDst();
  10637. }
  10638. else
  10639. {
  10640. inlineeStartInstr->Remove();
  10641. }
  10642. }
  10643. void
  10644. Lowerer::LowerInlineeEnd(IR::Instr *instr)
  10645. {
  10646. Assert(instr->m_func->IsInlinee());
  10647. Assert(m_func->IsTopFunc());
  10648. // No need to emit code if the function wasn't marked as having implicit calls or bailout. Dead-Store should have removed inline overhead.
  10649. if (instr->m_func->GetHasImplicitCalls() || PHASE_OFF(Js::DeadStorePhase, this->m_func))
  10650. {
  10651. LowererMD::CreateAssign(instr->m_func->GetInlineeArgCountSlotOpnd(),
  10652. IR::AddrOpnd::New(0, IR::AddrOpndKindConstantVar, instr->m_func),
  10653. instr);
  10654. }
  10655. // Keep InlineeEnd around as it is used by register allocator, if we have optimized the arguments stack
  10656. if (instr->m_func->m_hasInlineArgsOpt)
  10657. {
  10658. instr->FreeSrc1();
  10659. }
  10660. else
  10661. {
  10662. instr->Remove();
  10663. }
  10664. }
  10665. IR::Instr *
  10666. Lowerer::LoadFloatFromNonReg(IR::Opnd * opndSrc, IR::Opnd * opndDst, IR::Instr * instrInsert)
  10667. {
  10668. double value;
  10669. if (opndSrc->IsAddrOpnd())
  10670. {
  10671. Js::Var var = opndSrc->AsAddrOpnd()->m_address;
  10672. if (Js::TaggedInt::Is(var))
  10673. {
  10674. value = Js::TaggedInt::ToDouble(var);
  10675. }
  10676. else
  10677. {
  10678. value = Js::JavascriptNumber::GetValue(var);
  10679. }
  10680. }
  10681. else if (opndSrc->IsIntConstOpnd())
  10682. {
  10683. if (opndSrc->IsUInt32())
  10684. {
  10685. value = (double)(uint32)opndSrc->AsIntConstOpnd()->GetValue();
  10686. }
  10687. else
  10688. {
  10689. value = (double)opndSrc->AsIntConstOpnd()->GetValue();
  10690. }
  10691. }
  10692. else if (opndSrc->IsFloatConstOpnd())
  10693. {
  10694. value = (double)opndSrc->AsFloatConstOpnd()->m_value;
  10695. }
  10696. else
  10697. {
  10698. AssertMsg(0, "Unexpected opnd type");
  10699. value = 0;
  10700. }
  10701. return LowererMD::LoadFloatValue(opndDst, value, instrInsert);
  10702. }
  10703. void
  10704. Lowerer::LoadInt32FromUntaggedVar(IR::Instr *const instrLoad)
  10705. {
  10706. Assert(instrLoad);
  10707. Assert(instrLoad->GetDst());
  10708. Assert(instrLoad->GetDst()->IsRegOpnd());
  10709. Assert(instrLoad->GetDst()->IsInt32());
  10710. Assert(instrLoad->GetSrc1());
  10711. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  10712. Assert(instrLoad->GetSrc1()->IsVar());
  10713. Assert(!instrLoad->GetSrc2());
  10714. // push src
  10715. // int32Value = call JavascriptNumber::GetNonzeroInt32Value_NoChecks
  10716. // test int32Value, int32Value
  10717. // jne $done
  10718. // (fall through to 'instrLoad'; caller will generate code here)
  10719. // $done:
  10720. // (rest of program)
  10721. Func *const func = instrLoad->m_func;
  10722. IR::LabelInstr *const doneLabel = instrLoad->GetOrCreateContinueLabel();
  10723. // push src
  10724. // int32Value = call JavascriptNumber::GetNonzeroInt32Value_NoChecks
  10725. StackSym *const int32ValueSym = instrLoad->GetDst()->AsRegOpnd()->m_sym;
  10726. IR::Instr *const instr =
  10727. IR::Instr::New(
  10728. Js::OpCode::Call,
  10729. IR::RegOpnd::New(int32ValueSym, TyInt32, func),
  10730. instrLoad->GetSrc1()->AsRegOpnd(),
  10731. func);
  10732. instrLoad->InsertBefore(instr);
  10733. LowerUnaryHelper(instr, IR::HelperGetNonzeroInt32Value_NoTaggedIntCheck);
  10734. // test int32Value, int32Value
  10735. // jne $done
  10736. InsertCompareBranch(
  10737. IR::RegOpnd::New(int32ValueSym, TyInt32, func),
  10738. IR::IntConstOpnd::New(0, TyInt32, func, true),
  10739. Js::OpCode::BrNeq_A,
  10740. doneLabel,
  10741. instrLoad);
  10742. }
  10743. bool
  10744. Lowerer::GetValueFromIndirOpnd(IR::IndirOpnd *indirOpnd, IR::Opnd **pValueOpnd, IntConstType *pValue)
  10745. {
  10746. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  10747. IR::Opnd* valueOpnd = nullptr;
  10748. IntConstType value = 0;
  10749. if (!indexOpnd)
  10750. {
  10751. value = (IntConstType)indirOpnd->GetOffset();
  10752. if (value < 0)
  10753. {
  10754. // Can't do fast path for negative index
  10755. return false;
  10756. }
  10757. valueOpnd = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
  10758. }
  10759. else if (indexOpnd->m_sym->IsIntConst())
  10760. {
  10761. value = indexOpnd->AsRegOpnd()->m_sym->GetIntConstValue();
  10762. if (value < 0)
  10763. {
  10764. // Can't do fast path for negative index
  10765. return false;
  10766. }
  10767. valueOpnd = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
  10768. }
  10769. *pValueOpnd = valueOpnd;
  10770. *pValue = value;
  10771. return true;
  10772. }
  10773. void
  10774. Lowerer::GenerateFastBrOnObject(IR::Instr *instr)
  10775. {
  10776. Assert(instr->m_opcode == Js::OpCode::BrOnObject_A);
  10777. IR::RegOpnd *object = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  10778. IR::LabelInstr *done = instr->GetOrCreateContinueLabel();
  10779. IR::LabelInstr *target = instr->AsBranchInstr()->GetTarget();
  10780. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  10781. IR::IntConstOpnd *typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, instr->m_func);
  10782. if (!object)
  10783. {
  10784. object = IR::RegOpnd::New(TyVar, m_func);
  10785. LowererMD::CreateAssign(object, instr->GetSrc1(), instr);
  10786. }
  10787. // TEST object, 1
  10788. // JNE $done
  10789. // MOV typeRegOpnd, [object + offset(Type)]
  10790. // CMP [typeRegOpnd + offset(TypeId)], TypeIds_LastJavascriptPrimitiveType
  10791. // JGT $target
  10792. // $done:
  10793. m_lowererMD.GenerateObjectTest(object, instr, done);
  10794. InsertMove(typeRegOpnd,
  10795. IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  10796. instr);
  10797. InsertCompareBranch(
  10798. IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
  10799. typeIdOpnd, Js::OpCode::BrGt_A, target, instr);
  10800. instr->Remove();
  10801. }
  10802. void Lowerer::GenerateObjectHeaderInliningTest(IR::RegOpnd *baseOpnd, IR::LabelInstr * target,IR::Instr *insertBeforeInstr)
  10803. {
  10804. Assert(baseOpnd);
  10805. Assert(target);
  10806. AssertMsg(
  10807. baseOpnd->GetValueType().IsLikelyObject() &&
  10808. baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray,
  10809. "Why are we here, when the object is already known not to have an ObjArray");
  10810. Assert(insertBeforeInstr);
  10811. Func *const func = insertBeforeInstr->m_func;
  10812. // mov type, [base + offsetOf(type)]
  10813. IR::RegOpnd *const opnd = IR::RegOpnd::New(TyMachPtr, func);
  10814. m_lowererMD.CreateAssign(
  10815. opnd,
  10816. IR::IndirOpnd::New(
  10817. baseOpnd,
  10818. Js::DynamicObject::GetOffsetOfType(),
  10819. opnd->GetType(),
  10820. func),
  10821. insertBeforeInstr);
  10822. // mov typeHandler, [type + offsetOf(typeHandler)]
  10823. m_lowererMD.CreateAssign(
  10824. opnd,
  10825. IR::IndirOpnd::New(
  10826. opnd,
  10827. Js::DynamicType::GetOffsetOfTypeHandler(),
  10828. opnd->GetType(),
  10829. func),
  10830. insertBeforeInstr);
  10831. IR::IndirOpnd * offsetOfInlineSlotOpnd = IR::IndirOpnd::New(opnd,Js::DynamicTypeHandler::GetOffsetOfOffsetOfInlineSlots(), TyInt16, func);
  10832. IR::IntConstOpnd * objHeaderInlinedSlotOffset = IR::IntConstOpnd::New(Js::DynamicTypeHandler::GetOffsetOfObjectHeaderInlineSlots(), TyInt16, func);
  10833. // CMP [typeHandler + offsetOf(offsetOfInlineSlots)], objHeaderInlinedSlotOffset
  10834. InsertCompareBranch(
  10835. offsetOfInlineSlotOpnd,
  10836. objHeaderInlinedSlotOffset,
  10837. Js::OpCode::BrEq_A,
  10838. target,
  10839. insertBeforeInstr);
  10840. }
  10841. void Lowerer::GenerateObjectTypeTest(IR::RegOpnd *srcReg, IR::Instr *instrInsert, IR::LabelInstr *labelHelper)
  10842. {
  10843. Assert(srcReg);
  10844. if (!srcReg->IsNotTaggedValue())
  10845. {
  10846. m_lowererMD.GenerateObjectTest(srcReg, instrInsert, labelHelper);
  10847. }
  10848. // CMP [srcReg], Js::DynamicObject::`vtable'
  10849. // JNE $helper
  10850. InsertCompareBranch(
  10851. IR::IndirOpnd::New(srcReg, 0, TyMachPtr, m_func),
  10852. LoadVTableValueOpnd(instrInsert, VTableValue::VtableDynamicObject),
  10853. Js::OpCode::BrNeq_A,
  10854. labelHelper,
  10855. instrInsert);
  10856. }
  10857. const VTableValue Lowerer::VtableAddresses[static_cast<ValueType::TSize>(ObjectType::Count)] =
  10858. {
  10859. /* ObjectType::UninitializedObject */ VTableValue::VtableInvalid,
  10860. /* ObjectType::Object */ VTableValue::VtableInvalid,
  10861. /* ObjectType::RegExp */ VTableValue::VtableInvalid,
  10862. /* ObjectType::ObjectWithArray */ VTableValue::VtableJavascriptArray,
  10863. /* ObjectType::Array */ VTableValue::VtableJavascriptArray,
  10864. /* ObjectType::Int8Array */ VTableValue::VtableInt8Array,
  10865. /* ObjectType::Uint8Array */ VTableValue::VtableUint8Array,
  10866. /* ObjectType::Uint8ClampedArray */ VTableValue::VtableUint8ClampedArray,
  10867. /* ObjectType::Int16Array */ VTableValue::VtableInt16Array,
  10868. /* ObjectType::Uint16Array */ VTableValue::VtableUint16Array,
  10869. /* ObjectType::Int32Array */ VTableValue::VtableInt32Array,
  10870. /* ObjectType::Uint32Array */ VTableValue::VtableUint32Array,
  10871. /* ObjectType::Float32Array */ VTableValue::VtableFloat32Array,
  10872. /* ObjectType::Float64Array */ VTableValue::VtableFloat64Array,
  10873. /* ObjectType::Int8VirtualArray */ VTableValue::VtableInt8VirtualArray,
  10874. /* ObjectType::Uint8VirtualArray */ VTableValue::VtableUint8VirtualArray,
  10875. /* ObjectType::Uint8ClampedVirtualArray */ VTableValue::VtableUint8ClampedVirtualArray,
  10876. /* ObjectType::Int16VirtualArray */ VTableValue::VtableInt16VirtualArray,
  10877. /* ObjectType::Uint16VirtualArray */ VTableValue::VtableUint16VirtualArray,
  10878. /* ObjectType::Int32VirtualArray */ VTableValue::VtableInt32VirtualArray,
  10879. /* ObjectType::Uint32VirtualArray */ VTableValue::VtableUint32VirtualArray,
  10880. /* ObjectType::Float32VirtualArray */ VTableValue::VtableFloat32VirtualArray,
  10881. /* ObjectType::Float64VirtualArray */ VTableValue::VtableFloat64VirtualArray,
  10882. /* ObjectType::Int8MixedArray */ VTableValue::VtableInt8Array,
  10883. /* ObjectType::Uint8MixedArray */ VTableValue::VtableUint8Array,
  10884. /* ObjectType::Uint8ClampedMixedArray */ VTableValue::VtableUint8ClampedArray,
  10885. /* ObjectType::Int16MixedArray */ VTableValue::VtableInt16Array,
  10886. /* ObjectType::Uint16MixedArray */ VTableValue::VtableUint16Array,
  10887. /* ObjectType::Int32MixedArray */ VTableValue::VtableInt32Array,
  10888. /* ObjectType::Uint32MixedArray */ VTableValue::VtableUint32Array,
  10889. /* ObjectType::Float32MixedArray */ VTableValue::VtableFloat32Array,
  10890. /* ObjectType::Float64MixedArray */ VTableValue::VtableFloat64Array,
  10891. /* ObjectType::Int64Array */ VTableValue::VtableInt64Array,
  10892. /* ObjectType::Uint64Array */ VTableValue::VtableUint64Array,
  10893. /* ObjectType::BoolArray */ VTableValue::VtableBoolArray,
  10894. /* ObjectType::CharArray */ VTableValue::VtableCharArray
  10895. };
  10896. const uint32 Lowerer::OffsetsOfHeadSegment[static_cast<ValueType::TSize>(ObjectType::Count)] =
  10897. {
  10898. /* ObjectType::UninitializedObject */ static_cast<uint32>(-1),
  10899. /* ObjectType::Object */ static_cast<uint32>(-1),
  10900. /* ObjectType::RegExp */ static_cast<uint32>(-1),
  10901. /* ObjectType::ObjectWithArray */ Js::JavascriptArray::GetOffsetOfHead(),
  10902. /* ObjectType::Array */ Js::JavascriptArray::GetOffsetOfHead(),
  10903. /* ObjectType::Int8Array */ Js::Int8Array::GetOffsetOfBuffer(),
  10904. /* ObjectType::Uint8Array */ Js::Uint8Array::GetOffsetOfBuffer(),
  10905. /* ObjectType::Uint8ClampedArray */ Js::Uint8ClampedArray::GetOffsetOfBuffer(),
  10906. /* ObjectType::Int16Array */ Js::Int16Array::GetOffsetOfBuffer(),
  10907. /* ObjectType::Uint16Array */ Js::Uint16Array::GetOffsetOfBuffer(),
  10908. /* ObjectType::Int32Array */ Js::Int32Array::GetOffsetOfBuffer(),
  10909. /* ObjectType::Uint32Array */ Js::Uint32Array::GetOffsetOfBuffer(),
  10910. /* ObjectType::Float32Array */ Js::Float32Array::GetOffsetOfBuffer(),
  10911. /* ObjectType::Float64Array */ Js::Float64Array::GetOffsetOfBuffer(),
  10912. /* ObjectType::Int8VirtualArray */ Js::Int8VirtualArray::GetOffsetOfBuffer(),
  10913. /* ObjectType::Uint8VirtualArray */ Js::Uint8VirtualArray::GetOffsetOfBuffer(),
  10914. /* ObjectType::Uint8ClampedVirtualArray */ Js::Uint8ClampedVirtualArray::GetOffsetOfBuffer(),
  10915. /* ObjectType::Int16VirtualArray */ Js::Int16VirtualArray::GetOffsetOfBuffer(),
  10916. /* ObjectType::Uint16VirtualArray */ Js::Uint16VirtualArray::GetOffsetOfBuffer(),
  10917. /* ObjectType::Int32VirtualArray */ Js::Int32VirtualArray::GetOffsetOfBuffer(),
  10918. /* ObjectType::Uint32VirtualArray */ Js::Uint32VirtualArray::GetOffsetOfBuffer(),
  10919. /* ObjectType::Float32VirtualArray */ Js::Float32VirtualArray::GetOffsetOfBuffer(),
  10920. /* ObjectType::Float64VirtualArray */ Js::Float64VirtualArray::GetOffsetOfBuffer(),
  10921. /* ObjectType::Int8MixedArray */ Js::Int8Array::GetOffsetOfBuffer(),
  10922. /* ObjectType::Uint8MixedArray */ Js::Uint8Array::GetOffsetOfBuffer(),
  10923. /* ObjectType::Uint8ClampedMixedArray */ Js::Uint8ClampedArray::GetOffsetOfBuffer(),
  10924. /* ObjectType::Int16MixedArray */ Js::Int16Array::GetOffsetOfBuffer(),
  10925. /* ObjectType::Uint16MixedArray */ Js::Uint16Array::GetOffsetOfBuffer(),
  10926. /* ObjectType::Int32MixedArray */ Js::Int32Array::GetOffsetOfBuffer(),
  10927. /* ObjectType::Uint32MixedArray */ Js::Uint32Array::GetOffsetOfBuffer(),
  10928. /* ObjectType::Float32MixedArray */ Js::Float32Array::GetOffsetOfBuffer(),
  10929. /* ObjectType::Float64MixedArray */ Js::Float64Array::GetOffsetOfBuffer(),
  10930. /* ObjectType::Int64Array */ Js::Int64Array::GetOffsetOfBuffer(),
  10931. /* ObjectType::Uint64Array */ Js::Uint64Array::GetOffsetOfBuffer(),
  10932. /* ObjectType::BoolArray */ Js::BoolArray::GetOffsetOfBuffer(),
  10933. /* ObjectType::CharArray */ Js::CharArray::GetOffsetOfBuffer()
  10934. };
  10935. const uint32 Lowerer::OffsetsOfLength[static_cast<ValueType::TSize>(ObjectType::Count)] =
  10936. {
  10937. /* ObjectType::UninitializedObject */ static_cast<uint32>(-1),
  10938. /* ObjectType::Object */ static_cast<uint32>(-1),
  10939. /* ObjectType::RegExp */ static_cast<uint32>(-1),
  10940. /* ObjectType::ObjectWithArray */ Js::JavascriptArray::GetOffsetOfLength(),
  10941. /* ObjectType::Array */ Js::JavascriptArray::GetOffsetOfLength(),
  10942. /* ObjectType::Int8Array */ Js::Int8Array::GetOffsetOfLength(),
  10943. /* ObjectType::Uint8Array */ Js::Uint8Array::GetOffsetOfLength(),
  10944. /* ObjectType::Uint8ClampedArray */ Js::Uint8ClampedArray::GetOffsetOfLength(),
  10945. /* ObjectType::Int16Array */ Js::Int16Array::GetOffsetOfLength(),
  10946. /* ObjectType::Uint16Array */ Js::Uint16Array::GetOffsetOfLength(),
  10947. /* ObjectType::Int32Array */ Js::Int32Array::GetOffsetOfLength(),
  10948. /* ObjectType::Uint32Array */ Js::Uint32Array::GetOffsetOfLength(),
  10949. /* ObjectType::Float32Array */ Js::Float32Array::GetOffsetOfLength(),
  10950. /* ObjectType::Float64Array */ Js::Float64Array::GetOffsetOfLength(),
  10951. /* ObjectType::Int8VirtualArray */ Js::Int8VirtualArray::GetOffsetOfLength(),
  10952. /* ObjectType::Uint8VirtualArray */ Js::Uint8VirtualArray::GetOffsetOfLength(),
  10953. /* ObjectType::Uint8ClampedVirtualArray */ Js::Uint8ClampedVirtualArray::GetOffsetOfLength(),
  10954. /* ObjectType::Int16VirtualArray */ Js::Int16VirtualArray::GetOffsetOfLength(),
  10955. /* ObjectType::Uint16VirtualArray */ Js::Uint16VirtualArray::GetOffsetOfLength(),
  10956. /* ObjectType::Int32VirtualArray */ Js::Int32VirtualArray::GetOffsetOfLength(),
  10957. /* ObjectType::Uint32VirtualArray */ Js::Uint32VirtualArray::GetOffsetOfLength(),
  10958. /* ObjectType::Float32VirtualArray */ Js::Float32VirtualArray::GetOffsetOfLength(),
  10959. /* ObjectType::Float64VirtualArray */ Js::Float64VirtualArray::GetOffsetOfLength(),
  10960. /* ObjectType::Int8MixedArray */ Js::Int8Array::GetOffsetOfLength(),
  10961. /* ObjectType::Uint8MixedArray */ Js::Uint8Array::GetOffsetOfLength(),
  10962. /* ObjectType::Uint8ClampedMixedArray */ Js::Uint8ClampedArray::GetOffsetOfLength(),
  10963. /* ObjectType::Int16MixedArray */ Js::Int16Array::GetOffsetOfLength(),
  10964. /* ObjectType::Uint16MixedArray */ Js::Uint16Array::GetOffsetOfLength(),
  10965. /* ObjectType::Int32MixedArray */ Js::Int32Array::GetOffsetOfLength(),
  10966. /* ObjectType::Uint32MixedArray */ Js::Uint32Array::GetOffsetOfLength(),
  10967. /* ObjectType::Float32MixedArray */ Js::Float32Array::GetOffsetOfLength(),
  10968. /* ObjectType::Float64MixedArray */ Js::Float64Array::GetOffsetOfLength(),
  10969. /* ObjectType::Int64Array */ Js::Int64Array::GetOffsetOfLength(),
  10970. /* ObjectType::Uint64Array */ Js::Uint64Array::GetOffsetOfLength(),
  10971. /* ObjectType::BoolArray */ Js::BoolArray::GetOffsetOfLength(),
  10972. /* ObjectType::CharArray */ Js::CharArray::GetOffsetOfLength()
  10973. };
  10974. const IRType Lowerer::IndirTypes[static_cast<ValueType::TSize>(ObjectType::Count)] =
  10975. {
  10976. /* ObjectType::UninitializedObject */ TyIllegal,
  10977. /* ObjectType::Object */ TyIllegal,
  10978. /* ObjectType::RegExp */ TyIllegal,
  10979. /* ObjectType::ObjectWithArray */ TyVar,
  10980. /* ObjectType::Array */ TyVar,
  10981. /* ObjectType::Int8Array */ TyInt8,
  10982. /* ObjectType::Uint8Array */ TyUint8,
  10983. /* ObjectType::Uint8ClampedArray */ TyUint8,
  10984. /* ObjectType::Int16Array */ TyInt16,
  10985. /* ObjectType::Uint16Array */ TyUint16,
  10986. /* ObjectType::Int32Array */ TyInt32,
  10987. /* ObjectType::Uint32Array */ TyUint32,
  10988. /* ObjectType::Float32Array */ TyFloat32,
  10989. /* ObjectType::Float64Array */ TyFloat64,
  10990. /* ObjectType::Int8VirtualArray */ TyInt8,
  10991. /* ObjectType::Uint8VirtualArray */ TyUint8,
  10992. /* ObjectType::Uint8ClampedVirtualArray */ TyUint8,
  10993. /* ObjectType::Int16VirtualArray */ TyInt16,
  10994. /* ObjectType::Uint16vArray */ TyUint16,
  10995. /* ObjectType::Int32VirtualArray */ TyInt32,
  10996. /* ObjectType::Uint32VirtualArray */ TyUint32,
  10997. /* ObjectType::Float32VirtualArray */ TyFloat32,
  10998. /* ObjectType::Float64VirtualArray */ TyFloat64,
  10999. /* ObjectType::Int8MixedArray */ TyInt8,
  11000. /* ObjectType::Uint8MixedArray */ TyUint8,
  11001. /* ObjectType::Uint8ClampedMixedArray */ TyUint8,
  11002. /* ObjectType::Int16MixedArray */ TyInt16,
  11003. /* ObjectType::Uint16MixedArray */ TyUint16,
  11004. /* ObjectType::Int32MixedArray */ TyInt32,
  11005. /* ObjectType::Uint32MixedArray */ TyUint32,
  11006. /* ObjectType::Float32MixedArray */ TyFloat32,
  11007. /* ObjectType::Float64MixedArray */ TyFloat64,
  11008. /* ObjectType::Int64Array */ TyInt64,
  11009. /* ObjectType::Uint64Array */ TyUint64,
  11010. /* ObjectType::BoolArray */ TyUint8,
  11011. /* ObjectType::CharArray */ TyUint16
  11012. };
  11013. const BYTE Lowerer::IndirScales[static_cast<ValueType::TSize>(ObjectType::Count)] =
  11014. {
  11015. /* ObjectType::UninitializedObject */ static_cast<BYTE>(-1),
  11016. /* ObjectType::Object */ static_cast<BYTE>(-1),
  11017. /* ObjectType::RegExp */ static_cast<BYTE>(-1),
  11018. /* ObjectType::ObjectWithArray */ LowererMD::GetDefaultIndirScale(),
  11019. /* ObjectType::Array */ LowererMD::GetDefaultIndirScale(),
  11020. /* ObjectType::Int8Array */ 0, // log2(sizeof(int8))
  11021. /* ObjectType::Uint8Array */ 0, // log2(sizeof(uint8))
  11022. /* ObjectType::Uint8ClampedArray */ 0, // log2(sizeof(uint8))
  11023. /* ObjectType::Int16Array */ 1, // log2(sizeof(int16))
  11024. /* ObjectType::Uint16Array */ 1, // log2(sizeof(uint16))
  11025. /* ObjectType::Int32Array */ 2, // log2(sizeof(int32))
  11026. /* ObjectType::Uint32Array */ 2, // log2(sizeof(uint32))
  11027. /* ObjectType::Float32Array */ 2, // log2(sizeof(float))
  11028. /* ObjectType::Float64Array */ 3, // log2(sizeof(double))
  11029. /* ObjectType::Int8VirtualArray */ 0, // log2(sizeof(int8))
  11030. /* ObjectType::Uint8VirtualArray */ 0, // log2(sizeof(uint8))
  11031. /* ObjectType::Uint8ClampedVirtualArray */ 0, // log2(sizeof(uint8))
  11032. /* ObjectType::Int16VirtualArray */ 1, // log2(sizeof(int16))
  11033. /* ObjectType::Uint16VirtualArray */ 1, // log2(sizeof(uint16))
  11034. /* ObjectType::Int32VirtualArray */ 2, // log2(sizeof(int32))
  11035. /* ObjectType::Uint32VirtualArray */ 2, // log2(sizeof(uint32))
  11036. /* ObjectType::Float32VirtualArray */ 2, // log2(sizeof(float))
  11037. /* ObjectType::Float64VirtualArray */ 3, // log2(sizeof(double))
  11038. /* ObjectType::Int8MixedArray */ 0, // log2(sizeof(int8))
  11039. /* ObjectType::Uint8MixedArray */ 0, // log2(sizeof(uint8))
  11040. /* ObjectType::Uint8ClampedMixedArray */ 0, // log2(sizeof(uint8))
  11041. /* ObjectType::Int16MixedArray */ 1, // log2(sizeof(int16))
  11042. /* ObjectType::Uint16MixedArray */ 1, // log2(sizeof(uint16))
  11043. /* ObjectType::Int32MixedArray */ 2, // log2(sizeof(int32))
  11044. /* ObjectType::Uint32MixedArray */ 2, // log2(sizeof(uint32))
  11045. /* ObjectType::Float32MixedArray */ 2, // log2(sizeof(float))
  11046. /* ObjectType::Float64MixedArray */ 3, // log2(sizeof(double))
  11047. /* ObjectType::Int64Array */ 3, // log2(sizeof(int64))
  11048. /* ObjectType::Uint64Array */ 3, // log2(sizeof(uint64))
  11049. /* ObjectType::BoolArray */ 0, // log2(sizeof(bool))
  11050. /* ObjectType::CharArray */ 1 // log2(sizeof(wchar_t))
  11051. };
  11052. VTableValue Lowerer::GetArrayVtableAddress(const ValueType valueType, bool getVirtual)
  11053. {
  11054. Assert(valueType.IsLikelyAnyOptimizedArray());
  11055. if(valueType.IsLikelyArrayOrObjectWithArray())
  11056. {
  11057. if(valueType.HasIntElements())
  11058. {
  11059. return VTableValue::VtableNativeIntArray;
  11060. }
  11061. else if(valueType.HasFloatElements())
  11062. {
  11063. return VTableValue::VtableNativeFloatArray;
  11064. }
  11065. }
  11066. if (getVirtual && valueType.IsLikelyMixedTypedArrayType())
  11067. {
  11068. return VtableAddresses[static_cast<ValueType::TSize>(valueType.GetMixedToVirtualTypedArrayObjectType())];
  11069. }
  11070. return VtableAddresses[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  11071. }
  11072. uint32 Lowerer::GetArrayOffsetOfHeadSegment(const ValueType valueType)
  11073. {
  11074. Assert(valueType.IsLikelyAnyOptimizedArray());
  11075. return OffsetsOfHeadSegment[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  11076. }
  11077. uint32 Lowerer::GetArrayOffsetOfLength(const ValueType valueType)
  11078. {
  11079. Assert(valueType.IsLikelyAnyOptimizedArray());
  11080. return OffsetsOfLength[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  11081. }
  11082. IRType Lowerer::GetArrayIndirType(const ValueType valueType)
  11083. {
  11084. Assert(valueType.IsLikelyAnyOptimizedArray());
  11085. if(valueType.IsLikelyArrayOrObjectWithArray())
  11086. {
  11087. if(valueType.HasIntElements())
  11088. {
  11089. return TyInt32;
  11090. }
  11091. else if(valueType.HasFloatElements())
  11092. {
  11093. return TyFloat64;
  11094. }
  11095. }
  11096. return IndirTypes[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  11097. }
  11098. BYTE Lowerer::GetArrayIndirScale(const ValueType valueType)
  11099. {
  11100. Assert(valueType.IsLikelyAnyOptimizedArray());
  11101. if(valueType.IsLikelyArrayOrObjectWithArray())
  11102. {
  11103. if(valueType.HasIntElements())
  11104. {
  11105. return 2; // log2(sizeof(int32))
  11106. }
  11107. else if(valueType.HasFloatElements())
  11108. {
  11109. return 3; // log2(sizeof(double))
  11110. }
  11111. }
  11112. return IndirScales[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  11113. }
  11114. int Lowerer::SimdGetElementCountFromBytes(ValueType arrValueType, uint8 dataWidth)
  11115. {
  11116. Assert(dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  11117. Assert(arrValueType.IsTypedArray());
  11118. BYTE bpe = 1 << Lowerer::GetArrayIndirScale(arrValueType);
  11119. // round up
  11120. return (int)::ceil(((float)dataWidth) / bpe);
  11121. }
  11122. bool Lowerer::ShouldGenerateArrayFastPath(
  11123. const IR::Opnd *const arrayOpnd,
  11124. const bool supportsObjectsWithArrays,
  11125. const bool supportsTypedArrays,
  11126. const bool requiresSse2ForFloatArrays) const
  11127. {
  11128. Assert(arrayOpnd);
  11129. const ValueType arrayValueType(arrayOpnd->GetValueType());
  11130. if(arrayValueType.IsUninitialized())
  11131. {
  11132. // Don't have info about the value type, better to generate the fast path anyway
  11133. return true;
  11134. }
  11135. if (!arrayValueType.IsLikelyObject())
  11136. {
  11137. if (!arrayValueType.HasBeenObject() || arrayValueType.IsLikelyString())
  11138. {
  11139. return false;
  11140. }
  11141. //We have seen at least once there is an object in the code path. Generate fastpath hoping it to be array.
  11142. //Its nice if we can get all the attributes set but valueType is only 16 bits. Consider expanding the same.
  11143. return true;
  11144. }
  11145. if( !supportsObjectsWithArrays && arrayValueType.GetObjectType() == ObjectType::ObjectWithArray ||
  11146. !supportsTypedArrays && arrayValueType.IsLikelyTypedArray())
  11147. {
  11148. // The fast path likely would not hit
  11149. return false;
  11150. }
  11151. if(arrayValueType.GetObjectType() == ObjectType::UninitializedObject)
  11152. {
  11153. // Don't have info about the object type, better to generate the fast path anyway
  11154. return true;
  11155. }
  11156. #ifdef _M_IX86
  11157. if(requiresSse2ForFloatArrays &&
  11158. (
  11159. arrayValueType.GetObjectType() == ObjectType::Float32Array ||
  11160. arrayValueType.GetObjectType() == ObjectType::Float64Array
  11161. ) &&
  11162. !AutoSystemInfo::Data.SSE2Available())
  11163. {
  11164. // Fast paths for float arrays rely on SSE2
  11165. return false;
  11166. }
  11167. #endif
  11168. return !arrayValueType.IsLikelyAnyUnOptimizedArray();
  11169. }
  11170. IR::RegOpnd *Lowerer::LoadObjectArray(IR::RegOpnd *const baseOpnd, IR::Instr *const insertBeforeInstr)
  11171. {
  11172. Assert(baseOpnd);
  11173. Assert(
  11174. baseOpnd->GetValueType().IsLikelyObject() &&
  11175. baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray);
  11176. Assert(insertBeforeInstr);
  11177. Func *const func = insertBeforeInstr->m_func;
  11178. // mov array, [base + offsetOf(objectArrayOrFlags)]
  11179. IR::RegOpnd *const arrayOpnd =
  11180. baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd()->CopyAsRegOpnd(func) : baseOpnd->Copy(func)->AsRegOpnd();
  11181. arrayOpnd->m_sym = StackSym::New(TyVar, func);
  11182. arrayOpnd->SetValueType(arrayOpnd->GetValueType().ToArray());
  11183. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, func, false /* autoDelete */);
  11184. m_lowererMD.CreateAssign(
  11185. arrayOpnd,
  11186. IR::IndirOpnd::New(
  11187. baseOpnd,
  11188. Js::DynamicObject::GetOffsetOfObjectArray(),
  11189. arrayOpnd->GetType(),
  11190. func),
  11191. insertBeforeInstr);
  11192. return arrayOpnd;
  11193. }
  11194. void
  11195. Lowerer::GenerateIsEnabledArraySetElementFastPathCheck(
  11196. IR::LabelInstr * isDisabledLabel,
  11197. IR::Instr * const insertBeforeInstr)
  11198. {
  11199. InsertCompareBranch(
  11200. this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable),
  11201. LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableInvalid),
  11202. Js::OpCode::BrEq_A,
  11203. isDisabledLabel,
  11204. insertBeforeInstr);
  11205. }
  11206. IR::RegOpnd *Lowerer::GenerateArrayTest(
  11207. IR::RegOpnd *const baseOpnd,
  11208. IR::LabelInstr *const isNotObjectLabel,
  11209. IR::LabelInstr *const isNotArrayLabel,
  11210. IR::Instr *const insertBeforeInstr,
  11211. const bool forceFloat,
  11212. const bool isStore,
  11213. const bool allowDefiniteArray)
  11214. {
  11215. Assert(baseOpnd);
  11216. const ValueType baseValueType(baseOpnd->GetValueType());
  11217. // Shouldn't request to do an array test when it's already known to be an array, or if it's unlikely to be an array
  11218. Assert(!baseValueType.IsAnyOptimizedArray() || allowDefiniteArray || baseValueType.IsNativeArray());
  11219. Assert(baseValueType.IsUninitialized() || baseValueType.HasBeenObject());
  11220. Assert(isNotObjectLabel);
  11221. Assert(isNotArrayLabel);
  11222. Assert(insertBeforeInstr);
  11223. Func *const func = insertBeforeInstr->m_func;
  11224. IR::RegOpnd *arrayOpnd;
  11225. IR::AutoReuseOpnd autoReuseArrayOpnd;
  11226. if(baseValueType.IsLikelyObject() && baseValueType.GetObjectType() == ObjectType::ObjectWithArray)
  11227. {
  11228. // Only DynamicObject is allowed (DynamicObject vtable is ensured) because some object types have special handling for
  11229. // index properties - arguments object, string object, external object, etc.
  11230. GenerateObjectTypeTest(baseOpnd, insertBeforeInstr, isNotObjectLabel);
  11231. GenerateObjectHeaderInliningTest(baseOpnd, isNotArrayLabel, insertBeforeInstr);
  11232. arrayOpnd = LoadObjectArray(baseOpnd, insertBeforeInstr);
  11233. autoReuseArrayOpnd.Initialize(arrayOpnd, func, false /* autoDelete */);
  11234. // test array, array
  11235. // je $isNotArrayLabel
  11236. // test array, 1
  11237. // jne $isNotArrayLabel
  11238. InsertTestBranch(
  11239. arrayOpnd,
  11240. arrayOpnd,
  11241. Js::OpCode::BrEq_A,
  11242. isNotArrayLabel,
  11243. insertBeforeInstr);
  11244. InsertTestBranch(
  11245. arrayOpnd,
  11246. IR::IntConstOpnd::New(1, TyUint8, func, true),
  11247. Js::OpCode::BrNeq_A,
  11248. isNotArrayLabel,
  11249. insertBeforeInstr);
  11250. }
  11251. else
  11252. {
  11253. if(!baseOpnd->IsNotTaggedValue())
  11254. {
  11255. m_lowererMD.GenerateObjectTest(baseOpnd, insertBeforeInstr, isNotObjectLabel);
  11256. }
  11257. arrayOpnd = baseOpnd->Copy(func)->AsRegOpnd();
  11258. if(!baseValueType.IsLikelyAnyOptimizedArray())
  11259. {
  11260. arrayOpnd->SetValueType(
  11261. ValueType::GetObject(ObjectType::Array)
  11262. .ToLikely()
  11263. .SetHasNoMissingValues(false)
  11264. .SetArrayTypeId(Js::TypeIds_Array));
  11265. }
  11266. autoReuseArrayOpnd.Initialize(arrayOpnd, func, false /* autoDelete */);
  11267. }
  11268. VTableValue vtableAddress = baseValueType.IsLikelyAnyOptimizedArray()
  11269. ? GetArrayVtableAddress(baseValueType)
  11270. : VTableValue::VtableJavascriptArray;
  11271. VTableValue virtualVtableAddress = VTableValue::VtableInvalid;
  11272. if (baseValueType.IsLikelyMixedTypedArrayType())
  11273. {
  11274. virtualVtableAddress = GetArrayVtableAddress(baseValueType, true);
  11275. }
  11276. IR::Opnd * vtableOpnd;
  11277. IR::Opnd * vtableVirtualOpnd = nullptr;
  11278. if (isStore &&
  11279. (vtableAddress == VTableValue::VtableJavascriptArray ||
  11280. baseValueType.IsLikelyNativeArray()))
  11281. {
  11282. vtableOpnd = IR::RegOpnd::New(TyMachPtr, func);
  11283. if (baseValueType.IsLikelyNativeArray())
  11284. {
  11285. if (baseValueType.HasIntElements())
  11286. {
  11287. InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesIntArraySetElementFastPathVtable), insertBeforeInstr);
  11288. }
  11289. else
  11290. {
  11291. Assert(baseValueType.HasFloatElements());
  11292. InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesFloatArraySetElementFastPathVtable), insertBeforeInstr);
  11293. }
  11294. }
  11295. else
  11296. {
  11297. InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable), insertBeforeInstr);
  11298. }
  11299. }
  11300. else
  11301. {
  11302. vtableOpnd = LoadVTableValueOpnd(insertBeforeInstr, vtableAddress);
  11303. }
  11304. // cmp [array], vtableAddress
  11305. // jne $isNotArrayLabel
  11306. if (forceFloat && baseValueType.IsLikelyNativeFloatArray())
  11307. {
  11308. // We expect a native float array. If we get native int instead, convert it on the spot and bail out afterward.
  11309. const auto goodArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  11310. InsertCompareBranch(
  11311. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  11312. vtableOpnd,
  11313. Js::OpCode::BrEq_A,
  11314. goodArrayLabel,
  11315. insertBeforeInstr);
  11316. IR::LabelInstr *notFloatArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  11317. insertBeforeInstr->InsertBefore(notFloatArrayLabel);
  11318. if (isStore)
  11319. {
  11320. vtableOpnd = IR::RegOpnd::New(TyMachPtr, func);
  11321. InsertMove(vtableOpnd, IR::MemRefOpnd::New(
  11322. func->GetScriptContext()->optimizationOverrides.GetAddressOfIntArraySetElementFastPathVtable(),
  11323. TyMachPtr, func), insertBeforeInstr);
  11324. }
  11325. else
  11326. {
  11327. vtableOpnd = LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableJavascriptNativeIntArray);
  11328. }
  11329. InsertCompareBranch(
  11330. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  11331. vtableOpnd,
  11332. Js::OpCode::BrNeq_A,
  11333. isNotArrayLabel,
  11334. insertBeforeInstr);
  11335. m_lowererMD.LoadHelperArgument(insertBeforeInstr, arrayOpnd);
  11336. IR::Instr *helperInstr = IR::Instr::New(Js::OpCode::Call, m_func);
  11337. insertBeforeInstr->InsertBefore(helperInstr);
  11338. m_lowererMD.ChangeToHelperCall(helperInstr, IR::HelperIntArr_ToNativeFloatArray);
  11339. // Branch to the (bailout) label, because converting the array may have made our array checks unsafe.
  11340. InsertBranch(Js::OpCode::Br, isNotArrayLabel, insertBeforeInstr);
  11341. insertBeforeInstr->InsertBefore(goodArrayLabel);
  11342. }
  11343. else
  11344. {
  11345. IR::LabelInstr* goodArrayLabel = nullptr;
  11346. if (baseValueType.IsLikelyMixedTypedArrayType())
  11347. {
  11348. goodArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  11349. InsertCompareBranch(
  11350. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  11351. vtableOpnd,
  11352. Js::OpCode::BrEq_A,
  11353. goodArrayLabel,
  11354. insertBeforeInstr);
  11355. Assert(virtualVtableAddress);
  11356. vtableVirtualOpnd = LoadVTableValueOpnd(insertBeforeInstr, virtualVtableAddress);
  11357. Assert(vtableVirtualOpnd);
  11358. InsertCompareBranch(
  11359. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  11360. vtableVirtualOpnd,
  11361. Js::OpCode::BrNeq_A,
  11362. isNotArrayLabel,
  11363. insertBeforeInstr);
  11364. insertBeforeInstr->InsertBefore(goodArrayLabel);
  11365. }
  11366. else
  11367. {
  11368. InsertCompareBranch(
  11369. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  11370. vtableOpnd,
  11371. Js::OpCode::BrNeq_A,
  11372. isNotArrayLabel,
  11373. insertBeforeInstr);
  11374. }
  11375. }
  11376. ValueType arrayValueType(arrayOpnd->GetValueType());
  11377. if(arrayValueType.IsLikelyArrayOrObjectWithArray() && !arrayValueType.IsObject())
  11378. {
  11379. arrayValueType = arrayValueType.SetHasNoMissingValues(false);
  11380. }
  11381. arrayValueType = arrayValueType.ToDefiniteObject();
  11382. arrayOpnd->SetValueType(arrayValueType);
  11383. return arrayOpnd;
  11384. }
  11385. IR::LabelInstr *Lowerer::InsertLabel(const bool isHelper, IR::Instr *const insertBeforeInstr)
  11386. {
  11387. Assert(insertBeforeInstr);
  11388. Func *const func = insertBeforeInstr->m_func;
  11389. IR::LabelInstr *const instr = IR::LabelInstr::New(Js::OpCode::Label, func, isHelper);
  11390. insertBeforeInstr->InsertBefore(instr);
  11391. return instr;
  11392. }
  11393. IR::Instr *Lowerer::InsertMoveWithBarrier(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr)
  11394. {
  11395. return Lowerer::InsertMove(dst, src, insertBeforeInstr, true);
  11396. }
  11397. IR::Instr *Lowerer::InsertMove(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr, bool generateWriteBarrier)
  11398. {
  11399. Assert(dst);
  11400. Assert(src);
  11401. Assert(insertBeforeInstr);
  11402. Func *const func = insertBeforeInstr->m_func;
  11403. if(dst->IsFloat() && src->IsConstOpnd())
  11404. {
  11405. return LoadFloatFromNonReg(src, dst, insertBeforeInstr);
  11406. }
  11407. if(TySize[dst->GetType()] < TySize[src->GetType()])
  11408. {
  11409. src = src->UseWithNewType(dst->GetType(), func);
  11410. }
  11411. IR::Instr *const instr = IR::Instr::New(Js::OpCode::Ld_A, dst, src, func);
  11412. insertBeforeInstr->InsertBefore(instr);
  11413. if (generateWriteBarrier)
  11414. {
  11415. LowererMD::ChangeToWriteBarrierAssign(instr);
  11416. }
  11417. else
  11418. {
  11419. LowererMD::ChangeToAssign(instr);
  11420. }
  11421. return instr;
  11422. }
  11423. IR::BranchInstr *Lowerer::InsertBranch(
  11424. const Js::OpCode opCode,
  11425. IR::LabelInstr *const target,
  11426. IR::Instr *const insertBeforeInstr)
  11427. {
  11428. return InsertBranch(opCode, false /* isUnsigned */, target, insertBeforeInstr);
  11429. }
  11430. IR::BranchInstr *Lowerer::InsertBranch(
  11431. const Js::OpCode opCode,
  11432. const bool isUnsigned,
  11433. IR::LabelInstr *const target,
  11434. IR::Instr *const insertBeforeInstr)
  11435. {
  11436. Assert(target);
  11437. Assert(insertBeforeInstr);
  11438. Func *const func = insertBeforeInstr->m_func;
  11439. IR::BranchInstr *const instr = IR::BranchInstr::New(opCode, target, func);
  11440. if(!instr->IsLowered())
  11441. {
  11442. if(opCode == Js::OpCode::Br)
  11443. {
  11444. instr->m_opcode = LowererMD::MDUncondBranchOpcode;
  11445. }
  11446. else if(isUnsigned)
  11447. {
  11448. instr->m_opcode = LowererMD::MDUnsignedBranchOpcode(opCode);
  11449. }
  11450. else
  11451. {
  11452. instr->m_opcode = LowererMD::MDBranchOpcode(opCode);
  11453. }
  11454. }
  11455. insertBeforeInstr->InsertBefore(instr);
  11456. return instr;
  11457. }
  11458. IR::Instr *Lowerer::InsertCompare(IR::Opnd *const src1, IR::Opnd *const src2, IR::Instr *const insertBeforeInstr)
  11459. {
  11460. Assert(src1);
  11461. Assert(!src1->IsFloat64()); // not implemented
  11462. Assert(src2);
  11463. Assert(!src2->IsFloat64()); // not implemented
  11464. Assert(!src1->IsEqual(src2));
  11465. Assert(insertBeforeInstr);
  11466. Func *const func = insertBeforeInstr->m_func;
  11467. IR::Instr *const instr = IR::Instr::New(Js::OpCode::CMP, func);
  11468. instr->SetSrc1(src1);
  11469. instr->SetSrc2(src2);
  11470. insertBeforeInstr->InsertBefore(instr);
  11471. LowererMD::Legalize(instr);
  11472. return instr;
  11473. }
  11474. IR::BranchInstr *Lowerer::InsertCompareBranch(
  11475. IR::Opnd *const compareSrc1,
  11476. IR::Opnd *const compareSrc2,
  11477. Js::OpCode branchOpCode,
  11478. IR::LabelInstr *const target,
  11479. IR::Instr *const insertBeforeInstr,
  11480. const bool ignoreNaN)
  11481. {
  11482. return InsertCompareBranch(compareSrc1, compareSrc2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr, ignoreNaN);
  11483. }
  11484. IR::BranchInstr *Lowerer::InsertCompareBranch(
  11485. IR::Opnd *compareSrc1,
  11486. IR::Opnd *compareSrc2,
  11487. Js::OpCode branchOpCode,
  11488. const bool isUnsigned,
  11489. IR::LabelInstr *const target,
  11490. IR::Instr *const insertBeforeInstr,
  11491. const bool ignoreNaN)
  11492. {
  11493. Assert(compareSrc1);
  11494. Assert(compareSrc2);
  11495. Func *const func = insertBeforeInstr->m_func;
  11496. if(compareSrc1->IsFloat64())
  11497. {
  11498. Assert(compareSrc2->IsFloat64());
  11499. Assert(!isUnsigned);
  11500. IR::BranchInstr *const instr = IR::BranchInstr::New(branchOpCode, target, compareSrc1, compareSrc2, func);
  11501. insertBeforeInstr->InsertBefore(instr);
  11502. return LowererMD::LowerFloatCondBranch(instr, ignoreNaN);
  11503. }
  11504. Js::OpCode swapSrcsBranchOpCode;
  11505. switch(branchOpCode)
  11506. {
  11507. case Js::OpCode::BrEq_A:
  11508. case Js::OpCode::BrNeq_A:
  11509. swapSrcsBranchOpCode = branchOpCode;
  11510. goto Common_BrEqNeqGeGtLeLt;
  11511. case Js::OpCode::BrGe_A:
  11512. swapSrcsBranchOpCode = Js::OpCode::BrLe_A;
  11513. goto Common_BrEqNeqGeGtLeLt;
  11514. case Js::OpCode::BrGt_A:
  11515. swapSrcsBranchOpCode = Js::OpCode::BrLt_A;
  11516. goto Common_BrEqNeqGeGtLeLt;
  11517. case Js::OpCode::BrLe_A:
  11518. swapSrcsBranchOpCode = Js::OpCode::BrGe_A;
  11519. goto Common_BrEqNeqGeGtLeLt;
  11520. case Js::OpCode::BrLt_A:
  11521. swapSrcsBranchOpCode = Js::OpCode::BrGt_A;
  11522. // fall through
  11523. Common_BrEqNeqGeGtLeLt:
  11524. // Check if src1 is a constant and src2 is not, and facilitate folding the constant into the Cmp instruction
  11525. if( (
  11526. compareSrc1->IsIntConstOpnd() ||
  11527. (
  11528. compareSrc1->IsAddrOpnd() &&
  11529. Math::FitsInDWord(reinterpret_cast<size_t>(compareSrc1->AsAddrOpnd()->m_address))
  11530. )
  11531. ) &&
  11532. !compareSrc2->IsIntConstOpnd() &&
  11533. !compareSrc2->IsAddrOpnd())
  11534. {
  11535. // Swap the sources and branch
  11536. IR::Opnd *const tempSrc = compareSrc1;
  11537. compareSrc1 = compareSrc2;
  11538. compareSrc2 = tempSrc;
  11539. branchOpCode = swapSrcsBranchOpCode;
  11540. }
  11541. // Check for compare with zero, to prefer using Test instead of Cmp
  11542. if( !compareSrc1->IsRegOpnd() ||
  11543. !(
  11544. compareSrc2->IsIntConstOpnd() && compareSrc2->AsIntConstOpnd()->GetValue() == 0 ||
  11545. compareSrc2->IsAddrOpnd() && !compareSrc2->AsAddrOpnd()->m_address
  11546. ) ||
  11547. branchOpCode == Js::OpCode::BrGt_A || branchOpCode == Js::OpCode::BrLe_A)
  11548. {
  11549. goto Default;
  11550. }
  11551. if(branchOpCode == Js::OpCode::BrGe_A || branchOpCode == Js::OpCode::BrLt_A)
  11552. {
  11553. if(isUnsigned)
  11554. {
  11555. goto Default;
  11556. }
  11557. branchOpCode = LowererMD::MDCompareWithZeroBranchOpcode(branchOpCode);
  11558. }
  11559. if(!compareSrc2->IsInUse())
  11560. {
  11561. compareSrc2->Free(func);
  11562. }
  11563. InsertTest(compareSrc1, compareSrc1, insertBeforeInstr);
  11564. break;
  11565. default:
  11566. Default:
  11567. InsertCompare(compareSrc1, compareSrc2, insertBeforeInstr);
  11568. break;
  11569. }
  11570. return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
  11571. }
  11572. IR::Instr *Lowerer::InsertTest(IR::Opnd *const src1, IR::Opnd *const src2, IR::Instr *const insertBeforeInstr)
  11573. {
  11574. Assert(src1);
  11575. Assert(!src1->IsFloat64()); // not implemented
  11576. Assert(src2);
  11577. Assert(!src2->IsFloat64()); // not implemented
  11578. Assert(insertBeforeInstr);
  11579. Func *const func = insertBeforeInstr->m_func;
  11580. IR::Instr *const instr = IR::Instr::New(LowererMD::MDTestOpcode, func);
  11581. instr->SetSrc1(src1);
  11582. instr->SetSrc2(src2);
  11583. insertBeforeInstr->InsertBefore(instr);
  11584. LowererMD::Legalize(instr);
  11585. return instr;
  11586. }
  11587. IR::BranchInstr *Lowerer::InsertTestBranch(
  11588. IR::Opnd *const testSrc1,
  11589. IR::Opnd *const testSrc2,
  11590. const Js::OpCode branchOpCode,
  11591. IR::LabelInstr *const target,
  11592. IR::Instr *const insertBeforeInstr)
  11593. {
  11594. return InsertTestBranch(testSrc1, testSrc2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr);
  11595. }
  11596. IR::BranchInstr *Lowerer::InsertTestBranch(
  11597. IR::Opnd *const testSrc1,
  11598. IR::Opnd *const testSrc2,
  11599. const Js::OpCode branchOpCode,
  11600. const bool isUnsigned,
  11601. IR::LabelInstr *const target,
  11602. IR::Instr *const insertBeforeInstr)
  11603. {
  11604. InsertTest(testSrc1, testSrc2, insertBeforeInstr);
  11605. return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
  11606. }
  11607. IR::Instr *Lowerer::InsertAdd(
  11608. const bool needFlags,
  11609. IR::Opnd *const dst,
  11610. IR::Opnd *src1,
  11611. IR::Opnd *src2,
  11612. IR::Instr *const insertBeforeInstr)
  11613. {
  11614. Assert(dst);
  11615. Assert(src1);
  11616. Assert(src2);
  11617. Assert(insertBeforeInstr);
  11618. Func *const func = insertBeforeInstr->m_func;
  11619. if(src2->IsIntConstOpnd())
  11620. {
  11621. IR::IntConstOpnd *const intConstOpnd = src2->AsIntConstOpnd();
  11622. const IntConstType value = intConstOpnd->GetValue();
  11623. if(value < 0 && value != IntConstMin)
  11624. {
  11625. // Change (s1 = s1 + -5) into (s1 = s1 - 5)
  11626. IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
  11627. newSrc2->SetValue(-value);
  11628. return InsertSub(needFlags, dst, src1, newSrc2, insertBeforeInstr);
  11629. }
  11630. }
  11631. else if(src1->IsIntConstOpnd())
  11632. {
  11633. IR::IntConstOpnd *const intConstOpnd = src1->AsIntConstOpnd();
  11634. const IntConstType value = intConstOpnd->GetValue();
  11635. if(value < 0 && value != IntConstMin)
  11636. {
  11637. // Change (s1 = -5 + s1) into (s1 = s1 - 5)
  11638. IR::Opnd *const newSrc1 = src2;
  11639. IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
  11640. newSrc2->SetValue(-value);
  11641. return InsertSub(needFlags, dst, newSrc1, newSrc2, insertBeforeInstr);
  11642. }
  11643. }
  11644. IR::Instr *const instr = IR::Instr::New(Js::OpCode::Add_A, dst, src1, src2, func);
  11645. insertBeforeInstr->InsertBefore(instr);
  11646. LowererMD::ChangeToAdd(instr, needFlags);
  11647. LowererMD::Legalize(instr);
  11648. return instr;
  11649. }
  11650. IR::Instr *Lowerer::InsertSub(
  11651. const bool needFlags,
  11652. IR::Opnd *const dst,
  11653. IR::Opnd *src1,
  11654. IR::Opnd *src2,
  11655. IR::Instr *const insertBeforeInstr)
  11656. {
  11657. Assert(dst);
  11658. Assert(src1);
  11659. Assert(src2);
  11660. Assert(insertBeforeInstr);
  11661. Func *const func = insertBeforeInstr->m_func;
  11662. if(src2->IsIntConstOpnd())
  11663. {
  11664. IR::IntConstOpnd *const intConstOpnd = src2->AsIntConstOpnd();
  11665. const IntConstType value = intConstOpnd->GetValue();
  11666. if(value < 0 && value != IntConstMin)
  11667. {
  11668. // Change (s1 = s1 - -5) into (s1 = s1 + 5)
  11669. IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
  11670. newSrc2->SetValue(-value);
  11671. return InsertAdd(needFlags, dst, src1, newSrc2, insertBeforeInstr);
  11672. }
  11673. }
  11674. IR::Instr *const instr = IR::Instr::New(Js::OpCode::Sub_A, dst, src1, src2, func);
  11675. insertBeforeInstr->InsertBefore(instr);
  11676. LowererMD::ChangeToSub(instr, needFlags);
  11677. LowererMD::Legalize(instr);
  11678. return instr;
  11679. }
  11680. IR::Instr *Lowerer::InsertLea(IR::RegOpnd *const dst, IR::Opnd *const src, IR::Instr *const insertBeforeInstr)
  11681. {
  11682. Assert(dst);
  11683. Assert(src);
  11684. Assert(src->IsIndirOpnd() || src->IsSymOpnd());
  11685. Assert(insertBeforeInstr);
  11686. Func *const func = insertBeforeInstr->m_func;
  11687. IR::Instr *const instr = IR::Instr::New(Js::OpCode::LEA, dst, src, func);
  11688. insertBeforeInstr->InsertBefore(instr);
  11689. return LowererMD::ChangeToLea(instr);
  11690. }
  11691. #if _M_X64
  11692. IR::Instr *Lowerer::InsertMoveBitCast(
  11693. IR::Opnd *const dst,
  11694. IR::Opnd *const src1,
  11695. IR::Instr *const insertBeforeInstr)
  11696. {
  11697. Assert(dst);
  11698. Assert(dst->GetType() == TyFloat64);
  11699. Assert(src1);
  11700. Assert(src1->GetType() == TyUint64);
  11701. Assert(insertBeforeInstr);
  11702. Func *const func = insertBeforeInstr->m_func;
  11703. IR::Instr *const instr = IR::Instr::New(LowererMD::MDMovUint64ToFloat64Opcode, dst, src1, func);
  11704. insertBeforeInstr->InsertBefore(instr);
  11705. LowererMD::Legalize(instr);
  11706. return instr;
  11707. }
  11708. #endif
  11709. IR::Instr *Lowerer::InsertXor(
  11710. IR::Opnd *const dst,
  11711. IR::Opnd *const src1,
  11712. IR::Opnd *const src2,
  11713. IR::Instr *const insertBeforeInstr)
  11714. {
  11715. Assert(dst);
  11716. Assert(src1);
  11717. Assert(src2);
  11718. Assert(insertBeforeInstr);
  11719. Func *const func = insertBeforeInstr->m_func;
  11720. IR::Instr *const instr = IR::Instr::New(LowererMD::MDXorOpcode, dst, src1, src2, func);
  11721. insertBeforeInstr->InsertBefore(instr);
  11722. LowererMD::Legalize(instr);
  11723. return instr;
  11724. }
  11725. IR::Instr *Lowerer::InsertAnd(
  11726. IR::Opnd *const dst,
  11727. IR::Opnd *const src1,
  11728. IR::Opnd *const src2,
  11729. IR::Instr *const insertBeforeInstr)
  11730. {
  11731. Assert(dst);
  11732. Assert(src1);
  11733. Assert(src2);
  11734. Assert(insertBeforeInstr);
  11735. Func *const func = insertBeforeInstr->m_func;
  11736. IR::Instr *const instr = IR::Instr::New(Js::OpCode::AND, dst, src1, src2, func);
  11737. insertBeforeInstr->InsertBefore(instr);
  11738. LowererMD::Legalize(instr);
  11739. return instr;
  11740. }
  11741. IR::Instr *Lowerer::InsertOr(
  11742. IR::Opnd *const dst,
  11743. IR::Opnd *const src1,
  11744. IR::Opnd *const src2,
  11745. IR::Instr *const insertBeforeInstr)
  11746. {
  11747. Assert(dst);
  11748. Assert(src1);
  11749. Assert(src2);
  11750. Assert(insertBeforeInstr);
  11751. Func *const func = insertBeforeInstr->m_func;
  11752. IR::Instr *const instr = IR::Instr::New(LowererMD::MDOrOpcode, dst, src1, src2, func);
  11753. insertBeforeInstr->InsertBefore(instr);
  11754. LowererMD::Legalize(instr);
  11755. return instr;
  11756. }
  11757. IR::Instr *Lowerer::InsertShift(
  11758. const Js::OpCode opCode,
  11759. const bool needFlags,
  11760. IR::Opnd *const dst,
  11761. IR::Opnd *const src1,
  11762. IR::Opnd *const src2,
  11763. IR::Instr *const insertBeforeInstr)
  11764. {
  11765. Assert(dst);
  11766. Assert(!dst->IsFloat64()); // not implemented
  11767. Assert(src1);
  11768. Assert(!src1->IsFloat64()); // not implemented
  11769. Assert(src2);
  11770. Assert(!src2->IsFloat64()); // not implemented
  11771. Assert(insertBeforeInstr);
  11772. Func *const func = insertBeforeInstr->m_func;
  11773. IR::Instr *const instr = IR::Instr::New(opCode, dst, src1, src2, func);
  11774. insertBeforeInstr->InsertBefore(instr);
  11775. LowererMD::ChangeToShift(instr, needFlags);
  11776. LowererMD::Legalize(instr);
  11777. return instr;
  11778. }
  11779. IR::Instr *Lowerer::InsertShiftBranch(
  11780. const Js::OpCode shiftOpCode,
  11781. IR::Opnd *const dst,
  11782. IR::Opnd *const src1,
  11783. IR::Opnd *const src2,
  11784. const Js::OpCode branchOpCode,
  11785. IR::LabelInstr *const target,
  11786. IR::Instr *const insertBeforeInstr)
  11787. {
  11788. return InsertShiftBranch(shiftOpCode, dst, src1, src2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr);
  11789. }
  11790. IR::Instr *Lowerer::InsertShiftBranch(
  11791. const Js::OpCode shiftOpCode,
  11792. IR::Opnd *const dst,
  11793. IR::Opnd *const src1,
  11794. IR::Opnd *const src2,
  11795. const Js::OpCode branchOpCode,
  11796. const bool isUnsigned,
  11797. IR::LabelInstr *const target,
  11798. IR::Instr *const insertBeforeInstr)
  11799. {
  11800. InsertShift(shiftOpCode, true /* needFlags */, dst, src1, src2, insertBeforeInstr);
  11801. return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
  11802. }
  11803. IR::Instr *Lowerer::InsertConvertFloat32ToFloat64(
  11804. IR::Opnd *const dst,
  11805. IR::Opnd *const src,
  11806. IR::Instr *const insertBeforeInstr)
  11807. {
  11808. Assert(dst);
  11809. Assert(dst->IsFloat64());
  11810. Assert(src);
  11811. Assert(src->IsFloat32());
  11812. Assert(insertBeforeInstr);
  11813. Func *const func = insertBeforeInstr->m_func;
  11814. IR::Instr *const instr = IR::Instr::New(LowererMD::MDConvertFloat32ToFloat64Opcode, dst, src, func);
  11815. insertBeforeInstr->InsertBefore(instr);
  11816. LowererMD::Legalize(instr);
  11817. return instr;
  11818. }
  11819. IR::Instr *Lowerer::InsertConvertFloat64ToFloat32(
  11820. IR::Opnd *const dst,
  11821. IR::Opnd *const src,
  11822. IR::Instr *const insertBeforeInstr)
  11823. {
  11824. Assert(dst);
  11825. Assert(dst->IsFloat32());
  11826. Assert(src);
  11827. Assert(src->IsFloat64());
  11828. Assert(insertBeforeInstr);
  11829. Func *const func = insertBeforeInstr->m_func;
  11830. IR::Instr *const instr = IR::Instr::New(LowererMD::MDConvertFloat64ToFloat32Opcode, dst, src, func);
  11831. insertBeforeInstr->InsertBefore(instr);
  11832. LowererMD::Legalize(instr);
  11833. return instr;
  11834. }
  11835. void Lowerer::InsertIncUInt8PreventOverflow(
  11836. IR::Opnd *const dst,
  11837. IR::Opnd *const src,
  11838. IR::Instr *const insertBeforeInstr,
  11839. IR::Instr * *const onOverflowInsertBeforeInstrRef)
  11840. {
  11841. LowererMD::InsertIncUInt8PreventOverflow(dst, src, insertBeforeInstr, onOverflowInsertBeforeInstrRef);
  11842. }
  11843. void Lowerer::InsertDecUInt8PreventOverflow(
  11844. IR::Opnd *const dst,
  11845. IR::Opnd *const src,
  11846. IR::Instr *const insertBeforeInstr,
  11847. IR::Instr * *const onOverflowInsertBeforeInstrRef)
  11848. {
  11849. LowererMD::InsertDecUInt8PreventOverflow(dst, src, insertBeforeInstr, onOverflowInsertBeforeInstrRef);
  11850. }
  11851. void Lowerer::InsertFloatCheckForZeroOrNanBranch(
  11852. IR::Opnd *const src,
  11853. const bool branchOnZeroOrNan,
  11854. IR::LabelInstr *const target,
  11855. IR::LabelInstr *const fallthroughLabel,
  11856. IR::Instr *const insertBeforeInstr)
  11857. {
  11858. Assert(src);
  11859. Assert(src->IsFloat64());
  11860. Assert(target);
  11861. Assert(!fallthroughLabel || fallthroughLabel != target);
  11862. Assert(insertBeforeInstr);
  11863. Func *const func = insertBeforeInstr->m_func;
  11864. IR::BranchInstr *const branchOnEqualOrNotEqual =
  11865. InsertCompareBranch(
  11866. src,
  11867. IR::MemRefOpnd::New((double*)&(Js::JavascriptNumber::k_Zero), TyFloat64, func),
  11868. branchOnZeroOrNan ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A,
  11869. target,
  11870. insertBeforeInstr,
  11871. true /* ignoreNaN */);
  11872. // x86/x64
  11873. // When NaN is ignored, on x86 and x64, JE branches when equal or unordered since an unordered result sets the zero
  11874. // flag, and JNE branches when not equal and not unordered. By comparing with zero, JE will branch when src is zero or
  11875. // NaN, and JNE will branch when src is not zero and not NaN.
  11876. //
  11877. // ARM
  11878. // When NaN is ignored, BEQ branches when equal and not unordered, and BNE branches when not equal or unordered. So,
  11879. // when comparing src with zero, an unordered check needs to be added before the BEQ/BNE.
  11880. branchOnEqualOrNotEqual; // satisfy the compiler
  11881. #ifdef _M_ARM
  11882. InsertBranch(
  11883. Js::OpCode::BVS,
  11884. branchOnZeroOrNan
  11885. ? target
  11886. : fallthroughLabel ? fallthroughLabel : insertBeforeInstr->m_prev->GetOrCreateContinueLabel(),
  11887. branchOnEqualOrNotEqual);
  11888. #endif
  11889. }
  11890. IR::IndirOpnd *
  11891. Lowerer::GenerateFastElemICommon(
  11892. IR::Instr * ldElem,
  11893. bool isStore,
  11894. IR::IndirOpnd * indirOpnd,
  11895. IR::LabelInstr * labelHelper,
  11896. IR::LabelInstr * labelCantUseArray,
  11897. IR::LabelInstr *labelFallthrough,
  11898. bool * pIsTypedArrayElement,
  11899. bool * pIsStringIndex,
  11900. bool *emitBailoutRef,
  11901. IR::LabelInstr **pLabelSegmentLengthIncreased /*= nullptr*/,
  11902. bool checkArrayLengthOverflow /*= true*/,
  11903. bool forceGenerateFastPath /* = false */,
  11904. bool returnLength,
  11905. IR::LabelInstr *bailOutLabelInstr /* = nullptr*/)
  11906. {
  11907. *pIsTypedArrayElement = false;
  11908. *pIsStringIndex = false;
  11909. if(pLabelSegmentLengthIncreased)
  11910. {
  11911. *pLabelSegmentLengthIncreased = nullptr;
  11912. }
  11913. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  11914. AssertMsg(baseOpnd, "This shouldn't be NULL");
  11915. // Caution: If making changes to the conditions under which we don't emit the typical array checks, make sure
  11916. // the code in GlobOpt::ShouldAssumeIndirOpndHasNonNegativeIntIndex is updated accordingly. We don't want the
  11917. // global optimizer to type specialize instructions, for which the lowerer is forced to emit unconditional
  11918. // bailouts.
  11919. if (baseOpnd->IsTaggedInt())
  11920. {
  11921. return NULL;
  11922. }
  11923. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  11924. if (indexOpnd)
  11925. {
  11926. if (indexOpnd->GetValueType().IsString())
  11927. {
  11928. if (!baseOpnd->GetValueType().IsLikelyOptimizedTypedArray())
  11929. {
  11930. // If profile data says that it's a typed array - do not generate the property string fast path as the src. could be a temp and that would cause a bug.
  11931. *pIsTypedArrayElement = false;
  11932. *pIsStringIndex = true;
  11933. return m_lowererMD.GenerateFastElemIStringIndexCommon(ldElem, isStore, indirOpnd, labelHelper);
  11934. }
  11935. else
  11936. {
  11937. // There's no point in generating the int index fast path if we know the index has a string value.
  11938. return nullptr;
  11939. }
  11940. }
  11941. }
  11942. return
  11943. GenerateFastElemIIntIndexCommon(
  11944. ldElem,
  11945. isStore,
  11946. indirOpnd,
  11947. labelHelper,
  11948. labelCantUseArray,
  11949. labelFallthrough,
  11950. pIsTypedArrayElement,
  11951. emitBailoutRef,
  11952. pLabelSegmentLengthIncreased,
  11953. checkArrayLengthOverflow,
  11954. false,
  11955. returnLength,
  11956. bailOutLabelInstr);
  11957. }
  11958. IR::IndirOpnd *
  11959. Lowerer::GenerateFastElemIIntIndexCommon(
  11960. IR::Instr * ldElem,
  11961. bool isStore,
  11962. IR::IndirOpnd * indirOpnd,
  11963. IR::LabelInstr * labelHelper,
  11964. IR::LabelInstr * labelCantUseArray,
  11965. IR::LabelInstr *labelFallthrough,
  11966. bool * pIsTypedArrayElement,
  11967. bool *emitBailoutRef,
  11968. IR::LabelInstr **pLabelSegmentLengthIncreased,
  11969. bool checkArrayLengthOverflow /*= true*/,
  11970. bool forceGenerateFastPath /* = false */,
  11971. bool returnLength,
  11972. IR::LabelInstr *bailOutLabelInstr /* = nullptr*/)
  11973. {
  11974. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  11975. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  11976. Assert(!baseOpnd->IsTaggedInt() || (indexOpnd && indexOpnd->IsNotInt()));
  11977. BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  11978. IRType indirType = TyVar;
  11979. const ValueType baseValueType(baseOpnd->GetValueType());
  11980. // TEST base, AtomTag -- check base not tagged int
  11981. // JNE $helper
  11982. // if (base.GetValueType() != Array) {
  11983. // CMP [base], JavascriptArray::`vtable'
  11984. // JNE $helper
  11985. // }
  11986. // TEST index, 1 -- index tagged int
  11987. // JEQ $helper
  11988. // if (inputIndex is not int const) {
  11989. // MOV index, inputIndex
  11990. // SAR index, Js::VarTag_Shift -- remote atom tag
  11991. // JS $helper -- exclude negative index
  11992. // }
  11993. // MOV headSegment, [base + offset(head)]
  11994. // CMP [headSegment + offset(length)], index -- bounds check
  11995. // if (opcode == StElemI_A) {
  11996. // JA $done (for typedarray, JA $toNumberHelper)
  11997. // CMP [headSegment + offset(size)], index -- chunk has room?
  11998. // JBE $helper
  11999. // if (index is not int const) {
  12000. // LEA newLength, [index + 1]
  12001. // } else {
  12002. // newLength = index + 1
  12003. // }
  12004. // MOV [headSegment + offset(length)], newLength -- update length on chunk
  12005. // CMP [base + offset(length)], newLength
  12006. // JAE $done
  12007. // MOV [base + offset(length)], newLength -- update length on array
  12008. // if(length to be returned){
  12009. // SHL newLength, AtomTag
  12010. // INC newLength
  12011. // MOV dst, newLength
  12012. // }
  12013. // JMP $done
  12014. //
  12015. // $toNumberHelper: Call HelperOp_ConvNumber_Full
  12016. // JMP $done
  12017. // $done
  12018. // } else {la
  12019. // JBE $helper
  12020. // }
  12021. // return [headSegment + offset(elements) + index]
  12022. // Caution: If making changes to the conditions under which we don't emit the typical array checks, make sure
  12023. // the code in GlobOpt::ShouldAssumeIndirOpndHasNonNegativeIntIndex is updated accordingly. We don't want the
  12024. // global optimizer to type specialize instructions, for which the lowerer is forced to emit unconditional
  12025. // bailouts.
  12026. bool isIndexNotInt = false;
  12027. IntConstType value = 0;
  12028. IR::Opnd * indexValueOpnd = nullptr;
  12029. bool invertBoundCheckComparison = false;
  12030. if (indirOpnd->TryGetIntConstIndexValue(true, &value, &isIndexNotInt))
  12031. {
  12032. if (value >= 0)
  12033. {
  12034. indexValueOpnd = IR::IntConstOpnd::New(value, TyUint32, this->m_func);
  12035. invertBoundCheckComparison = true; // facilitate folding the constant index into the compare instruction
  12036. }
  12037. else
  12038. {
  12039. // If the index is a negative int constant we go directly to helper.
  12040. Assert(!forceGenerateFastPath);
  12041. return nullptr;
  12042. }
  12043. }
  12044. else if (isIndexNotInt)
  12045. {
  12046. // If we know the index is not an int we go directly to helper.
  12047. Assert(!forceGenerateFastPath);
  12048. return nullptr;
  12049. }
  12050. //At this point indexValueOpnd is either NULL or contains the valueOpnd
  12051. if(!forceGenerateFastPath && !ShouldGenerateArrayFastPath(baseOpnd, true, true, true))
  12052. {
  12053. return nullptr;
  12054. }
  12055. if(baseValueType.IsLikelyAnyOptimizedArray())
  12056. {
  12057. indirScale = GetArrayIndirScale(baseValueType);
  12058. indirType = GetArrayIndirType(baseValueType);
  12059. }
  12060. IRType elementType = TyIllegal;
  12061. IR::Opnd * element = nullptr;
  12062. if(ldElem->m_opcode == Js::OpCode::InlineArrayPush)
  12063. {
  12064. element = ldElem->GetSrc2();
  12065. elementType = element->GetType();
  12066. }
  12067. else if(isStore && ldElem->GetSrc1())
  12068. {
  12069. element = ldElem->GetSrc1();
  12070. elementType = element->GetType();
  12071. }
  12072. Assert(isStore || (element == nullptr && elementType == TyIllegal));
  12073. if (isStore && baseValueType.IsLikelyNativeArray() && indirType != elementType)
  12074. {
  12075. // We're trying to write a value of the wrong type, which should force a conversion of the array.
  12076. // Go to the helper for that.
  12077. return nullptr;
  12078. }
  12079. IR::RegOpnd *arrayOpnd = baseOpnd;
  12080. IR::RegOpnd *headSegmentOpnd = nullptr;
  12081. IR::Opnd *headSegmentLengthOpnd = nullptr;
  12082. IR::AutoReuseOpnd autoReuseHeadSegmentOpnd, autoReuseHeadSegmentLengthOpnd;
  12083. bool indexIsNonnegative = indexValueOpnd || indexOpnd->GetType() == TyUint32 || !checkArrayLengthOverflow;
  12084. bool indexIsLessThanHeadSegmentLength = false;
  12085. if(!baseValueType.IsAnyOptimizedArray())
  12086. {
  12087. arrayOpnd = GenerateArrayTest(baseOpnd, labelCantUseArray, labelCantUseArray, ldElem, true, isStore);
  12088. }
  12089. else
  12090. {
  12091. if(arrayOpnd->IsArrayRegOpnd())
  12092. {
  12093. IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
  12094. if(arrayRegOpnd->HeadSegmentSym())
  12095. {
  12096. headSegmentOpnd = IR::RegOpnd::New(arrayRegOpnd->HeadSegmentSym(), TyMachPtr, m_func);
  12097. DebugOnly(headSegmentOpnd->FreezeSymValue());
  12098. autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
  12099. }
  12100. if(arrayRegOpnd->HeadSegmentLengthSym())
  12101. {
  12102. headSegmentLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->HeadSegmentLengthSym(), TyUint32, m_func);
  12103. DebugOnly(headSegmentLengthOpnd->AsRegOpnd()->FreezeSymValue());
  12104. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  12105. }
  12106. if (arrayRegOpnd->EliminatedLowerBoundCheck())
  12107. {
  12108. indexIsNonnegative = true;
  12109. }
  12110. if(arrayRegOpnd->EliminatedUpperBoundCheck())
  12111. {
  12112. indexIsLessThanHeadSegmentLength = true;
  12113. }
  12114. }
  12115. }
  12116. IR::AutoReuseOpnd autoReuseArrayOpnd;
  12117. if(arrayOpnd->GetValueType().GetObjectType() != ObjectType::ObjectWithArray)
  12118. {
  12119. autoReuseArrayOpnd.Initialize(arrayOpnd, m_func);
  12120. }
  12121. const auto EnsureObjectArrayLoaded = [&]()
  12122. {
  12123. if(arrayOpnd->GetValueType().GetObjectType() != ObjectType::ObjectWithArray)
  12124. {
  12125. return;
  12126. }
  12127. arrayOpnd = LoadObjectArray(arrayOpnd, ldElem);
  12128. autoReuseArrayOpnd.Initialize(arrayOpnd, m_func);
  12129. };
  12130. const bool doUpperBoundCheck = checkArrayLengthOverflow && !indexIsLessThanHeadSegmentLength;
  12131. if(!indexValueOpnd)
  12132. {
  12133. indexValueOpnd =
  12134. m_lowererMD.LoadNonnegativeIndex(
  12135. indexOpnd,
  12136. (
  12137. indexIsNonnegative
  12138. #if !INT32VAR
  12139. ||
  12140. // On 32-bit platforms, skip the negative check since for now, the unsigned upper bound check covers it
  12141. doUpperBoundCheck
  12142. #endif
  12143. ),
  12144. labelCantUseArray,
  12145. labelHelper,
  12146. ldElem);
  12147. }
  12148. const IR::AutoReuseOpnd autoReuseIndexValueOpnd(indexValueOpnd, m_func);
  12149. if (baseValueType.IsLikelyTypedArray())
  12150. {
  12151. *pIsTypedArrayElement = true;
  12152. if(doUpperBoundCheck)
  12153. {
  12154. if(!headSegmentLengthOpnd)
  12155. {
  12156. // (headSegmentLength = [base + offset(length)])
  12157. int lengthOffset;
  12158. lengthOffset = Js::Float64Array::GetOffsetOfLength();
  12159. headSegmentLengthOpnd = IR::IndirOpnd::New(arrayOpnd, lengthOffset, TyUint32, m_func);
  12160. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  12161. }
  12162. // CMP index, headSegmentLength -- upper bound check
  12163. if(!invertBoundCheckComparison)
  12164. {
  12165. InsertCompare(indexValueOpnd, headSegmentLengthOpnd, ldElem);
  12166. }
  12167. else
  12168. {
  12169. InsertCompare(headSegmentLengthOpnd, indexValueOpnd, ldElem);
  12170. }
  12171. }
  12172. }
  12173. else
  12174. {
  12175. *pIsTypedArrayElement = false;
  12176. if (isStore &&
  12177. baseValueType.IsLikelyNativeIntArray() &&
  12178. (!element->IsIntConstOpnd() || Js::SparseArraySegment<int32>::GetMissingItem() == element->AsIntConstOpnd()->AsInt32()))
  12179. {
  12180. Assert(ldElem->m_opcode != Js::OpCode::InlineArrayPush || bailOutLabelInstr);
  12181. // Check for a write of the MissingItem value.
  12182. InsertCompareBranch(
  12183. element,
  12184. GetMissingItemOpnd(elementType, m_func),
  12185. Js::OpCode::BrEq_A,
  12186. ldElem->m_opcode == Js::OpCode::InlineArrayPush ? bailOutLabelInstr : labelCantUseArray,
  12187. ldElem,
  12188. true);
  12189. }
  12190. if(!headSegmentOpnd)
  12191. {
  12192. EnsureObjectArrayLoaded();
  12193. // MOV headSegment, [base + offset(head)]
  12194. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfHead(), TyMachPtr, this->m_func);
  12195. headSegmentOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  12196. autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
  12197. InsertMove(headSegmentOpnd, indirOpnd, ldElem);
  12198. }
  12199. if(doUpperBoundCheck)
  12200. {
  12201. if(!headSegmentLengthOpnd)
  12202. {
  12203. // (headSegmentLength = [headSegment + offset(length)])
  12204. headSegmentLengthOpnd =
  12205. IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
  12206. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  12207. }
  12208. // CMP index, headSegmentLength -- upper bound check
  12209. if(!invertBoundCheckComparison)
  12210. {
  12211. InsertCompare(indexValueOpnd, headSegmentLengthOpnd, ldElem);
  12212. }
  12213. else
  12214. {
  12215. InsertCompare(headSegmentLengthOpnd, indexValueOpnd, ldElem);
  12216. }
  12217. }
  12218. }
  12219. const IR::BailOutKind bailOutKind = ldElem->HasBailOutInfo() ? ldElem->GetBailOutKind() : IR::BailOutInvalid;
  12220. if(indexIsLessThanHeadSegmentLength ||
  12221. bailOutKind & (IR::BailOutOnArrayAccessHelperCall | IR::BailOutOnInvalidatedArrayHeadSegment))
  12222. {
  12223. if(bailOutKind & (IR::BailOutOnArrayAccessHelperCall | IR::BailOutOnInvalidatedArrayHeadSegment))
  12224. {
  12225. // The bailout must be pre-op because it will not have completed the operation
  12226. Assert(ldElem->GetBailOutInfo()->bailOutOffset == ldElem->GetByteCodeOffset());
  12227. // Verify other bailouts these can be combined with
  12228. Assert(
  12229. !(
  12230. bailOutKind &
  12231. IR::BailOutKindBits &
  12232. ~(
  12233. IR::BailOutOnArrayAccessHelperCall |
  12234. IR::BailOutOnInvalidatedArrayHeadSegment |
  12235. IR::BailOutOnInvalidatedArrayLength |
  12236. IR::BailOutConventionalNativeArrayAccessOnly |
  12237. (bailOutKind & IR::BailOutOnArrayAccessHelperCall ? IR::BailOutInvalid : IR::BailOutConvertedNativeArray)
  12238. )
  12239. ));
  12240. if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  12241. {
  12242. // Omit the helper call and generate a bailout instead
  12243. Assert(emitBailoutRef);
  12244. *emitBailoutRef = true;
  12245. }
  12246. }
  12247. if(indexIsLessThanHeadSegmentLength)
  12248. {
  12249. Assert(!(bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment));
  12250. }
  12251. else
  12252. {
  12253. IR::LabelInstr *bailOutLabel;
  12254. if(bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment)
  12255. {
  12256. Assert(isStore);
  12257. // Lower a separate (but shared) bailout for this case, and preserve the bailout kind in the instruction if the
  12258. // helper call is going to be generated, because the bailout kind needs to be lowered again and differently in the
  12259. // helper call path.
  12260. //
  12261. // Generate:
  12262. // (LdElem)
  12263. // jmp $continue
  12264. // $bailOut:
  12265. // Bail out with IR::BailOutOnInvalidatedArrayHeadSegment
  12266. // $continue:
  12267. LowerOneBailOutKind(
  12268. ldElem,
  12269. IR::BailOutOnInvalidatedArrayHeadSegment,
  12270. false,
  12271. !(bailOutKind & IR::BailOutOnArrayAccessHelperCall));
  12272. bailOutLabel = ldElem->GetOrCreateContinueLabel(true);
  12273. InsertBranch(Js::OpCode::Br, labelFallthrough, bailOutLabel);
  12274. }
  12275. else
  12276. {
  12277. Assert(bailOutKind & IR::BailOutOnArrayAccessHelperCall);
  12278. bailOutLabel = labelHelper;
  12279. }
  12280. // Bail out if the index is outside the head segment bounds
  12281. // jae $bailOut
  12282. Assert(checkArrayLengthOverflow);
  12283. InsertBranch(
  12284. !invertBoundCheckComparison ? Js::OpCode::BrGe_A : Js::OpCode::BrLe_A,
  12285. true /* isUnsigned */,
  12286. bailOutLabel,
  12287. ldElem);
  12288. }
  12289. }
  12290. else if (isStore && !baseValueType.IsLikelyTypedArray()) // #if (opcode == StElemI_A)
  12291. {
  12292. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  12293. IR::LabelInstr *labelSegmentLengthIncreased = nullptr;
  12294. const bool isPush = ldElem->m_opcode != Js::OpCode::StElemI_A && ldElem->m_opcode != Js::OpCode::StElemI_A_Strict;
  12295. // Put the head segment size check and length updates in a helper block since they're not the common path for StElem.
  12296. // For push, that is the common path so keep it in a non-helper block.
  12297. const bool isInHelperBlock = !isPush;
  12298. if(checkArrayLengthOverflow)
  12299. {
  12300. if(pLabelSegmentLengthIncreased &&
  12301. !(
  12302. baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues() ||
  12303. (ldElem->m_opcode == Js::OpCode::StElemI_A || ldElem->m_opcode == Js::OpCode::StElemI_A_Strict) &&
  12304. ldElem->IsProfiledInstr() && !ldElem->AsProfiledInstr()->u.stElemInfo->LikelyFillsMissingValue()
  12305. ))
  12306. {
  12307. // For arrays that are not guaranteed to have no missing values, before storing to an element where
  12308. // (index < length), the element value needs to be checked to see if it's a missing value, and if so, fall back
  12309. // to the helper. This is done to keep the missing value tracking precise in arrays. So, create a separate label
  12310. // for the case where the length was increased (index >= length), and pass it back to GenerateFastStElemI, which
  12311. // will fill in the rest.
  12312. labelSegmentLengthIncreased = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelperBlock);
  12313. *pLabelSegmentLengthIncreased = labelSegmentLengthIncreased;
  12314. }
  12315. else
  12316. {
  12317. labelSegmentLengthIncreased = labelDone;
  12318. }
  12319. // JB $done
  12320. InsertBranch(
  12321. !invertBoundCheckComparison ? Js::OpCode::BrLt_A : Js::OpCode::BrGt_A,
  12322. true /* isUnsigned */,
  12323. labelDone,
  12324. ldElem);
  12325. }
  12326. if(isInHelperBlock)
  12327. {
  12328. InsertLabel(true /* isHelper */, ldElem);
  12329. }
  12330. EnsureObjectArrayLoaded();
  12331. do // while(false);
  12332. {
  12333. if(checkArrayLengthOverflow)
  12334. {
  12335. if(ldElem->HasBailOutInfo() && ldElem->GetBailOutKind() & IR::BailOutOnMissingValue)
  12336. {
  12337. // Need to bail out if this store would create a missing value. The store would cause a missing value to be
  12338. // created if (index > length && index < size). If (index >= size) we would go to helper anyway, and the bailout
  12339. // handling for this is done after the helper call, so just go to helper if (index > length).
  12340. //
  12341. // jne $helper // branch for (cmp index, headSegmentLength)
  12342. InsertBranch(Js::OpCode::BrNeq_A, labelHelper, ldElem);
  12343. }
  12344. else
  12345. {
  12346. // If (index < size) we will not call the helper, so the array flags must be updated to reflect that it no
  12347. // longer has no missing values.
  12348. //
  12349. // jne indexGreaterThanLength // branch for (cmp index, headSegmentLength)
  12350. // cmp index, [headSegment + offset(size)]
  12351. // jae $helper
  12352. // jmp indexLessThanSize
  12353. // indexGreaterThanLength:
  12354. // cmp index, [headSegment + offset(size)]
  12355. // jae $helper
  12356. // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
  12357. // indexLessThanSize:
  12358. IR::LabelInstr *const indexGreaterThanLengthLabel = InsertLabel(true /* isHelper */, ldElem);
  12359. IR::LabelInstr *const indexLessThanSizeLabel = InsertLabel(isInHelperBlock, ldElem);
  12360. // jne indexGreaterThanLength // branch for (cmp index, headSegmentLength)
  12361. // cmp index, [headSegment + offset(size)]
  12362. // jae $helper
  12363. // jmp indexLessThanSize
  12364. // indexGreaterThanLength:
  12365. InsertBranch(Js::OpCode::BrNeq_A, indexGreaterThanLengthLabel, indexGreaterThanLengthLabel);
  12366. InsertCompareBranch(
  12367. indexValueOpnd,
  12368. IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, m_func),
  12369. Js::OpCode::BrGe_A,
  12370. true /* isUnsigned */,
  12371. labelHelper,
  12372. indexGreaterThanLengthLabel);
  12373. InsertBranch(Js::OpCode::Br, indexLessThanSizeLabel, indexGreaterThanLengthLabel);
  12374. // indexGreaterThanLength:
  12375. // cmp index, [headSegment + offset(size)]
  12376. // jae $helper
  12377. // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
  12378. // indexLessThanSize:
  12379. InsertCompareBranch(
  12380. indexValueOpnd,
  12381. IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, m_func),
  12382. Js::OpCode::BrGe_A,
  12383. true /* isUnsigned */,
  12384. labelHelper,
  12385. indexLessThanSizeLabel);
  12386. CompileAssert(
  12387. static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
  12388. Js::DynamicObjectFlags::HasNoMissingValues);
  12389. InsertAnd(
  12390. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  12391. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  12392. IR::IntConstOpnd::New(
  12393. static_cast<uint8>(~Js::DynamicObjectFlags::HasNoMissingValues),
  12394. TyUint8,
  12395. m_func,
  12396. true),
  12397. indexLessThanSizeLabel);
  12398. // indexLessThanSize:
  12399. break;
  12400. }
  12401. }
  12402. // CMP index, [headSegment + offset(size)]
  12403. // JAE $helper
  12404. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, this->m_func);
  12405. InsertCompareBranch(indexValueOpnd, indirOpnd, Js::OpCode::BrGe_A, true /* isUnsigned */, labelHelper, ldElem);
  12406. } while(false);
  12407. if(isPush)
  12408. {
  12409. IR::LabelInstr *const updateLengthLabel = InsertLabel(isInHelperBlock, ldElem);
  12410. if(!doUpperBoundCheck && !headSegmentLengthOpnd)
  12411. {
  12412. // (headSegmentLength = [headSegment + offset(length)])
  12413. headSegmentLengthOpnd =
  12414. IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
  12415. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  12416. }
  12417. // For push, it is guaranteed that (index >= length). We already know that (index < size), but we need to check if
  12418. // (index > length) because in that case a missing value will be created and the missing value tracking in the array
  12419. // needs to be updated.
  12420. //
  12421. // cmp index, headSegmentLength
  12422. // je $updateLength
  12423. // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
  12424. // updateLength:
  12425. InsertCompareBranch(
  12426. indexValueOpnd,
  12427. headSegmentLengthOpnd,
  12428. Js::OpCode::BrEq_A,
  12429. updateLengthLabel,
  12430. updateLengthLabel);
  12431. CompileAssert(
  12432. static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
  12433. Js::DynamicObjectFlags::HasNoMissingValues);
  12434. InsertAnd(
  12435. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  12436. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  12437. IR::IntConstOpnd::New(
  12438. static_cast<uint8>(~Js::DynamicObjectFlags::HasNoMissingValues),
  12439. TyUint8,
  12440. m_func,
  12441. true),
  12442. updateLengthLabel);
  12443. }
  12444. if (baseValueType.IsArrayOrObjectWithArray())
  12445. {
  12446. // We didn't emit an array check, but if we are going to grow the array
  12447. // We need to go to helper if there is an ES5 array/objectarray used as prototype
  12448. GenerateIsEnabledArraySetElementFastPathCheck(labelHelper, ldElem);
  12449. }
  12450. IR::Opnd *newLengthOpnd;
  12451. IR::AutoReuseOpnd autoReuseNewLengthOpnd;
  12452. if (indexValueOpnd->IsRegOpnd())
  12453. {
  12454. // LEA newLength, [index + 1]
  12455. newLengthOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
  12456. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  12457. InsertAdd(false /* needFlags */, newLengthOpnd, indexValueOpnd, IR::IntConstOpnd::New(1, TyUint32, m_func), ldElem);
  12458. }
  12459. else
  12460. {
  12461. newLengthOpnd = IR::IntConstOpnd::New(value + 1, TyUint32, this->m_func);
  12462. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  12463. }
  12464. // MOV [headSegment + offset(length)], newLength
  12465. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, length), TyUint32, this->m_func);
  12466. InsertMove(indirOpnd, newLengthOpnd, ldElem);
  12467. if (checkArrayLengthOverflow)
  12468. {
  12469. // CMP newLength, [base + offset(length)]
  12470. // JBE $segmentLengthIncreased
  12471. Assert(labelSegmentLengthIncreased);
  12472. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  12473. InsertCompareBranch(
  12474. newLengthOpnd,
  12475. indirOpnd,
  12476. Js::OpCode::BrLe_A,
  12477. true /* isUnsigned */,
  12478. labelSegmentLengthIncreased,
  12479. ldElem);
  12480. if(!isInHelperBlock)
  12481. {
  12482. InsertLabel(true /* isHelper */, ldElem);
  12483. }
  12484. }
  12485. // MOV [base + offset(length)], newLength
  12486. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  12487. InsertMove(indirOpnd, newLengthOpnd, ldElem);
  12488. if(returnLength)
  12489. {
  12490. if(newLengthOpnd->GetSize() != MachPtr)
  12491. {
  12492. newLengthOpnd = newLengthOpnd->UseWithNewType(TyMachPtr, m_func)->AsRegOpnd();
  12493. }
  12494. // SHL newLength, AtomTag
  12495. // INC newLength
  12496. this->m_lowererMD.GenerateInt32ToVarConversion(newLengthOpnd, ldElem);
  12497. // MOV dst, newLength
  12498. InsertMove(ldElem->GetDst(), newLengthOpnd, ldElem);
  12499. }
  12500. if(labelSegmentLengthIncreased && labelSegmentLengthIncreased != labelDone)
  12501. {
  12502. // labelSegmentLengthIncreased:
  12503. ldElem->InsertBefore(labelSegmentLengthIncreased);
  12504. }
  12505. // $done
  12506. ldElem->InsertBefore(labelDone);
  12507. }
  12508. else // #else
  12509. {
  12510. if (checkArrayLengthOverflow)
  12511. {
  12512. if (*pIsTypedArrayElement && isStore)
  12513. {
  12514. IR::LabelInstr *labelInlineSet = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  12515. //For positive index beyond length or negative index its essentially nop for typed array store
  12516. InsertBranch(
  12517. !invertBoundCheckComparison ? Js::OpCode::BrLt_A : Js::OpCode::BrGt_A,
  12518. true /* isUnsigned */,
  12519. labelInlineSet,
  12520. ldElem);
  12521. // For typed array, call ToNumber before we fallThrough.
  12522. if (ldElem->GetSrc1()->GetType() == TyVar && !ldElem->GetSrc1()->GetValueType().IsPrimitive())
  12523. {
  12524. IR::Instr *toNumberInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  12525. toNumberInstr->SetSrc1(ldElem->GetSrc1());
  12526. ldElem->InsertBefore(toNumberInstr);
  12527. LowerUnaryHelperMem(toNumberInstr, IR::HelperOp_ConvNumber_Full);
  12528. }
  12529. InsertBranch(Js::OpCode::Br, labelFallthrough, ldElem); //Jump to fallThrough
  12530. ldElem->InsertBefore(labelInlineSet);
  12531. }
  12532. else
  12533. {
  12534. // JAE $helper
  12535. InsertBranch(
  12536. !invertBoundCheckComparison ? Js::OpCode::BrGe_A : Js::OpCode::BrLe_A,
  12537. true /* isUnsigned */,
  12538. labelHelper,
  12539. ldElem);
  12540. }
  12541. }
  12542. EnsureObjectArrayLoaded();
  12543. if (ldElem->m_opcode == Js::OpCode::InlineArrayPop)
  12544. {
  12545. Assert(!baseValueType.IsLikelyTypedArray());
  12546. Assert(bailOutLabelInstr);
  12547. if (indexValueOpnd->IsIntConstOpnd())
  12548. {
  12549. // indirOpnd = [headSegment + index + offset(elements)]
  12550. IntConstType offset = offsetof(Js::SparseArraySegment<Js::Var>, elements) + (value << indirScale);
  12551. // TODO: Assert(Math::FitsInDWord(offset));
  12552. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
  12553. }
  12554. else
  12555. {
  12556. // indirOpnd = [headSegment + offset(elements) + (index << scale)]
  12557. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
  12558. indirOpnd->SetOffset(offsetof(Js::SparseArraySegment<Js::Var>, elements));
  12559. }
  12560. IR::Opnd * tmpDst = nullptr;
  12561. IR::Opnd * dst = ldElem->GetDst();
  12562. //Pop might not have a dst, if not don't worry about returning the last element. But we still have to
  12563. // worry about gaps, because these force us to access the prototype chain, which may have side-effects.
  12564. if (dst || !baseValueType.HasNoMissingValues())
  12565. {
  12566. if (!dst)
  12567. {
  12568. dst = IR::RegOpnd::New(indirType, this->m_func);
  12569. }
  12570. else if (dst->AsRegOpnd()->m_sym == arrayOpnd->m_sym)
  12571. {
  12572. tmpDst = IR::RegOpnd::New(TyVar, this->m_func);
  12573. dst = tmpDst;
  12574. }
  12575. // MOV dst, [head + offset]
  12576. InsertMove(dst, indirOpnd, ldElem);
  12577. //If the array has missing values, check for one
  12578. if (!baseValueType.HasNoMissingValues())
  12579. {
  12580. InsertCompareBranch(
  12581. dst,
  12582. GetMissingItemOpnd(indirType, m_func),
  12583. Js::OpCode::BrEq_A,
  12584. bailOutLabelInstr,
  12585. ldElem,
  12586. true);
  12587. }
  12588. }
  12589. // MOV [head + offset], missing
  12590. InsertMove(indirOpnd, GetMissingItemOpnd(indirType, m_func), ldElem);
  12591. IR::Opnd *newLengthOpnd;
  12592. IR::AutoReuseOpnd autoReuseNewLengthOpnd;
  12593. if (indexValueOpnd->IsRegOpnd())
  12594. {
  12595. // LEA newLength, [index]
  12596. newLengthOpnd = indexValueOpnd;
  12597. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  12598. }
  12599. else
  12600. {
  12601. newLengthOpnd = IR::IntConstOpnd::New(value, TyUint32, this->m_func);
  12602. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  12603. }
  12604. //update segment length and array length
  12605. // MOV [headSegment + offset(length)], newLength
  12606. IR::IndirOpnd *lengthIndirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, length), TyUint32, this->m_func);
  12607. InsertMove(lengthIndirOpnd, newLengthOpnd, ldElem);
  12608. // MOV [base + offset(length)], newLength
  12609. lengthIndirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  12610. InsertMove(lengthIndirOpnd, newLengthOpnd, ldElem);
  12611. if (tmpDst)
  12612. {
  12613. // The array opnd and the destination is the same, need to move the value in the tmp dst
  12614. // to the actual dst
  12615. InsertMove(ldElem->GetDst(), tmpDst, ldElem);
  12616. }
  12617. return indirOpnd;
  12618. }
  12619. } // #endif
  12620. if (baseValueType.IsLikelyTypedArray())
  12621. {
  12622. if(!headSegmentOpnd)
  12623. {
  12624. // MOV headSegment, [base + offset(arrayBuffer)]
  12625. int bufferOffset;
  12626. bufferOffset = Js::Float64Array::GetOffsetOfBuffer();
  12627. indirOpnd = IR::IndirOpnd::New(arrayOpnd, bufferOffset, TyMachPtr, this->m_func);
  12628. headSegmentOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  12629. autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
  12630. InsertMove(headSegmentOpnd, indirOpnd, ldElem);
  12631. }
  12632. // indirOpnd = [headSegment + index]
  12633. if (indexValueOpnd->IsIntConstOpnd())
  12634. {
  12635. IntConstType offset = (value << indirScale);
  12636. // TODO: Assert(Math::FitsInDWord(offset));
  12637. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
  12638. }
  12639. else
  12640. {
  12641. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
  12642. }
  12643. }
  12644. else if (indexValueOpnd->IsIntConstOpnd())
  12645. {
  12646. // indirOpnd = [headSegment + index + offset(elements)]
  12647. IntConstType offset = offsetof(Js::SparseArraySegment<Js::Var>, elements) + (value << indirScale);
  12648. // TODO: Assert(Math::FitsInDWord(offset));
  12649. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
  12650. }
  12651. else
  12652. {
  12653. // indirOpnd = [headSegment + offset(elements) + (index << scale)]
  12654. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
  12655. indirOpnd->SetOffset(offsetof(Js::SparseArraySegment<Js::Var>, elements));
  12656. }
  12657. return indirOpnd;
  12658. }
  12659. void
  12660. Lowerer::GenerateTypeIdCheck(Js::TypeId typeId, IR::RegOpnd * opnd, IR::LabelInstr * labelFail, IR::Instr * insertBeforeInstr, bool generateObjectCheck)
  12661. {
  12662. if (generateObjectCheck && !opnd->IsNotTaggedValue())
  12663. {
  12664. m_lowererMD.GenerateObjectTest(opnd, insertBeforeInstr, labelFail);
  12665. }
  12666. // MOV r1, [opnd + offset(type)]
  12667. IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  12668. const IR::AutoReuseOpnd autoReuseR1(r1, m_func);
  12669. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(opnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  12670. InsertMove(r1, indirOpnd, insertBeforeInstr);
  12671. // CMP [r1 + offset(typeId)], typeid -- check src isString
  12672. // JNE $fail
  12673. indirOpnd = IR::IndirOpnd::New(r1, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
  12674. InsertCompareBranch(
  12675. indirOpnd,
  12676. IR::IntConstOpnd::New(typeId, TyInt32, this->m_func),
  12677. Js::OpCode::BrNeq_A,
  12678. labelFail,
  12679. insertBeforeInstr);
  12680. }
  12681. IR::RegOpnd *
  12682. Lowerer::GenerateUntagVar(IR::RegOpnd * opnd, IR::LabelInstr * labelFail, IR::Instr * insertBeforeInstr, bool generateTagCheck)
  12683. {
  12684. if (!opnd->IsVar())
  12685. {
  12686. AssertMsg(opnd->GetSize() == 4, "This should be 32-bit wide");
  12687. return opnd;
  12688. }
  12689. return m_lowererMD.GenerateUntagVar(opnd, labelFail, insertBeforeInstr, generateTagCheck && !opnd->IsTaggedInt());
  12690. }
  12691. void
  12692. Lowerer::GenerateNotZeroTest( IR::Opnd * opndSrc, IR::LabelInstr * isZeroLabel, IR::Instr * insertBeforeInstr)
  12693. {
  12694. InsertTestBranch(opndSrc, opndSrc, Js::OpCode::BrEq_A, isZeroLabel, insertBeforeInstr);
  12695. }
  12696. bool
  12697. Lowerer::GenerateFastStringLdElem(IR::Instr * ldElem, IR::LabelInstr * labelHelper, IR::LabelInstr * labelFallThru)
  12698. {
  12699. IR::IndirOpnd * indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  12700. IR::RegOpnd * baseOpnd = indirOpnd->GetBaseOpnd();
  12701. // don't generate the fast path if the instance is not likely string
  12702. if (!baseOpnd->GetValueType().IsLikelyString())
  12703. {
  12704. return false;
  12705. }
  12706. Assert(!baseOpnd->IsTaggedInt());
  12707. IR::RegOpnd * indexOpnd = indirOpnd->GetIndexOpnd();
  12708. // Don't generate the fast path if the index operand is not likely int
  12709. if (indexOpnd && !indexOpnd->GetValueType().IsLikelyInt())
  12710. {
  12711. return false;
  12712. }
  12713. // Make sure the instance is a string
  12714. Assert(!indexOpnd || !indexOpnd->IsNotInt());
  12715. GenerateStringTest(baseOpnd, ldElem, labelHelper);
  12716. IR::Opnd * index32CmpOpnd;
  12717. IR::RegOpnd * bufferOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  12718. const IR::AutoReuseOpnd autoReuseBufferOpnd(bufferOpnd, m_func);
  12719. IR::IndirOpnd * charIndirOpnd;
  12720. if (indexOpnd)
  12721. {
  12722. // Untag the var and generate the indir into the string buffer
  12723. IR::RegOpnd * index32Opnd = GenerateUntagVar(indexOpnd, labelHelper, ldElem);
  12724. charIndirOpnd = IR::IndirOpnd::New(bufferOpnd, index32Opnd, 1, TyUint16, this->m_func);
  12725. index32CmpOpnd = index32Opnd;
  12726. }
  12727. else
  12728. {
  12729. // Just use the offset to indirect into the string buffer
  12730. charIndirOpnd = IR::IndirOpnd::New(bufferOpnd, indirOpnd->GetOffset() * sizeof(wchar_t), TyUint16, this->m_func);
  12731. index32CmpOpnd = IR::IntConstOpnd::New(indirOpnd->GetOffset(), TyUint32, this->m_func);
  12732. }
  12733. // Check if the index is in range of the string length
  12734. // CMP [baseOpnd + offset(length)], indexOpnd -- string length
  12735. // JBE $helper -- unsigned compare, and string length are at most INT_MAX - 1
  12736. // -- so that even if we have a negative index, this will fail
  12737. InsertCompareBranch(IR::IndirOpnd::New(baseOpnd, offsetof(Js::JavascriptString, m_charLength), TyInt32, this->m_func)
  12738. , index32CmpOpnd, Js::OpCode::BrLe_A, true, labelHelper, ldElem);
  12739. // Load the string buffer and make sure it is not null
  12740. // MOV bufferOpnd, [baseOpnd + offset(m_pszValue)]
  12741. // TEST bufferOpnd, bufferOpnd
  12742. // JEQ $labelHelper
  12743. indirOpnd = IR::IndirOpnd::New(baseOpnd, offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func);
  12744. InsertMove(bufferOpnd, indirOpnd, ldElem);
  12745. GenerateNotZeroTest(bufferOpnd, labelHelper, ldElem);
  12746. // Load the character and check if it is 7bit ASCI (which we have the cache for)
  12747. // MOV charOpnd, [bufferOpnd + index32Opnd]
  12748. // CMP charOpnd, 0x80
  12749. // JAE $helper
  12750. IR::RegOpnd * charOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  12751. const IR::AutoReuseOpnd autoReuseCharOpnd(charOpnd, m_func);
  12752. InsertMove(charOpnd, charIndirOpnd, ldElem);
  12753. InsertCompareBranch(charOpnd, IR::IntConstOpnd::New(Js::CharStringCache::CharStringCacheSize, TyUint16, this->m_func),
  12754. Js::OpCode::BrGe_A, true, labelHelper, ldElem);
  12755. // Load the string from the cache
  12756. // MOV charStringCache, <charStringCache, address>
  12757. // MOV stringOpnd, [charStringCache + charOpnd * 4]
  12758. IR::RegOpnd * cacheOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  12759. const IR::AutoReuseOpnd autoReuseCacheOpnd(cacheOpnd, m_func);
  12760. Assert(Js::JavascriptLibrary::GetCharStringCacheAOffset() == Js::JavascriptLibrary::GetCharStringCacheOffset());
  12761. InsertMove(cacheOpnd, this->LoadLibraryValueOpnd(ldElem, LibraryValue::ValueCharStringCache), ldElem);
  12762. // Check if we have created the string or not
  12763. // TEST stringOpnd, stringOpnd
  12764. // JE $helper
  12765. IR::RegOpnd * stringOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  12766. const IR::AutoReuseOpnd autoReuseStringOpnd(stringOpnd, m_func);
  12767. InsertMove(stringOpnd, IR::IndirOpnd::New(cacheOpnd, charOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyVar, this->m_func), ldElem);
  12768. GenerateNotZeroTest(stringOpnd, labelHelper, ldElem);
  12769. InsertMove(ldElem->GetDst(), stringOpnd, ldElem);
  12770. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  12771. return true;
  12772. }
  12773. bool
  12774. Lowerer::GenerateFastLdElemI(IR::Instr *& ldElem, bool *instrIsInHelperBlockRef)
  12775. {
  12776. Assert(instrIsInHelperBlockRef);
  12777. bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
  12778. instrIsInHelperBlock = false;
  12779. IR::LabelInstr * labelHelper;
  12780. IR::LabelInstr * labelFallThru;
  12781. IR::LabelInstr * labelBailOut = nullptr;
  12782. IR::LabelInstr * labelMissingNative = nullptr;
  12783. IR::Opnd *src1 = ldElem->GetSrc1();
  12784. AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd on LdElementI");
  12785. IR::IndirOpnd * indirOpnd = src1->AsIndirOpnd();
  12786. // From FastElemICommon:
  12787. // TEST base, AtomTag -- check base not tagged int
  12788. // JNE $helper
  12789. // MOV r1, [base + offset(type)] -- check base isArray
  12790. // CMP [r1 + offset(typeId)], TypeIds_Array
  12791. // JNE $helper
  12792. // TEST index, 1 -- index tagged int
  12793. // JEQ $helper
  12794. // MOV r2, index
  12795. // SAR r2, Js::VarTag_Shift -- remote atom tag
  12796. // JS $helper -- exclude negative index
  12797. // MOV r4, [base + offset(head)]
  12798. // CMP r2, [r4 + offset(length)] -- bounds check
  12799. // JAE $helper
  12800. // MOV r3, [r4 + offset(elements)]
  12801. // Generated here:
  12802. // MOV dst, [r3 + r2]
  12803. // TEST dst, dst
  12804. // JNE $fallthrough
  12805. if(ldElem->m_opcode == Js::OpCode::LdMethodElem && indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyOptimizedTypedArray())
  12806. {
  12807. // Typed arrays don't return objects, so it's not worth generating a fast path for LdMethodElem. Calling the helper also
  12808. // generates a better error message. Skip the fast path and just generate a helper call.
  12809. return true;
  12810. }
  12811. labelFallThru = ldElem->GetOrCreateContinueLabel();
  12812. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  12813. // If we know for sure (based on flow graph) we're loading from the arguments object, then ignore the (path-based) profile info.
  12814. bool isNativeArrayLoad = !ldElem->DoStackArgsOpt(this->m_func) && indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyNativeArray();
  12815. bool needMissingValueCheck = true;
  12816. bool emittedFastPath = false;
  12817. bool emitBailout = false;
  12818. if (ldElem->DoStackArgsOpt(this->m_func))
  12819. {
  12820. emittedFastPath = GenerateFastArgumentsLdElemI(ldElem, labelHelper, labelFallThru);
  12821. }
  12822. else if (GenerateFastStringLdElem(ldElem, labelHelper, labelFallThru))
  12823. {
  12824. emittedFastPath = true;
  12825. }
  12826. else
  12827. {
  12828. IR::LabelInstr * labelCantUseArray = labelHelper;
  12829. if (isNativeArrayLoad)
  12830. {
  12831. if (ldElem->GetDst()->GetType() == TyVar)
  12832. {
  12833. // Skip the fast path and just generate a helper call
  12834. return true;
  12835. }
  12836. // Specialized native array lowering for LdElem requires that it is profiled. When not profiled, GlobOpt should not
  12837. // have specialized it.
  12838. Assert(ldElem->IsProfiledInstr());
  12839. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  12840. labelCantUseArray = labelBailOut;
  12841. }
  12842. bool isTypedArrayElement, isStringIndex;
  12843. indirOpnd =
  12844. GenerateFastElemICommon(
  12845. ldElem,
  12846. false,
  12847. src1->AsIndirOpnd(),
  12848. labelHelper,
  12849. labelCantUseArray,
  12850. labelFallThru,
  12851. &isTypedArrayElement,
  12852. &isStringIndex,
  12853. &emitBailout);
  12854. IR::Opnd *dst = ldElem->GetDst();
  12855. IRType dstType = dst->AsRegOpnd()->GetType();
  12856. // The index is negative or not int.
  12857. if (indirOpnd == nullptr)
  12858. {
  12859. Assert(!(ldElem->HasBailOutInfo() && ldElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall));
  12860. // The global optimizer should never type specialize a LdElem for which the index is not int or an integer constant
  12861. // with a negative value. This would force an unconditional bail out on the main code path.
  12862. if (dst->IsVar())
  12863. {
  12864. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  12865. {
  12866. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  12867. Output::Print(L"Typed Array Lowering: function: %s (%s): instr %s, not specialized by glob opt due to negative or not likely int index.\n",
  12868. this->m_func->GetJnFunction()->GetDisplayName(),
  12869. this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  12870. Js::OpCodeUtil::GetOpCodeName(ldElem->m_opcode));
  12871. Output::Flush();
  12872. }
  12873. // We must be dealing with some unconventional index value. Don't emit fast path, but go directly to helper.
  12874. emittedFastPath = false;
  12875. return true;
  12876. }
  12877. else
  12878. {
  12879. AssertMsg(false, "Global optimizer shouldn't have specialized this instruction.");
  12880. Assert(dst->IsRegOpnd());
  12881. // If global optimizer failed to notice the unconventional index and type specialized the dst,
  12882. // there is nothing to do but bail out. This could happen if global optimizer's information based
  12883. // on value tracking fails to recognize a non-integer index or a constant int index that is negative.
  12884. // The bailout below ensures that we behave correctly in retail builds even under
  12885. // these (unlikely) conditions. To satisfy the downstream code we must populate the type specialized operand
  12886. // with some made up values, even though we will unconditionally bail out here and the values will never be
  12887. // used.
  12888. IR::IntConstOpnd *constOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func, true);
  12889. InsertMove(dst, constOpnd, ldElem);
  12890. ldElem->UnlinkSrc1();
  12891. ldElem->UnlinkDst();
  12892. GenerateBailOut(ldElem, nullptr, nullptr);
  12893. emittedFastPath = true;
  12894. return false;
  12895. }
  12896. }
  12897. const IR::AutoReuseOpnd autoReuseIndirOpnd(indirOpnd, m_func);
  12898. const ValueType baseValueType(src1->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
  12899. if (ldElem->HasBailOutInfo() &&
  12900. ldElem->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset &&
  12901. ldElem->GetBailOutInfo()->bailOutOffset <= ldElem->GetByteCodeOffset() &&
  12902. dst->IsEqual(src1->AsIndirOpnd()->GetBaseOpnd()) ||
  12903. (src1->AsIndirOpnd()->GetIndexOpnd() && dst->IsEqual(src1->AsIndirOpnd()->GetIndexOpnd())))
  12904. {
  12905. // This is a pre-op bailout where the dst is the same as one of the srcs. The dst may be trashed before bailing out,
  12906. // but since the operation will be processed again in the interpreter, src values need to be kept intact. Use a
  12907. // temporary dst until after the operation is complete.
  12908. IR::Instr *instrSink = ldElem->SinkDst(Js::OpCode::Ld_A);
  12909. // The sink instruction needs to be on the fall-through path
  12910. instrSink->Unlink();
  12911. labelFallThru->InsertAfter(instrSink);
  12912. LowererMD::ChangeToAssign(instrSink);
  12913. dst = ldElem->GetDst();
  12914. }
  12915. if (isTypedArrayElement)
  12916. {
  12917. // For typedArrays, convert the loaded element to the appropriate type
  12918. IR::RegOpnd *reg;
  12919. IR::AutoReuseOpnd autoReuseReg;
  12920. Assert(dst->IsRegOpnd());
  12921. if(indirOpnd->IsFloat())
  12922. {
  12923. AssertMsg((dstType == TyFloat64) || (dstType == TyVar), "For Float32Array LdElemI's dst should be specialized to TyFloat64 or not at all.");
  12924. if(indirOpnd->IsFloat32())
  12925. {
  12926. // MOVSS reg32.f32, indirOpnd.f32
  12927. IR::RegOpnd *reg32 = IR::RegOpnd::New(TyFloat32, this->m_func);
  12928. const IR::AutoReuseOpnd autoReuseReg32(reg32, m_func);
  12929. InsertMove(reg32, indirOpnd, ldElem);
  12930. // CVTPS2PD dst/reg.f64, reg32.f64
  12931. reg = dstType == TyFloat64 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyFloat64, this->m_func);
  12932. autoReuseReg.Initialize(reg, m_func);
  12933. InsertConvertFloat32ToFloat64(reg, reg32, ldElem);
  12934. }
  12935. else
  12936. {
  12937. Assert(indirOpnd->IsFloat64());
  12938. // MOVSD dst/reg.f64, indirOpnd.f64
  12939. reg = dstType == TyFloat64 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyFloat64, this->m_func);
  12940. autoReuseReg.Initialize(reg, m_func);
  12941. InsertMove(reg, indirOpnd, ldElem);
  12942. }
  12943. if (dstType != TyFloat64)
  12944. {
  12945. // Convert reg.f64 to var
  12946. m_lowererMD.SaveDoubleToVar(dst->AsRegOpnd(), reg, ldElem, ldElem);
  12947. }
  12948. #if FLOATVAR
  12949. // For NaNs, go to the helper to guarantee we don't have an illegal NaN
  12950. // UCOMISD reg, reg
  12951. {
  12952. IR::Instr *const instr = IR::Instr::New(Js::OpCode::UCOMISD, this->m_func);
  12953. instr->SetSrc1(reg);
  12954. instr->SetSrc2(reg);
  12955. ldElem->InsertBefore(instr);
  12956. }
  12957. // JP $helper
  12958. {
  12959. IR::Instr *const instr = IR::BranchInstr::New(Js::OpCode::JP, labelHelper, this->m_func);
  12960. ldElem->InsertBefore(instr);
  12961. }
  12962. #endif
  12963. if(dstType == TyFloat64)
  12964. {
  12965. emitBailout = true;
  12966. }
  12967. }
  12968. else
  12969. {
  12970. AssertMsg((dstType == TyInt32) || (dstType == TyVar), "For Int/UintArray LdElemI's dst should be specialized to TyInt32 or not at all.");
  12971. reg = dstType == TyInt32 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyInt32, this->m_func);
  12972. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  12973. // Int32 and Uint32 arrays could overflow an int31, but the others can't
  12974. if (indirOpnd->GetType() != TyUint32
  12975. #if !INT32VAR
  12976. && indirOpnd->GetType() != TyInt32
  12977. #endif
  12978. )
  12979. {
  12980. reg->SetValueType(ValueType::GetTaggedInt()); // Fits as a tagged-int
  12981. }
  12982. // MOV/MOVZX/MOVSX dst/reg.int32, IndirOpnd.type
  12983. IR::Instr *const instr = InsertMove(reg, indirOpnd, ldElem);
  12984. if (dstType == TyInt32)
  12985. {
  12986. instr->dstIsTempNumber = ldElem->dstIsTempNumber;
  12987. instr->dstIsTempNumberTransferred = ldElem->dstIsTempNumberTransferred;
  12988. if (indirOpnd->GetType() == TyUint32)
  12989. {
  12990. // TEST dst, dst
  12991. // JSB $helper (bailout)
  12992. InsertCompareBranch(
  12993. reg,
  12994. IR::IntConstOpnd::New(0, TyUint32, this->m_func, /* dontEncode = */ true),
  12995. Js::OpCode::BrLt_A,
  12996. labelHelper,
  12997. ldElem);
  12998. }
  12999. emitBailout = true;
  13000. }
  13001. else
  13002. {
  13003. // MOV dst, reg
  13004. IR::Instr *const instr = IR::Instr::New(Js::OpCode::ToVar, dst, reg, this->m_func);
  13005. instr->dstIsTempNumber = ldElem->dstIsTempNumber;
  13006. instr->dstIsTempNumberTransferred = ldElem->dstIsTempNumberTransferred;
  13007. ldElem->InsertBefore(instr);
  13008. // Convert dst to var
  13009. m_lowererMD.EmitLoadVar(instr, /* isFromUint32 = */ (indirOpnd->GetType() == TyUint32));
  13010. }
  13011. }
  13012. // JMP $fallthrough
  13013. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  13014. emittedFastPath = true;
  13015. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  13016. {
  13017. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  13018. baseValueType.ToString(baseValueTypeStr);
  13019. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  13020. Output::Print(L"Typed Array Lowering: function: %s (%s), instr: %s, base value type: %S, %s.",
  13021. this->m_func->GetJnFunction()->GetDisplayName(),
  13022. this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  13023. Js::OpCodeUtil::GetOpCodeName(ldElem->m_opcode),
  13024. baseValueTypeStr,
  13025. (!dst->IsVar() ? L"specialized" : L"not specialized"));
  13026. Output::Print(L"\n");
  13027. Output::Flush();
  13028. }
  13029. }
  13030. else
  13031. {
  13032. // MOV dst, indirOpnd
  13033. InsertMove(dst, indirOpnd, ldElem);
  13034. // The string index fast path does not operate on index properties (we don't get a PropertyString in that case), so
  13035. // we don't need to do any further checks in that case
  13036. // For LdMethodElem, if the loaded value is a tagged number, the error message generated by the helper call is
  13037. // better than if we were to just try to call the number. Also, the call arguments need to be evaluated before
  13038. // throwing the error, so just test whether it's an object and jump to helper if it's not.
  13039. const bool needObjectTest = !isStringIndex && !isNativeArrayLoad && ldElem->m_opcode == Js::OpCode::LdMethodElem;
  13040. needMissingValueCheck =
  13041. !isStringIndex && !(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues());
  13042. if(needMissingValueCheck)
  13043. {
  13044. // TEST dst, dst
  13045. // JEQ $helper | JNE $fallthrough
  13046. InsertCompareBranch(
  13047. dst,
  13048. GetMissingItemOpnd(dst->GetType(), m_func),
  13049. needObjectTest ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A,
  13050. needObjectTest ? labelHelper : labelFallThru,
  13051. ldElem,
  13052. true);
  13053. if (isNativeArrayLoad)
  13054. {
  13055. Assert(!needObjectTest);
  13056. Assert(labelHelper != labelBailOut);
  13057. if(ldElem->AsProfiledInstr()->u.ldElemInfo->GetElementType().HasBeenUndefined())
  13058. {
  13059. // We're going to bail out trying to load "missing value" into a type-spec'd opnd.
  13060. // Branch to a point where we'll convert the array so that we don't keep bailing here.
  13061. // (Gappy arrays are not well-suited to nativeness.)
  13062. labelMissingNative = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  13063. InsertBranch(Js::OpCode::Br, labelMissingNative, ldElem);
  13064. }
  13065. else
  13066. {
  13067. // If the value has not been profiled to be undefined at some point, jump directly to bail out
  13068. InsertBranch(Js::OpCode::Br, labelBailOut, ldElem);
  13069. }
  13070. }
  13071. }
  13072. if(needObjectTest)
  13073. {
  13074. // GenerateObjectTest(dst)
  13075. // JIsObject $fallthrough
  13076. m_lowererMD.GenerateObjectTest(dst, ldElem, labelFallThru, true);
  13077. }
  13078. else if(!needMissingValueCheck)
  13079. {
  13080. // JMP $fallthrough
  13081. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  13082. }
  13083. emittedFastPath = true;
  13084. }
  13085. }
  13086. // $helper:
  13087. // bailout or caller generated helper call
  13088. // $fallthru:
  13089. if (!emittedFastPath)
  13090. {
  13091. labelHelper->isOpHelper = false;
  13092. }
  13093. ldElem->InsertBefore(labelHelper);
  13094. instrIsInHelperBlock = true;
  13095. if (isNativeArrayLoad)
  13096. {
  13097. Assert(ldElem->HasBailOutInfo());
  13098. Assert(labelHelper != labelBailOut);
  13099. // Transform the original instr:
  13100. //
  13101. // $helper:
  13102. // dst = LdElemI_A src (BailOut)
  13103. // $fallthrough:
  13104. //
  13105. // to:
  13106. //
  13107. // b $fallthru <--- we get here if we loaded a valid element directly
  13108. // $helper:
  13109. // dst = LdElemI_A src
  13110. // cmp dst, MissingItem
  13111. // bne $fallthrough
  13112. // $bailout:
  13113. // BailOut
  13114. // $fallthrough:
  13115. LowerOneBailOutKind(ldElem, IR::BailOutConventionalNativeArrayAccessOnly, instrIsInHelperBlock);
  13116. IR::Instr *const insertBeforeInstr = ldElem->m_next;
  13117. // Do missing value check on value returned from helper so that we don't have to check the index against
  13118. // array length. (We already checked it above against the segment length.)
  13119. bool hasBeenUndefined = ldElem->AsProfiledInstr()->u.ldElemInfo->GetElementType().HasBeenUndefined();
  13120. if (hasBeenUndefined)
  13121. {
  13122. if(!emitBailout)
  13123. {
  13124. if (labelMissingNative == nullptr)
  13125. {
  13126. labelMissingNative = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  13127. }
  13128. InsertCompareBranch(GetMissingItemOpnd(ldElem->GetDst()->GetType(), m_func), ldElem->GetDst(), Js::OpCode::BrEq_A, labelMissingNative, insertBeforeInstr, true);
  13129. }
  13130. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  13131. if(labelMissingNative)
  13132. {
  13133. // We're going to bail out on a load from a gap, but convert the array to Var first, so we don't just
  13134. // bail here over and over. Gappy arrays are not well suited to nativeness.
  13135. // NOTE: only emit this call if the profile tells us that this has happened before ("hasBeenUndefined").
  13136. // Emitting this in Navier-Stokes brutalizes the score.
  13137. insertBeforeInstr->InsertBefore(labelMissingNative);
  13138. IR::JnHelperMethod helperMethod;
  13139. indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  13140. if (indirOpnd->GetBaseOpnd()->GetValueType().HasIntElements())
  13141. {
  13142. helperMethod = IR::HelperIntArr_ToVarArray;
  13143. }
  13144. else
  13145. {
  13146. Assert(indirOpnd->GetBaseOpnd()->GetValueType().HasFloatElements());
  13147. helperMethod = IR::HelperFloatArr_ToVarArray;
  13148. }
  13149. m_lowererMD.LoadHelperArgument(insertBeforeInstr, indirOpnd->GetBaseOpnd());
  13150. IR::Instr *instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
  13151. instrHelper->SetSrc1(IR::HelperCallOpnd::New(helperMethod, m_func));
  13152. insertBeforeInstr->InsertBefore(instrHelper);
  13153. m_lowererMD.LowerCall(instrHelper, 0);
  13154. }
  13155. }
  13156. else
  13157. {
  13158. if(!emitBailout)
  13159. {
  13160. InsertCompareBranch(GetMissingItemOpnd(ldElem->GetDst()->GetType(), m_func), ldElem->GetDst(), Js::OpCode::BrEq_A, labelBailOut, insertBeforeInstr, true);
  13161. }
  13162. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  13163. }
  13164. insertBeforeInstr->InsertBefore(labelBailOut);
  13165. }
  13166. if (emitBailout)
  13167. {
  13168. ldElem->UnlinkSrc1();
  13169. ldElem->UnlinkDst();
  13170. GenerateBailOut(ldElem, nullptr, nullptr);
  13171. }
  13172. return !emitBailout;
  13173. }
  13174. IR::Opnd *
  13175. Lowerer::GetMissingItemOpnd(IRType type, Func *func)
  13176. {
  13177. if (type == TyVar)
  13178. {
  13179. return IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, func, true);
  13180. }
  13181. if (type == TyInt32)
  13182. {
  13183. return IR::IntConstOpnd::New(Js::JavascriptNativeIntArray::MissingItem, TyInt32, func, true);
  13184. }
  13185. Assert(type == TyFloat64);
  13186. return IR::MemRefOpnd::New((BYTE*)&Js::JavascriptNativeFloatArray::MissingItem, TyFloat64, func);
  13187. }
  13188. bool
  13189. Lowerer::GenerateFastStElemI(IR::Instr *& stElem, bool *instrIsInHelperBlockRef)
  13190. {
  13191. Assert(instrIsInHelperBlockRef);
  13192. bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
  13193. instrIsInHelperBlock = false;
  13194. IR::LabelInstr * labelHelper;
  13195. IR::LabelInstr * labelSegmentLengthIncreased;
  13196. IR::LabelInstr * labelFallThru;
  13197. IR::LabelInstr * labelBailOut = nullptr;
  13198. IR::Opnd *dst = stElem->GetDst();
  13199. IR::IndirOpnd * indirOpnd = dst->AsIndirOpnd();
  13200. AssertMsg(dst->IsIndirOpnd(), "Expected indirOpnd on StElementI");
  13201. // From FastElemICommon:
  13202. // TEST base, AtomTag -- check base not tagged int
  13203. // JNE $helper
  13204. // MOV r1, [base + offset(type)] -- check base isArray
  13205. // CMP [r1 + offset(typeId)], TypeIds_Array
  13206. // JNE $helper
  13207. // TEST index, 1 -- index tagged int
  13208. // JEQ $helper
  13209. // MOV r2, index
  13210. // SAR r2, Js::VarTag_Shift -- remote atom tag
  13211. // JS $helper -- exclude negative index
  13212. // MOV r4, [base + offset(head)]
  13213. // CMP r2, [r4 + offset(length)] -- bounds check
  13214. // JB $done
  13215. // CMP r2, [r4 + offset(size)] -- chunk has room?
  13216. // JAE $helper
  13217. // LEA r5, [r2 + 1]
  13218. // MOV [r4 + offset(length)], r5 -- update length on chunk
  13219. // CMP r5, [base + offset(length)]
  13220. // JBE $done
  13221. // MOV [base + offset(length)], r5 -- update length on array
  13222. // $done
  13223. // LEA r3, [r4 + offset(elements)]
  13224. // Generated here.
  13225. // MOV [r3 + r2], src
  13226. labelFallThru = stElem->GetOrCreateContinueLabel();
  13227. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  13228. bool emitBailout = false;
  13229. bool isNativeArrayStore = indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyNativeArray();
  13230. IR::LabelInstr * labelCantUseArray = labelHelper;
  13231. if (isNativeArrayStore)
  13232. {
  13233. if (stElem->GetSrc1()->GetType() != GetArrayIndirType(indirOpnd->GetBaseOpnd()->GetValueType()))
  13234. {
  13235. // Skip the fast path and just generate a helper call
  13236. return true;
  13237. }
  13238. if(stElem->HasBailOutInfo())
  13239. {
  13240. const IR::BailOutKind bailOutKind = stElem->GetBailOutKind();
  13241. if (bailOutKind & IR::BailOutConventionalNativeArrayAccessOnly)
  13242. {
  13243. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  13244. labelCantUseArray = labelBailOut;
  13245. }
  13246. }
  13247. }
  13248. bool isTypedArrayElement, isStringIndex;
  13249. indirOpnd =
  13250. GenerateFastElemICommon(
  13251. stElem,
  13252. true,
  13253. indirOpnd,
  13254. labelHelper,
  13255. labelCantUseArray,
  13256. labelFallThru,
  13257. &isTypedArrayElement,
  13258. &isStringIndex,
  13259. &emitBailout,
  13260. &labelSegmentLengthIncreased);
  13261. IR::Opnd *src = stElem->GetSrc1();
  13262. const IR::AutoReuseOpnd autoReuseSrc(src, m_func);
  13263. // The index is negative or not int.
  13264. if (indirOpnd == nullptr)
  13265. {
  13266. Assert(!(stElem->HasBailOutInfo() && stElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall));
  13267. // The global optimizer should never type specialize a StElem for which we know the index is not int or is a negative
  13268. // int constant. This would result in an unconditional bailout on the main code path.
  13269. if (src->IsVar())
  13270. {
  13271. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  13272. {
  13273. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  13274. Output::Print(L"Typed Array Lowering: function: %s (%s): instr %s, not specialized by glob opt due to negative or not likely int index.\n",
  13275. this->m_func->GetJnFunction()->GetDisplayName(),
  13276. this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  13277. Js::OpCodeUtil::GetOpCodeName(stElem->m_opcode));
  13278. Output::Flush();
  13279. }
  13280. // We must be dealing with some atypical index value. Don't emit fast path, but go directly to helper.
  13281. return true;
  13282. }
  13283. else
  13284. {
  13285. // If global optimizer failed to notice the unconventional index and type specialized the src,
  13286. // there is nothing to do but bail out. We should never hit this code path, unless the global optimizer's conditions
  13287. // for not specializing the instruction don't match the lowerer's conditions for not emitting the array checks (see above).
  13288. // This could happen if global optimizer's information based on value tracking fails to recognize a non-integer index or
  13289. // a constant int index that is negative. The bailout below ensures that we behave correctly in retail builds even under
  13290. // these (unlikely) conditions.
  13291. AssertMsg(false, "Global optimizer shouldn't have specialized this instruction.");
  13292. stElem->UnlinkSrc1();
  13293. stElem->UnlinkDst();
  13294. GenerateBailOut(stElem, nullptr, nullptr);
  13295. return false;
  13296. }
  13297. }
  13298. const IR::AutoReuseOpnd autoReuseIndirOpnd(indirOpnd, m_func);
  13299. const ValueType baseValueType(dst->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
  13300. if (isTypedArrayElement)
  13301. {
  13302. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  13303. {
  13304. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  13305. baseValueType.ToString(baseValueTypeStr);
  13306. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  13307. Output::Print(L"Typed Array Lowering: function: %s (%s), instr: %s, base value type: %S, %s.",
  13308. this->m_func->GetJnFunction()->GetDisplayName(),
  13309. this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  13310. Js::OpCodeUtil::GetOpCodeName(stElem->m_opcode),
  13311. baseValueTypeStr,
  13312. (!src->IsVar() ? L"specialized" : L"not specialized"));
  13313. Output::Print(L"\n");
  13314. Output::Flush();
  13315. }
  13316. ObjectType objectType = baseValueType.GetObjectType();
  13317. if(indirOpnd->IsFloat())
  13318. {
  13319. if (src->GetType() == TyFloat64)
  13320. {
  13321. IR::RegOpnd *const regSrc = src->AsRegOpnd();
  13322. if (indirOpnd->IsFloat32())
  13323. {
  13324. // CVTSD2SS reg.f32, regSrc.f64 -- Convert regSrc from f64 to f32
  13325. IR::RegOpnd *const reg = IR::RegOpnd::New(TyFloat32, this->m_func);
  13326. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  13327. InsertConvertFloat64ToFloat32(reg, regSrc, stElem);
  13328. // MOVSS indirOpnd, reg
  13329. InsertMove(indirOpnd, reg, stElem);
  13330. }
  13331. else
  13332. {
  13333. // MOVSD indirOpnd, regSrc
  13334. InsertMove(indirOpnd, regSrc, stElem);
  13335. }
  13336. emitBailout = true;
  13337. }
  13338. else
  13339. {
  13340. Assert(src->GetType() == TyVar);
  13341. // MOV reg, src
  13342. IR::RegOpnd *const reg = IR::RegOpnd::New(TyVar, this->m_func);
  13343. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  13344. InsertMove(reg, src, stElem);
  13345. // Convert to float, and assign to indirOpnd
  13346. if (baseValueType.IsLikelyOptimizedVirtualTypedArray())
  13347. {
  13348. IR::RegOpnd* dstReg = IR::RegOpnd::New(indirOpnd->GetType(), this->m_func);
  13349. m_lowererMD.EmitLoadFloat(dstReg, reg, stElem);
  13350. InsertMove(indirOpnd, dstReg, stElem);
  13351. }
  13352. else
  13353. {
  13354. m_lowererMD.EmitLoadFloat(indirOpnd, reg, stElem);
  13355. }
  13356. }
  13357. }
  13358. else if (objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedVirtualArray || objectType == ObjectType::Uint8ClampedMixedArray)
  13359. {
  13360. Assert(indirOpnd->GetType() == TyUint8);
  13361. IR::RegOpnd *regSrc;
  13362. IR::AutoReuseOpnd autoReuseRegSrc;
  13363. if(src->IsRegOpnd())
  13364. {
  13365. regSrc = src->AsRegOpnd();
  13366. }
  13367. else
  13368. {
  13369. regSrc = IR::RegOpnd::New(StackSym::New(src->GetType(), m_func), src->GetType(), m_func);
  13370. autoReuseRegSrc.Initialize(regSrc, m_func);
  13371. InsertMove(regSrc, src, stElem);
  13372. }
  13373. IR::Opnd *bitMaskOpnd;
  13374. IRType srcType = regSrc->GetType();
  13375. if ((srcType == TyFloat64) || (srcType == TyInt32))
  13376. {
  13377. // if (srcType == TyInt32) {
  13378. // TEST regSrc, ~255
  13379. // JE $storeValue
  13380. // JSB $handleNegative
  13381. // MOV indirOpnd, 255
  13382. // JMP $fallThru
  13383. // $handleNegative [isHelper = false]
  13384. // MOV indirOpnd, 0
  13385. // JMP $fallThru
  13386. // $storeValue
  13387. // MOV indirOpnd, regSrc
  13388. // }
  13389. // else {
  13390. // MOVSD regTmp, regSrc
  13391. // ADDSD regTmp, 0.5
  13392. // CVTTSD2SI regOpnd, regTmp
  13393. // TEST regOpnd, ~255
  13394. // JE $storeValue
  13395. // $handleOutOfBounds [isHelper = true]
  13396. // COMISD regSrc, [&FloatZero]
  13397. // JB $handleNegative
  13398. // MOV regOpnd, 255
  13399. // JMP $storeValue
  13400. // $handleNegative [isHelper = true]
  13401. // MOV regOpnd, 0
  13402. // $storeValue
  13403. // MOV indirOpnd, regOpnd
  13404. // }
  13405. // $fallThru
  13406. IR::RegOpnd *regOpnd;
  13407. IR::AutoReuseOpnd autoReuseRegOpnd;
  13408. if (srcType == TyInt32)
  13409. {
  13410. // When srcType == TyInt32 we will never call the helper and we will never
  13411. // modify the regOpnd. Therefore, it's okay to use regSrc directly, and it
  13412. // reduces register pressure.
  13413. regOpnd = regSrc;
  13414. }
  13415. else
  13416. {
  13417. #ifdef _M_IX86
  13418. AssertMsg(AutoSystemInfo::Data.SSE2Available(), "GlobOpt shouldn't have specialized Uint8ClampedArray StElem to float64 if SSE2 is unavailable.");
  13419. #endif
  13420. regOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  13421. autoReuseRegOpnd.Initialize(regOpnd, m_func);
  13422. Assert(objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedVirtualArray || objectType == ObjectType::Uint8ClampedMixedArray);
  13423. // Uint8ClampedArray follows IEEE 754 rounding rules for ties which round up
  13424. // odd integers and round down even integers. Both ties result in the nearest
  13425. // even integer value.
  13426. //
  13427. // CVTSD2SI regOpnd, regSrc
  13428. LowererMD::InsertConvertFloat64ToInt32(RoundModeHalfToEven, regOpnd, regSrc, stElem);
  13429. }
  13430. IR::LabelInstr *labelStoreValue = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  13431. #ifndef _M_ARM
  13432. // TEST regOpnd, ~255
  13433. // JE $storeValue
  13434. bitMaskOpnd = IR::IntConstOpnd::New(~255, TyInt32, this->m_func, true);
  13435. InsertTestBranch(regOpnd, bitMaskOpnd, Js::OpCode::BrEq_A, labelStoreValue, stElem);
  13436. #else // ARM
  13437. // Special case for ARM, a shift may be better
  13438. //
  13439. // ASRS tempReg, src, 8
  13440. // BEQ $inlineSet
  13441. InsertShiftBranch(
  13442. Js::OpCode::Shr_A,
  13443. IR::RegOpnd::New(TyInt32, this->m_func),
  13444. regOpnd,
  13445. IR::IntConstOpnd::New(8, TyInt8, this->m_func),
  13446. Js::OpCode::BrEq_A,
  13447. labelStoreValue,
  13448. stElem);
  13449. #endif
  13450. IR::LabelInstr *labelHandleNegative = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, srcType == TyFloat64);
  13451. if (srcType == TyInt32)
  13452. {
  13453. // JSB $handleNegativeOrOverflow
  13454. InsertBranch(
  13455. LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A),
  13456. labelHandleNegative,
  13457. stElem);
  13458. // MOV IndirOpnd.u8, 255
  13459. InsertMove(indirOpnd, IR::IntConstOpnd::New(255, TyUint8, this->m_func, true), stElem);
  13460. // JMP $fallThru
  13461. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  13462. // $handleNegative [isHelper = false]
  13463. stElem->InsertBefore(labelHandleNegative);
  13464. // MOV IndirOpnd.u8, 0
  13465. InsertMove(indirOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func, true), stElem);
  13466. // JMP $fallThru
  13467. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  13468. }
  13469. else
  13470. {
  13471. Assert(regOpnd != regSrc);
  13472. // This label is just to ensure the following code is moved to the helper block.
  13473. // $handleOutOfBounds [isHelper = true]
  13474. IR::LabelInstr *labelHandleOutOfBounds = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  13475. stElem->InsertBefore(labelHandleOutOfBounds);
  13476. // COMISD regSrc, FloatZero
  13477. // JB labelHandleNegative
  13478. IR::MemRefOpnd * zeroOpnd = IR::MemRefOpnd::New((double*)&(Js::JavascriptNumber::k_Zero), TyMachDouble, this->m_func);
  13479. InsertCompareBranch(regSrc, zeroOpnd, Js::OpCode::BrNotGe_A, labelHandleNegative, stElem);
  13480. // MOV regOpnd, 255
  13481. InsertMove(regOpnd, IR::IntConstOpnd::New(255, TyUint8, this->m_func, true), stElem);
  13482. // JMP $storeValue
  13483. InsertBranch(Js::OpCode::Br, labelStoreValue, stElem);
  13484. // $handleNegative [isHelper = true]
  13485. stElem->InsertBefore(labelHandleNegative);
  13486. // MOV regOpnd, 0
  13487. InsertMove(regOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func, true), stElem);
  13488. }
  13489. // $storeValue
  13490. stElem->InsertBefore(labelStoreValue);
  13491. // MOV IndirOpnd.u8, regOpnd.u8
  13492. InsertMove(indirOpnd, regOpnd, stElem);
  13493. emitBailout = true;
  13494. }
  13495. else
  13496. {
  13497. Assert(srcType == TyVar);
  13498. #if INT32VAR
  13499. bitMaskOpnd = IR::AddrOpnd::New((Js::Var)~(INT_PTR)(Js::TaggedInt::ToVarUnchecked(255)), IR::AddrOpndKindConstantVar, this->m_func, true);
  13500. #else
  13501. bitMaskOpnd = IR::IntConstOpnd::New(~(INT_PTR)(Js::TaggedInt::ToVarUnchecked(255)), TyMachReg, this->m_func, true);
  13502. #endif
  13503. // Note: We are assuming that if no bits other than ~(TaggedInt(255)) are 1, that we have a tagged
  13504. // int value between 0 - 255.
  13505. // #if INT32VAR
  13506. // This works for pointers because tagged int bit can't be on, and first 64k are not valid addresses
  13507. // This works for floats because a valid float would have one of the upper 13 bits on.
  13508. // #else
  13509. // Any pointer is larger than 512 because first 64k memory is reserved by the OS
  13510. // #endif
  13511. IR::LabelInstr *labelInlineSet = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  13512. #ifndef _M_ARM
  13513. // TEST src, ~(TaggedInt(255)) -- Check for tagged int >= 255 and <= 0
  13514. // JEQ $inlineSet
  13515. InsertTestBranch(regSrc, bitMaskOpnd, Js::OpCode::BrEq_A, labelInlineSet, stElem);
  13516. #else // ARM
  13517. // Special case for ARM, a shift may be better
  13518. //
  13519. // ASRS tempReg, src, 8
  13520. // BEQ $inlineSet
  13521. InsertShiftBranch(
  13522. Js::OpCode::Shr_A,
  13523. IR::RegOpnd::New(TyInt32, this->m_func),
  13524. regSrc,
  13525. IR::IntConstOpnd::New(8, TyInt8, this->m_func),
  13526. Js::OpCode::BrEq_A,
  13527. labelInlineSet,
  13528. stElem);
  13529. #endif
  13530. // Uint8ClampedArray::DirectSetItem(array, index, value);
  13531. m_lowererMD.LoadHelperArgument(stElem, regSrc);
  13532. IR::Opnd *indexOpnd = indirOpnd->GetIndexOpnd();
  13533. if (indexOpnd == nullptr)
  13534. {
  13535. indexOpnd = IR::IntConstOpnd::New(indirOpnd->GetOffset(), TyInt32, this->m_func);
  13536. }
  13537. else
  13538. {
  13539. Assert(indirOpnd->GetOffset() == 0);
  13540. }
  13541. m_lowererMD.LoadHelperArgument(stElem, indexOpnd);
  13542. m_lowererMD.LoadHelperArgument(stElem, stElem->GetDst()->AsIndirOpnd()->GetBaseOpnd());
  13543. IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  13544. Assert(objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedMixedArray || objectType == ObjectType::Uint8ClampedVirtualArray);
  13545. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperUint8ClampedArraySetItem, this->m_func));
  13546. stElem->InsertBefore(instr);
  13547. m_lowererMD.LowerCall(instr, 0);
  13548. // JMP $fallThrough
  13549. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  13550. //$inlineSet
  13551. stElem->InsertBefore(labelInlineSet);
  13552. IR::RegOpnd *regOpnd;
  13553. IR::AutoReuseOpnd autoReuseRegOpnd;
  13554. #if INT32VAR
  13555. regOpnd = regSrc;
  13556. #else
  13557. // MOV r1, src
  13558. // SAR r1, 1
  13559. regOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  13560. autoReuseRegOpnd.Initialize(regOpnd, m_func);
  13561. InsertShift(
  13562. Js::OpCode::Shr_A,
  13563. false /* needFlags */,
  13564. regOpnd,
  13565. regSrc,
  13566. IR::IntConstOpnd::New(1, TyInt8, this->m_func),
  13567. stElem);
  13568. #endif
  13569. // MOV IndirOpnd.u8, reg.u8
  13570. InsertMove(indirOpnd, regOpnd, stElem);
  13571. }
  13572. }
  13573. else
  13574. {
  13575. if (src->IsInt32())
  13576. {
  13577. // MOV indirOpnd, src
  13578. InsertMove(indirOpnd, src, stElem);
  13579. emitBailout = true;
  13580. }
  13581. else if (src->IsFloat64())
  13582. {
  13583. AssertMsg(indirOpnd->GetType() == TyUint32, "Only StElemI to Uint32Array could be specialized to float64.");
  13584. #ifdef _M_IX86
  13585. AssertMsg(AutoSystemInfo::Data.SSE2Available(), "GloOpt shouldn't have specialized Uint32Array StElemI to float64 if SSE2 is unavailable.");
  13586. #endif
  13587. IR::RegOpnd *const reg = IR::RegOpnd::New(TyInt32, this->m_func);
  13588. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  13589. m_lowererMD.EmitFloatToInt(reg, src, stElem);
  13590. // MOV indirOpnd, reg
  13591. InsertMove(indirOpnd, reg, stElem);
  13592. emitBailout = true;
  13593. }
  13594. else
  13595. {
  13596. Assert(src->IsVar());
  13597. if(src->IsAddrOpnd())
  13598. {
  13599. IR::AddrOpnd *const addrSrc = src->AsAddrOpnd();
  13600. Assert(addrSrc->IsVar());
  13601. Assert(Js::TaggedInt::Is(addrSrc->m_address));
  13602. // MOV indirOpnd, intValue
  13603. InsertMove(
  13604. indirOpnd,
  13605. IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(addrSrc->m_address), TyInt32, m_func),
  13606. stElem);
  13607. }
  13608. else
  13609. {
  13610. IR::RegOpnd *const regSrc = src->AsRegOpnd();
  13611. // FromVar reg, Src
  13612. IR::RegOpnd *const reg = IR::RegOpnd::New(TyInt32, this->m_func);
  13613. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  13614. IR::Instr *const instr = IR::Instr::New(Js::OpCode::FromVar, reg, regSrc, stElem->m_func);
  13615. stElem->InsertBefore(instr);
  13616. // Convert reg to int32
  13617. // Note: ToUint32 is implemented as (uint32)ToInt32()
  13618. m_lowererMD.EmitLoadInt32(instr, true /*conversionFromObjectAllowed*/);
  13619. // MOV indirOpnd, reg
  13620. InsertMove(indirOpnd, reg, stElem);
  13621. }
  13622. }
  13623. }
  13624. }
  13625. else
  13626. {
  13627. if(labelSegmentLengthIncreased)
  13628. {
  13629. IR::Instr *const insertBeforeInstr = labelSegmentLengthIncreased->m_next;
  13630. // labelSegmentLengthIncreased:
  13631. // mov [segment + index], src
  13632. // jmp $fallThru
  13633. InsertMove(indirOpnd, src, insertBeforeInstr);
  13634. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  13635. }
  13636. if (!(isStringIndex || baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()))
  13637. {
  13638. if(!stElem->IsProfiledInstr() || stElem->AsProfiledInstr()->u.stElemInfo->LikelyFillsMissingValue())
  13639. {
  13640. // Check whether the store is filling a missing value. If so, fall back to the helper so that it can check whether
  13641. // this store is filling the last missing value in the array. This is necessary to keep the missing value tracking
  13642. // in arrays precise. The check is omitted when profile data says that the store is likely to create missing values.
  13643. //
  13644. // cmp [segment + index], Js::SparseArraySegment::MissingValue
  13645. // je $helper
  13646. InsertCompareBranch(
  13647. indirOpnd,
  13648. GetMissingItemOpnd(src->GetType(), m_func),
  13649. Js::OpCode::BrEq_A,
  13650. labelHelper,
  13651. stElem,
  13652. true);
  13653. }
  13654. else
  13655. {
  13656. GenerateIsEnabledArraySetElementFastPathCheck(labelHelper, stElem);
  13657. }
  13658. }
  13659. // MOV [r3 + r2], src
  13660. InsertMoveWithBarrier(indirOpnd, src, stElem);
  13661. }
  13662. // JMP $fallThru
  13663. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  13664. // $helper:
  13665. // bailout or caller generated helper call
  13666. // $fallThru:
  13667. stElem->InsertBefore(labelHelper);
  13668. instrIsInHelperBlock = true;
  13669. if (isNativeArrayStore && !isStringIndex)
  13670. {
  13671. Assert(stElem->HasBailOutInfo());
  13672. Assert(labelHelper != labelBailOut);
  13673. // Transform the original instr:
  13674. //
  13675. // $helper:
  13676. // dst = LdElemI_A src (BailOut)
  13677. // $fallthrough:
  13678. //
  13679. // to:
  13680. //
  13681. // $helper:
  13682. // dst = LdElemI_A src
  13683. // b $fallthrough
  13684. // $bailout:
  13685. // BailOut
  13686. // $fallthrough:
  13687. LowerOneBailOutKind(stElem, IR::BailOutConventionalNativeArrayAccessOnly, instrIsInHelperBlock);
  13688. IR::Instr *const insertBeforeInstr = stElem->m_next;
  13689. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  13690. insertBeforeInstr->InsertBefore(labelBailOut);
  13691. }
  13692. if (emitBailout)
  13693. {
  13694. stElem->UnlinkSrc1();
  13695. stElem->UnlinkDst();
  13696. GenerateBailOut(stElem, nullptr, nullptr);
  13697. }
  13698. return !emitBailout;
  13699. }
  13700. bool
  13701. Lowerer::GenerateFastLdLen(IR::Instr *ldLen, bool *instrIsInHelperBlockRef)
  13702. {
  13703. Assert(instrIsInHelperBlockRef);
  13704. bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
  13705. instrIsInHelperBlock = false;
  13706. // TEST src, AtomTag -- check src not tagged int
  13707. // JNE $helper
  13708. // CMP [src], JavascriptArray::`vtable' -- check base isArray
  13709. // JNE $string
  13710. // MOV length, [src + offset(length)] -- Load array length
  13711. // JMP $tovar
  13712. // $string:
  13713. // CMP [src + offset(type)], static_string_type -- check src isString
  13714. // JNE $helper
  13715. // MOV length, [src + offset(length)] -- Load string length
  13716. // $toVar:
  13717. // TEST length, 0xC0000000 -- test for overflow of SHL, or negative
  13718. // JNE $helper
  13719. // SHL length, Js::VarTag_Shift -- restore the var tag on the result
  13720. // INC length
  13721. // MOV dst, length
  13722. // JMP $fallthru
  13723. // $helper:
  13724. // CALL GetProperty(src, length_property_id, scriptContext)
  13725. // $fallthru:
  13726. IR::RegOpnd * opnd = ldLen->GetSrc1()->AsRegOpnd();
  13727. IR::RegOpnd * dst = ldLen->GetDst()->AsRegOpnd();
  13728. IR::RegOpnd * src = opnd->AsRegOpnd();
  13729. const ValueType srcValueType(src->GetValueType());
  13730. AssertMsg(src->IsRegOpnd(), "Expected regOpnd on LdLen");
  13731. IR::LabelInstr *const labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  13732. if (ldLen->DoStackArgsOpt(this->m_func))
  13733. {
  13734. GenerateFastArgumentsLdLen(ldLen, labelHelper, ldLen->GetOrCreateContinueLabel());
  13735. }
  13736. else
  13737. {
  13738. const bool arrayFastPath = ShouldGenerateArrayFastPath(src, false, true, false);
  13739. // HasBeenString instead of IsLikelyString because it could be a merge between StringObject and String, and this
  13740. // information about whether it's a StringObject or some other object is not available in the profile data
  13741. const bool stringFastPath = srcValueType.IsUninitialized() || srcValueType.HasBeenString();
  13742. if(!(arrayFastPath || stringFastPath))
  13743. {
  13744. return true;
  13745. }
  13746. const int32 arrayOffsetOfLength =
  13747. srcValueType.IsLikelyAnyOptimizedArray()
  13748. ? GetArrayOffsetOfLength(srcValueType)
  13749. : Js::JavascriptArray::GetOffsetOfLength();
  13750. IR::LabelInstr *labelString = nullptr;
  13751. IR::RegOpnd *arrayOpnd = src;
  13752. IR::RegOpnd *arrayLengthOpnd = nullptr;
  13753. IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
  13754. if(arrayFastPath)
  13755. {
  13756. if(!srcValueType.IsAnyOptimizedArray())
  13757. {
  13758. if(stringFastPath)
  13759. {
  13760. // If we don't have info about the src value type or its object type, the array and string fast paths are
  13761. // generated
  13762. labelString = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  13763. }
  13764. arrayOpnd = GenerateArrayTest(src, labelHelper, stringFastPath ? labelString : labelHelper, ldLen, false);
  13765. }
  13766. else if(src->IsArrayRegOpnd())
  13767. {
  13768. IR::ArrayRegOpnd *const arrayRegOpnd = src->AsArrayRegOpnd();
  13769. if(arrayRegOpnd->LengthSym())
  13770. {
  13771. arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), TyUint32, m_func);
  13772. DebugOnly(arrayLengthOpnd->FreezeSymValue());
  13773. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  13774. }
  13775. }
  13776. }
  13777. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
  13778. IR::RegOpnd *lengthOpnd = nullptr;
  13779. IR::AutoReuseOpnd autoReuseLengthOpnd;
  13780. const auto EnsureLengthOpnd = [&]()
  13781. {
  13782. if(lengthOpnd)
  13783. {
  13784. return;
  13785. }
  13786. lengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  13787. autoReuseLengthOpnd.Initialize(lengthOpnd, m_func);
  13788. };
  13789. if(arrayFastPath)
  13790. {
  13791. if(arrayLengthOpnd)
  13792. {
  13793. lengthOpnd = arrayLengthOpnd;
  13794. autoReuseLengthOpnd.Initialize(lengthOpnd, m_func);
  13795. Assert(!stringFastPath);
  13796. }
  13797. else
  13798. {
  13799. // MOV length, [array + offset(length)] -- Load array length
  13800. EnsureLengthOpnd();
  13801. IR::IndirOpnd *const indirOpnd = IR::IndirOpnd::New(arrayOpnd, arrayOffsetOfLength, TyUint32, this->m_func);
  13802. InsertMove(lengthOpnd, indirOpnd, ldLen);
  13803. }
  13804. }
  13805. if(stringFastPath)
  13806. {
  13807. IR::LabelInstr *labelToVar = nullptr;
  13808. if(arrayFastPath)
  13809. {
  13810. // JMP $tovar
  13811. labelToVar = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  13812. InsertBranch(Js::OpCode::Br, labelToVar, ldLen);
  13813. // $string:
  13814. ldLen->InsertBefore(labelString);
  13815. }
  13816. // CMP [src + offset(type)], static_stringtype -- check src isString
  13817. // JNE $helper
  13818. GenerateStringTest(src, ldLen, labelHelper, nullptr, !arrayFastPath);
  13819. // MOV length, [src + offset(length)] -- Load string length
  13820. EnsureLengthOpnd();
  13821. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(src, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func);
  13822. InsertMove(lengthOpnd, indirOpnd, ldLen);
  13823. if(arrayFastPath)
  13824. {
  13825. // $toVar:
  13826. ldLen->InsertBefore(labelToVar);
  13827. }
  13828. }
  13829. Assert(lengthOpnd);
  13830. if(ldLen->HasBailOutInfo() && (ldLen->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnIrregularLength)
  13831. {
  13832. Assert(ldLen->GetBailOutKind() == IR::BailOutOnIrregularLength);
  13833. Assert(dst->IsInt32());
  13834. // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative
  13835. // test length, length
  13836. // js $helper
  13837. // mov dst, length
  13838. // jmp $fallthrough
  13839. InsertCompareBranch(
  13840. lengthOpnd,
  13841. IR::IntConstOpnd::New(0, lengthOpnd->GetType(), m_func, true),
  13842. Js::OpCode::BrLt_A,
  13843. labelHelper,
  13844. ldLen);
  13845. InsertMove(dst, lengthOpnd, ldLen);
  13846. InsertBranch(Js::OpCode::Br, ldLen->GetOrCreateContinueLabel(), ldLen);
  13847. // $helper:
  13848. // (Bail out with IR::BailOutOnIrregularLength)
  13849. ldLen->InsertBefore(labelHelper);
  13850. instrIsInHelperBlock = true;
  13851. ldLen->FreeDst();
  13852. ldLen->FreeSrc1();
  13853. GenerateBailOut(ldLen);
  13854. return false;
  13855. }
  13856. #if INT32VAR
  13857. // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative
  13858. // test length, length
  13859. // js $helper
  13860. InsertCompareBranch(
  13861. lengthOpnd,
  13862. IR::IntConstOpnd::New(0, lengthOpnd->GetType(), m_func, true),
  13863. Js::OpCode::BrLt_A,
  13864. labelHelper,
  13865. ldLen);
  13866. #else
  13867. // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative.
  13868. // Additionally, verify that the signed value's width is not greater than 31 bits, since it needs to be tagged.
  13869. // test length, 0xC0000000
  13870. // jne $helper
  13871. InsertTestBranch(
  13872. lengthOpnd,
  13873. IR::IntConstOpnd::New(0xC0000000, TyUint32, this->m_func, true),
  13874. Js::OpCode::BrNeq_A,
  13875. labelHelper,
  13876. ldLen);
  13877. #endif
  13878. #if INT32VAR
  13879. //
  13880. // dst_32 = MOV length
  13881. // dst_64 = OR dst_64, Js::AtomTag_IntPtr
  13882. //
  13883. Assert(dst->GetType() == TyVar);
  13884. IR::Opnd *dst32 = dst->Copy(this->m_func);
  13885. dst32->SetType(TyInt32);
  13886. // This will clear the top bits.
  13887. InsertMove(dst32, lengthOpnd, ldLen);
  13888. m_lowererMD.GenerateInt32ToVarConversion(dst, ldLen);
  13889. #else
  13890. // dst = SHL length, Js::VarTag_Shift -- restore the var tag on the result
  13891. InsertShift(
  13892. Js::OpCode::Shl_A,
  13893. false /* needFlags */,
  13894. dst,
  13895. lengthOpnd,
  13896. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func),
  13897. ldLen);
  13898. // dst = ADD dst, AtomTag
  13899. InsertAdd(
  13900. false /* needFlags */,
  13901. dst,
  13902. dst,
  13903. IR::IntConstOpnd::New(Js::AtomTag_Int32, TyUint32, m_func, true),
  13904. ldLen);
  13905. #endif
  13906. // JMP $fallthrough
  13907. InsertBranch(Js::OpCode::Br, ldLen->GetOrCreateContinueLabel(), ldLen);
  13908. }
  13909. // $helper:
  13910. // (caller generates helper call)
  13911. ldLen->InsertBefore(labelHelper);
  13912. instrIsInHelperBlock = true;
  13913. return true; // fast path was generated, helper call will be in a helper block
  13914. }
  13915. void
  13916. Lowerer::GenerateFastInlineStringCodePointAt(IR::Instr* lastInstr, Func* func, IR::Opnd *strLength, IR::Opnd *srcIndex, IR::RegOpnd *lowerChar, IR::RegOpnd *strPtr)
  13917. {
  13918. //// Required State:
  13919. // strLength - UInt32
  13920. // srcIndex - TyVar if not Address
  13921. // lowerChar - TyMachReg
  13922. // strPtr - Addr
  13923. //// Instructions
  13924. // CMP [strLength], srcIndex + 1
  13925. // JBE charCodeAt
  13926. // CMP lowerChar 0xDC00
  13927. // JGE charCodeAt
  13928. // CMP lowerChar 0xD7FF
  13929. // JLE charCodeAt
  13930. // upperChar = MOVZX [strPtr + srcIndex + 1]
  13931. // CMP upperChar 0xE000
  13932. // JGE charCodeAt
  13933. // CMP lowerChar 0xDBFF
  13934. // JLE charCodeAt
  13935. // lowerChar = SUB lowerChar - 0xD800
  13936. // lowerChar = SHL lowerChar, 10
  13937. // lowerChar = ADD lowerChar + upperChar
  13938. // lowerChar = ADD lowerChar + 0x2400
  13939. // :charCodeAt
  13940. // :done
  13941. // Asserts
  13942. // Arm should change to Uint32 for the strLength
  13943. Assert(strLength->GetType() == TyUint32 || strLength->GetType() == TyMachReg);
  13944. Assert(srcIndex->GetType() == TyVar || srcIndex->IsAddrOpnd());
  13945. Assert(lowerChar->GetType() == TyMachReg || lowerChar->GetType() == TyUint32);
  13946. Assert(strPtr->IsRegOpnd());
  13947. IR::RegOpnd *tempReg = IR::RegOpnd::New(TyMachReg, func);
  13948. IR::LabelInstr *labelCharCodeAt = IR::LabelInstr::New(Js::OpCode::Label, func);
  13949. IR::IndirOpnd *tempIndirOpnd;
  13950. if (srcIndex->IsAddrOpnd())
  13951. {
  13952. uint32 length = Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address) + 1U;
  13953. InsertCompareBranch(strLength, IR::IntConstOpnd::New(length, TyUint32, func), Js::OpCode::BrLe_A, true, labelCharCodeAt, lastInstr);
  13954. tempIndirOpnd = IR::IndirOpnd::New(strPtr, (length) * sizeof(wchar_t), TyUint16, func);
  13955. }
  13956. else
  13957. {
  13958. InsertMove(tempReg, srcIndex, lastInstr);
  13959. #if INT32VAR
  13960. IR::Opnd * reg32Bit = tempReg->UseWithNewType(TyInt32, func);
  13961. InsertMove(tempReg, reg32Bit, lastInstr);
  13962. tempReg = reg32Bit->AsRegOpnd();
  13963. #else
  13964. InsertShift(Js::OpCode::Shr_A, false, tempReg, tempReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, func), lastInstr);
  13965. #endif
  13966. InsertAdd(false, tempReg, tempReg, IR::IntConstOpnd::New(1, TyInt32, func), lastInstr);
  13967. InsertCompareBranch(strLength, tempReg, Js::OpCode::BrLe_A, true, labelCharCodeAt, lastInstr);
  13968. if(tempReg->GetSize() != MachPtr)
  13969. {
  13970. tempReg = tempReg->UseWithNewType(TyMachPtr, func)->AsRegOpnd();
  13971. }
  13972. tempIndirOpnd = IR::IndirOpnd::New(strPtr, tempReg, 1, TyUint16, func);
  13973. }
  13974. // By this point, we have added instructions before labelCharCodeAt to check for extra length required for the surrogate pair
  13975. // The branching for that is already handled, all we have to do now is to check for correct values.
  13976. // Validate char is in range [D800, DBFF]; otherwise just get a charCodeAt
  13977. InsertCompareBranch(lowerChar, IR::IntConstOpnd::New(0xDC00, TyUint32, func), Js::OpCode::BrGe_A, labelCharCodeAt, lastInstr);
  13978. InsertCompareBranch(lowerChar, IR::IntConstOpnd::New(0xD7FF, TyUint32, func), Js::OpCode::BrLe_A, labelCharCodeAt, lastInstr);
  13979. // upperChar = MOVZX r3, [r1 + r3 * 2] -- this is the value of the upper surrogate pair char
  13980. IR::RegOpnd *upperChar = IR::RegOpnd::New(TyInt32, func);
  13981. InsertMove(upperChar, tempIndirOpnd, lastInstr);
  13982. // Validate upper is in range [DC00, DFFF]; otherwise just get a charCodeAt
  13983. InsertCompareBranch(upperChar, IR::IntConstOpnd::New(0xE000, TyUint32, func), Js::OpCode::BrGe_A, labelCharCodeAt, lastInstr);
  13984. InsertCompareBranch(upperChar, IR::IntConstOpnd::New(0xDBFF, TyUint32, func), Js::OpCode::BrLe_A, labelCharCodeAt, lastInstr);
  13985. // (lower - 0xD800) << 10 + second - 0xDC00 + 0x10000 -- 0x10000 - 0xDC00 = 0x2400
  13986. // lowerChar = SUB lowerChar - 0xD800
  13987. // lowerChar = SHL lowerChar, 10
  13988. // lowerChar = ADD lowerChar + upperChar
  13989. // lowerChar = ADD lowerChar + 0x2400
  13990. InsertSub(false, lowerChar, lowerChar, IR::IntConstOpnd::New(0xD800, TyUint32, func), lastInstr);
  13991. InsertShift(Js::OpCode::Shl_A, false, lowerChar, lowerChar, IR::IntConstOpnd::New(10, TyUint32, func), lastInstr);
  13992. InsertAdd(false, lowerChar, lowerChar, upperChar, lastInstr);
  13993. InsertAdd(false, lowerChar, lowerChar, IR::IntConstOpnd::New(0x2400, TyUint32, func), lastInstr);
  13994. lastInstr->InsertBefore(labelCharCodeAt);
  13995. }
  13996. bool
  13997. Lowerer::GenerateFastInlineStringFromCodePoint(IR::Instr* instr)
  13998. {
  13999. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  14000. // ArgOut sequence
  14001. // s8.var = StartCall 2 (0x2).i32 #000c
  14002. // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, String object
  14003. // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to FromCharCode
  14004. // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
  14005. // s0[LikelyTaggedInt].var = CallDirect String_FromCodePoint.u32, arg1(s11)<0>.u32 #001c
  14006. IR::Opnd * linkOpnd = instr->GetSrc2();
  14007. IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  14008. linkOpnd = tmpInstr->GetSrc2();
  14009. #if DBG
  14010. IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  14011. Assert(argCount == 2);
  14012. #endif
  14013. IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
  14014. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  14015. IR::Opnd *src1 = argInstr->GetSrc1();
  14016. if (src1->GetValueType().IsLikelyNumber())
  14017. {
  14018. //Trying to generate this code
  14019. // MOV resultOpnd, dst
  14020. // MOV fromCharCodeIntArgOpnd, src1
  14021. // SAR fromCharCodeIntArgOpnd, Js::VarTag_Shift
  14022. // JAE $Helper
  14023. // CMP fromCharCodeIntArgOpnd, Js::ScriptContext::CharStringCacheSize
  14024. //
  14025. // JAE $labelWCharStringCheck <
  14026. // MOV resultOpnd, GetCharStringCache[fromCharCodeIntArgOpnd]
  14027. // TST resultOpnd, resultOpnd //Check for null
  14028. // JEQ $helper
  14029. // JMP $Done
  14030. //
  14031. //$labelWCharStringCheck:
  14032. // resultOpnd = Call HelperGetStringForCharW
  14033. // JMP $Done
  14034. //$helper:
  14035. IR::RegOpnd * resultOpnd = nullptr;
  14036. if (!instr->GetDst()->IsRegOpnd() || instr->GetDst()->IsEqual(src1))
  14037. {
  14038. resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  14039. }
  14040. else
  14041. {
  14042. resultOpnd = instr->GetDst()->AsRegOpnd();
  14043. }
  14044. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14045. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14046. IR::RegOpnd * fromCodePointIntArgOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  14047. IR::AutoReuseOpnd autoReuseFromCodePointIntArgOpnd(fromCodePointIntArgOpnd, instr->m_func);
  14048. InsertMove(fromCodePointIntArgOpnd, src1, instr);
  14049. //Check for tagged int and get the untagged version.
  14050. fromCodePointIntArgOpnd = GenerateUntagVar(fromCodePointIntArgOpnd, labelHelper, instr);
  14051. GenerateGetSingleCharString(fromCodePointIntArgOpnd, resultOpnd, labelHelper, doneLabel, instr, true);
  14052. instr->InsertBefore(labelHelper);
  14053. instr->InsertAfter(doneLabel);
  14054. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  14055. }
  14056. return true;
  14057. }
  14058. bool
  14059. Lowerer::GenerateFastInlineStringFromCharCode(IR::Instr* instr)
  14060. {
  14061. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  14062. // ArgOut sequence
  14063. // s8.var = StartCall 2 (0x2).i32 #000c
  14064. // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, String object
  14065. // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to FromCharCode
  14066. // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
  14067. // s0[LikelyTaggedInt].var = CallDirect String_FromCharCode.u32, arg1(s11)<0>.u32 #001c
  14068. IR::Opnd * linkOpnd = instr->GetSrc2();
  14069. IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  14070. linkOpnd = tmpInstr->GetSrc2();
  14071. #if DBG
  14072. IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  14073. Assert(argCount == 2);
  14074. #endif
  14075. IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
  14076. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  14077. IR::Opnd *src1 = argInstr->GetSrc1();
  14078. if (src1->GetValueType().IsLikelyNumber())
  14079. {
  14080. //Trying to generate this code
  14081. // MOV resultOpnd, dst
  14082. // MOV fromCharCodeIntArgOpnd, src1
  14083. // SAR fromCharCodeIntArgOpnd, Js::VarTag_Shift
  14084. // JAE $Helper
  14085. // CMP fromCharCodeIntArgOpnd, Js::ScriptContext::CharStringCacheSize
  14086. //
  14087. // JAE $labelWCharStringCheck <
  14088. // MOV resultOpnd, GetCharStringCache[fromCharCodeIntArgOpnd]
  14089. // TST resultOpnd, resultOpnd //Check for null
  14090. // JEQ $helper
  14091. // JMP $Done
  14092. //
  14093. //$labelWCharStringCheck:
  14094. // resultOpnd = Call HelperGetStringForCharW
  14095. // JMP $Done
  14096. //$helper:
  14097. IR::RegOpnd * resultOpnd = nullptr;
  14098. if (!instr->GetDst()->IsRegOpnd() || instr->GetDst()->IsEqual(src1))
  14099. {
  14100. resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  14101. }
  14102. else
  14103. {
  14104. resultOpnd = instr->GetDst()->AsRegOpnd();
  14105. }
  14106. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14107. IR::RegOpnd * fromCharCodeIntArgOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  14108. IR::AutoReuseOpnd autoReuseFromCharCodeIntArgOpnd(fromCharCodeIntArgOpnd, instr->m_func);
  14109. InsertMove(fromCharCodeIntArgOpnd, src1, instr);
  14110. //Check for tagged int and get the untagged version.
  14111. fromCharCodeIntArgOpnd = GenerateUntagVar(fromCharCodeIntArgOpnd, labelHelper, instr);
  14112. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14113. GenerateGetSingleCharString(fromCharCodeIntArgOpnd, resultOpnd, labelHelper, doneLabel, instr, false);
  14114. instr->InsertBefore(labelHelper);
  14115. instr->InsertAfter(doneLabel);
  14116. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  14117. }
  14118. return true;
  14119. }
  14120. void
  14121. Lowerer::GenerateGetSingleCharString(IR::RegOpnd * charCodeOpnd, IR::Opnd * resultOpnd, IR::LabelInstr * labelHelper, IR::LabelInstr * doneLabel, IR::Instr * instr, bool isCodePoint)
  14122. {
  14123. // MOV cacheReg, CharStringCache
  14124. // CMP charCodeOpnd, Js::ScriptContext::CharStringCacheSize
  14125. // JAE $labelWCharStringCheck <
  14126. // MOV resultOpnd, cacheReg[charCodeOpnd]
  14127. // TST resultOpnd, resultOpnd //Check for null
  14128. // JEQ $helper
  14129. // JMP $Done
  14130. //
  14131. //$labelWCharStringCheck:
  14132. // Arg1 = charCodeOpnd
  14133. // Arg0 = cacheReg
  14134. // resultOpnd = Call HelperGetStringForCharW/CodePoint
  14135. // JMP $Done
  14136. //$helper:
  14137. IR::LabelInstr *labelWCharStringCheck = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14138. //Try to load from in CharStringCacheA
  14139. IR::RegOpnd *cacheRegOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  14140. IR::AutoReuseOpnd autoReuseCacheRegOpnd(cacheRegOpnd, instr->m_func);
  14141. Assert(Js::JavascriptLibrary::GetCharStringCacheAOffset() == Js::JavascriptLibrary::GetCharStringCacheOffset());
  14142. InsertMove(cacheRegOpnd, this->LoadLibraryValueOpnd(instr, LibraryValue::ValueCharStringCache), instr);
  14143. InsertCompareBranch(charCodeOpnd, IR::IntConstOpnd::New(Js::CharStringCache::CharStringCacheSize, TyUint32, this->m_func), Js::OpCode::BrGe_A, true, labelWCharStringCheck, instr);
  14144. InsertMove(resultOpnd, IR::IndirOpnd::New(cacheRegOpnd, charCodeOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyVar, instr->m_func), instr);
  14145. InsertTestBranch(resultOpnd, resultOpnd, Js::OpCode::BrEq_A, labelHelper, instr);
  14146. InsertMove(instr->GetDst(), resultOpnd, instr);
  14147. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  14148. instr->InsertBefore(labelWCharStringCheck);
  14149. IR::JnHelperMethod helperMethod;
  14150. if (isCodePoint)
  14151. {
  14152. helperMethod = IR::HelperGetStringForCharCodePoint;
  14153. }
  14154. else
  14155. {
  14156. InsertMove(charCodeOpnd, charCodeOpnd->UseWithNewType(TyUint16, instr->m_func), instr);
  14157. helperMethod = IR::HelperGetStringForChar;
  14158. }
  14159. //Try to load from in CharStringCacheW or CharStringCacheCodePoint, this is a helper call.
  14160. this->m_lowererMD.LoadHelperArgument(instr, charCodeOpnd);
  14161. this->m_lowererMD.LoadHelperArgument(instr, cacheRegOpnd);
  14162. IR::Instr* helperCallInstr = IR::Instr::New(Js::OpCode::Call, resultOpnd, IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
  14163. instr->InsertBefore(helperCallInstr);
  14164. this->m_lowererMD.LowerCall(helperCallInstr, 0);
  14165. InsertMove(instr->GetDst(), resultOpnd, instr);
  14166. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  14167. }
  14168. bool
  14169. Lowerer::GenerateFastInlineGlobalObjectParseInt(IR::Instr *instr)
  14170. {
  14171. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  14172. // ArgOut sequence
  14173. // s8.var = StartCall 2 (0x2).i32 #000c
  14174. // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, global object
  14175. // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to parseInt
  14176. // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
  14177. // s0[LikelyTaggedInt].var = CallDirect GlobalObject_ParseInt.u32, arg1(s11)<0>.u32 #001c
  14178. IR::Opnd * linkOpnd = instr->GetSrc2();
  14179. IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  14180. linkOpnd = tmpInstr->GetSrc2();
  14181. #if DBG
  14182. IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  14183. Assert(argCount == 2);
  14184. #endif
  14185. IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
  14186. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  14187. IR::Opnd *parseIntArgOpnd = argInstr->GetSrc1();
  14188. if (parseIntArgOpnd->GetValueType().IsLikelyNumber())
  14189. {
  14190. //If likely int check for tagged int and set the dst
  14191. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14192. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14193. if (!parseIntArgOpnd->IsTaggedInt())
  14194. {
  14195. this->m_lowererMD.GenerateSmIntTest(parseIntArgOpnd, instr, labelHelper);
  14196. }
  14197. if (instr->GetDst())
  14198. {
  14199. this->m_lowererMD.CreateAssign(instr->GetDst(), parseIntArgOpnd, instr);
  14200. }
  14201. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  14202. instr->InsertBefore(labelHelper);
  14203. instr->InsertAfter(doneLabel);
  14204. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  14205. }
  14206. return true;
  14207. }
  14208. void
  14209. Lowerer::GenerateFastInlineArrayPop(IR::Instr * instr)
  14210. {
  14211. Assert(instr->m_opcode == Js::OpCode::InlineArrayPop);
  14212. IR::Opnd *arrayOpnd = instr->GetSrc1();
  14213. IR::LabelInstr *bailOutLabelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14214. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14215. bool isLikelyNativeArray = arrayOpnd->GetValueType().IsLikelyNativeArray();
  14216. if (ShouldGenerateArrayFastPath(arrayOpnd, false, false, false))
  14217. {
  14218. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14219. if(isLikelyNativeArray)
  14220. {
  14221. //We bailOut on cases like length == 0, Array Test failing cases (Runtime helper cannot handle these cases)
  14222. GenerateFastPop(arrayOpnd, instr, labelHelper, doneLabel, bailOutLabelHelper);
  14223. }
  14224. else
  14225. {
  14226. //We jump to helper on cases like length == 0, Array Test failing cases
  14227. GenerateFastPop(arrayOpnd, instr, labelHelper, doneLabel, labelHelper);
  14228. }
  14229. instr->InsertBefore(labelHelper);
  14230. ///JMP to $doneLabel
  14231. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  14232. }
  14233. else
  14234. {
  14235. //We assume here that the array will be a Var array. - Runtime Helper calls assume this.
  14236. Assert(!isLikelyNativeArray);
  14237. }
  14238. instr->InsertAfter(doneLabel);
  14239. if(isLikelyNativeArray)
  14240. {
  14241. //Lower IR::BailOutConventionalNativeArrayAccessOnly here.
  14242. LowerOneBailOutKind(instr, IR::BailOutConventionalNativeArrayAccessOnly, false, false);
  14243. instr->InsertAfter(bailOutLabelHelper);
  14244. }
  14245. GenerateHelperToArrayPopFastPath(instr, doneLabel, bailOutLabelHelper);
  14246. }
  14247. bool
  14248. Lowerer::ShouldGenerateStringReplaceFastPath(IR::Instr * callInstr, IntConstType argCount)
  14249. {
  14250. // a.replace(b,c)
  14251. // We want to emit the fast path if 'a' and 'c' are strings and 'b' is a regex
  14252. //
  14253. // argout sequence:
  14254. // arg1(s12)<0>.var = ArgOut_A s2.var, s11.var #0014 <---- a
  14255. // arg2(s13)<4>.var = ArgOut_A s3.var, arg1(s12)<0>.var #0018 <---- b
  14256. // arg3(s14)<8>.var = ArgOut_A s4.var, arg2(s13)<4>.var #001c <---- c
  14257. // s0[LikelyString].var = CallI s5[ffunc].var, arg3(s14)<8>.var #0020
  14258. IR::Opnd *linkOpnd = callInstr->GetSrc2();
  14259. Assert(argCount == 2);
  14260. while(linkOpnd->IsSymOpnd())
  14261. {
  14262. IR::SymOpnd *src2 = linkOpnd->AsSymOpnd();
  14263. StackSym *sym = src2->m_sym->AsStackSym();
  14264. Assert(sym->m_isSingleDef);
  14265. IR::Instr *argInstr = sym->m_instrDef;
  14266. Assert(argCount >= 0);
  14267. // check to see if 'a' and 'c' are likely strings
  14268. if((argCount == 2 || argCount == 0) && (!argInstr->GetSrc1()->GetValueType().IsLikelyString()))
  14269. {
  14270. return false;
  14271. }
  14272. // we want 'b' to be regex. Don't generate fastpath if it is a tagged int
  14273. if((argCount == 1) && (argInstr->GetSrc1()->IsTaggedInt()))
  14274. {
  14275. return false;
  14276. }
  14277. argCount--;
  14278. linkOpnd = argInstr->GetSrc2();
  14279. }
  14280. return true;
  14281. }
  14282. bool
  14283. Lowerer::GenerateFastReplace(IR::Opnd* strOpnd, IR::Opnd* src1, IR::Opnd* src2, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
  14284. {
  14285. // a.replace(b,c)
  14286. // We want to emit the fast path if 'a' and 'c' are strings and 'b' is a regex
  14287. //
  14288. // strOpnd --> a
  14289. // src1 --> b
  14290. // src2 --> c
  14291. IR::Opnd * callDst = callInstr->GetDst();
  14292. Assert(strOpnd->GetValueType().IsLikelyString() && src2->GetValueType().IsLikelyString());
  14293. if(!strOpnd->GetValueType().IsString())
  14294. {
  14295. if(!strOpnd->IsRegOpnd())
  14296. {
  14297. IR::RegOpnd *strOpndReg = IR::RegOpnd::New(TyVar, m_func);
  14298. LowererMD::CreateAssign(strOpndReg, strOpnd, insertInstr);
  14299. strOpnd = strOpndReg;
  14300. }
  14301. this->GenerateStringTest(strOpnd->AsRegOpnd(), insertInstr, labelHelper);
  14302. }
  14303. if(!src1->IsNotTaggedValue())
  14304. {
  14305. m_lowererMD.GenerateObjectTest(src1, insertInstr, labelHelper);
  14306. }
  14307. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptRegExp);
  14308. // cmp [regex], vtableAddress
  14309. // jne $labelHelper
  14310. if(!src1->IsRegOpnd())
  14311. {
  14312. IR::RegOpnd *src1Reg = IR::RegOpnd::New(TyVar, m_func);
  14313. LowererMD::CreateAssign(src1Reg, src1, insertInstr);
  14314. src1 = src1Reg;
  14315. }
  14316. InsertCompareBranch(
  14317. IR::IndirOpnd::New(src1->AsRegOpnd(), 0, TyMachPtr, insertInstr->m_func),
  14318. vtableOpnd,
  14319. Js::OpCode::BrNeq_A,
  14320. labelHelper,
  14321. insertInstr);
  14322. if(!src2->GetValueType().IsString())
  14323. {
  14324. if(!src2->IsRegOpnd())
  14325. {
  14326. IR::RegOpnd *src2Reg = IR::RegOpnd::New(TyVar, m_func);
  14327. LowererMD::CreateAssign(src2Reg, src2, insertInstr);
  14328. src2 = src2Reg;
  14329. }
  14330. this->GenerateStringTest(src2->AsRegOpnd(), insertInstr, labelHelper);
  14331. }
  14332. //scriptContext, pRegEx, pThis, pReplace (to be pushed in reverse order)
  14333. // pReplace, pThis, pRegEx
  14334. this->m_lowererMD.LoadHelperArgument(insertInstr, src2);
  14335. this->m_lowererMD.LoadHelperArgument(insertInstr, strOpnd);
  14336. this->m_lowererMD.LoadHelperArgument(insertInstr, src1);
  14337. // script context
  14338. LoadScriptContext(insertInstr);
  14339. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, insertInstr->m_func);
  14340. if(callDst)
  14341. {
  14342. helperCallInstr->SetDst(callDst);
  14343. }
  14344. insertInstr->InsertBefore(helperCallInstr);
  14345. if(callDst)
  14346. {
  14347. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperRegExp_ReplaceStringResultUsed);
  14348. }
  14349. else
  14350. {
  14351. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperRegExp_ReplaceStringResultNotUsed);
  14352. }
  14353. return true;
  14354. }
  14355. ///----
  14356. void
  14357. Lowerer::GenerateFastInlineStringSplitMatch(IR::Instr * instr)
  14358. {
  14359. // a.split(b,c (optional) )
  14360. // We want to emit the fast path when
  14361. // 1. c is not present, and
  14362. // 2. 'a' is a string and 'b' is a regex.
  14363. //
  14364. // a.match(b)
  14365. // We want to emit the fast path when 'a' is a string and 'b' is a regex.
  14366. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  14367. IR::Opnd * callDst = instr->GetDst();
  14368. //helperCallOpnd
  14369. IR::Opnd * src1 = instr->GetSrc1();
  14370. //ArgOut_A_InlineSpecialized
  14371. IR::Instr * tmpInstr = instr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  14372. IR::Opnd * argsOpnd[2];
  14373. if(!instr->FetchOperands(argsOpnd, 2))
  14374. {
  14375. return;
  14376. }
  14377. if(!argsOpnd[0]->GetValueType().IsLikelyString() || argsOpnd[1]->IsTaggedInt())
  14378. {
  14379. return;
  14380. }
  14381. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14382. if(!argsOpnd[0]->GetValueType().IsString())
  14383. {
  14384. if(!argsOpnd[0]->IsRegOpnd())
  14385. {
  14386. IR::RegOpnd *opndReg = IR::RegOpnd::New(TyVar, m_func);
  14387. LowererMD::CreateAssign(opndReg, argsOpnd[0], instr);
  14388. argsOpnd[0] = opndReg;
  14389. }
  14390. this->GenerateStringTest(argsOpnd[0]->AsRegOpnd(), instr, labelHelper);
  14391. }
  14392. if(!argsOpnd[1]->IsNotTaggedValue())
  14393. {
  14394. m_lowererMD.GenerateObjectTest(argsOpnd[1], instr, labelHelper);
  14395. }
  14396. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  14397. // cmp [regex], vtableAddress
  14398. // jne $labelHelper
  14399. if(!argsOpnd[1]->IsRegOpnd())
  14400. {
  14401. IR::RegOpnd *opndReg = IR::RegOpnd::New(TyVar, m_func);
  14402. LowererMD::CreateAssign(opndReg, argsOpnd[1], instr);
  14403. argsOpnd[1] = opndReg;
  14404. }
  14405. InsertCompareBranch(
  14406. IR::IndirOpnd::New(argsOpnd[1]->AsRegOpnd(), 0, TyMachPtr, instr->m_func),
  14407. vtableOpnd,
  14408. Js::OpCode::BrNeq_A,
  14409. labelHelper,
  14410. instr);
  14411. // [stackAllocationPointer, ]scriptcontext, regexp, input[, limit] (to be pushed in reverse order)
  14412. if(src1->AsHelperCallOpnd()->m_fnHelper == IR::JnHelperMethod::HelperString_Split)
  14413. {
  14414. //limit
  14415. //As we are optimizing only for two operands, make limit UINT_MAX
  14416. IR::Opnd* limit = IR::IntConstOpnd::New(UINT_MAX, TyUint32, instr->m_func);
  14417. this->m_lowererMD.LoadHelperArgument(instr, limit);
  14418. }
  14419. //input, regexp
  14420. this->m_lowererMD.LoadHelperArgument(instr, argsOpnd[0]);
  14421. this->m_lowererMD.LoadHelperArgument(instr, argsOpnd[1]);
  14422. // script context
  14423. LoadScriptContext(instr);
  14424. IR::JnHelperMethod helperMethod;
  14425. IR::AutoReuseOpnd autoReuseStackAllocationOpnd;
  14426. if(callDst && instr->dstIsTempObject)
  14427. {
  14428. switch(src1->AsHelperCallOpnd()->m_fnHelper)
  14429. {
  14430. case IR::JnHelperMethod::HelperString_Split:
  14431. helperMethod = IR::JnHelperMethod::HelperRegExp_SplitResultUsedAndMayBeTemp;
  14432. break;
  14433. case IR::JnHelperMethod::HelperString_Match:
  14434. helperMethod = IR::JnHelperMethod::HelperRegExp_MatchResultUsedAndMayBeTemp;
  14435. break;
  14436. default:
  14437. Assert(false);
  14438. __assume(false);
  14439. }
  14440. // Allocate some space on the stack for the result array
  14441. IR::RegOpnd *const stackAllocationOpnd = IR::RegOpnd::New(TyVar, m_func);
  14442. autoReuseStackAllocationOpnd.Initialize(stackAllocationOpnd, m_func);
  14443. stackAllocationOpnd->SetValueType(callDst->GetValueType());
  14444. GenerateMarkTempAlloc(stackAllocationOpnd, Js::JavascriptArray::StackAllocationSize, instr);
  14445. m_lowererMD.LoadHelperArgument(instr, stackAllocationOpnd);
  14446. }
  14447. else
  14448. {
  14449. switch(src1->AsHelperCallOpnd()->m_fnHelper)
  14450. {
  14451. case IR::JnHelperMethod::HelperString_Split:
  14452. helperMethod =
  14453. callDst
  14454. ? IR::JnHelperMethod::HelperRegExp_SplitResultUsed
  14455. : IR::JnHelperMethod::HelperRegExp_SplitResultNotUsed;
  14456. break;
  14457. case IR::JnHelperMethod::HelperString_Match:
  14458. helperMethod =
  14459. callDst
  14460. ? IR::JnHelperMethod::HelperRegExp_MatchResultUsed
  14461. : IR::JnHelperMethod::HelperRegExp_MatchResultNotUsed;
  14462. break;
  14463. default:
  14464. Assert(false);
  14465. __assume(false);
  14466. }
  14467. }
  14468. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
  14469. if(callDst)
  14470. {
  14471. helperCallInstr->SetDst(callDst);
  14472. }
  14473. instr->InsertBefore(helperCallInstr);
  14474. m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
  14475. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14476. instr->InsertAfter(doneLabel);
  14477. instr->InsertBefore(labelHelper);
  14478. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  14479. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  14480. }
  14481. void
  14482. Lowerer::GenerateFastInlineRegExpExec(IR::Instr * instr)
  14483. {
  14484. // a.exec(b)
  14485. // We want to emit the fast path when 'a' is a regex and 'b' is a string
  14486. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  14487. IR::Opnd * callDst = instr->GetDst();
  14488. //ArgOut_A_InlineSpecialized
  14489. IR::Instr * tmpInstr = instr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  14490. IR::Opnd * argsOpnd[2];
  14491. if (!instr->FetchOperands(argsOpnd, 2))
  14492. {
  14493. return;
  14494. }
  14495. IR::Opnd *opndString = argsOpnd[1];
  14496. if(!opndString->GetValueType().IsLikelyString() || argsOpnd[0]->IsTaggedInt())
  14497. {
  14498. return;
  14499. }
  14500. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14501. if(!opndString->GetValueType().IsString())
  14502. {
  14503. if(!opndString->IsRegOpnd())
  14504. {
  14505. IR::RegOpnd *opndReg = IR::RegOpnd::New(TyVar, m_func);
  14506. LowererMD::CreateAssign(opndReg, opndString, instr);
  14507. opndString = opndReg;
  14508. }
  14509. this->GenerateStringTest(opndString->AsRegOpnd(), instr, labelHelper);
  14510. }
  14511. IR::Opnd *opndRegex = argsOpnd[0];
  14512. if(!opndRegex->IsNotTaggedValue())
  14513. {
  14514. m_lowererMD.GenerateObjectTest(opndRegex, instr, labelHelper);
  14515. }
  14516. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  14517. // cmp [regex], vtableAddress
  14518. // jne $labelHelper
  14519. if(!opndRegex->IsRegOpnd())
  14520. {
  14521. IR::RegOpnd *opndReg = IR::RegOpnd::New(TyVar, m_func);
  14522. LowererMD::CreateAssign(opndReg, opndRegex, instr);
  14523. opndRegex = opndReg;
  14524. }
  14525. InsertCompareBranch(
  14526. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), 0, TyMachPtr, instr->m_func),
  14527. vtableOpnd,
  14528. Js::OpCode::BrNeq_A,
  14529. labelHelper,
  14530. instr);
  14531. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  14532. if (!PHASE_OFF(Js::ExecBOIFastPathPhase, m_func))
  14533. {
  14534. // Load pattern from regex operand
  14535. IR::RegOpnd *opndPattern = IR::RegOpnd::New(TyMachPtr, m_func);
  14536. LowererMD::CreateAssign(
  14537. opndPattern,
  14538. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfPattern(), TyMachPtr, m_func),
  14539. instr);
  14540. // Load program from pattern
  14541. IR::RegOpnd *opndProgram = IR::RegOpnd::New(TyMachPtr, m_func);
  14542. LowererMD::CreateAssign(
  14543. opndProgram,
  14544. IR::IndirOpnd::New(opndPattern, offsetof(UnifiedRegex::RegexPattern, rep) + offsetof(UnifiedRegex::RegexPattern::UnifiedRep, program), TyMachPtr, m_func),
  14545. instr);
  14546. IR::LabelInstr *labelFastHelper = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  14547. // We want the program's tag to be BOILiteral2Tag
  14548. InsertCompareBranch(
  14549. IR::IndirOpnd::New(opndProgram, (int32)UnifiedRegex::Program::GetOffsetOfTag(), TyUint8, m_func),
  14550. IR::IntConstOpnd::New(UnifiedRegex::Program::GetBOILiteral2Tag(), TyUint8, m_func),
  14551. Js::OpCode::BrNeq_A,
  14552. labelFastHelper,
  14553. instr);
  14554. // Test the program's flags for "global"
  14555. InsertTestBranch(
  14556. IR::IndirOpnd::New(opndProgram, offsetof(UnifiedRegex::Program, flags), TyUint8, m_func),
  14557. IR::IntConstOpnd::New(UnifiedRegex::GlobalRegexFlag, TyUint8, m_func),
  14558. Js::OpCode::BrNeq_A,
  14559. labelFastHelper,
  14560. instr);
  14561. IR::LabelInstr *labelNoMatch = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  14562. // If string length < 2...
  14563. InsertCompareBranch(
  14564. IR::IndirOpnd::New(opndString->AsRegOpnd(), offsetof(Js::JavascriptString, m_charLength), TyUint32, m_func),
  14565. IR::IntConstOpnd::New(2, TyUint32, m_func),
  14566. Js::OpCode::BrLt_A,
  14567. labelNoMatch,
  14568. instr);
  14569. // ...or the DWORD doesn't match the pattern...
  14570. IR::RegOpnd *opndBuffer = IR::RegOpnd::New(TyMachReg, m_func);
  14571. LowererMD::CreateAssign(
  14572. opndBuffer,
  14573. IR::IndirOpnd::New(opndString->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, m_func),
  14574. instr);
  14575. IR::LabelInstr *labelGotString = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  14576. InsertTestBranch(opndBuffer, opndBuffer, Js::OpCode::BrNeq_A, labelGotString, instr);
  14577. m_lowererMD.LoadHelperArgument(instr, opndString);
  14578. IR::Instr *instrCall = IR::Instr::New(Js::OpCode::Call, opndBuffer, IR::HelperCallOpnd::New(IR::HelperString_GetSz, m_func), m_func);
  14579. instr->InsertBefore(instrCall);
  14580. m_lowererMD.LowerCall(instrCall, 0);
  14581. instr->InsertBefore(labelGotString);
  14582. IR::RegOpnd *opndBufferDWORD = IR::RegOpnd::New(TyUint32, m_func);
  14583. LowererMD::CreateAssign(
  14584. opndBufferDWORD,
  14585. IR::IndirOpnd::New(opndBuffer, 0, TyUint32, m_func),
  14586. instr);
  14587. InsertCompareBranch(
  14588. IR::IndirOpnd::New(opndProgram, (int32)(UnifiedRegex::Program::GetOffsetOfRep() + UnifiedRegex::Program::GetOffsetOfBOILiteral2Literal()), TyUint32, m_func),
  14589. opndBufferDWORD,
  14590. Js::OpCode::BrEq_A,
  14591. labelFastHelper,
  14592. instr);
  14593. // ...then set the last index to 0...
  14594. instr->InsertBefore(labelNoMatch);
  14595. LowererMD::CreateAssign(
  14596. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), TyVar, m_func),
  14597. IR::AddrOpnd::NewNull(m_func),
  14598. instr);
  14599. LowererMD::CreateAssign(
  14600. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), TyUint32, m_func),
  14601. IR::IntConstOpnd::New(0, TyUint32, m_func),
  14602. instr);
  14603. // ...and set the dst to null...
  14604. if (callDst)
  14605. {
  14606. LowererMD::CreateAssign(
  14607. callDst,
  14608. LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  14609. instr);
  14610. }
  14611. // ...and we're done.
  14612. this->InsertBranch(Js::OpCode::Br, doneLabel, instr);
  14613. instr->InsertBefore(labelFastHelper);
  14614. }
  14615. // [stackAllocationPointer, ]scriptcontext, regexp, string (to be pushed in reverse order)
  14616. //string, regexp
  14617. this->m_lowererMD.LoadHelperArgument(instr, opndString);
  14618. this->m_lowererMD.LoadHelperArgument(instr, opndRegex);
  14619. // script context
  14620. LoadScriptContext(instr);
  14621. IR::JnHelperMethod helperMethod;
  14622. IR::AutoReuseOpnd autoReuseStackAllocationOpnd;
  14623. if(callDst)
  14624. {
  14625. if(instr->dstIsTempObject)
  14626. {
  14627. helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultUsedAndMayBeTemp;
  14628. // Allocate some space on the stack for the result array
  14629. IR::RegOpnd *const stackAllocationOpnd = IR::RegOpnd::New(TyVar, m_func);
  14630. autoReuseStackAllocationOpnd.Initialize(stackAllocationOpnd, m_func);
  14631. stackAllocationOpnd->SetValueType(callDst->GetValueType());
  14632. GenerateMarkTempAlloc(stackAllocationOpnd, Js::JavascriptArray::StackAllocationSize, instr);
  14633. m_lowererMD.LoadHelperArgument(instr, stackAllocationOpnd);
  14634. }
  14635. else
  14636. {
  14637. helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultUsed;
  14638. }
  14639. }
  14640. else
  14641. {
  14642. helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultNotUsed;
  14643. }
  14644. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
  14645. if(callDst)
  14646. {
  14647. helperCallInstr->SetDst(callDst);
  14648. }
  14649. instr->InsertBefore(helperCallInstr);
  14650. m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
  14651. instr->InsertAfter(doneLabel);
  14652. instr->InsertBefore(labelHelper);
  14653. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  14654. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  14655. }
  14656. void
  14657. Lowerer::RelocateCallDirectToHelperPath(IR::Instr* argoutInlineSpecialized, IR::LabelInstr* labelHelper)
  14658. {
  14659. IR::Opnd *linkOpnd = argoutInlineSpecialized->GetSrc2(); //ArgOut_A_InlineSpecialized src2; link to actual argouts.
  14660. argoutInlineSpecialized->Unlink();
  14661. labelHelper->InsertAfter(argoutInlineSpecialized);
  14662. while(linkOpnd->IsSymOpnd())
  14663. {
  14664. IR::SymOpnd *src2 = linkOpnd->AsSymOpnd();
  14665. StackSym *sym = src2->m_sym->AsStackSym();
  14666. Assert(sym->m_isSingleDef);
  14667. IR::Instr *argInstr = sym->m_instrDef;
  14668. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  14669. argInstr->Unlink();
  14670. labelHelper->InsertAfter(argInstr);
  14671. linkOpnd = argInstr->GetSrc2();
  14672. }
  14673. // Move startcall
  14674. Assert(linkOpnd->IsRegOpnd());
  14675. StackSym *sym = linkOpnd->AsRegOpnd()->m_sym;
  14676. Assert(sym->m_isSingleDef);
  14677. IR::Instr *startCall = sym->m_instrDef;
  14678. Assert(startCall->m_opcode == Js::OpCode::StartCall);
  14679. startCall->Unlink();
  14680. labelHelper->InsertAfter(startCall);
  14681. }
  14682. bool
  14683. Lowerer::GenerateFastInlineStringCharCodeAt(IR::Instr * instr, Js::BuiltinFunction index)
  14684. {
  14685. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  14686. //CallDirect src2
  14687. IR::Opnd * linkOpnd = instr->GetSrc2();
  14688. //ArgOut_A_InlineSpecialized
  14689. IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  14690. IR::Opnd * argsOpnd[2] = {0};
  14691. bool result = instr->FetchOperands(argsOpnd, 2);
  14692. Assert(result);
  14693. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14694. instr->InsertAfter(doneLabel);
  14695. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14696. bool success = this->m_lowererMD.GenerateFastCharAt(index, instr->GetDst(), argsOpnd[0], argsOpnd[1],
  14697. instr, instr, labelHelper, doneLabel);
  14698. instr->InsertBefore(labelHelper);
  14699. if (!success)
  14700. {
  14701. return false;
  14702. }
  14703. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  14704. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  14705. return true;
  14706. }
  14707. void
  14708. Lowerer::GenerateFastInlineMathClz32(IR::Instr* instr)
  14709. {
  14710. Assert(instr->GetDst()->IsInt32());
  14711. Assert(instr->GetSrc1()->IsInt32());
  14712. m_lowererMD.GenerateClz(instr);
  14713. }
  14714. void
  14715. Lowerer::GenerateFastInlineMathImul(IR::Instr* instr)
  14716. {
  14717. IR::Opnd* src1 = instr->GetSrc1();
  14718. IR::Opnd* src2 = instr->GetSrc2();
  14719. IR::Opnd* dst = instr->GetDst();
  14720. Assert(dst->IsInt32());
  14721. Assert(src1->IsInt32());
  14722. Assert(src2->IsInt32());
  14723. IR::Instr* imul = IR::Instr::New(LowererMD::MDImulOpcode, dst, src1, src2, instr->m_func);
  14724. instr->InsertBefore(imul);
  14725. LowererMD::Legalize(imul);
  14726. instr->Remove();
  14727. }
  14728. void
  14729. Lowerer::GenerateFastInlineMathFround(IR::Instr* instr)
  14730. {
  14731. IR::Opnd* src1 = instr->GetSrc1();
  14732. IR::Opnd* dst = instr->GetDst();
  14733. Assert(dst->IsFloat());
  14734. Assert(src1->IsFloat());
  14735. IR::Instr* fcvt64to32 = IR::Instr::New(LowererMD::MDConvertFloat64ToFloat32Opcode, dst, src1, instr->m_func);
  14736. instr->InsertBefore(fcvt64to32);
  14737. LowererMD::Legalize(fcvt64to32);
  14738. if (dst->IsFloat64())
  14739. {
  14740. IR::Instr* fcvt32to64 = IR::Instr::New(LowererMD::MDConvertFloat32ToFloat64Opcode, dst, dst, instr->m_func);
  14741. instr->InsertBefore(fcvt32to64);
  14742. LowererMD::Legalize(fcvt32to64);
  14743. }
  14744. instr->Remove();
  14745. return;
  14746. }
  14747. bool
  14748. Lowerer::GenerateFastInlineStringReplace(IR::Instr * instr)
  14749. {
  14750. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  14751. //CallDirect src2
  14752. IR::Opnd * linkOpnd = instr->GetSrc2();
  14753. //ArgOut_A_InlineSpecialized
  14754. IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  14755. IR::Opnd * argsOpnd[3] = {0};
  14756. bool result = instr->FetchOperands(argsOpnd, 3);
  14757. Assert(result);
  14758. AnalysisAssert(argsOpnd[0] && argsOpnd[1] && argsOpnd[2]);
  14759. if (!argsOpnd[0]->GetValueType().IsLikelyString()
  14760. || argsOpnd[1]->GetValueType().IsNotObject()
  14761. || !argsOpnd[2]->GetValueType().IsLikelyString())
  14762. {
  14763. return false;
  14764. }
  14765. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14766. instr->InsertAfter(doneLabel);
  14767. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14768. bool success = this->GenerateFastReplace(argsOpnd[0], argsOpnd[1], argsOpnd[2],
  14769. instr, instr, labelHelper, doneLabel);
  14770. instr->InsertBefore(labelHelper);
  14771. if (!success)
  14772. {
  14773. return false;
  14774. }
  14775. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  14776. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  14777. return true;
  14778. }
  14779. #ifdef ENABLE_DOM_FAST_PATH
  14780. /*
  14781. Lower the DOMFastPathGetter opcode
  14782. We have inliner generated bytecode:
  14783. (dst)helpArg1: ExtendArg_A (src1)thisObject (src2)null
  14784. (dst)helpArg2: ExtendArg_A (src1)funcObject (src2)helpArg1
  14785. method: DOMFastPathGetter (src1)HelperCall (src2)helpArg2
  14786. We'll convert it to a JavascriptFunction entry method call:
  14787. CALL Helper funcObject CallInfo(CallFlags_Value, 3) thisObj
  14788. */
  14789. void
  14790. Lowerer::LowerFastInlineDOMFastPathGetter(IR::Instr* instr)
  14791. {
  14792. IR::Opnd* helperOpnd = instr->UnlinkSrc1();
  14793. Assert(helperOpnd->IsHelperCallOpnd());
  14794. IR::Opnd *linkOpnd = instr->UnlinkSrc2();
  14795. Assert(linkOpnd->IsRegOpnd());
  14796. IR::Instr* prevInstr = linkOpnd->AsRegOpnd()->m_sym->m_instrDef;
  14797. Assert(prevInstr->m_opcode == Js::OpCode::ExtendArg_A);
  14798. IR::Opnd* funcObj = prevInstr->GetSrc1();
  14799. Assert(funcObj->IsRegOpnd());
  14800. // If the Extended_arg was CSE's across a loop or hoisted out of a loop,
  14801. // adding a new reference down here might cause funcObj to now be liveOnBackEdge.
  14802. // Use the addToLiveOnBackEdgeSyms bit vector to add it to a loop if we encounter one.
  14803. // We'll clear it once we reach the Extended arg.
  14804. this->addToLiveOnBackEdgeSyms->Set(funcObj->AsRegOpnd()->m_sym->m_id);
  14805. Assert(prevInstr->GetSrc2() != nullptr);
  14806. prevInstr = prevInstr->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
  14807. Assert(prevInstr->m_opcode == Js::OpCode::ExtendArg_A);
  14808. IR::Opnd* thisObj = prevInstr->GetSrc1();
  14809. Assert(prevInstr->GetSrc2() == nullptr);
  14810. Assert(thisObj->IsRegOpnd());
  14811. this->addToLiveOnBackEdgeSyms->Set(thisObj->AsRegOpnd()->m_sym->m_id);
  14812. const auto info = Lowerer::MakeCallInfoConst(Js::CallFlags_Value, 1, m_func);
  14813. m_lowererMD.LoadHelperArgument(instr, thisObj);
  14814. m_lowererMD.LoadHelperArgument(instr, info);
  14815. m_lowererMD.LoadHelperArgument(instr, funcObj);
  14816. instr->m_opcode = Js::OpCode::Call;
  14817. IR::HelperCallOpnd *helperCallOpnd = Lowerer::CreateHelperCallOpnd(helperOpnd->AsHelperCallOpnd()->m_fnHelper, 3, m_func);
  14818. instr->SetSrc1(helperCallOpnd);
  14819. m_lowererMD.LowerCall(instr, 3); // we have funcobj, callInfo, and this.
  14820. }
  14821. #endif
  14822. void
  14823. Lowerer::GenerateFastInlineArrayPush(IR::Instr * instr)
  14824. {
  14825. Assert(instr->m_opcode == Js::OpCode::InlineArrayPush);
  14826. IR::Opnd * baseOpnd = instr->GetSrc1();
  14827. IR::Opnd * srcOpnd = instr->GetSrc2();
  14828. bool returnLength = false;
  14829. if(instr->GetDst())
  14830. {
  14831. returnLength = true;
  14832. }
  14833. IR::LabelInstr * bailOutLabelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14834. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14835. instr->InsertAfter(doneLabel);
  14836. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14837. //Don't Generate fast path according to ShouldGenerateArrayFastPath()
  14838. //AND, Don't Generate fast path if the array is LikelyNative and the element is not specialized
  14839. if(ShouldGenerateArrayFastPath(baseOpnd, false, false, false) &&
  14840. !(baseOpnd->GetValueType().IsLikelyNativeArray() && srcOpnd->IsVar()))
  14841. {
  14842. GenerateFastPush(baseOpnd, srcOpnd, instr, instr, labelHelper, doneLabel, bailOutLabelHelper, returnLength);
  14843. instr->InsertBefore(labelHelper);
  14844. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  14845. }
  14846. if(baseOpnd->GetValueType().IsLikelyNativeArray())
  14847. {
  14848. //Lower IR::BailOutConventionalNativeArrayAccessOnly here.
  14849. LowerOneBailOutKind(instr, IR::BailOutConventionalNativeArrayAccessOnly, false, false);
  14850. instr->InsertAfter(bailOutLabelHelper);
  14851. InsertBranch(Js::OpCode::Br, doneLabel, bailOutLabelHelper);
  14852. }
  14853. GenerateHelperToArrayPushFastPath(instr, bailOutLabelHelper);
  14854. }
  14855. bool Lowerer::GenerateFastPop(IR::Opnd *baseOpndParam, IR::Instr *callInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel, IR::LabelInstr * bailOutLabelHelper)
  14856. {
  14857. Assert(ShouldGenerateArrayFastPath(baseOpndParam, false, false, false));
  14858. // TEST baseOpnd, AtomTag -- check baseOpnd not tagged int
  14859. // JNE $helper
  14860. // CMP [baseOpnd], JavascriptArray::`vtable' -- check baseOpnd isArray
  14861. // JNE $helper
  14862. // MOV r2, [baseOpnd + offset(length)] -- Load array length
  14863. IR::RegOpnd * baseOpnd = baseOpndParam->AsRegOpnd();
  14864. const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
  14865. ValueType arrValueType(baseOpndParam->GetValueType());
  14866. IR::RegOpnd *arrayOpnd = baseOpnd;
  14867. IR::RegOpnd *arrayLengthOpnd = nullptr;
  14868. IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
  14869. if(!arrValueType.IsAnyOptimizedArray())
  14870. {
  14871. arrayOpnd = GenerateArrayTest(baseOpnd, bailOutLabelHelper, bailOutLabelHelper, callInstr, false, true);
  14872. arrValueType = arrayOpnd->GetValueType().ToDefiniteObject().SetHasNoMissingValues(false);
  14873. }
  14874. else if(arrayOpnd->IsArrayRegOpnd())
  14875. {
  14876. IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
  14877. if(arrayRegOpnd->LengthSym())
  14878. {
  14879. arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), arrayRegOpnd->LengthSym()->GetType(), m_func);
  14880. DebugOnly(arrayLengthOpnd->FreezeSymValue());
  14881. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  14882. }
  14883. }
  14884. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
  14885. IR::AutoReuseOpnd autoReuseMutableArrayLengthOpnd;
  14886. {
  14887. IR::RegOpnd *const mutableArrayLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  14888. autoReuseMutableArrayLengthOpnd.Initialize(mutableArrayLengthOpnd, m_func);
  14889. if(arrayLengthOpnd)
  14890. {
  14891. // mov mutableArrayLength, arrayLength
  14892. InsertMove(mutableArrayLengthOpnd, arrayLengthOpnd, callInstr);
  14893. }
  14894. else
  14895. {
  14896. // MOV mutableArrayLength, [array + offset(length)] -- Load array length
  14897. // We know this index is safe since, so mark it as UInt32 to avoid unnecessary conversion/checks
  14898. InsertMove(
  14899. mutableArrayLengthOpnd,
  14900. IR::IndirOpnd::New(
  14901. arrayOpnd,
  14902. Js::JavascriptArray::GetOffsetOfLength(),
  14903. mutableArrayLengthOpnd->GetType(),
  14904. this->m_func),
  14905. callInstr);
  14906. }
  14907. arrayLengthOpnd = mutableArrayLengthOpnd;
  14908. }
  14909. InsertCompareBranch(arrayLengthOpnd, IR::IntConstOpnd::New(0, TyUint32, this->m_func), Js::OpCode::BrEq_A, true, bailOutLabelHelper, callInstr);
  14910. InsertSub(false, arrayLengthOpnd, arrayLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func),callInstr);
  14911. IR::IndirOpnd *arrayRef = IR::IndirOpnd::New(arrayOpnd, arrayLengthOpnd, TyVar, this->m_func);
  14912. arrayRef->GetBaseOpnd()->SetValueType(arrValueType);
  14913. //Array length is going to overflow, hence don't check for Array.length and Segment.length overflow.
  14914. bool isTypedArrayElement, isStringIndex;
  14915. IR::IndirOpnd *const indirOpnd =
  14916. GenerateFastElemICommon(
  14917. callInstr,
  14918. false,
  14919. arrayRef,
  14920. labelHelper,
  14921. labelHelper,
  14922. nullptr,
  14923. &isTypedArrayElement,
  14924. &isStringIndex,
  14925. nullptr,
  14926. nullptr /*pLabelSegmentLengthIncreased*/,
  14927. true /*checkArrayLengthOverflow*/,
  14928. true /* forceGenerateFastPath */,
  14929. false/* = returnLength */,
  14930. bailOutLabelHelper /* = bailOutLabelInstr*/);
  14931. Assert(!isTypedArrayElement);
  14932. Assert(indirOpnd);
  14933. return true;
  14934. }
  14935. bool Lowerer::GenerateFastPush(IR::Opnd *baseOpndParam, IR::Opnd *src, IR::Instr *callInstr,
  14936. IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel, IR::LabelInstr * bailOutLabelHelper, bool returnLength)
  14937. {
  14938. Assert(ShouldGenerateArrayFastPath(baseOpndParam, false, false, false));
  14939. // TEST baseOpnd, AtomTag -- check baseOpnd not tagged int
  14940. // JNE $helper
  14941. // CMP [baseOpnd], JavascriptArray::`vtable' -- check baseOpnd isArray
  14942. // JNE $helper
  14943. // MOV r2, [baseOpnd + offset(length)] -- Load array length
  14944. IR::RegOpnd * baseOpnd = baseOpndParam->AsRegOpnd();
  14945. const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
  14946. ValueType arrValueType(baseOpndParam->GetValueType());
  14947. IR::RegOpnd *arrayOpnd = baseOpnd;
  14948. IR::RegOpnd *arrayLengthOpnd = nullptr;
  14949. IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
  14950. if(!arrValueType.IsAnyOptimizedArray())
  14951. {
  14952. arrayOpnd = GenerateArrayTest(baseOpnd, labelHelper, labelHelper, insertInstr, false, true);
  14953. arrValueType = arrayOpnd->GetValueType().ToDefiniteObject().SetHasNoMissingValues(false);
  14954. }
  14955. else if(arrayOpnd->IsArrayRegOpnd())
  14956. {
  14957. IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
  14958. if(arrayRegOpnd->LengthSym())
  14959. {
  14960. arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), arrayRegOpnd->LengthSym()->GetType(), m_func);
  14961. DebugOnly(arrayLengthOpnd->FreezeSymValue());
  14962. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  14963. }
  14964. }
  14965. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
  14966. if(!arrayLengthOpnd)
  14967. {
  14968. // MOV arrayLength, [array + offset(length)] -- Load array length
  14969. // We know this index is safe since, so mark it as UInt32 to avoid unnecessary conversion/checks
  14970. arrayLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  14971. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  14972. InsertMove(
  14973. arrayLengthOpnd,
  14974. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), arrayLengthOpnd->GetType(), this->m_func),
  14975. insertInstr);
  14976. }
  14977. IR::IndirOpnd *arrayRef = IR::IndirOpnd::New(arrayOpnd, arrayLengthOpnd, TyVar, this->m_func);
  14978. arrayRef->GetBaseOpnd()->SetValueType(arrValueType);
  14979. if (returnLength && src->IsEqual(insertInstr->GetDst()))
  14980. {
  14981. //If the dst is same as the src, then dst is going to be overridden by GenerateFastElemICommon in process of updating the length.
  14982. //Save it in a temp register.
  14983. IR::RegOpnd *opnd = IR::RegOpnd::New(src->GetType(), this->m_func);
  14984. InsertMove(opnd, src, insertInstr);
  14985. src = opnd;
  14986. }
  14987. //Array length is going to overflow, hence don't check for Array.length and Segment.length overflow.
  14988. bool isTypedArrayElement, isStringIndex;
  14989. IR::IndirOpnd *const indirOpnd =
  14990. GenerateFastElemICommon(
  14991. insertInstr,
  14992. true,
  14993. arrayRef,
  14994. labelHelper,
  14995. labelHelper,
  14996. nullptr,
  14997. &isTypedArrayElement,
  14998. &isStringIndex,
  14999. nullptr,
  15000. nullptr /*pLabelSegmentLengthIncreased*/,
  15001. false /*checkArrayLengthOverflow*/,
  15002. true /* forceGenerateFastPath */,
  15003. returnLength,
  15004. bailOutLabelHelper);
  15005. Assert(!isTypedArrayElement);
  15006. Assert(indirOpnd);
  15007. // MOV [r3 + r2], src
  15008. InsertMoveWithBarrier(indirOpnd, src, insertInstr);
  15009. return true;
  15010. }
  15011. IR::Opnd*
  15012. Lowerer::GenerateArgOutForInlineeStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  15013. {
  15014. Assert(callInstr->m_func->IsInlinee());
  15015. Func *func = callInstr->m_func;
  15016. uint32 actualCount = func->actualCount - 1; // don't count this pointer
  15017. Assert(actualCount < Js::InlineeCallInfo::MaxInlineeArgoutCount);
  15018. const auto firstRealArgStackSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  15019. this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr); //Start after this pointer
  15020. IR::SymOpnd *firstArg = IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, func);
  15021. const IR::AutoReuseOpnd autoReuseFirstArg(firstArg, func);
  15022. IR::RegOpnd* argInOpnd = IR::RegOpnd::New(TyMachReg, func);
  15023. const IR::AutoReuseOpnd autoReuseArgInOpnd(argInOpnd, func);
  15024. InsertLea(argInOpnd, firstArg, callInstr);
  15025. IR::IndirOpnd *argIndirOpnd = nullptr;
  15026. IR::Instr* argout = nullptr;
  15027. #if defined(_M_IX86)
  15028. // Maintain alignment
  15029. if ((actualCount & 1) == 0)
  15030. {
  15031. IR::Instr *alignPush = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  15032. alignPush->SetSrc1(IR::IntConstOpnd::New(1, TyInt32, this->m_func));
  15033. callInstr->InsertBefore(alignPush);
  15034. }
  15035. #endif
  15036. for(uint i = actualCount; i > 0; i--)
  15037. {
  15038. argIndirOpnd = IR::IndirOpnd::New(argInOpnd, (i - 1) * MachPtr, TyMachReg, func);
  15039. argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  15040. argout->SetSrc1(argIndirOpnd);
  15041. callInstr->InsertBefore(argout);
  15042. // i represents ith arguments from actuals, with is i + 3 counting this, callInfo and function object
  15043. this->m_lowererMD.LoadDynamicArgument(argout, i + 3);
  15044. }
  15045. return IR::IntConstOpnd::New(func->actualCount, TyInt32, func);
  15046. }
  15047. // For AMD64 and ARM only.
  15048. void
  15049. Lowerer::LowerInlineSpreadArgOutLoopUsingRegisters(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd)
  15050. {
  15051. Func *const func = callInstr->m_func;
  15052. IR::LabelInstr *oneArgLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  15053. InsertCompareBranch(indexOpnd, IR::IntConstOpnd::New(1, TyUint8, func), Js::OpCode::BrEq_A, true, oneArgLabel, callInstr);
  15054. IR::LabelInstr *startLoopLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  15055. startLoopLabel->m_isLoopTop = true;
  15056. Loop *loop = JitAnew(func->m_alloc, Loop, func->m_alloc, this->m_func);
  15057. startLoopLabel->SetLoop(loop);
  15058. loop->SetLoopTopInstr(startLoopLabel);
  15059. loop->regAlloc.liveOnBackEdgeSyms = AllocatorNew(JitArenaAllocator, func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
  15060. loop->regAlloc.liveOnBackEdgeSyms->Set(indexOpnd->m_sym->m_id);
  15061. loop->regAlloc.liveOnBackEdgeSyms->Set(arrayElementsStartOpnd->m_sym->m_id);
  15062. callInstr->InsertBefore(startLoopLabel);
  15063. InsertSub(false, indexOpnd, indexOpnd, IR::IntConstOpnd::New(1, TyInt8, func), callInstr);
  15064. IR::IndirOpnd *elemPtrOpnd = IR::IndirOpnd::New(arrayElementsStartOpnd, indexOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyMachPtr, func);
  15065. // Generate argout for n+2 arg (skipping function object + this)
  15066. IR::Instr *argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  15067. // X64 requires a reg opnd
  15068. IR::RegOpnd *elemRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  15069. LowererMD::CreateAssign(elemRegOpnd, elemPtrOpnd, callInstr);
  15070. argout->SetSrc1(elemRegOpnd);
  15071. argout->SetSrc2(indexOpnd);
  15072. callInstr->InsertBefore(argout);
  15073. this->m_lowererMD.LoadDynamicArgumentUsingLength(argout);
  15074. InsertCompareBranch(indexOpnd, IR::IntConstOpnd::New(1, TyUint8, func), Js::OpCode::BrNeq_A, true, startLoopLabel, callInstr);
  15075. // Emit final argument into register 4 on AMD64 and ARM
  15076. callInstr->InsertBefore(oneArgLabel);
  15077. argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  15078. argout->SetSrc1(elemPtrOpnd);
  15079. callInstr->InsertBefore(argout);
  15080. this->m_lowererMD.LoadDynamicArgument(argout, 4); //4 to denote this is 4th register after this, callinfo & function object
  15081. }
  15082. IR::Instr *
  15083. Lowerer::LowerCallIDynamicSpread(IR::Instr *callInstr, ushort callFlags)
  15084. {
  15085. Assert(callInstr->m_opcode == Js::OpCode::CallIDynamicSpread);
  15086. IR::Instr * insertBeforeInstrForCFG = nullptr;
  15087. Func *const func = callInstr->m_func;
  15088. if (func->IsInlinee())
  15089. {
  15090. throw Js::RejitException(RejitReason::InlineSpreadDisabled);
  15091. }
  15092. IR::Instr *spreadArrayInstr = callInstr;
  15093. IR::SymOpnd *argLinkOpnd = spreadArrayInstr->UnlinkSrc2()->AsSymOpnd();
  15094. StackSym *argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  15095. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  15096. argLinkOpnd->Free(this->m_func);
  15097. spreadArrayInstr = argLinkSym->m_instrDef;
  15098. Assert(spreadArrayInstr->m_opcode == Js::OpCode::ArgOut_A_SpreadArg);
  15099. IR::RegOpnd *arrayOpnd = nullptr;
  15100. IR::Opnd *arraySrcOpnd = spreadArrayInstr->UnlinkSrc1();
  15101. if (!arraySrcOpnd->IsRegOpnd())
  15102. {
  15103. arrayOpnd = IR::RegOpnd::New(TyMachPtr, func);
  15104. LowererMD::CreateAssign(arrayOpnd, arraySrcOpnd, spreadArrayInstr);
  15105. }
  15106. else
  15107. {
  15108. arrayOpnd = arraySrcOpnd->AsRegOpnd();
  15109. }
  15110. argLinkOpnd = spreadArrayInstr->UnlinkSrc2()->AsSymOpnd();
  15111. // Walk the arg chain and find the start call
  15112. argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  15113. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  15114. argLinkOpnd->Free(this->m_func);
  15115. // Nothing to be done for the function object, emit as normal
  15116. IR::Instr *thisInstr = argLinkSym->m_instrDef;
  15117. IR::RegOpnd *thisOpnd = thisInstr->UnlinkSrc2()->AsRegOpnd();
  15118. argLinkSym = thisOpnd->m_sym->AsStackSym();
  15119. thisInstr->Unlink();
  15120. thisInstr->FreeDst();
  15121. // Remove the array ArgOut instr and StartCall, they are no longer needed
  15122. spreadArrayInstr->Unlink();
  15123. spreadArrayInstr->FreeDst();
  15124. IR::Instr *startCallInstr = argLinkSym->m_instrDef;
  15125. Assert(startCallInstr->m_opcode == Js::OpCode::StartCall);
  15126. insertBeforeInstrForCFG = startCallInstr->GetNextRealInstr();
  15127. startCallInstr->Remove();
  15128. IR::RegOpnd *argsLengthOpnd = IR::RegOpnd::New(TyUint32, func);
  15129. IR::IndirOpnd *arrayLengthPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, func);
  15130. LowererMD::CreateAssign(argsLengthOpnd, arrayLengthPtrOpnd, callInstr);
  15131. // Don't bother expanding args if there are zero
  15132. IR::LabelInstr *zeroArgsLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  15133. InsertCompareBranch(argsLengthOpnd, IR::IntConstOpnd::New(0, TyInt8, func), Js::OpCode::BrEq_A, true, zeroArgsLabel, callInstr);
  15134. IR::RegOpnd *indexOpnd = IR::RegOpnd::New(TyUint32, func);
  15135. LowererMD::CreateAssign(indexOpnd, argsLengthOpnd, callInstr);
  15136. // Get the array head offset and length
  15137. IR::IndirOpnd *arrayHeadPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfHead(), TyMachPtr, func);
  15138. IR::RegOpnd *arrayElementsStartOpnd = IR::RegOpnd::New(TyMachPtr, func);
  15139. InsertAdd(false, arrayElementsStartOpnd, arrayHeadPtrOpnd, IR::IntConstOpnd::New(offsetof(Js::SparseArraySegment<Js::Var>, elements), TyUint8, func), callInstr);
  15140. this->m_lowererMD.LowerInlineSpreadArgOutLoop(callInstr, indexOpnd, arrayElementsStartOpnd);
  15141. // Resume if we have zero args
  15142. callInstr->InsertBefore(zeroArgsLabel);
  15143. // Lower call
  15144. callInstr->m_opcode = Js::OpCode::CallIDynamic;
  15145. callInstr = m_lowererMD.LowerCallIDynamic(callInstr, thisInstr, argsLengthOpnd, callFlags, insertBeforeInstrForCFG);
  15146. return callInstr;
  15147. }
  15148. IR::Instr *
  15149. Lowerer::LowerCallIDynamic(IR::Instr * callInstr, ushort callFlags)
  15150. {
  15151. if (!this->m_func->GetHasStackArgs())
  15152. {
  15153. throw Js::RejitException(RejitReason::InlineApplyDisabled);
  15154. }
  15155. IR::Instr * insertBeforeInstrForCFG = nullptr;
  15156. // Lower args and look for StartCall
  15157. IR::Instr * argInstr = callInstr;
  15158. IR::SymOpnd * argLinkOpnd = argInstr->UnlinkSrc2()->AsSymOpnd();
  15159. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  15160. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  15161. argLinkOpnd->Free(this->m_func);
  15162. argInstr = argLinkSym->m_instrDef;
  15163. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  15164. IR::Instr* saveThisArgOutInstr = argInstr;
  15165. saveThisArgOutInstr->Unlink();
  15166. saveThisArgOutInstr->FreeDst();
  15167. argLinkOpnd = argInstr->UnlinkSrc2()->AsSymOpnd();
  15168. argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  15169. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  15170. argLinkOpnd->Free(this->m_func);
  15171. argInstr = argLinkSym->m_instrDef;
  15172. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_FromStackArgs);
  15173. IR::Opnd* argsLength = m_lowererMD.GenerateArgOutForStackArgs(callInstr, argInstr);
  15174. IR::RegOpnd* startCallDstOpnd = argInstr->UnlinkSrc2()->AsRegOpnd();
  15175. argLinkSym = startCallDstOpnd->m_sym->AsStackSym();
  15176. startCallDstOpnd->Free(this->m_func);
  15177. argInstr->Remove();// Remove ArgOut_A_FromStackArgs
  15178. argInstr = argLinkSym->m_instrDef;
  15179. Assert(argInstr->m_opcode == Js::OpCode::StartCall);
  15180. insertBeforeInstrForCFG = argInstr->GetNextRealInstr();
  15181. argInstr->Remove(); //Remove start call
  15182. return m_lowererMD.LowerCallIDynamic(callInstr, saveThisArgOutInstr, argsLength, callFlags, insertBeforeInstrForCFG);
  15183. }
  15184. //This is only for x64 & ARM.
  15185. IR::Opnd*
  15186. Lowerer::GenerateArgOutForStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  15187. {
  15188. // s25.var = LdLen_A s4.var
  15189. // s26.var = Ld_A s25.var
  15190. // BrNeq_I4 $L3, s25.var,0
  15191. // $L2:
  15192. // BrNeq_I4 $L4, s25.var,1
  15193. // s25.var = SUB_I4 s25.var, 0x1
  15194. // s10.var = LdElemI_A [s4.var+s25.var].var
  15195. // ArgOut_A_Dynamic s10.var, s25.var
  15196. // Br $L2
  15197. // $L4:
  15198. // s10.var = LdElemI_A [s4.var].var
  15199. // ArgOut_A_Dynamic s10.var, 4
  15200. // $L3
  15201. #if defined(_M_IX86)
  15202. Assert(false);
  15203. #endif
  15204. Assert(stackArgsInstr->m_opcode == Js::OpCode::ArgOut_A_FromStackArgs);
  15205. Assert(callInstr->m_opcode == Js::OpCode::CallIDynamic);
  15206. this->m_lowererMD.GenerateFunctionObjectTest(callInstr, callInstr->GetSrc1()->AsRegOpnd(), false);
  15207. if (callInstr->m_func->IsInlinee())
  15208. {
  15209. return this->GenerateArgOutForInlineeStackArgs(callInstr, stackArgsInstr);
  15210. }
  15211. Func *func = callInstr->m_func;
  15212. IR::RegOpnd* stackArgs = stackArgsInstr->GetSrc1()->AsRegOpnd();
  15213. IR::RegOpnd* ldLenDstOpnd = IR::RegOpnd::New(TyUint32, func);
  15214. IR::Instr* ldLen = IR::Instr::New(Js::OpCode::LdLen_A, ldLenDstOpnd ,stackArgs, func);
  15215. ldLenDstOpnd->SetValueType(ValueType::GetTaggedInt()); /*LdLen_A works only on stack arguments*/
  15216. callInstr->InsertBefore(ldLen);
  15217. GenerateFastRealStackArgumentsLdLen(ldLen);
  15218. IR::Instr* saveLenInstr = IR::Instr::New(Js::OpCode::MOV, IR::RegOpnd::New(TyUint32, func), ldLenDstOpnd, func);
  15219. saveLenInstr->GetDst()->SetValueType(ValueType::GetTaggedInt());
  15220. callInstr->InsertBefore(saveLenInstr);
  15221. IR::LabelInstr* doneArgs = IR::LabelInstr::New(Js::OpCode::Label, func);
  15222. IR::Instr* branchDoneArgs = IR::BranchInstr::New(Js::OpCode::BrEq_I4, doneArgs, ldLenDstOpnd, IR::IntConstOpnd::New(0, TyInt8, func),func);
  15223. callInstr->InsertBefore(branchDoneArgs);
  15224. this->m_lowererMD.EmitInt4Instr(branchDoneArgs);
  15225. IR::LabelInstr* startLoop = IR::LabelInstr::New(Js::OpCode::Label, func);
  15226. IR::LabelInstr* endLoop = IR::LabelInstr::New(Js::OpCode::Label, func);
  15227. startLoop->m_isLoopTop = true;
  15228. Loop *loop = JitAnew(func->m_alloc, Loop, func->m_alloc, this->m_func);
  15229. startLoop->SetLoop(loop);
  15230. loop->SetLoopTopInstr(startLoop);
  15231. loop->regAlloc.liveOnBackEdgeSyms = AllocatorNew(JitArenaAllocator, func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
  15232. callInstr->InsertBefore(startLoop);
  15233. IR::Instr* branchOutOfLoop = IR::BranchInstr::New(Js::OpCode::BrEq_I4, endLoop, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyInt8, func),func);
  15234. callInstr->InsertBefore(branchOutOfLoop);
  15235. this->m_lowererMD.EmitInt4Instr(branchOutOfLoop);
  15236. IR::Instr* subInstr = IR::Instr::New(Js::OpCode::Sub_I4, ldLenDstOpnd, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyInt8, func),func);
  15237. callInstr->InsertBefore(subInstr);
  15238. this->m_lowererMD.EmitInt4Instr(subInstr);
  15239. IR::IndirOpnd *nthArgument = IR::IndirOpnd::New(stackArgs, ldLenDstOpnd, TyMachReg, func);
  15240. IR::RegOpnd* ldElemDstOpnd = IR::RegOpnd::New(TyMachReg,func);
  15241. IR::Instr* ldElem = IR::Instr::New(Js::OpCode::LdElemI_A, ldElemDstOpnd, nthArgument, func);
  15242. callInstr->InsertBefore(ldElem);
  15243. GenerateFastStackArgumentsLdElemI(ldElem);
  15244. IR::Instr* argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  15245. argout->SetSrc1(ldElemDstOpnd);
  15246. argout->SetSrc2(ldLenDstOpnd);
  15247. callInstr->InsertBefore(argout);
  15248. this->m_lowererMD.LoadDynamicArgumentUsingLength(argout);
  15249. IR::BranchInstr *tailBranch = IR::BranchInstr::New(Js::OpCode::Br, startLoop, func);
  15250. callInstr->InsertBefore(tailBranch);
  15251. callInstr->InsertBefore(endLoop);
  15252. this->m_lowererMD.LowerUncondBranch(tailBranch);
  15253. loop->regAlloc.liveOnBackEdgeSyms->Set(ldLenDstOpnd->m_sym->m_id);
  15254. subInstr = IR::Instr::New(Js::OpCode::Sub_I4, ldLenDstOpnd, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyInt8, func),func);
  15255. callInstr->InsertBefore(subInstr);
  15256. this->m_lowererMD.EmitInt4Instr(subInstr);
  15257. nthArgument = IR::IndirOpnd::New(stackArgs, ldLenDstOpnd, TyMachReg, func);
  15258. ldElemDstOpnd = IR::RegOpnd::New(TyMachReg,func);
  15259. ldElem = IR::Instr::New(Js::OpCode::LdElemI_A, ldElemDstOpnd, nthArgument, func);
  15260. callInstr->InsertBefore(ldElem);
  15261. GenerateFastStackArgumentsLdElemI(ldElem);
  15262. argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  15263. argout->SetSrc1(ldElemDstOpnd);
  15264. callInstr->InsertBefore(argout);
  15265. this->m_lowererMD.LoadDynamicArgument(argout, 4); //4 to denote this is 4th register after this, callinfo & function object
  15266. callInstr->InsertBefore(doneArgs);
  15267. /*return the length which will be used for callInfo generations & stack allocation*/
  15268. return saveLenInstr->GetDst()->AsRegOpnd();
  15269. }
  15270. void
  15271. Lowerer::GenerateLoadStackArgumentByIndex(IR::Opnd *dst, IR::RegOpnd *indexOpnd, IR::Instr *instr, int32 offset, Func *func)
  15272. {
  15273. // Load argument set dst = [ebp + index].
  15274. IR::RegOpnd *ebpOpnd = IR::Opnd::CreateFramePointerOpnd(func);
  15275. IR::IndirOpnd *argIndirOpnd = nullptr;
  15276. // The stack looks like this:
  15277. // [new.target or FrameDisplay] <== EBP + formalParamOffset (4) + callInfo.Count - 1
  15278. // arguments[n] <== EBP + formalParamOffset (4) + n
  15279. // ...
  15280. // arguments[1] <== EBP + formalParamOffset (4) + 2
  15281. // arguments[0] <== EBP + formalParamOffset (4) + 1
  15282. // this or new.target <== EBP + formalParamOffset (4)
  15283. // callinfo
  15284. // function object
  15285. // return addr
  15286. // EBP-> EBP chain
  15287. //actual arguments offset is LowererMD::GetFormalParamOffset() + 1 (this)
  15288. int32 actualOffset = GetFormalParamOffset() + offset;
  15289. Assert(GetFormalParamOffset() == 4);
  15290. const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  15291. argIndirOpnd = IR::IndirOpnd::New(ebpOpnd, indexOpnd, indirScale, TyMachReg, this->m_func);
  15292. argIndirOpnd->SetOffset(actualOffset << indirScale);
  15293. LowererMD::CreateAssign(dst, argIndirOpnd, instr);
  15294. }
  15295. //This function assumes there is stackargs bailout and index is always on the range.
  15296. bool
  15297. Lowerer::GenerateFastStackArgumentsLdElemI(IR::Instr* ldElem)
  15298. {
  15299. // MOV dst, ebp [(valueOpnd + 5) *4] // 5 for the stack layout
  15300. //
  15301. IR::IndirOpnd *indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  15302. // Now load the index and check if it is an integer.
  15303. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  15304. Assert (indexOpnd && indexOpnd->IsTaggedInt());
  15305. if(ldElem->m_func->IsInlinee())
  15306. {
  15307. IR::IndirOpnd *argIndirOpnd = GetArgsIndirOpndForInlinee(ldElem, indexOpnd);
  15308. LowererMD::CreateAssign(ldElem->GetDst(), argIndirOpnd, ldElem);
  15309. }
  15310. else
  15311. {
  15312. GenerateLoadStackArgumentByIndex(ldElem->GetDst(), indexOpnd, ldElem, indirOpnd->GetOffset() + 1, m_func); // +1 to offset 'this'
  15313. }
  15314. ldElem->Remove();
  15315. return false;
  15316. }
  15317. IR::IndirOpnd*
  15318. Lowerer::GetArgsIndirOpndForInlinee(IR::Instr* ldElem, IR::Opnd* valueOpnd)
  15319. {
  15320. Assert(ldElem->m_func->IsInlinee());
  15321. IR::IndirOpnd* argIndirOpnd = nullptr;
  15322. // Address of argument after 'this'
  15323. const auto firstRealArgStackSym = ldElem->m_func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  15324. this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr); //Start after this pointer
  15325. IR::SymOpnd *firstArg = IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, ldElem->m_func);
  15326. const IR::AutoReuseOpnd autoReuseFirstArg(firstArg, m_func);
  15327. IR::RegOpnd *const baseOpnd = IR::RegOpnd::New(TyMachReg, ldElem->m_func);
  15328. const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
  15329. InsertLea(baseOpnd, firstArg, ldElem);
  15330. if (valueOpnd->IsIntConstOpnd())
  15331. {
  15332. IntConstType offset = valueOpnd->AsIntConstOpnd()->GetValue() * MachPtr;
  15333. // TODO: Assert(Math::FitsInDWord(offset));
  15334. argIndirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, ldElem->m_func);
  15335. }
  15336. else
  15337. {
  15338. Assert(valueOpnd->IsRegOpnd());
  15339. const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  15340. argIndirOpnd = IR::IndirOpnd::New(baseOpnd, valueOpnd->AsRegOpnd(), indirScale, TyMachReg, ldElem->m_func);
  15341. }
  15342. return argIndirOpnd;
  15343. }
  15344. IR::IndirOpnd*
  15345. Lowerer::GetArgsIndirOpndForTopFunction(IR::Instr* ldElem, IR::Opnd* valueOpnd)
  15346. {
  15347. // Load argument set dst = [ebp + index] (or grab from the generator object if m_func is a generator function).
  15348. IR::RegOpnd *baseOpnd = m_func->GetJnFunction()->IsGenerator() ? LoadGeneratorArgsPtr(ldElem) : IR::Opnd::CreateFramePointerOpnd(m_func);
  15349. IR::IndirOpnd* argIndirOpnd = nullptr;
  15350. // The stack looks like this:
  15351. // ...
  15352. // arguments[1]
  15353. // arguments[0]
  15354. // this
  15355. // callinfo
  15356. // function object
  15357. // return addr
  15358. // EBP-> EBP chain
  15359. //actual arguments offset is LowererMD::GetFormalParamOffset() + 1 (this)
  15360. uint16 actualOffset = m_func->GetJnFunction()->IsGenerator() ? 1 : GetFormalParamOffset() + 1; //5
  15361. Assert(actualOffset == 5 || m_func->GetJnFunction()->IsGenerator());
  15362. if (valueOpnd->IsIntConstOpnd())
  15363. {
  15364. IntConstType offset = (valueOpnd->AsIntConstOpnd()->GetValue() + actualOffset) * MachPtr;
  15365. // TODO: Assert(Math::FitsInDWord(offset));
  15366. argIndirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, this->m_func);
  15367. }
  15368. else
  15369. {
  15370. const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  15371. argIndirOpnd = IR::IndirOpnd::New(baseOpnd->AsRegOpnd(), valueOpnd->AsRegOpnd(), indirScale, TyMachReg, this->m_func);
  15372. // Need to offset valueOpnd by 5. Instead of changing valueOpnd, we can just add an offset to the indir. Changing
  15373. // valueOpnd requires creation of a temp sym (if it's not already a temp) so that the value of the sym that
  15374. // valueOpnd represents is not changed.
  15375. argIndirOpnd->SetOffset(actualOffset << indirScale);
  15376. }
  15377. return argIndirOpnd;
  15378. }
  15379. void
  15380. Lowerer::GenerateCheckForArgumentsLength(IR::Instr* ldElem, IR::LabelInstr* labelCreateHeapArgs, IR::Opnd* actualParamOpnd, IR::Opnd* valueOpnd, Js::OpCode opcode)
  15381. {
  15382. // Check if index < nr_actuals.
  15383. InsertCompare(actualParamOpnd, valueOpnd, ldElem);
  15384. // Jump to helper if index >= nr_actuals.
  15385. // Do an unsigned check here so that a negative index will also fail.
  15386. // (GenerateLdValueFromCheckedIndexOpnd does not guarantee positive index on x86.)
  15387. InsertBranch(opcode, true, labelCreateHeapArgs, ldElem);
  15388. }
  15389. bool
  15390. Lowerer::GenerateFastArgumentsLdElemI(IR::Instr* ldElem, IR::LabelInstr * labelHelper, IR::LabelInstr *labelFallThru)
  15391. {
  15392. // TEST argsSlot, argsSlot
  15393. // JNE $helper // There is an arguments object created jump to helper.
  15394. // ---GenerateSmIntTest
  15395. // ---GenerateLdValueFromCheckedIndexOpnd
  15396. // ---LoadInputParamCount
  15397. // CMP actualParamOpnd, valueOpnd //Compare between the actual count & the index count (say i in arguments[i])
  15398. // JLE $labelCreateHeapArgs
  15399. // MOV dst, ebp [(valueOpnd + 5) *4] // 5 for the stack layout
  15400. // JMP $fallthrough
  15401. //
  15402. //labelCreateHeapArgs:
  15403. // ---LoadHeapArguments
  15404. Assert(ldElem->DoStackArgsOpt(this->m_func));
  15405. IR::IndirOpnd *indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  15406. bool isInlinee = ldElem->m_func->IsInlinee();
  15407. Func *func = ldElem->m_func;
  15408. // First check the slot on the frame to see if there is a heap arguments object.
  15409. IR::Opnd *cachedArgsObjectSlotOpnd = isInlinee? ldElem->m_func->GetInlineeArgumentsObjectSlotOpnd() : this->m_lowererMD.CreateStackArgumentsSlotOpnd() ;
  15410. // Re-use the base pointer here so that we're loading the current heap args into the reg we will pass
  15411. // to the helper if necessary.
  15412. IR::RegOpnd *argsObjRegOpnd = indirOpnd->GetBaseOpnd();
  15413. LowererMD::CreateAssign(argsObjRegOpnd, cachedArgsObjectSlotOpnd, ldElem);
  15414. InsertTest(argsObjRegOpnd, argsObjRegOpnd, ldElem);
  15415. IR::LabelInstr *labelCreateHeapArgs = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  15416. InsertBranch(Js::OpCode::BrNeq_A, labelHelper, ldElem);
  15417. // Now load the index and check if it is an integer.
  15418. bool emittedFastPath = false;
  15419. bool isNotInt = false;
  15420. IntConstType value = 0;
  15421. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  15422. IR::Opnd *valueOpnd = nullptr;
  15423. IR::Opnd *actualParamOpnd = nullptr;
  15424. bool hasIntConstIndex = indirOpnd->TryGetIntConstIndexValue(true, &value, &isNotInt);
  15425. if (isInlinee && hasIntConstIndex && value >= (ldElem->m_func->actualCount - 1))
  15426. {
  15427. //Outside the range of actuals, skip
  15428. }
  15429. else if (labelFallThru != nullptr && !(hasIntConstIndex && value < 0)) //if index is not a negative int constant
  15430. {
  15431. if (isInlinee)
  15432. {
  15433. actualParamOpnd = IR::IntConstOpnd::New(ldElem->m_func->actualCount - 1, TyInt32, func);
  15434. }
  15435. else
  15436. {
  15437. // Load actuals count, LoadHeapArguments will reuse the generated instructions here
  15438. IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldElem, -1 /* don't include 'this' while counting actuals. */);
  15439. actualParamOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
  15440. }
  15441. if (hasIntConstIndex)
  15442. {
  15443. //Constant index
  15444. valueOpnd = IR::IntConstOpnd::New(value, TyInt32, func);
  15445. }
  15446. else
  15447. {
  15448. //Load valueOpnd from the index
  15449. valueOpnd =
  15450. m_lowererMD.LoadNonnegativeIndex(
  15451. indexOpnd,
  15452. (
  15453. #if INT32VAR
  15454. indexOpnd->GetType() == TyUint32
  15455. #else
  15456. // On 32-bit platforms, skip the negative check since for now, the unsigned upper bound check covers it
  15457. true
  15458. #endif
  15459. ),
  15460. labelCreateHeapArgs,
  15461. labelCreateHeapArgs,
  15462. ldElem);
  15463. }
  15464. if (isInlinee)
  15465. {
  15466. if (!hasIntConstIndex)
  15467. {
  15468. //Runtime check if to make sure length is within the arguments.length range.
  15469. GenerateCheckForArgumentsLength(ldElem, labelCreateHeapArgs, valueOpnd, actualParamOpnd, Js::OpCode::BrGe_A);
  15470. }
  15471. }
  15472. else
  15473. {
  15474. GenerateCheckForArgumentsLength(ldElem, labelCreateHeapArgs, actualParamOpnd, valueOpnd, Js::OpCode::BrLe_A);
  15475. }
  15476. IR::Opnd *argIndirOpnd = nullptr;
  15477. if (isInlinee)
  15478. {
  15479. argIndirOpnd = GetArgsIndirOpndForInlinee(ldElem, valueOpnd);
  15480. }
  15481. else
  15482. {
  15483. argIndirOpnd = GetArgsIndirOpndForTopFunction(ldElem, valueOpnd);
  15484. }
  15485. LowererMD::CreateAssign(ldElem->GetDst(), argIndirOpnd, ldElem);
  15486. // JMP $done
  15487. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  15488. // $labelCreateHeapArgs:
  15489. ldElem->InsertBefore(labelCreateHeapArgs);
  15490. emittedFastPath = true;
  15491. }
  15492. IR::Opnd *nullOpnd = this->LoadLibraryValueOpnd(ldElem, LibraryValue::ValueNull);
  15493. IR::Instr *instrArgs = IR::Instr::New(Js::OpCode::LdHeapArguments,
  15494. indirOpnd->GetBaseOpnd(),
  15495. nullOpnd,
  15496. nullOpnd,
  15497. func);
  15498. ldElem->InsertBefore(instrArgs);
  15499. this->m_lowererMD.LoadHeapArguments(instrArgs, true, actualParamOpnd);
  15500. return emittedFastPath;
  15501. }
  15502. bool
  15503. Lowerer::GenerateFastRealStackArgumentsLdLen(IR::Instr *ldLen)
  15504. {
  15505. if(ldLen->m_func->IsInlinee())
  15506. {
  15507. //Get the length of the arguments
  15508. LowererMD::CreateAssign(ldLen->GetDst(),
  15509. IR::IntConstOpnd::New(ldLen->m_func->actualCount - 1, TyUint32, ldLen->m_func),
  15510. ldLen);
  15511. }
  15512. else
  15513. {
  15514. IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldLen, -1);
  15515. IR::RegOpnd *actualCountOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
  15516. LowererMD::CreateAssign(ldLen->GetDst(), actualCountOpnd, ldLen);
  15517. }
  15518. ldLen->Remove();
  15519. return false;
  15520. }
  15521. bool
  15522. Lowerer::GenerateFastArgumentsLdLen(IR::Instr *ldLen, IR::LabelInstr* labelHelper, IR::LabelInstr* labelFallThru)
  15523. {
  15524. // TEST argslot, argslot //Test if the arguments slot is zero
  15525. // JNE $helper
  15526. // actualCountOpnd <-LoadInputParamCount fastpath
  15527. // SHL actualCountOpnd, actualCountOpnd, 1 // Left shift for tagging
  15528. // INC actualCountOpnd // Tagging
  15529. // MOV dst, actualCountOpnd
  15530. // JMP $fallthrough
  15531. //$helper:
  15532. Assert(ldLen->DoStackArgsOpt(this->m_func));
  15533. if(ldLen->m_func->IsInlinee())
  15534. {
  15535. IR::Opnd *cachedArgsObjectSlotOpnd = ldLen->m_func->GetInlineeArgumentsObjectSlotOpnd();
  15536. // Re-use the LdLen_A source here so that we're loading the current heap args into the reg we will pass
  15537. // to the helper if necessary.
  15538. IR::RegOpnd *argsObjectRegOpnd = ldLen->GetSrc1()->AsRegOpnd();
  15539. LowererMD::CreateAssign(argsObjectRegOpnd, cachedArgsObjectSlotOpnd, ldLen);
  15540. InsertTest(argsObjectRegOpnd, argsObjectRegOpnd, ldLen);
  15541. InsertBranch(Js::OpCode::BrNeq_A, labelHelper, ldLen);
  15542. //Get the length of the arguments
  15543. LowererMD::CreateAssign(ldLen->GetDst(),
  15544. IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(ldLen->m_func->actualCount - 1), IR::AddrOpndKindConstantVar, ldLen->m_func), // -1 to exclude this pointer
  15545. ldLen);
  15546. }
  15547. else
  15548. {
  15549. IR::Opnd *cachedArgsObjectSlotOpnd = this->m_lowererMD.CreateStackArgumentsSlotOpnd();
  15550. // Re-use the LdLen_A source here so that we're loading the current heap args into the reg we will pass
  15551. // to the helper if necessary.
  15552. IR::RegOpnd *argsObjectRegOpnd = ldLen->GetSrc1()->AsRegOpnd();
  15553. LowererMD::CreateAssign(argsObjectRegOpnd, cachedArgsObjectSlotOpnd, ldLen);
  15554. InsertTest(argsObjectRegOpnd, argsObjectRegOpnd, ldLen);
  15555. InsertBranch(Js::OpCode::BrNeq_A, labelHelper, ldLen);
  15556. IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldLen, -1);
  15557. IR::RegOpnd *actualCountOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
  15558. this->m_lowererMD.GenerateInt32ToVarConversion(actualCountOpnd, ldLen);
  15559. LowererMD::CreateAssign(ldLen->GetDst(), actualCountOpnd, ldLen);
  15560. }
  15561. InsertBranch(Js::OpCode::Br, labelFallThru, ldLen);
  15562. return true;
  15563. }
  15564. IR::RegOpnd*
  15565. Lowerer::GenerateFunctionTypeFromFixedFunctionObject(IR::Instr *insertInstrPt, IR::Opnd* functionObjOpnd)
  15566. {
  15567. IR::RegOpnd * functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  15568. IR::Opnd *functionTypeOpnd = nullptr;
  15569. if(functionObjOpnd->IsAddrOpnd())
  15570. {
  15571. IR::AddrOpnd* functionObjAddrOpnd = functionObjOpnd->AsAddrOpnd();
  15572. // functionTypeRegOpnd = MOV [fixed function address + type offset]
  15573. functionObjAddrOpnd->m_address;
  15574. functionTypeOpnd = IR::MemRefOpnd::New((void *)((intptr_t)functionObjAddrOpnd->m_address + Js::RecyclableObject::GetOffsetOfType()), TyMachPtr, this->m_func,
  15575. IR::AddrOpndKindDynamicObjectTypeRef);
  15576. }
  15577. else
  15578. {
  15579. functionTypeOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, this->m_func);
  15580. }
  15581. LowererMD::CreateAssign(functionTypeRegOpnd, functionTypeOpnd, insertInstrPt);
  15582. return functionTypeRegOpnd;
  15583. }
  15584. void
  15585. Lowerer::FinalLower()
  15586. {
  15587. this->m_lowererMD.FinalLower();
  15588. // ensure that the StartLabel and EndLabel are inserted
  15589. // before the prolog and after the epilog respectively
  15590. IR::LabelInstr * startLabel = m_func->GetFuncStartLabel();
  15591. if (startLabel != nullptr)
  15592. {
  15593. m_func->m_headInstr->InsertAfter(startLabel);
  15594. }
  15595. IR::LabelInstr * endLabel = m_func->GetFuncEndLabel();
  15596. if (endLabel != nullptr)
  15597. {
  15598. m_func->m_tailInstr->GetPrevRealInstr()->InsertBefore(endLabel);
  15599. }
  15600. }
  15601. void
  15602. Lowerer::EHBailoutPatchUp()
  15603. {
  15604. Assert(this->m_func->isPostLayout);
  15605. // 1. Insert return thunks for all the regions.
  15606. // 2. Set the hasBailedOut bit to true on all bailout paths in EH regions.
  15607. // 3. Insert code after every bailout in a try or catch region to save the return value on the stack, and jump to the return thunk (See Region.h) of that region.
  15608. // 4. Insert code right before the epilog, to restore the return value (saved in 2.) from a bailout into eax.
  15609. IR::LabelInstr * restoreReturnValueFromBailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  15610. IR::LabelInstr * epilogLabel;
  15611. IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->GetPrevRealInstrOrLabel();
  15612. if (exitPrevInstr->IsLabelInstr())
  15613. {
  15614. epilogLabel = exitPrevInstr->AsLabelInstr();
  15615. }
  15616. else
  15617. {
  15618. epilogLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  15619. this->m_func->m_exitInstr->InsertBefore(epilogLabel);
  15620. }
  15621. IR::Instr * tmpInstr = nullptr;
  15622. bool restoreReturnFromBailoutEmitted = false;
  15623. FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, this->m_func)
  15624. {
  15625. if (instr->IsLabelInstr())
  15626. {
  15627. this->currentRegion = instr->AsLabelInstr()->GetRegion();
  15628. }
  15629. // Consider (radua): Assert(this->currentRegion) here?
  15630. if (this->currentRegion)
  15631. {
  15632. RegionType currentRegionType = this->currentRegion->GetType();
  15633. if (currentRegionType == RegionTypeTry || currentRegionType == RegionTypeCatch)
  15634. {
  15635. this->InsertReturnThunkForRegion(this->currentRegion, restoreReturnValueFromBailoutLabel);
  15636. if (instr->HasBailOutInfo())
  15637. {
  15638. this->SetHasBailedOut(instr);
  15639. tmpInstr = this->EmitEHBailoutStackRestore(instr);
  15640. this->EmitSaveEHBailoutReturnValueAndJumpToRetThunk(tmpInstr);
  15641. if (!restoreReturnFromBailoutEmitted)
  15642. {
  15643. this->EmitRestoreReturnValueFromEHBailout(restoreReturnValueFromBailoutLabel, epilogLabel);
  15644. restoreReturnFromBailoutEmitted = true;
  15645. }
  15646. }
  15647. }
  15648. }
  15649. }
  15650. NEXT_INSTR_IN_FUNC_EDITING
  15651. }
  15652. bool
  15653. Lowerer::GenerateFastLdFld(IR::Instr * const instrLdFld, IR::JnHelperMethod helperMethod, IR::JnHelperMethod polymorphicHelperMethod,
  15654. IR::LabelInstr ** labelBailOut, IR::RegOpnd* typeOpnd, bool* pIsHelper, IR::LabelInstr** pLabelHelper)
  15655. {
  15656. // Generates:
  15657. //
  15658. // r1 = object->type
  15659. // if (r1 is taggedInt) goto helper
  15660. // Load inline cache
  15661. // if monomorphic
  15662. // r2 = address of the monomorphic inline cache
  15663. // if polymorphic
  15664. // r2 = address of the polymorphic inline cache array
  15665. // r3 = (type >> PIC shift amount) & (PIC size - 1)
  15666. // r2 = r2 + r3
  15667. // Try load property using proto cache (if protoFirst)
  15668. // Try load property using local cache
  15669. // Try loading property using proto cache (if !protoFirst)
  15670. // Try loading property using flags cache
  15671. //
  15672. // Loading property using local cache:
  15673. // if (r1 == r2->u.local.type)
  15674. // result = load inline slot r2->u.local.slotIndex from r1
  15675. // goto fallthru
  15676. // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.local.type)
  15677. // result = load aux slot r2->u.local.slotIndex from r1
  15678. // goto fallthru
  15679. //
  15680. // Loading property using proto cache:
  15681. // if (r1 == r2->u.proto.type)
  15682. // r3 = r2->u.proto.prototypeObject
  15683. // result = load inline slot r2->u.proto.slotIndex from r3
  15684. // goto fallthru
  15685. // if (r1 | InlineCacheAuxSlotTypeTag) == r2.u.proto.type)
  15686. // r3 = r2->u.proto.prototypeObject
  15687. // result = load aux slot r2->u.proto.slotIndex from r3
  15688. // goto fallthru
  15689. //
  15690. // Loading property using flags cache:
  15691. // if (r2->u.accessor.flags & (Js::InlineCacheGetterFlag | Js::InlineCacheSetterFlag) == 0)
  15692. // if (r1 == r2->u.accessor.type)
  15693. // result = load inline slot r2->u.accessor.slotIndex from r1
  15694. // goto fallthru
  15695. // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.accessor.type)
  15696. // result = load aux slot r2->u.accessor.slotIndex from r1
  15697. // goto fallthru
  15698. //
  15699. // Loading an inline slot:
  15700. // result = [r1 + slotIndex * sizeof(Var)]
  15701. //
  15702. // Loading an aux slot:
  15703. // slotArray = r1->auxSlots
  15704. // result = [slotArray + slotIndex * sizeof(Var)]
  15705. //
  15706. // We only emit the code block for a type of cache (local/proto/flags) if the profile data
  15707. // indicates that type of cache was used to load the property in the past.
  15708. // We don't emit the type check with aux slot tag if the profile data indicates that we didn't
  15709. // load the property from an aux slot before.
  15710. // We don't emit the type check without an aux slot tag if the profile data indicates that we didn't
  15711. // load the property from an inline slot before.
  15712. IR::Opnd * opndSrc = instrLdFld->GetSrc1();
  15713. AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->IsPropertySymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(), "Expected PropertySym as src of LdFld");
  15714. Assert(!instrLdFld->DoStackArgsOpt(this->m_func));
  15715. IR::PropertySymOpnd * propertySymOpnd = opndSrc->AsPropertySymOpnd();
  15716. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  15717. PHASE_PRINT_TESTTRACE(
  15718. Js::ObjTypeSpecPhase,
  15719. this->m_func,
  15720. L"Field load: %s, property: %s, func: %s, cache ID: %d, cloned cache: false\n",
  15721. Js::OpCodeUtil::GetOpCodeName(instrLdFld->m_opcode),
  15722. this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(),
  15723. this->m_func->GetJnFunction()->GetDisplayName(),
  15724. propertySymOpnd->m_inlineCacheIndex);
  15725. Assert(pIsHelper != nullptr);
  15726. bool& isHelper = *pIsHelper;
  15727. Assert(pLabelHelper != nullptr);
  15728. IR::LabelInstr*& labelHelper = *pLabelHelper;
  15729. bool doLocal = true;
  15730. bool doProto = instrLdFld->m_opcode == Js::OpCode::LdMethodFld
  15731. || instrLdFld->m_opcode == Js::OpCode::LdRootMethodFld
  15732. || instrLdFld->m_opcode == Js::OpCode::ScopedLdMethodFld;
  15733. bool doProtoFirst = doProto;
  15734. bool doInlineSlots = true;
  15735. bool doAuxSlots = true;
  15736. if (!PHASE_OFF(Js::ProfileBasedFldFastPathPhase, this->m_func) && instrLdFld->IsProfiledInstr())
  15737. {
  15738. IR::ProfiledInstr * profiledInstrLdFld = instrLdFld->AsProfiledInstr();
  15739. if (profiledInstrLdFld->u.FldInfo().flags != Js::FldInfo_NoInfo)
  15740. {
  15741. doProto = !!(profiledInstrLdFld->u.FldInfo().flags & Js::FldInfo_FromProto);
  15742. doLocal = !!(profiledInstrLdFld->u.FldInfo().flags & Js::FldInfo_FromLocal);
  15743. if ((profiledInstrLdFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromInlineSlots)
  15744. {
  15745. // If the inline slots flag is set and the aux slots flag is not, only generate the inline slots check
  15746. doAuxSlots = false;
  15747. }
  15748. else if ((profiledInstrLdFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromAuxSlots)
  15749. {
  15750. // If the aux slots flag is set and the inline slots flag is not, only generate the aux slots check
  15751. doInlineSlots = false;
  15752. }
  15753. }
  15754. else if (!profiledInstrLdFld->u.FldInfo().valueType.IsUninitialized())
  15755. {
  15756. // We have value type info about the field but no flags. This means we shouldn't generate any
  15757. // fast paths for this field load.
  15758. doLocal = false;
  15759. doProto = false;
  15760. }
  15761. }
  15762. if (!doLocal && !doProto)
  15763. {
  15764. return false;
  15765. }
  15766. IR::LabelInstr * labelFallThru = instrLdFld->GetOrCreateContinueLabel();
  15767. if (labelHelper == nullptr)
  15768. {
  15769. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  15770. }
  15771. IR::RegOpnd * opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  15772. bool usePolymorphicInlineCache = !!propertySymOpnd->m_runtimePolymorphicInlineCache;
  15773. IR::RegOpnd * opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  15774. if (usePolymorphicInlineCache)
  15775. {
  15776. LowererMD::CreateAssign(opndInlineCache, IR::AddrOpnd::New(propertySymOpnd->m_runtimePolymorphicInlineCache->GetInlineCaches(), IR::AddrOpndKindDynamicInlineCache, this->m_func, true), instrLdFld);
  15777. }
  15778. else
  15779. {
  15780. LowererMD::CreateAssign(opndInlineCache, this->LoadRuntimeInlineCacheOpnd(instrLdFld, propertySymOpnd, isHelper), instrLdFld);
  15781. }
  15782. if (typeOpnd == nullptr)
  15783. {
  15784. typeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  15785. GenerateObjectTestAndTypeLoad(instrLdFld, opndBase, typeOpnd, labelHelper);
  15786. }
  15787. if (usePolymorphicInlineCache)
  15788. {
  15789. LowererMD::GenerateLoadPolymorphicInlineCacheSlot(instrLdFld, opndInlineCache, typeOpnd, propertySymOpnd->m_runtimePolymorphicInlineCache->GetSize());
  15790. }
  15791. IR::LabelInstr * labelNext = nullptr;
  15792. IR::Opnd * opndDst = instrLdFld->GetDst();
  15793. IR::RegOpnd * opndTaggedType = nullptr;
  15794. IR::BranchInstr * labelNextBranchToPatch = nullptr;
  15795. if (doProto && doProtoFirst)
  15796. {
  15797. if (doInlineSlots)
  15798. {
  15799. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  15800. labelNextBranchToPatch = LowererMD::GenerateProtoInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
  15801. LowererMD::GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  15802. instrLdFld->InsertBefore(labelNext);
  15803. }
  15804. if (doAuxSlots)
  15805. {
  15806. if (opndTaggedType == nullptr)
  15807. {
  15808. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  15809. LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
  15810. }
  15811. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  15812. labelNextBranchToPatch = LowererMD::GenerateProtoInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
  15813. LowererMD::GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  15814. instrLdFld->InsertBefore(labelNext);
  15815. }
  15816. }
  15817. if (doLocal)
  15818. {
  15819. if (doInlineSlots)
  15820. {
  15821. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  15822. labelNextBranchToPatch = LowererMD::GenerateLocalInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
  15823. LowererMD::GenerateLdFldFromLocalInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  15824. instrLdFld->InsertBefore(labelNext);
  15825. }
  15826. if (doAuxSlots)
  15827. {
  15828. if (opndTaggedType == nullptr)
  15829. {
  15830. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  15831. LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
  15832. }
  15833. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  15834. labelNextBranchToPatch = LowererMD::GenerateLocalInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
  15835. LowererMD::GenerateLdFldFromLocalInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  15836. instrLdFld->InsertBefore(labelNext);
  15837. }
  15838. }
  15839. if (doProto && !doProtoFirst)
  15840. {
  15841. if (doInlineSlots)
  15842. {
  15843. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  15844. labelNextBranchToPatch = LowererMD::GenerateProtoInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
  15845. LowererMD::GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  15846. instrLdFld->InsertBefore(labelNext);
  15847. }
  15848. if (doAuxSlots)
  15849. {
  15850. if (opndTaggedType == nullptr)
  15851. {
  15852. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  15853. LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
  15854. }
  15855. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  15856. labelNextBranchToPatch = LowererMD::GenerateProtoInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
  15857. LowererMD::GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  15858. instrLdFld->InsertBefore(labelNext);
  15859. }
  15860. }
  15861. Assert(labelNextBranchToPatch);
  15862. labelNextBranchToPatch->SetTarget(labelHelper);
  15863. labelNext->Remove();
  15864. // $helper:
  15865. // dst = CALL Helper(inlineCache, base, field, scriptContext)
  15866. // $fallthru:
  15867. isHelper = true;
  15868. // Return false to indicate the original instruction was not lowered. Caller will insert the helper label.
  15869. return false;
  15870. }
  15871. void
  15872. Lowerer::GenerateAuxSlotAdjustmentRequiredCheck(
  15873. IR::Instr * instrToInsertBefore,
  15874. IR::RegOpnd * opndInlineCache,
  15875. IR::LabelInstr * labelHelper)
  15876. {
  15877. // regSlotCap = MOV [&(inlineCache->u.local.rawUInt16)] // sized to 16 bits
  15878. IR::RegOpnd * regSlotCap = IR::RegOpnd::New(TyMachReg, instrToInsertBefore->m_func);
  15879. IR::IndirOpnd * memSlotCap = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.rawUInt16), TyUint16, instrToInsertBefore->m_func);
  15880. InsertMove(regSlotCap, memSlotCap, instrToInsertBefore);
  15881. // SAR regSlotCap, Js::InlineCache::CacheLayoutSelectorBitCount
  15882. IR::IntConstOpnd * constSelectorBitCount = IR::IntConstOpnd::New(Js::InlineCache::CacheLayoutSelectorBitCount, TyUint16, instrToInsertBefore->m_func, /* dontEncode = */ true);
  15883. InsertShiftBranch(Js::OpCode::Shr_A, regSlotCap, regSlotCap, constSelectorBitCount, Js::OpCode::BrNeq_A, true, labelHelper, instrToInsertBefore);
  15884. }
  15885. void
  15886. Lowerer::GenerateSetObjectTypeFromInlineCache(
  15887. IR::Instr * instrToInsertBefore,
  15888. IR::RegOpnd * opndBase,
  15889. IR::RegOpnd * opndInlineCache,
  15890. bool isTypeTagged)
  15891. {
  15892. // regNewType = MOV [&(inlineCache->u.local.type)]
  15893. IR::RegOpnd * regNewType = IR::RegOpnd::New(TyMachReg, instrToInsertBefore->m_func);
  15894. IR::IndirOpnd * memNewType = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.type), TyMachReg, instrToInsertBefore->m_func);
  15895. InsertMove(regNewType, memNewType, instrToInsertBefore);
  15896. // AND regNewType, ~InlineCacheAuxSlotTypeTag
  15897. if (isTypeTagged)
  15898. {
  15899. // On 64-bit platforms IntConstOpnd isn't big enough to hold TyMachReg values.
  15900. IR::AddrOpnd * constTypeTagComplement = IR::AddrOpnd::New((Js::Var)~InlineCacheAuxSlotTypeTag, IR::AddrOpndKindConstant, instrToInsertBefore->m_func, /* dontEncode = */ true);
  15901. InsertAnd(regNewType, regNewType, constTypeTagComplement, instrToInsertBefore);
  15902. }
  15903. // MOV base->type, regNewType
  15904. IR::IndirOpnd * memObjType = IR::IndirOpnd::New(opndBase, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrToInsertBefore->m_func);
  15905. InsertMove(memObjType, regNewType, instrToInsertBefore);
  15906. }
  15907. bool
  15908. Lowerer::GenerateFastStFld(IR::Instr * const instrStFld, IR::JnHelperMethod helperMethod, IR::JnHelperMethod polymorphicHelperMethod, IR::LabelInstr ** labelBailOut, IR::RegOpnd* typeOpnd,
  15909. bool* pIsHelper, IR::LabelInstr** pLabelHelper, bool withPutFlags, Js::PropertyOperationFlags flags)
  15910. {
  15911. // Generates:
  15912. //
  15913. // r1 = object->type
  15914. // if (r1 is taggedInt) goto helper
  15915. // Load inline cache
  15916. // if monomorphic
  15917. // r2 = address of the monomorphic inline cache
  15918. // if polymorphic
  15919. // r2 = address of the polymorphic inline cache array
  15920. // r3 = (type >> PIC shift amount) & (PIC size - 1)
  15921. // r2 = r2 + r3
  15922. // Try store property using local cache
  15923. //
  15924. // Loading property using local cache:
  15925. // if (r1 == r2->u.local.type)
  15926. // store value to inline slot r2->u.local.slotIndex on r1
  15927. // goto fallthru
  15928. // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.local.type)
  15929. // store value to aux slot r2->u.local.slotIndex on r1
  15930. // goto fallthru
  15931. //
  15932. // Storing to an inline slot:
  15933. // [r1 + slotIndex * sizeof(Var)] = value
  15934. //
  15935. // Storing to an aux slot:
  15936. // slotArray = r1->auxSlots
  15937. // [slotArray + slotIndex * sizeof(Var)] = value
  15938. //
  15939. // We don't emit the type check with aux slot tag if the profile data indicates that we didn't
  15940. // store the property to an aux slot before.
  15941. // We don't emit the type check without an aux slot tag if the profile data indicates that we didn't
  15942. // store the property to an inline slot before.
  15943. IR::Opnd * opndSrc = instrStFld->GetSrc1();
  15944. IR::Opnd * opndDst = instrStFld->GetDst();
  15945. AssertMsg(opndDst->IsSymOpnd() && opndDst->AsSymOpnd()->IsPropertySymOpnd() && opndDst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected PropertySym as dst of StFld");
  15946. IR::PropertySymOpnd * propertySymOpnd = opndDst->AsPropertySymOpnd();
  15947. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  15948. PHASE_PRINT_TESTTRACE(
  15949. Js::ObjTypeSpecPhase,
  15950. this->m_func,
  15951. L"Field store: %s, property: %s, func: %s, cache ID: %d, cloned cache: false\n",
  15952. Js::OpCodeUtil::GetOpCodeName(instrStFld->m_opcode),
  15953. this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(),
  15954. this->m_func->GetJnFunction()->GetDisplayName(),
  15955. propertySymOpnd->m_inlineCacheIndex);
  15956. Assert(pIsHelper != nullptr);
  15957. bool& isHelper = *pIsHelper;
  15958. Assert(pLabelHelper != nullptr);
  15959. IR::LabelInstr*& labelHelper = *pLabelHelper;
  15960. bool doStore = true;
  15961. bool doAdd = false;
  15962. bool doInlineSlots = true;
  15963. bool doAuxSlots = true;
  15964. if (!PHASE_OFF(Js::ProfileBasedFldFastPathPhase, this->m_func) && instrStFld->IsProfiledInstr())
  15965. {
  15966. IR::ProfiledInstr * profiledInstrStFld = instrStFld->AsProfiledInstr();
  15967. if (profiledInstrStFld->u.FldInfo().flags != Js::FldInfo_NoInfo)
  15968. {
  15969. if (!(profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromLocal | Js::FldInfo_FromLocalWithoutProperty)))
  15970. {
  15971. return false;
  15972. }
  15973. if (!PHASE_OFF(Js::AddFldFastPathPhase, this->m_func))
  15974. {
  15975. // We always try to do the store field fast path, unless the profile specifically says we never set, but always add a property here.
  15976. if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromLocal | Js::FldInfo_FromLocalWithoutProperty)) == Js::FldInfo_FromLocalWithoutProperty)
  15977. {
  15978. doStore = false;
  15979. }
  15980. // On the other hand, we only emit the add field fast path, if the profile explicitly says we do add properties here.
  15981. if (!!(profiledInstrStFld->u.FldInfo().flags & Js::FldInfo_FromLocalWithoutProperty))
  15982. {
  15983. doAdd = true;
  15984. }
  15985. }
  15986. else
  15987. {
  15988. #if ENABLE_DEBUG_CONFIG_OPTIONS
  15989. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  15990. #endif
  15991. PHASE_PRINT_TRACE(Js::AddFldFastPathPhase, this->m_func,
  15992. L"AddFldFastPath: function: %s(%s) property: %s(#%d) no fast path, because the phase is off.\n",
  15993. this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  15994. this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(), propertySym->m_propertyId);
  15995. }
  15996. if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromInlineSlots)
  15997. {
  15998. // If the inline slots flag is set and the aux slots flag is not, only generate the inline slots check
  15999. doAuxSlots = false;
  16000. }
  16001. else if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromAuxSlots)
  16002. {
  16003. // If the aux slots flag is set and the inline slots flag is not, only generate the aux slots check
  16004. doInlineSlots = false;
  16005. }
  16006. }
  16007. else if (!profiledInstrStFld->u.FldInfo().valueType.IsUninitialized())
  16008. {
  16009. // We have value type info about the field but no flags. This means we shouldn't generate any
  16010. // fast paths for this field store.
  16011. return false;
  16012. }
  16013. }
  16014. Assert(doStore || doAdd);
  16015. if (labelHelper == nullptr)
  16016. {
  16017. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16018. }
  16019. IR::LabelInstr * labelFallThru = instrStFld->GetOrCreateContinueLabel();
  16020. IR::RegOpnd * opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  16021. bool usePolymorphicInlineCache = !!propertySymOpnd->m_runtimePolymorphicInlineCache;
  16022. if (doAdd)
  16023. {
  16024. #if ENABLE_DEBUG_CONFIG_OPTIONS
  16025. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  16026. #endif
  16027. PHASE_PRINT_TRACE(Js::AddFldFastPathPhase, this->m_func,
  16028. L"AddFldFastPath: function: %s(%s) property: %s(#%d) %s fast path for %s.\n",
  16029. this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  16030. this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(), propertySym->m_propertyId,
  16031. usePolymorphicInlineCache ? L"poly" : L"mono", doStore ? L"store and add" : L"add only");
  16032. }
  16033. IR::RegOpnd * opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  16034. if (usePolymorphicInlineCache)
  16035. {
  16036. LowererMD::CreateAssign(opndInlineCache, IR::AddrOpnd::New(propertySymOpnd->m_runtimePolymorphicInlineCache->GetInlineCaches(), IR::AddrOpndKindDynamicInlineCache, this->m_func, true), instrStFld);
  16037. }
  16038. else
  16039. {
  16040. LowererMD::CreateAssign(opndInlineCache, this->LoadRuntimeInlineCacheOpnd(instrStFld, propertySymOpnd, isHelper), instrStFld);
  16041. }
  16042. if (typeOpnd == nullptr)
  16043. {
  16044. typeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  16045. GenerateObjectTestAndTypeLoad(instrStFld, opndBase, typeOpnd, labelHelper);
  16046. }
  16047. if (usePolymorphicInlineCache)
  16048. {
  16049. LowererMD::GenerateLoadPolymorphicInlineCacheSlot(instrStFld, opndInlineCache, typeOpnd, propertySymOpnd->m_runtimePolymorphicInlineCache->GetSize());
  16050. }
  16051. IR::LabelInstr * labelNext = nullptr;
  16052. IR::RegOpnd * opndTaggedType = nullptr;
  16053. IR::BranchInstr * lastBranchToNext = nullptr;
  16054. if (doStore)
  16055. {
  16056. if (doInlineSlots)
  16057. {
  16058. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  16059. lastBranchToNext = LowererMD::GenerateLocalInlineCacheCheck(instrStFld, typeOpnd, opndInlineCache, labelNext);
  16060. LowererMD::GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, true);
  16061. instrStFld->InsertBefore(labelNext);
  16062. }
  16063. if (doAuxSlots)
  16064. {
  16065. if (opndTaggedType == nullptr)
  16066. {
  16067. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  16068. LowererMD::GenerateLoadTaggedType(instrStFld, typeOpnd, opndTaggedType);
  16069. }
  16070. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  16071. lastBranchToNext = LowererMD::GenerateLocalInlineCacheCheck(instrStFld, opndTaggedType, opndInlineCache, labelNext);
  16072. LowererMD::GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, false);
  16073. instrStFld->InsertBefore(labelNext);
  16074. }
  16075. }
  16076. if (doAdd)
  16077. {
  16078. if (doInlineSlots)
  16079. {
  16080. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  16081. lastBranchToNext = LowererMD::GenerateLocalInlineCacheCheck(instrStFld, typeOpnd, opndInlineCache, labelNext, true);
  16082. GenerateSetObjectTypeFromInlineCache(instrStFld, opndBase, opndInlineCache, false);
  16083. LowererMD::GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, true);
  16084. instrStFld->InsertBefore(labelNext);
  16085. }
  16086. if (doAuxSlots)
  16087. {
  16088. if (opndTaggedType == nullptr)
  16089. {
  16090. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  16091. LowererMD::GenerateLoadTaggedType(instrStFld, typeOpnd, opndTaggedType);
  16092. }
  16093. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16094. lastBranchToNext = LowererMD::GenerateLocalInlineCacheCheck(instrStFld, opndTaggedType, opndInlineCache, labelNext, true);
  16095. GenerateAuxSlotAdjustmentRequiredCheck(instrStFld, opndInlineCache, labelHelper);
  16096. GenerateSetObjectTypeFromInlineCache(instrStFld, opndBase, opndInlineCache, true);
  16097. LowererMD::GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, false);
  16098. instrStFld->InsertBefore(labelNext);
  16099. }
  16100. }
  16101. Assert(lastBranchToNext);
  16102. lastBranchToNext->SetTarget(labelHelper);
  16103. labelNext->Remove();
  16104. // $helper:
  16105. // CALL Helper(inlineCache, base, field, src, scriptContext)
  16106. // $fallthru:
  16107. isHelper = true;
  16108. // Return false to indicate the original instruction was not lowered. Caller will insert the helper label.
  16109. return false;
  16110. }
  16111. bool Lowerer::GenerateFastStFldForCustomProperty(IR::Instr *const instr, IR::LabelInstr * *const labelHelperRef)
  16112. {
  16113. Assert(instr);
  16114. Assert(labelHelperRef);
  16115. Assert(!*labelHelperRef);
  16116. switch(instr->m_opcode)
  16117. {
  16118. case Js::OpCode::StFld:
  16119. case Js::OpCode::StFldStrict:
  16120. break;
  16121. default:
  16122. return false;
  16123. }
  16124. IR::SymOpnd *const symOpnd = instr->GetDst()->AsSymOpnd();
  16125. PropertySym *const propertySym = symOpnd->m_sym->AsPropertySym();
  16126. if(propertySym->m_propertyId != Js::PropertyIds::lastIndex || !symOpnd->IsPropertySymOpnd())
  16127. {
  16128. return false;
  16129. }
  16130. const ValueType objectValueType(symOpnd->GetPropertyOwnerValueType());
  16131. if(!objectValueType.IsLikelyRegExp())
  16132. {
  16133. return false;
  16134. }
  16135. if(instr->HasBailOutInfo())
  16136. {
  16137. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  16138. if(!BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind) || bailOutKind & IR::BailOutKindBits)
  16139. {
  16140. // Other bailout kinds will likely need bailout checks that would not be generated here. In particular, if a type
  16141. // check is necessary here to guard against downstream property accesses on the same object, the type check will
  16142. // fail and cause a bailout if the object is a RegExp object since the "lastIndex" property accesses are not cached.
  16143. return false;
  16144. }
  16145. }
  16146. Func *const func = instr->m_func;
  16147. IR::RegOpnd *const objectOpnd = symOpnd->CreatePropertyOwnerOpnd(func);
  16148. const IR::AutoReuseOpnd autoReuseObjectOpnd(objectOpnd, func);
  16149. IR::LabelInstr *labelHelper = nullptr;
  16150. if(!objectOpnd->IsNotTaggedValue())
  16151. {
  16152. // test object, 1
  16153. // jnz $helper
  16154. if(!labelHelper)
  16155. {
  16156. *labelHelperRef = labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  16157. }
  16158. m_lowererMD.GenerateObjectTest(objectOpnd, instr, labelHelper);
  16159. }
  16160. if(!objectValueType.IsObject())
  16161. {
  16162. // cmp [object], Js::JavascriptRegExp::vtable
  16163. // jne $helper
  16164. if(!labelHelper)
  16165. {
  16166. *labelHelperRef = labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  16167. }
  16168. InsertCompareBranch(
  16169. IR::IndirOpnd::New(objectOpnd, 0, TyMachPtr, func),
  16170. LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp),
  16171. Js::OpCode::BrNeq_A,
  16172. labelHelper,
  16173. instr);
  16174. objectOpnd->SetValueType(objectValueType.ToDefiniteObject());
  16175. }
  16176. // mov [object + offset(lastIndexVar)], src
  16177. // mov [object + offset(lastIndexOrFlag)], Js::JavascriptRegExp::NotCachedValue
  16178. // jmp $done
  16179. InsertMove(
  16180. IR::IndirOpnd::New(objectOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), TyVar, func),
  16181. instr->GetSrc1(),
  16182. instr);
  16183. InsertMove(
  16184. IR::IndirOpnd::New(objectOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), TyUint32, func),
  16185. IR::IntConstOpnd::New(Js::JavascriptRegExp::NotCachedValue, TyUint32, func, true),
  16186. instr);
  16187. InsertBranch(Js::OpCode::Br, instr->GetOrCreateContinueLabel(), instr);
  16188. return true;
  16189. }
  16190. IR::RegOpnd *
  16191. Lowerer::GenerateIsBuiltinRecyclableObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool checkObjectAndDynamicObject, IR::LabelInstr *labelContinue)
  16192. {
  16193. // CMP [srcReg], Js::DynamicObject::`vtable'
  16194. // JEQ $fallThough
  16195. // MOV r1, [src1 + offset(type)] -- get the type id
  16196. // MOV r1, [r1 + offset(typeId)]
  16197. // ADD r1, ~TypeIds_LastStaticType -- if (typeId > TypeIds_LastStaticType && typeId <= TypeIds_LastBuiltinDynamicObject)
  16198. // CMP r1, (TypeIds_LastBuiltinDynamicObject - TypeIds_LastStaticType - 1)
  16199. // JA $helper
  16200. //fallThrough:
  16201. IR::LabelInstr *labelFallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16202. if (checkObjectAndDynamicObject)
  16203. {
  16204. if (!regOpnd->IsNotTaggedValue())
  16205. {
  16206. m_lowererMD.GenerateObjectTest(regOpnd, insertInstr, labelHelper);
  16207. }
  16208. m_lowererMD.GenerateIsDynamicObject(regOpnd, insertInstr, labelFallthrough, true);
  16209. }
  16210. IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  16211. IR::RegOpnd * typeIdRegOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  16212. IR::IndirOpnd *indirOpnd;
  16213. // MOV typeRegOpnd, [src1 + offset(type)]
  16214. indirOpnd = IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  16215. m_lowererMD.CreateAssign(typeRegOpnd, indirOpnd, insertInstr);
  16216. // MOV typeIdRegOpnd, [typeRegOpnd + offset(typeId)]
  16217. indirOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
  16218. m_lowererMD.CreateAssign(typeIdRegOpnd, indirOpnd, insertInstr);
  16219. // ADD typeIdRegOpnd, ~TypeIds_LastStaticType
  16220. InsertAdd(false, typeIdRegOpnd, typeIdRegOpnd,
  16221. IR::IntConstOpnd::New(~Js::TypeIds_LastStaticType, TyInt32, this->m_func, true), insertInstr);
  16222. // CMP typeIdRegOpnd, (TypeIds_LastBuiltinDynamicObject - TypeIds_LastStaticType - 1)
  16223. InsertCompare(
  16224. typeIdRegOpnd,
  16225. IR::IntConstOpnd::New(Js::TypeIds_LastBuiltinDynamicObject - Js::TypeIds_LastStaticType - 1, TyInt32, this->m_func),
  16226. insertInstr);
  16227. if (labelContinue)
  16228. {
  16229. // On success, go to continuation label.
  16230. InsertBranch(Js::OpCode::BrLe_A, true, labelContinue, insertInstr);
  16231. }
  16232. else
  16233. {
  16234. // On failure, go to helper.
  16235. InsertBranch(Js::OpCode::BrGt_A, true, labelHelper, insertInstr);
  16236. }
  16237. // $fallThrough
  16238. insertInstr->InsertBefore(labelFallthrough);
  16239. return typeRegOpnd;
  16240. }
  16241. bool Lowerer::GenerateFastBrEqLikely(IR::BranchInstr * instrBranch, bool *pNeedHelper)
  16242. {
  16243. IR::Opnd *src1 = instrBranch->GetSrc1();
  16244. IR::Opnd *src2 = instrBranch->GetSrc2();
  16245. IR::LabelInstr *targetInstr = instrBranch->GetTarget();
  16246. IR::LabelInstr *labelBooleanCmp = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16247. IR::LabelInstr *labelTrue = instrBranch->GetOrCreateContinueLabel();
  16248. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16249. bool isStrictBr = false;
  16250. bool isStrictMode = this->m_func->GetJnFunction()->GetIsStrictMode();
  16251. *pNeedHelper = true;
  16252. switch (instrBranch->m_opcode)
  16253. {
  16254. case Js::OpCode::BrSrEq_A:
  16255. case Js::OpCode::BrSrNotNeq_A:
  16256. case Js::OpCode::BrSrNeq_A:
  16257. case Js::OpCode::BrSrNotEq_A:
  16258. isStrictBr = true;
  16259. break;
  16260. }
  16261. if (src1->GetValueType().IsLikelyBoolean() && src2->GetValueType().IsLikelyBoolean())
  16262. {
  16263. //
  16264. // Booleans
  16265. //
  16266. if (isStrictBr)
  16267. {
  16268. if (!src1->GetValueType().IsBoolean() && !src2->GetValueType().IsBoolean())
  16269. {
  16270. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instrBranch, labelHelper, false);
  16271. if (this->m_lowererMD.GenerateJSBooleanTest(src2->AsRegOpnd(), instrBranch, labelBooleanCmp, true))
  16272. {
  16273. instrBranch->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func));
  16274. }
  16275. }
  16276. else
  16277. {
  16278. *pNeedHelper = false;
  16279. }
  16280. }
  16281. else
  16282. {
  16283. this->m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instrBranch, labelHelper, false);
  16284. this->m_lowererMD.GenerateJSBooleanTest(src1->AsRegOpnd(), instrBranch, labelHelper, false);
  16285. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instrBranch, labelHelper, false);
  16286. if (this->m_lowererMD.GenerateJSBooleanTest(src2->AsRegOpnd(), instrBranch, labelBooleanCmp, true))
  16287. {
  16288. instrBranch->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func));
  16289. }
  16290. }
  16291. }
  16292. else if (src1->GetValueType().IsLikelyObject() && src2->GetValueType().IsLikelyObject())
  16293. {
  16294. //
  16295. // Objects
  16296. //
  16297. IR::LabelInstr *labelTypeIdCheck = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16298. if (!isStrictBr)
  16299. {
  16300. // If not strictBr, verify both sides are dynamic objects
  16301. this->m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instrBranch, labelHelper, false);
  16302. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instrBranch, labelHelper, false);
  16303. this->m_lowererMD.GenerateIsDynamicObject(src1->AsRegOpnd(), instrBranch, labelTypeIdCheck, false);
  16304. }
  16305. else
  16306. {
  16307. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instrBranch, labelHelper, false);
  16308. }
  16309. this->m_lowererMD.GenerateIsDynamicObject(src2->AsRegOpnd(), instrBranch, labelBooleanCmp, true);
  16310. instrBranch->InsertBefore(labelTypeIdCheck);
  16311. if (isStrictMode)
  16312. {
  16313. labelTypeIdCheck->isOpHelper = true;
  16314. IR::BranchInstr *branchToHelper = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func);
  16315. instrBranch->InsertBefore(branchToHelper);
  16316. }
  16317. else
  16318. {
  16319. if (!ExternalLowerer::TryGenerateFastExternalEqTest(src1, src2, instrBranch, labelHelper, labelBooleanCmp, this, isStrictBr))
  16320. {
  16321. if (!isStrictBr)
  16322. {
  16323. GenerateIsBuiltinRecyclableObject(src1->AsRegOpnd(), instrBranch, labelHelper, false /*checkObjectAndDynamicObject*/);
  16324. }
  16325. GenerateIsBuiltinRecyclableObject(src2->AsRegOpnd(), instrBranch, labelHelper, false /*checkObjectAndDynamicObject*/);
  16326. }
  16327. }
  16328. }
  16329. else
  16330. {
  16331. return false;
  16332. }
  16333. instrBranch->InsertBefore(labelBooleanCmp);
  16334. IR::BranchInstr *newBranch = IR::BranchInstr::New(instrBranch->m_opcode, targetInstr, src1, src2, this->m_func);
  16335. instrBranch->InsertBefore(newBranch);
  16336. this->m_lowererMD.LowerCondBranch(newBranch);
  16337. newBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelTrue, this->m_func);
  16338. instrBranch->InsertBefore(newBranch);
  16339. instrBranch->InsertBefore(labelHelper);
  16340. return true;
  16341. }
  16342. bool Lowerer::GenerateFastBrBool(IR::BranchInstr *const instr)
  16343. {
  16344. Assert(instr);
  16345. Assert(instr->m_opcode == Js::OpCode::BrFalse_A || instr->m_opcode == Js::OpCode::BrTrue_A);
  16346. Func *const func = instr->m_func;
  16347. if(!instr->GetSrc1()->IsRegOpnd())
  16348. {
  16349. LowererMD::ChangeToAssign(instr->HoistSrc1(Js::OpCode::Ld_A));
  16350. }
  16351. IR::RegOpnd *const src = instr->GetSrc1()->Copy(func)->AsRegOpnd();
  16352. const IR::AutoReuseOpnd autoReuseSrc(src, func);
  16353. const ValueType srcOriginalValueType(src->GetValueType());
  16354. ValueType srcValueType(srcOriginalValueType);
  16355. IR::LabelInstr *const labelTarget = instr->GetTarget();
  16356. IR::LabelInstr *const labelFallthrough = instr->GetOrCreateContinueLabel();
  16357. if(labelTarget == labelFallthrough)
  16358. {
  16359. // Nothing to do
  16360. instr->Remove();
  16361. return false;
  16362. }
  16363. const bool branchOnFalse = instr->m_opcode == Js::OpCode::BrFalse_A;
  16364. IR::LabelInstr *const labelFalse = branchOnFalse ? labelTarget : labelFallthrough;
  16365. IR::LabelInstr *const labelTrue = branchOnFalse ? labelFallthrough : labelTarget;
  16366. const Js::OpCode compareWithFalseBranchToTargetOpCode = branchOnFalse ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A;
  16367. IR::LabelInstr *lastLabelBeforeHelper = nullptr;
  16368. /// Typespec'd float
  16369. if (instr->GetSrc1()->GetType() == TyFloat64)
  16370. {
  16371. InsertFloatCheckForZeroOrNanBranch(instr->GetSrc1(), branchOnFalse, labelTarget, labelFallthrough, instr);
  16372. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  16373. instr->Remove();
  16374. return false;
  16375. }
  16376. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  16377. // Null fast path
  16378. if (srcValueType.HasBeenNull() || srcOriginalValueType.IsUninitialized())
  16379. {
  16380. if(srcValueType.IsNull())
  16381. {
  16382. // jmp $false
  16383. InsertBranch(Js::OpCode::Br, labelFalse, instr);
  16384. // Skip lowering call to helper
  16385. Assert(instr->m_prev->IsBranchInstr());
  16386. instr->Remove();
  16387. return false;
  16388. }
  16389. // cmp src, null
  16390. // je $false
  16391. InsertCompareBranch(
  16392. src,
  16393. LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  16394. Js::OpCode::BrEq_A,
  16395. labelFalse,
  16396. instr);
  16397. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Null));
  16398. }
  16399. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  16400. // Undefined fast path
  16401. if(srcValueType.HasBeenUndefined() || srcOriginalValueType.IsUninitialized())
  16402. {
  16403. if(srcValueType.IsUndefined())
  16404. {
  16405. // jmp $false
  16406. InsertBranch(Js::OpCode::Br, labelFalse, instr);
  16407. // Skip lowering call to helper
  16408. Assert(instr->m_prev->IsBranchInstr());
  16409. instr->Remove();
  16410. return false;
  16411. }
  16412. // cmp src, undefined
  16413. // je $false
  16414. InsertCompareBranch(
  16415. src,
  16416. LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
  16417. Js::OpCode::BrEq_A,
  16418. labelFalse,
  16419. instr);
  16420. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Undefined));
  16421. }
  16422. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  16423. // Tagged int fast path
  16424. const bool isNotInt = src->IsNotInt();
  16425. bool checkedForTaggedInt = isNotInt;
  16426. if( (
  16427. srcValueType.HasBeenInt() ||
  16428. srcValueType.HasBeenUnknownNumber() ||
  16429. srcOriginalValueType.IsUninitialized()
  16430. ) && !isNotInt)
  16431. {
  16432. checkedForTaggedInt = true;
  16433. IR::LabelInstr *notTaggedIntLabel = nullptr;
  16434. if(!src->IsTaggedInt())
  16435. {
  16436. // test src, 1
  16437. // jz $notTaggedInt
  16438. notTaggedIntLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  16439. m_lowererMD.GenerateSmIntTest(src, instr, notTaggedIntLabel);
  16440. }
  16441. // cmp src, tag(0)
  16442. // je/jne $target
  16443. m_lowererMD.GenerateTaggedZeroTest(src, instr);
  16444. Lowerer::InsertBranch(compareWithFalseBranchToTargetOpCode, labelTarget, instr);
  16445. if(src->IsTaggedInt())
  16446. {
  16447. // Skip lowering call to helper
  16448. Assert(instr->m_prev->IsBranchInstr());
  16449. instr->Remove();
  16450. return false;
  16451. }
  16452. // jmp $fallthrough
  16453. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  16454. // $notTaggedInt:
  16455. if(notTaggedIntLabel)
  16456. {
  16457. instr->InsertBefore(notTaggedIntLabel);
  16458. lastLabelBeforeHelper = notTaggedIntLabel;
  16459. }
  16460. }
  16461. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  16462. // Float fast path
  16463. bool generateFloatTest = srcValueType.IsLikelyFloat();
  16464. #ifdef _M_IX86
  16465. if (!AutoSystemInfo::Data.SSE2Available())
  16466. {
  16467. generateFloatTest = false;
  16468. }
  16469. #endif
  16470. bool checkedForTaggedFloat =
  16471. #if FLOATVAR
  16472. srcValueType.IsNotNumber();
  16473. #else
  16474. true; // there are no tagged floats, indicate that it has been checked
  16475. #endif
  16476. if (generateFloatTest)
  16477. {
  16478. // if(srcValueType.IsFloat()) // skip tagged int check?
  16479. //
  16480. // ValueType::IsFloat() does not guarantee that the storage is not in a tagged int.
  16481. // The tagged int check is necessary. It does, however, guarantee that as long as the value is not
  16482. // stored in a tagged int, that it is definitely stored in a JavascriptNumber/TaggedFloat.
  16483. IR::LabelInstr *const notFloatLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  16484. if(!checkedForTaggedInt)
  16485. {
  16486. checkedForTaggedInt = true;
  16487. m_lowererMD.GenerateSmIntTest(src, instr, notFloatLabel, nullptr, true);
  16488. }
  16489. // cmp [src], JavascriptNumber::vtable
  16490. // jne $notFloat
  16491. #if FLOATVAR
  16492. checkedForTaggedFloat = true;
  16493. IR::RegOpnd *const floatOpnd = m_lowererMD.CheckFloatAndUntag(src, instr, notFloatLabel);
  16494. #else
  16495. m_lowererMD.GenerateFloatTest(src, instr, notFloatLabel);
  16496. IR::IndirOpnd *const floatOpnd = IR::IndirOpnd::New(src, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, func);
  16497. #endif
  16498. // cmp src, 0.0
  16499. // jp $false
  16500. // je/jne $target
  16501. // jmp $fallthrough
  16502. InsertFloatCheckForZeroOrNanBranch(floatOpnd, branchOnFalse, labelTarget, labelFallthrough, instr);
  16503. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  16504. // $notFloat:
  16505. instr->InsertBefore(notFloatLabel);
  16506. lastLabelBeforeHelper = notFloatLabel;
  16507. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::AnyNumber));
  16508. }
  16509. IR::LabelInstr *labelHelper = nullptr;
  16510. bool _didObjectTest = checkedForTaggedInt && checkedForTaggedFloat;
  16511. const auto EnsureObjectTest = [&]()
  16512. {
  16513. if(_didObjectTest)
  16514. {
  16515. return;
  16516. }
  16517. if(!labelHelper)
  16518. {
  16519. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  16520. }
  16521. m_lowererMD.GenerateObjectTest(src, instr, labelHelper);
  16522. _didObjectTest = true;
  16523. };
  16524. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  16525. // Boolean fast path
  16526. if (srcValueType.HasBeenBoolean() || srcOriginalValueType.IsUninitialized())
  16527. {
  16528. IR::LabelInstr *notBooleanLabel = nullptr;
  16529. if (!srcValueType.IsBoolean())
  16530. {
  16531. EnsureObjectTest();
  16532. // cmp [src], JavascriptBoolean::vtable
  16533. // jne $notBoolean
  16534. notBooleanLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  16535. InsertCompareBranch(
  16536. IR::IndirOpnd::New(src, 0, TyMachPtr, func),
  16537. LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptBoolean),
  16538. Js::OpCode::BrNeq_A,
  16539. notBooleanLabel,
  16540. instr);
  16541. }
  16542. // cmp src, false
  16543. // je/jne $target
  16544. InsertCompareBranch(
  16545. src,
  16546. LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse),
  16547. compareWithFalseBranchToTargetOpCode,
  16548. labelTarget,
  16549. instr);
  16550. if (srcValueType.IsBoolean())
  16551. {
  16552. // Skip lowering call to helper
  16553. Assert(!labelHelper);
  16554. Assert(instr->m_prev->IsBranchInstr());
  16555. instr->Remove();
  16556. return false;
  16557. }
  16558. // jmp $fallthrough
  16559. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  16560. if (notBooleanLabel)
  16561. {
  16562. instr->InsertBefore(notBooleanLabel);
  16563. lastLabelBeforeHelper = notBooleanLabel;
  16564. }
  16565. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Boolean));
  16566. }
  16567. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  16568. // String fast path
  16569. if(srcValueType.HasBeenString())
  16570. {
  16571. IR::LabelInstr *notStringLabel = nullptr;
  16572. if(!srcValueType.IsString())
  16573. {
  16574. EnsureObjectTest();
  16575. notStringLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  16576. GenerateStringTest(src, instr, notStringLabel, nullptr, false);
  16577. }
  16578. // cmp [src + offset(length)], 0
  16579. // jeq/jne $target
  16580. InsertCompareBranch(
  16581. IR::IndirOpnd::New(src, Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func),
  16582. IR::IntConstOpnd::New(0, TyUint32, func, true),
  16583. compareWithFalseBranchToTargetOpCode,
  16584. labelTarget,
  16585. instr);
  16586. if(srcValueType.IsString())
  16587. {
  16588. // Skip lowering call to helper
  16589. Assert(!labelHelper);
  16590. Assert(instr->m_prev->IsBranchInstr());
  16591. instr->Remove();
  16592. return false;
  16593. }
  16594. // jmp $fallthrough
  16595. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  16596. if(notStringLabel)
  16597. {
  16598. instr->InsertBefore(notStringLabel);
  16599. lastLabelBeforeHelper = notStringLabel;
  16600. }
  16601. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::String));
  16602. }
  16603. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  16604. // Object fast path
  16605. if (srcValueType.IsLikelyObject())
  16606. {
  16607. if(srcValueType.IsObject())
  16608. {
  16609. if(srcValueType.GetObjectType() > ObjectType::Object)
  16610. {
  16611. // Specific object types that are tracked are equivalent to 'true'
  16612. // jmp $true
  16613. InsertBranch(Js::OpCode::Br, labelTrue, instr);
  16614. // Skip lowering call to helper
  16615. Assert(!labelHelper);
  16616. Assert(instr->m_prev->IsBranchInstr());
  16617. instr->Remove();
  16618. return false;
  16619. }
  16620. }
  16621. else
  16622. {
  16623. EnsureObjectTest();
  16624. }
  16625. // mov srcType, [src + offset(type)] -- load type
  16626. IR::RegOpnd *const srcType = IR::RegOpnd::New(TyMachPtr, func);
  16627. const IR::AutoReuseOpnd autoReuseR1(srcType, func);
  16628. InsertMove(srcType, IR::IndirOpnd::New(src, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func), instr);
  16629. // test [srcType + offset(flags)], TypeFlagMask_IsFalsy -- check if falsy
  16630. // jnz $false
  16631. InsertTestBranch(
  16632. IR::IndirOpnd::New(srcType, Js::Type::GetOffsetOfFlags(), TyUint8, func),
  16633. IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyUint8, func),
  16634. Js::OpCode::BrNeq_A,
  16635. labelFalse,
  16636. instr);
  16637. // cmp [srcType + offset(typeId)], TypeIds_LastJavascriptPrimitiveType -- check base TypeIds_LastJavascriptPrimitiveType
  16638. // ja $true
  16639. InsertCompareBranch(
  16640. IR::IndirOpnd::New(srcType, Js::Type::GetOffsetOfTypeId(), TyInt32, func),
  16641. IR::IntConstOpnd::New(Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, func),
  16642. Js::OpCode::BrGt_A,
  16643. true /* isUnsigned */,
  16644. labelTrue,
  16645. instr);
  16646. if(!labelHelper)
  16647. {
  16648. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  16649. }
  16650. lastLabelBeforeHelper = nullptr;
  16651. }
  16652. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  16653. // Helper call
  16654. // $helper:
  16655. if(lastLabelBeforeHelper)
  16656. {
  16657. Assert(instr->m_prev == lastLabelBeforeHelper);
  16658. lastLabelBeforeHelper->isOpHelper = true;
  16659. }
  16660. if (labelHelper)
  16661. {
  16662. Assert(labelHelper->isOpHelper);
  16663. instr->InsertBefore(labelHelper);
  16664. }
  16665. // call JavascriptConversion::ToBoolean
  16666. IR::RegOpnd *const toBoolDst = IR::RegOpnd::New(TyInt32, func);
  16667. const IR::AutoReuseOpnd autoReuseToBoolDst(toBoolDst, func);
  16668. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, toBoolDst, instr->GetSrc1(), func);
  16669. instr->InsertBefore(callInstr);
  16670. LowerUnaryHelperMem(callInstr, IR::HelperConv_ToBoolean);
  16671. // test eax, eax
  16672. InsertTest(toBoolDst, toBoolDst, instr);
  16673. // je/jne $target
  16674. Assert(instr->IsBranchInstr());
  16675. instr->FreeSrc1();
  16676. instr->m_opcode = LowererMD::MDBranchOpcode(compareWithFalseBranchToTargetOpCode);
  16677. Assert(instr->AsBranchInstr()->GetTarget() == labelTarget);
  16678. // Skip lowering another call to helper
  16679. return false;
  16680. }
  16681. // Helper method used in LowerMD by all platforms.
  16682. // Creates HelperCallOpnd or DiagHelperCallOpnd, based on helperMethod and state.
  16683. // static
  16684. IR::HelperCallOpnd*
  16685. Lowerer::CreateHelperCallOpnd(IR::JnHelperMethod helperMethod, int helperArgCount, Func* func)
  16686. {
  16687. Assert(func);
  16688. IR::HelperCallOpnd* helperCallOpnd;
  16689. if (CONFIG_FLAG(EnableContinueAfterExceptionWrappersForHelpers) &&
  16690. func->IsJitInDebugMode() &&
  16691. HelperMethodAttributes::CanThrow(helperMethod))
  16692. {
  16693. // Create DiagHelperCallOpnd to indicate that it's needed to wrap original helper with try-catch wrapper,
  16694. // so that we can ignore exception and bailout to next stmt in debugger.
  16695. // For details, see: Lib\Runtime\Debug\DiagHelperMethodWrapper.{h,cpp}.
  16696. helperCallOpnd = IR::DiagHelperCallOpnd::New(helperMethod, func, helperArgCount);
  16697. }
  16698. else
  16699. {
  16700. helperCallOpnd = IR::HelperCallOpnd::New(helperMethod, func);
  16701. }
  16702. return helperCallOpnd;
  16703. }
  16704. bool
  16705. Lowerer::TryGenerateFastBrOrCmTypeOf(IR::Instr *instr, IR::Instr **prev, bool *pfNoLower)
  16706. {
  16707. Assert(prev);
  16708. Assert(instr->m_opcode == Js::OpCode::BrSrEq_A ||
  16709. instr->m_opcode == Js::OpCode::BrSrNeq_A ||
  16710. instr->m_opcode == Js::OpCode::BrSrNotEq_A ||
  16711. instr->m_opcode == Js::OpCode::BrSrNotNeq_A ||
  16712. instr->m_opcode == Js::OpCode::CmSrEq_A ||
  16713. instr->m_opcode == Js::OpCode::CmSrNeq_A ||
  16714. instr->m_opcode == Js::OpCode::BrEq_A ||
  16715. instr->m_opcode == Js::OpCode::BrNeq_A ||
  16716. instr->m_opcode == Js::OpCode::BrNotEq_A ||
  16717. instr->m_opcode == Js::OpCode::BrNotNeq_A ||
  16718. instr->m_opcode == Js::OpCode::CmEq_A ||
  16719. instr->m_opcode == Js::OpCode::CmNeq_A);
  16720. //
  16721. // instr - (Br/Cm)Sr(N)eq_A
  16722. // instr->m_prev - typeOf
  16723. //
  16724. IR::Instr *instrLd = instr->GetPrevRealInstrOrLabel();
  16725. bool skippedLoads = false;
  16726. //Skip intermediate Ld_A which might be inserted by flow graph peeps
  16727. while (instrLd && instrLd->m_opcode == Js::OpCode::Ld_A )
  16728. {
  16729. if (!(instrLd->GetDst()->IsRegOpnd() && instrLd->GetDst()->AsRegOpnd()->m_fgPeepTmp))
  16730. {
  16731. return false;
  16732. }
  16733. if (instrLd->HasBailOutInfo())
  16734. {
  16735. return false;
  16736. }
  16737. instrLd = instrLd->GetPrevRealInstrOrLabel();
  16738. skippedLoads = true;
  16739. }
  16740. IR::Instr *typeOf = instrLd;
  16741. if (typeOf && (typeOf->m_opcode == Js::OpCode::Typeof))
  16742. {
  16743. IR::RegOpnd *typeOfDst = typeOf->GetDst()->IsRegOpnd() ? typeOf->GetDst()->AsRegOpnd() : nullptr;
  16744. IR::RegOpnd *instrSrc1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  16745. IR::RegOpnd *instrSrc2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  16746. if (typeOfDst && instrSrc1 && instrSrc2)
  16747. {
  16748. if (instrSrc1->m_sym == typeOfDst->m_sym)
  16749. {
  16750. if (!instrSrc1->m_isTempLastUse)
  16751. {
  16752. return false;
  16753. }
  16754. if (!(instrSrc2->m_sym->m_isSingleDef && instrSrc2->m_sym->m_isStrConst))
  16755. {
  16756. return false;
  16757. }
  16758. // The second argument to [Cm|Br]TypeOf is the typeid.
  16759. IR::IntConstOpnd *typeIdOpnd = nullptr;
  16760. Assert(instrSrc2->m_sym->m_isSingleDef);
  16761. Assert(instrSrc2->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd());
  16762. // We can't optimize non-javascript type strings.
  16763. Js::JavascriptString *typeNameJsString = Js::JavascriptString::FromVar(instrSrc2->m_sym->m_instrDef->GetSrc1()->AsAddrOpnd()->m_address);
  16764. const wchar_t *typeName = typeNameJsString->GetString();
  16765. Js::InternalString typeNameString(typeName, typeNameJsString->GetLength());
  16766. if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::UndefinedTypeNameString))
  16767. {
  16768. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Undefined, TyInt32, instr->m_func);
  16769. }
  16770. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::ObjectTypeNameString))
  16771. {
  16772. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Object, TyInt32, instr->m_func);
  16773. }
  16774. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::BooleanTypeNameString))
  16775. {
  16776. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Boolean, TyInt32, instr->m_func);
  16777. }
  16778. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::NumberTypeNameString))
  16779. {
  16780. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Number, TyInt32, instr->m_func);
  16781. }
  16782. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::StringTypeNameString))
  16783. {
  16784. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_String, TyInt32, instr->m_func);
  16785. }
  16786. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::FunctionTypeNameString))
  16787. {
  16788. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, instr->m_func);
  16789. }
  16790. else
  16791. {
  16792. return false;
  16793. }
  16794. if (skippedLoads)
  16795. {
  16796. //validate none of dst of Ld_A overlaps with typeof src or dst
  16797. IR::Opnd* typeOfSrc = typeOf->GetSrc1();
  16798. instrLd = typeOf->GetNextRealInstr();
  16799. while (instrLd != instr)
  16800. {
  16801. if (instrLd->GetDst()->IsEqual(typeOfDst) || instrLd->GetDst()->IsEqual(typeOfSrc))
  16802. {
  16803. return false;
  16804. }
  16805. instrLd = instrLd->GetNextRealInstr();
  16806. }
  16807. typeOf->Unlink();
  16808. instr->InsertBefore(typeOf);
  16809. }
  16810. // The first argument to [Cm|Br]TypeOf is the first arg to the TypeOf instruction.
  16811. IR::Opnd *objectOpnd = typeOf->GetSrc1();
  16812. Assert(objectOpnd->IsRegOpnd());
  16813. // Now emit this instruction and remove the ldstr and typeOf.
  16814. *prev = typeOf->m_prev;
  16815. *pfNoLower = false;
  16816. if (instr->IsBranchInstr())
  16817. {
  16818. GenerateFastBrTypeOf(instr, objectOpnd->AsRegOpnd(), typeIdOpnd, typeOf, pfNoLower);
  16819. }
  16820. else
  16821. {
  16822. GenerateFastCmTypeOf(instr, objectOpnd->AsRegOpnd(), typeIdOpnd, typeOf, pfNoLower);
  16823. }
  16824. return true;
  16825. }
  16826. }
  16827. }
  16828. return false;
  16829. }
  16830. void
  16831. Lowerer::GenerateFalsyObjectTest(IR::Instr *insertInstr, IR::RegOpnd *TypeOpnd, Js::TypeId typeIdToCheck, IR::LabelInstr* target, IR::LabelInstr* done, bool isNeqOp)
  16832. {
  16833. if (!this->m_func->GetScriptContext()->GetThreadContext()->CanBeFalsy(typeIdToCheck) && typeIdToCheck != Js::TypeIds_Undefined)
  16834. {
  16835. // Don't need the check for falsy, the typeId we are looking for doesn't care
  16836. return;
  16837. }
  16838. IR::Opnd *flagsOpnd = IR::IndirOpnd::New(TypeOpnd, Js::Type::GetOffsetOfFlags(), TyInt32, this->m_func);
  16839. InsertTest(flagsOpnd, IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyInt32, this->m_func), insertInstr);
  16840. if (typeIdToCheck == Js::TypeIds_Undefined)
  16841. {
  16842. //Falsy object returns true for undefined ((typeof falsyObj) == "undefined")
  16843. InsertBranch( Js::OpCode::BrNeq_A, true, isNeqOp ? done : target, insertInstr);
  16844. }
  16845. else
  16846. {
  16847. //Falsy object returns false for all other types ((typeof falsyObj) != "function")
  16848. InsertBranch( Js::OpCode::BrNeq_A, true, isNeqOp? target : done , insertInstr);
  16849. }
  16850. }
  16851. ///----------------------------------------------------------------------------
  16852. ///
  16853. /// LowererMD::GenerateFastBrTypeOf
  16854. ///
  16855. ///----------------------------------------------------------------------------
  16856. void
  16857. Lowerer::GenerateFastBrTypeOf(IR::Instr *branch, IR::RegOpnd *object, IR::IntConstOpnd *typeIdOpnd, IR::Instr *typeOf, bool *pfNoLower)
  16858. {
  16859. Js::TypeId typeId = static_cast<Js::TypeId>(typeIdOpnd->GetValue());
  16860. IR::LabelInstr *target = branch->AsBranchInstr()->GetTarget();
  16861. IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  16862. IR::LabelInstr *helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  16863. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  16864. bool isNeqOp;
  16865. switch(branch->m_opcode)
  16866. {
  16867. case Js::OpCode::BrSrNeq_A:
  16868. case Js::OpCode::BrNeq_A:
  16869. case Js::OpCode::BrSrNotEq_A:
  16870. case Js::OpCode::BrNotEq_A:
  16871. isNeqOp = true;
  16872. break;
  16873. case Js::OpCode::BrSrEq_A:
  16874. case Js::OpCode::BrEq_A:
  16875. case Js::OpCode::BrSrNotNeq_A:
  16876. case Js::OpCode::BrNotNeq_A:
  16877. isNeqOp = false;
  16878. break;
  16879. default:
  16880. Assert(UNREACHED);
  16881. __assume(UNREACHED);
  16882. }
  16883. // JNE/BNE (typeId == Js::TypeIds_Number) ? $target : $done
  16884. IR::LabelInstr *label = (typeId == Js::TypeIds_Number) ? target : done;
  16885. if (isNeqOp)
  16886. label = (label == target) ? done : target;
  16887. m_lowererMD.GenerateObjectTest(object, branch, label);
  16888. // MOV typeRegOpnd, [object + offset(Type)]
  16889. InsertMove(typeRegOpnd,
  16890. IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  16891. branch);
  16892. GenerateFalsyObjectTest(branch, typeRegOpnd, typeId, target, done, isNeqOp);
  16893. // MOV objTypeId, [typeRegOpnd + offset(TypeId)]
  16894. IR::RegOpnd* objTypeIdOpnd = IR::RegOpnd::New(TyInt32, m_func);
  16895. InsertMove(objTypeIdOpnd,
  16896. IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
  16897. branch);
  16898. // CMP objTypeId, typeId
  16899. // JEQ/JGE $done
  16900. if (typeId == Js::TypeIds_Object)
  16901. {
  16902. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrGe_A, isNeqOp ? done : target, branch);
  16903. }
  16904. else if (typeId == Js::TypeIds_Function)
  16905. {
  16906. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrEq_A, isNeqOp ? done : target, branch);
  16907. }
  16908. else if (typeId == Js::TypeIds_Number)
  16909. {
  16910. //Check for the typeIds between TypeIds_FirstNumberType <= typeIds <= TypeIds_LastNumberType
  16911. InsertSub(false, objTypeIdOpnd, objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_FirstNumberType, TyInt32, branch->m_func),branch);
  16912. InsertCompare(objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_LastNumberType - Js::TypeIds_FirstNumberType, TyInt32, branch->m_func), branch);
  16913. InsertBranch(isNeqOp ? Js::OpCode::BrGt_A : Js::OpCode::BrLe_A, true, target, branch);
  16914. }
  16915. else
  16916. {
  16917. InsertCompare(objTypeIdOpnd, typeIdOpnd, branch);
  16918. InsertBranch(isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, target, branch);
  16919. }
  16920. // This could be 'null' which, for historical reasons, has a TypeId < TypeIds_Object but
  16921. // is still a Javascript "object."
  16922. if (typeId == Js::TypeIds_Object)
  16923. {
  16924. // CMP object, 0xXXXXXXXX
  16925. // JEQ isNeqOp ? $done : $target
  16926. InsertCompareBranch(object,
  16927. LoadLibraryValueOpnd(branch, LibraryValue::ValueNull),
  16928. Js::OpCode::BrEq_A,
  16929. isNeqOp ? done : target,
  16930. branch);
  16931. }
  16932. branch->InsertAfter(done); // Get this label first
  16933. // "object" or "function" may come from HostDispatch. Needs helper if that's the case.
  16934. if (typeId == Js::TypeIds_Object || typeId == Js::TypeIds_Function)
  16935. {
  16936. // CMP objTypeId, TypeIds_Proxy. typeof proxy could be 'object' or 'function' depends on the target
  16937. // JNE isNeqOp ? $target : $done
  16938. InsertCompareBranch(objTypeIdOpnd,
  16939. IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
  16940. Js::OpCode::BrEq_A,
  16941. helper,
  16942. branch);
  16943. // CMP objTypeId, TypeIds_HostDispatch
  16944. // JNE isNeqOp ? $target : $done
  16945. InsertCompareBranch(objTypeIdOpnd,
  16946. IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
  16947. Js::OpCode::BrNeq_A,
  16948. isNeqOp ? target : done,
  16949. branch);
  16950. // Now emit Typeof and lower it like we would've for the helper call.
  16951. {
  16952. branch->InsertBefore(helper);
  16953. typeOf->Unlink();
  16954. branch->InsertBefore(typeOf);
  16955. LowerUnaryHelperMem(typeOf, IR::HelperOp_Typeof);
  16956. }
  16957. }
  16958. else // Other primitive types don't need helper
  16959. {
  16960. typeOf->Remove();
  16961. branch->Remove();
  16962. *pfNoLower = true;
  16963. }
  16964. // $done:
  16965. }
  16966. ///----------------------------------------------------------------------------
  16967. ///
  16968. /// LowererMD::GenerateFastCmTypeOf
  16969. ///
  16970. ///----------------------------------------------------------------------------
  16971. void
  16972. Lowerer::GenerateFastCmTypeOf(IR::Instr *compare, IR::RegOpnd *object, IR::IntConstOpnd *typeIdOpnd, IR::Instr *typeOf, bool *pfNoLower)
  16973. {
  16974. Assert(compare->m_opcode == Js::OpCode::CmSrEq_A ||
  16975. compare->m_opcode == Js::OpCode::CmEq_A ||
  16976. compare->m_opcode == Js::OpCode::CmSrNeq_A ||
  16977. compare->m_opcode == Js::OpCode::CmNeq_A);
  16978. Js::TypeId typeId = static_cast<Js::TypeId>(typeIdOpnd->GetValue());
  16979. IR::LabelInstr *movFalse = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  16980. IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  16981. IR::LabelInstr *helper= IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  16982. IR::RegOpnd *dst = compare->GetDst()->IsRegOpnd() ? compare->GetDst()->AsRegOpnd() : nullptr;
  16983. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  16984. bool isNeqOp = compare->m_opcode == Js::OpCode::CmSrNeq_A ||
  16985. compare->m_opcode == Js::OpCode::CmNeq_A;
  16986. Assert(dst);
  16987. if (dst->IsEqual(object))
  16988. {
  16989. //dst same as the src of typeof. As we need to move true to dst first we need to save the src to a new opnd
  16990. IR::RegOpnd *newObject = IR::RegOpnd::New(object->GetType(), m_func);
  16991. InsertMove(newObject, object, compare); //Save src
  16992. object = newObject;
  16993. }
  16994. // mov dst, 'true'
  16995. InsertMove(dst,
  16996. LoadLibraryValueOpnd(compare, LibraryValue::ValueTrue),
  16997. compare);
  16998. // TEST object, 1
  16999. // JNE (typeId == Js::TypeIds_Number) ? $done : $movFalse
  17000. IR::LabelInstr *target = (typeId == Js::TypeIds_Number) ? done : movFalse;
  17001. if (isNeqOp)
  17002. {
  17003. target = (target == done) ? movFalse : done;
  17004. }
  17005. m_lowererMD.GenerateObjectTest(object, compare, target);
  17006. // MOV typeRegOpnd, [object + offset(Type)]
  17007. InsertMove(typeRegOpnd,
  17008. IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  17009. compare);
  17010. GenerateFalsyObjectTest(compare, typeRegOpnd, typeId, done, movFalse, isNeqOp);
  17011. // MOV objTypeId, [typeRegOpnd + offset(TypeId)]
  17012. IR::RegOpnd* objTypeIdOpnd = IR::RegOpnd::New(TyInt32, m_func);
  17013. InsertMove(objTypeIdOpnd,
  17014. IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
  17015. compare);
  17016. // CMP objTypeId, typeId
  17017. // JEQ/JGE $done
  17018. if (typeId == Js::TypeIds_Object)
  17019. {
  17020. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrGe_A, isNeqOp ? movFalse : done, compare);
  17021. }
  17022. else if (typeId == Js::TypeIds_Function)
  17023. {
  17024. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrEq_A, isNeqOp ? movFalse : done, compare);
  17025. }
  17026. else if (typeId == Js::TypeIds_Number)
  17027. {
  17028. //Check for the typeIds between TypeIds_FirstNumberType <= typeIds <= TypeIds_LastNumberType
  17029. InsertCompareBranch(objTypeIdOpnd,
  17030. IR::IntConstOpnd::New(Js::TypeIds_LastNumberType, TyInt32, compare->m_func),
  17031. Js::OpCode::BrGt_A,
  17032. isNeqOp ? done : movFalse,
  17033. compare);
  17034. InsertCompareBranch(objTypeIdOpnd,
  17035. IR::IntConstOpnd::New(Js::TypeIds_FirstNumberType, TyInt32, compare->m_func),
  17036. isNeqOp? Js::OpCode::BrLt_A : Js::OpCode::BrGe_A,
  17037. done,
  17038. compare);
  17039. }
  17040. else
  17041. {
  17042. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, done, compare);
  17043. }
  17044. // This could be 'null' which, for historical reasons, has a TypeId < TypeIds_Object but
  17045. // is still a Javascript "object."
  17046. if (typeId == Js::TypeIds_Object)
  17047. {
  17048. // CMP object, 0xXXXXXXXX
  17049. // JEQ isNeqOp ? $movFalse : $done
  17050. InsertCompareBranch(object,
  17051. LoadLibraryValueOpnd(compare, LibraryValue::ValueNull),
  17052. Js::OpCode::BrEq_A,
  17053. isNeqOp ? movFalse : done,
  17054. compare);
  17055. }
  17056. compare->InsertAfter(done); // Get this label first
  17057. // "object" or "function" may come from HostDispatch. Needs helper if that's the case.
  17058. if (typeId == Js::TypeIds_Object || typeId == Js::TypeIds_Function)
  17059. {
  17060. // CMP objTypeId, TypeIds_Proxy
  17061. // JNE isNeqOp ? $done : $movFalse
  17062. InsertCompareBranch(objTypeIdOpnd,
  17063. IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
  17064. Js::OpCode::BrEq_A,
  17065. helper,
  17066. compare);
  17067. // CMP objTypeId, TypeIds_HostDispatch
  17068. // JNE isNeqOp ? $done : $movFalse
  17069. InsertCompareBranch(objTypeIdOpnd,
  17070. IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
  17071. Js::OpCode::BrNeq_A,
  17072. isNeqOp ? done : movFalse,
  17073. compare);
  17074. // Now emit Typeof like we would've for the helper call.
  17075. {
  17076. compare->InsertBefore(helper);
  17077. typeOf->Unlink();
  17078. compare->InsertBefore(typeOf);
  17079. LowerUnaryHelperMem(typeOf, IR::HelperOp_Typeof);
  17080. }
  17081. // JMP/B $done
  17082. InsertBranch(Js::OpCode::Br, done, done);
  17083. }
  17084. else // Other primitive types don't need helper
  17085. {
  17086. typeOf->Remove();
  17087. compare->Remove();
  17088. *pfNoLower = true;
  17089. }
  17090. // $movFalse: (insert before $done)
  17091. done->InsertBefore(movFalse);
  17092. // MOV dst, 'false'
  17093. InsertMove(dst, LoadLibraryValueOpnd(done, LibraryValue::ValueFalse), done);
  17094. // $done:
  17095. }
  17096. void
  17097. Lowerer::GenerateCheckForCallFlagNew(IR::Instr* instrInsert)
  17098. {
  17099. Func *func = instrInsert->m_func;
  17100. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  17101. Assert(!func->IsInlinee());
  17102. // MOV s1, [ebp + 4] // s1 = call info
  17103. // AND s2, s1, Js::CallFlags_New // s2 = s1 & Js::CallFlags_New
  17104. // CMP s2, 0
  17105. // JNE $Done
  17106. // CALL RuntimeTypeError
  17107. // $Done
  17108. IR::SymOpnd* callInfoOpnd = Lowerer::LoadCallInfo(instrInsert);
  17109. Assert(Js::CallInfo::ksizeofCount == 24);
  17110. IR::RegOpnd* isNewFlagSetRegOpnd = IR::RegOpnd::New(TyUint32, func);
  17111. InsertAnd(isNewFlagSetRegOpnd, callInfoOpnd, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_New << Js::CallInfo::ksizeofCount, TyUint32, func, true), instrInsert);
  17112. InsertTestBranch(isNewFlagSetRegOpnd, isNewFlagSetRegOpnd, Js::OpCode::BrNeq_A, labelDone, instrInsert);
  17113. IR::Instr *throwInstr = IR::Instr::New(
  17114. Js::OpCode::RuntimeTypeError,
  17115. IR::RegOpnd::New(TyMachReg, m_func),
  17116. IR::IntConstOpnd::New(SCODE_CODE(JSERR_ClassConstructorCannotBeCalledWithoutNew), TyInt32, m_func),
  17117. m_func);
  17118. instrInsert->InsertBefore(throwInstr);
  17119. this->LowerUnaryHelperMem(throwInstr, IR::HelperOp_RuntimeTypeError);
  17120. instrInsert->InsertBefore(labelDone);
  17121. instrInsert->Remove();
  17122. }
  17123. void
  17124. Lowerer::GenerateLoadNewTarget(IR::Instr* instrInsert)
  17125. {
  17126. Func *func = instrInsert->m_func;
  17127. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  17128. IR::LabelInstr * labelLoadArgNewTarget = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  17129. IR::Opnd* opndUndefAddress = this->LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueUndefined);
  17130. Assert(!func->IsInlinee());
  17131. if (func->GetJnFunction()->IsGenerator())
  17132. {
  17133. instrInsert->SetSrc1(opndUndefAddress);
  17134. LowererMD::ChangeToAssign(instrInsert);
  17135. return;
  17136. }
  17137. // MOV dst, undefined // dst = undefined
  17138. // MOV s1, callInfo // s1 = callInfo
  17139. // TEST s1, Js::CallFlags_NewTarget << 24 // if (callInfo.Flags & Js::CallFlags_NewTarget)
  17140. // JNE $LoadLastArgument // goto $LoadLastArgument
  17141. // TEST s1, Js::CallFlags_New << 24 // if (!(callInfo.Flags & Js::CallFlags_New))
  17142. // JE $Done // goto $Done
  17143. // MOV dst, functionObject // dst = functionObject
  17144. // JMP $Done // goto $Done
  17145. // $LoadLastArgument
  17146. // AND s1, s1, (0x00FFFFFF) // s2 = callInfo.Count == arguments.length + 2
  17147. // MOV dst, [ebp + (s1 - 1) * sizeof(Var) + formalParamOffset * sizeof(Var) ] // points to new.target
  17148. // $Done
  17149. IR::Opnd *dstOpnd = instrInsert->GetDst();
  17150. Assert(dstOpnd->IsRegOpnd());
  17151. LowererMD::CreateAssign(dstOpnd, opndUndefAddress, instrInsert);
  17152. IR::SymOpnd *callInfoOpnd = Lowerer::LoadCallInfo(instrInsert);
  17153. Assert(Js::CallInfo::ksizeofCount == 24);
  17154. IR::RegOpnd *s1 = IR::RegOpnd::New(TyUint32, func);
  17155. LowererMD::CreateAssign(s1, callInfoOpnd, instrInsert);
  17156. InsertTestBranch(s1, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_NewTarget << Js::CallInfo::ksizeofCount, TyUint32, func, true), Js::OpCode::BrNeq_A, labelLoadArgNewTarget, instrInsert);
  17157. InsertTestBranch(s1, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_New << Js::CallInfo::ksizeofCount, TyUint32, func, true), Js::OpCode::BrEq_A, labelDone, instrInsert);
  17158. IR::Instr* loadFuncInstr = IR::Instr::New(Js::OpCode::AND, func);
  17159. loadFuncInstr->SetDst(instrInsert->GetDst());
  17160. m_lowererMD.LoadFuncExpression(loadFuncInstr);
  17161. instrInsert->InsertBefore(loadFuncInstr);
  17162. InsertBranch(Js::OpCode::Br, labelDone, instrInsert);
  17163. instrInsert->InsertBefore(labelLoadArgNewTarget);
  17164. InsertAnd(s1, s1, IR::IntConstOpnd::New(0x00FFFFFF, TyUint32, func, true), instrInsert); // callInfo.Count
  17165. // [formalOffset (4) + callInfo.Count -1] points to 'new.target' - see diagram in GenerateLoadStackArgumentByIndex()
  17166. GenerateLoadStackArgumentByIndex(dstOpnd, s1, instrInsert, -1, m_func);
  17167. instrInsert->InsertBefore(labelDone);
  17168. instrInsert->Remove();
  17169. }
  17170. void
  17171. Lowerer::GenerateGetCurrentFunctionObject(IR::Instr * instr)
  17172. {
  17173. Func * func = this->m_func;
  17174. IR::Instr * insertBeforeInstr = instr->m_next;
  17175. IR::RegOpnd * functionObjectOpnd = instr->GetDst()->AsRegOpnd();
  17176. IR::Opnd * vtableAddressOpnd = this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableStackScriptFunction);
  17177. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  17178. InsertCompareBranch(IR::IndirOpnd::New(functionObjectOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
  17179. Js::OpCode::BrNeq_A, true, labelDone, insertBeforeInstr);
  17180. IR::RegOpnd * boxedFunctionObjectOpnd = IR::RegOpnd::New(TyMachPtr, func);
  17181. InsertMove(boxedFunctionObjectOpnd, IR::IndirOpnd::New(functionObjectOpnd,
  17182. Js::StackScriptFunction::GetOffsetOfBoxedScriptFunction(), TyMachPtr, func), insertBeforeInstr);
  17183. InsertTestBranch(boxedFunctionObjectOpnd, boxedFunctionObjectOpnd, Js::OpCode::BrEq_A, true, labelDone, insertBeforeInstr);
  17184. InsertMove(functionObjectOpnd, boxedFunctionObjectOpnd, insertBeforeInstr);
  17185. insertBeforeInstr->InsertBefore(labelDone);
  17186. }
  17187. IR::Opnd *
  17188. Lowerer::GetInlineCacheFromFuncObjectForRuntimeUse(IR::Instr * instr, IR::PropertySymOpnd * propSymOpnd, bool isHelper)
  17189. {
  17190. // MOV s1, [ebp + 8] //s1 = function object
  17191. // MOV s2, [s1 + offset(hasInlineCaches)]
  17192. // TEST s2, s2
  17193. // JE $L1
  17194. // MOV s3, [s1 + offset(m_inlineCaches)] //s3 = inlineCaches from function object
  17195. // MOV s4, [s3 + index*scale] //s4 = inlineCaches[index]
  17196. // JMP $L2
  17197. // $L1
  17198. // MOV s3, propSym->m_runtimeCache
  17199. // $L2
  17200. byte indirScale = this->m_lowererMD.GetDefaultIndirScale();
  17201. IR::RegOpnd * funcObjOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  17202. IR::Instr * funcObjInstr = IR::Instr::New(Js::OpCode::Ld_A, funcObjOpnd, instr->m_func);
  17203. instr->InsertBefore(funcObjInstr);
  17204. this->m_lowererMD.LoadFuncExpression(funcObjInstr);
  17205. IR::RegOpnd * funcObjHasInlineCachesOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  17206. this->m_lowererMD.CreateAssign(funcObjHasInlineCachesOpnd, IR::IndirOpnd::New(funcObjOpnd, Js::ScriptFunction::GetOffsetOfHasInlineCaches(), TyUint8, instr->m_func), instr);
  17207. IR::LabelInstr * inlineCachesNullLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, isHelper);
  17208. InsertTestBranch(funcObjHasInlineCachesOpnd, funcObjHasInlineCachesOpnd, Js::OpCode::BrEq_A, inlineCachesNullLabel, instr);
  17209. IR::RegOpnd * inlineCachesOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  17210. Lowerer::InsertMove(inlineCachesOpnd, IR::IndirOpnd::New(funcObjOpnd, Js::ScriptFunctionWithInlineCache::GetOffsetOfInlineCaches(), TyMachPtr, instr->m_func), instr);
  17211. IR::RegOpnd * inlineCacheOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  17212. IR::RegOpnd * indexOpnd = IR::RegOpnd::New(TyMachReg, instr->m_func);
  17213. int inlineCacheOffset;
  17214. if (!Int32Math::Mul(sizeof(Js::InlineCache *), propSymOpnd->m_inlineCacheIndex, &inlineCacheOffset))
  17215. {
  17216. Lowerer::InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCachesOpnd, inlineCacheOffset, TyMachPtr, instr->m_func), instr);
  17217. }
  17218. else
  17219. {
  17220. Lowerer::InsertMove(indexOpnd, IR::IntConstOpnd::New(propSymOpnd->m_inlineCacheIndex, TyUint32, instr->m_func), instr);
  17221. Lowerer::InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCachesOpnd, indexOpnd, indirScale, TyMachPtr, instr->m_func), instr);
  17222. }
  17223. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, isHelper);
  17224. InsertBranch(LowererMD::MDUncondBranchOpcode, continueLabel, instr);
  17225. IR::Instr * ldCacheFromPropSymOpndInstr = this->m_lowererMD.CreateAssign(inlineCacheOpnd, IR::AddrOpnd::New(propSymOpnd->m_runtimeInlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func), instr);
  17226. ldCacheFromPropSymOpndInstr->InsertBefore(inlineCachesNullLabel);
  17227. ldCacheFromPropSymOpndInstr->InsertAfter(continueLabel);
  17228. return inlineCacheOpnd;
  17229. }
  17230. IR::Instr *
  17231. Lowerer::LowerInitClass(IR::Instr * instr)
  17232. {
  17233. // scriptContext
  17234. IR::Instr * prevInstr = LoadScriptContext(instr);
  17235. // extends
  17236. if (instr->GetSrc2() != nullptr)
  17237. {
  17238. IR::Opnd * extendsOpnd = instr->UnlinkSrc2();
  17239. m_lowererMD.LoadHelperArgument(instr, extendsOpnd);
  17240. }
  17241. else
  17242. {
  17243. IR::AddrOpnd* extendsOpnd = IR::AddrOpnd::NewNull(this->m_func);
  17244. m_lowererMD.LoadHelperArgument(instr, extendsOpnd);
  17245. }
  17246. // constructor
  17247. IR::Opnd * ctorOpnd = instr->UnlinkSrc1();
  17248. m_lowererMD.LoadHelperArgument(instr, ctorOpnd);
  17249. // call
  17250. m_lowererMD.ChangeToHelperCall(instr, IR::HelperOP_InitClass);
  17251. return prevInstr;
  17252. }
  17253. void
  17254. Lowerer::LowerNewConcatStrMulti(IR::Instr * instr)
  17255. {
  17256. IR::IntConstOpnd * countOpnd = instr->UnlinkSrc1()->AsIntConstOpnd();
  17257. IR::RegOpnd * dstOpnd = instr->UnlinkDst()->AsRegOpnd();
  17258. uint8 count = (uint8)countOpnd->GetValue();
  17259. Assert(dstOpnd->GetValueType().IsString());
  17260. GenerateRecyclerAlloc(IR::HelperAllocMemForConcatStringMulti, Js::ConcatStringMulti::GetAllocSize(count), dstOpnd, instr);
  17261. GenerateRecyclerMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, VTableValue::VtableConcatStringMulti), instr);
  17262. GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfType(),
  17263. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueStringTypeStatic), instr);
  17264. GenerateRecyclerMemInitNull(dstOpnd, Js::ConcatStringMulti::GetOffsetOfpszValue(), instr);
  17265. GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), 0, instr);
  17266. GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfSlotCount(), countOpnd->AsUint32(), instr);
  17267. instr->Remove();
  17268. }
  17269. void
  17270. Lowerer::LowerNewConcatStrMultiBE(IR::Instr * instr)
  17271. {
  17272. // Lower
  17273. // t1 = SetConcatStrMultiBE s1
  17274. // t2 = SetConcatStrMultiBE s2, t1
  17275. // t3 = SetConcatStrMultiBE s3, t2
  17276. // s = NewConcatStrMultiBE 3, t3
  17277. // to
  17278. // s = new concat string
  17279. // s+0 = s1
  17280. // s+1 = s2
  17281. // s+2 = s3
  17282. Assert(instr->GetSrc1()->IsConstOpnd());
  17283. Assert(instr->GetDst()->IsRegOpnd());
  17284. IR::RegOpnd * newString = instr->GetDst()->AsRegOpnd();
  17285. IR::Opnd * newConcatItemOpnd = nullptr;
  17286. uint index = instr->GetSrc1()->AsIntConstOpnd()->AsUint32() - 1;
  17287. IR::Instr * concatItemInstr = nullptr;
  17288. IR::Opnd * linkOpnd = instr->GetSrc2();
  17289. while (linkOpnd)
  17290. {
  17291. Assert(linkOpnd->IsRegOpnd());
  17292. concatItemInstr = linkOpnd->GetStackSym()->GetInstrDef();
  17293. Assert(concatItemInstr->m_opcode == Js::OpCode::SetConcatStrMultiItemBE);
  17294. IR::Opnd * concatItemOpnd = concatItemInstr->GetSrc1();
  17295. Assert(concatItemOpnd->IsRegOpnd());
  17296. // If one of the concat items is equal to the dst of the concat expressions (s = s + a + b),
  17297. // hoist the load of that item to before the setting of the new string to the dst.
  17298. if (concatItemOpnd->IsEqual(newString))
  17299. {
  17300. if (!newConcatItemOpnd)
  17301. {
  17302. IR::Instr * hoistSrcInstr = concatItemInstr->HoistSrc1(Js::OpCode::Ld_A);
  17303. newConcatItemOpnd = hoistSrcInstr->GetDst();
  17304. }
  17305. concatItemOpnd = newConcatItemOpnd;
  17306. }
  17307. else
  17308. {
  17309. // If only some of the SetConcatStrMultiItemBE instructions were CSE'd and the rest, along with the NewConcatStrMultiBE
  17310. // instruction, were in a loop, the strings on the CSE'd Set*BE instructions will become live on back edge. Add them to
  17311. // addToLiveOnBackEdgeSyms here and clear when we reach the Set*BE instruction.
  17312. // Note that we are doing this only for string opnds which are not the same as the dst of the concat expression. Reasoning
  17313. // behind this is that if a loop has a concat expression with one of its sources same as the dst, the Set*BE instruction
  17314. // for the dst wouldn't have been CSE'd as the dst's value is changing in the loop and the backward pass should have set the
  17315. // symbol as live on backedge.
  17316. this->addToLiveOnBackEdgeSyms->Set(concatItemOpnd->GetStackSym()->m_id);
  17317. }
  17318. IR::Instr * newConcatItemInstr = IR::Instr::New(Js::OpCode::SetConcatStrMultiItem,
  17319. IR::IndirOpnd::New(newString, index, TyVar, instr->m_func),
  17320. concatItemOpnd,
  17321. instr->m_func);
  17322. instr->InsertAfter(newConcatItemInstr);
  17323. this->LowerSetConcatStrMultiItem(newConcatItemInstr);
  17324. linkOpnd = concatItemInstr->GetSrc2();
  17325. index--;
  17326. }
  17327. Assert(index == -1);
  17328. this->LowerNewConcatStrMulti(instr);
  17329. }
  17330. void
  17331. Lowerer::LowerSetConcatStrMultiItem(IR::Instr * instr)
  17332. {
  17333. Func * func = this->m_func;
  17334. IR::IndirOpnd * dstOpnd = instr->GetDst()->AsIndirOpnd();
  17335. IR::RegOpnd * concatStrOpnd = dstOpnd->GetBaseOpnd();
  17336. IR::RegOpnd * srcOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  17337. Assert(concatStrOpnd->GetValueType().IsString());
  17338. Assert(srcOpnd->GetValueType().IsString());
  17339. srcOpnd = GenerateGetImmutableOrScriptUnreferencedString(srcOpnd, instr, IR::HelperOp_CompoundStringCloneForConcat);
  17340. instr->SetSrc1(srcOpnd);
  17341. IR::IndirOpnd * dstLength = IR::IndirOpnd::New(concatStrOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), TyUint32, func);
  17342. IR::Opnd * srcLength;
  17343. if (srcOpnd->m_sym->m_isStrConst)
  17344. {
  17345. srcLength = IR::IntConstOpnd::New(Js::JavascriptString::FromVar(srcOpnd->m_sym->GetConstAddress())->GetLength(),
  17346. TyUint32, func);
  17347. }
  17348. else
  17349. {
  17350. srcLength = IR::RegOpnd::New(TyUint32, func);
  17351. InsertMove(srcLength, IR::IndirOpnd::New(srcOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), TyUint32, func), instr);
  17352. }
  17353. InsertAdd(false, dstLength, dstLength, srcLength, instr);
  17354. dstOpnd->SetOffset(dstOpnd->GetOffset() * sizeof(Js::JavascriptString *) + Js::ConcatStringMulti::GetOffsetOfSlots());
  17355. this->m_lowererMD.ChangeToAssign(instr);
  17356. }
  17357. IR::RegOpnd *
  17358. Lowerer::GenerateGetImmutableOrScriptUnreferencedString(IR::RegOpnd * strOpnd, IR::Instr * insertBeforeInstr, IR::JnHelperMethod helperMethod, bool reloadDst)
  17359. {
  17360. if (strOpnd->m_sym->m_isStrConst)
  17361. {
  17362. return strOpnd;
  17363. }
  17364. Func * const func = this->m_func;
  17365. IR::RegOpnd *dstOpnd = reloadDst == true ? IR::RegOpnd::New(TyVar, func) : strOpnd;
  17366. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  17367. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  17368. if (!strOpnd->IsNotTaggedValue())
  17369. {
  17370. this->m_lowererMD.GenerateObjectTest(strOpnd, insertBeforeInstr, doneLabel);
  17371. }
  17372. // CMP [strOpnd], Js::CompoundString::`vtable'
  17373. // JEQ $helper
  17374. InsertCompareBranch(
  17375. IR::IndirOpnd::New(strOpnd, 0, TyMachPtr, func),
  17376. this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableCompoundString),
  17377. Js::OpCode::BrEq_A,
  17378. helperLabel,
  17379. insertBeforeInstr);
  17380. if (reloadDst)
  17381. {
  17382. InsertMove(dstOpnd, strOpnd, insertBeforeInstr);
  17383. }
  17384. InsertBranch(Js::OpCode::Br, doneLabel, insertBeforeInstr);
  17385. insertBeforeInstr->InsertBefore(helperLabel);
  17386. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, strOpnd);
  17387. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, dstOpnd, func);
  17388. callInstr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
  17389. insertBeforeInstr->InsertBefore(callInstr);
  17390. this->m_lowererMD.LowerCall(callInstr, 0);
  17391. insertBeforeInstr->InsertBefore(doneLabel);
  17392. return dstOpnd;
  17393. }
  17394. void
  17395. Lowerer::LowerConvStrCommon(IR::JnHelperMethod helper, IR::Instr * instr)
  17396. {
  17397. IR::RegOpnd * src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
  17398. if (!src1Opnd->GetValueType().IsNotString())
  17399. {
  17400. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  17401. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17402. this->GenerateStringTest(src1Opnd, instr, helperLabel);
  17403. InsertMove(instr->GetDst(), src1Opnd, instr);
  17404. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  17405. instr->InsertBefore(helperLabel);
  17406. instr->InsertAfter(doneLabel);
  17407. }
  17408. if (instr->GetSrc2())
  17409. {
  17410. this->m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
  17411. }
  17412. this->LoadScriptContext(instr);
  17413. this->m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  17414. this->m_lowererMD.ChangeToHelperCall(instr, helper);
  17415. }
  17416. void
  17417. Lowerer::LowerConvStr(IR::Instr * instr)
  17418. {
  17419. LowerConvStrCommon(IR::HelperOp_ConvString, instr);
  17420. }
  17421. void
  17422. Lowerer::LowerCoerseStr(IR::Instr* instr)
  17423. {
  17424. LowerConvStrCommon(IR::HelperOp_CoerseString, instr);
  17425. }
  17426. ///----------------------------------------------------------------------------
  17427. ///
  17428. /// Lowerer::LowerCoerseStrOrRegex - This method is used for String.Replace(arg1, arg2)
  17429. /// where arg1 is regex or string
  17430. /// if arg1 is not regex, then do String.Replace(CoerseStr(arg1), arg2);
  17431. ///
  17432. /// CoerseStrOrRegex arg1
  17433. ///
  17434. /// if (value == regex) goto :done
  17435. /// else
  17436. ///helper:
  17437. /// ConvStr value
  17438. ///done:
  17439. ///----------------------------------------------------------------------------
  17440. void
  17441. Lowerer::LowerCoerseStrOrRegex(IR::Instr* instr)
  17442. {
  17443. IR::RegOpnd * src1Opnd = instr->GetSrc1()->AsRegOpnd();
  17444. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  17445. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17446. // if (value == regex) goto :done
  17447. if (!src1Opnd->IsNotTaggedValue())
  17448. {
  17449. this->m_lowererMD.GenerateObjectTest(src1Opnd, instr, helperLabel);
  17450. }
  17451. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  17452. InsertCompareBranch(IR::IndirOpnd::New(src1Opnd, 0, TyMachPtr, instr->m_func),
  17453. vtableOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
  17454. InsertMove(instr->GetDst(), src1Opnd, instr);
  17455. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  17456. instr->InsertBefore(helperLabel);
  17457. instr->InsertAfter(doneLabel);
  17458. // helper: ConvStr value
  17459. LowerConvStr(instr);
  17460. }
  17461. ///----------------------------------------------------------------------------
  17462. ///
  17463. /// Lowerer::LowerCoerseRegex - This method is used for String.Match(arg1)
  17464. /// if arg1 is regex, then pass CreateRegEx(arg1) to String.Match
  17465. ///
  17466. ///----------------------------------------------------------------------------
  17467. void
  17468. Lowerer::LowerCoerseRegex(IR::Instr* instr)
  17469. {
  17470. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  17471. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17472. IR::RegOpnd * src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
  17473. if (!src1Opnd->IsNotTaggedValue())
  17474. {
  17475. this->m_lowererMD.GenerateObjectTest(src1Opnd, instr, helperLabel);
  17476. }
  17477. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  17478. InsertCompareBranch(IR::IndirOpnd::New(src1Opnd, 0, TyMachPtr, instr->m_func),
  17479. vtableOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
  17480. InsertMove(instr->GetDst(), src1Opnd, instr);
  17481. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  17482. instr->InsertBefore(helperLabel);
  17483. instr->InsertAfter(doneLabel);
  17484. this->LoadScriptContext(instr);
  17485. this->m_lowererMD.LoadHelperArgument(instr, IR::AddrOpnd::NewNull(instr->m_func)); // option
  17486. this->m_lowererMD.LoadHelperArgument(instr, src1Opnd); // regex
  17487. this->m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_CoerseRegex);
  17488. }
  17489. void
  17490. Lowerer::LowerConvPrimStr(IR::Instr * instr)
  17491. {
  17492. LowerConvStrCommon(IR::HelperOp_ConvPrimitiveString, instr);
  17493. }
  17494. void
  17495. Lowerer::GenerateRecyclerAlloc(IR::JnHelperMethod allocHelper, size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, bool inOpHelper)
  17496. {
  17497. size_t alignedSize = HeapInfo::GetAlignedSizeNoCheck(allocSize);
  17498. this->GenerateRecyclerAllocAligned(allocHelper, alignedSize, newObjDst, insertionPointInstr, inOpHelper);
  17499. }
  17500. void
  17501. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, int value, IR::Instr * insertBeforeInstr, bool isZeroed)
  17502. {
  17503. IRType type = TyInt32;
  17504. if (isZeroed)
  17505. {
  17506. if (value == 0)
  17507. {
  17508. // Recycler memory are zero initialized
  17509. return;
  17510. }
  17511. if (value > 0 && value <= USHORT_MAX)
  17512. {
  17513. // Recycler memory are zero initialized, so we can just initialize the 8 or 16 bits of value
  17514. type = (value <= UCHAR_MAX)? TyUint8 : TyUint16;
  17515. }
  17516. }
  17517. Func * func = this->m_func;
  17518. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), IR::IntConstOpnd::New(value, type, func), insertBeforeInstr);
  17519. }
  17520. void
  17521. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, uint32 value, IR::Instr * insertBeforeInstr, bool isZeroed)
  17522. {
  17523. IRType type = TyUint32;
  17524. if (isZeroed)
  17525. {
  17526. if (value == 0)
  17527. {
  17528. // Recycler memory are zero initialized
  17529. return;
  17530. }
  17531. if (value <= USHORT_MAX)
  17532. {
  17533. // Recycler memory are zero initialized, so we can just initialize the 8 or 16 bits of value
  17534. type = (value <= UCHAR_MAX)? TyUint8 : TyUint16;
  17535. }
  17536. }
  17537. Func * func = this->m_func;
  17538. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), IR::IntConstOpnd::New(value, type, func), insertBeforeInstr);
  17539. }
  17540. void
  17541. Lowerer::GenerateMemInitNull(IR::RegOpnd * opnd, int32 offset, IR::Instr * insertBeforeInstr, bool isZeroed)
  17542. {
  17543. if (isZeroed)
  17544. {
  17545. return;
  17546. }
  17547. GenerateMemInit(opnd, offset, IR::AddrOpnd::NewNull(m_func), insertBeforeInstr);
  17548. }
  17549. void
  17550. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, IR::Opnd * value, IR::Instr * insertBeforeInstr, bool isZeroed)
  17551. {
  17552. IRType type = value->GetType();
  17553. Func * func = this->m_func;
  17554. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), value, insertBeforeInstr);
  17555. }
  17556. void
  17557. Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, int32 value, IR::Instr * insertBeforeInstr)
  17558. {
  17559. GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
  17560. }
  17561. void
  17562. Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, uint32 value, IR::Instr * insertBeforeInstr)
  17563. {
  17564. GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
  17565. }
  17566. void
  17567. Lowerer::GenerateRecyclerMemInitNull(IR::RegOpnd * opnd, int32 offset, IR::Instr * insertBeforeInstr)
  17568. {
  17569. GenerateMemInitNull(opnd, offset, insertBeforeInstr, true);
  17570. }
  17571. void
  17572. Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, IR::Opnd * value, IR::Instr * insertBeforeInstr)
  17573. {
  17574. GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
  17575. }
  17576. void
  17577. Lowerer::GenerateMemCopy(IR::Opnd * dst, IR::Opnd * src, uint32 size, IR::Instr * insertBeforeInstr)
  17578. {
  17579. Func * func = this->m_func;
  17580. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, IR::IntConstOpnd::New(size, TyUint32, func));
  17581. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, src);
  17582. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, dst);
  17583. IR::Instr * memcpyInstr = IR::Instr::New(Js::OpCode::Call, func);
  17584. memcpyInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperMemCpy, func));
  17585. insertBeforeInstr->InsertBefore(memcpyInstr);
  17586. m_lowererMD.LowerCall(memcpyInstr, 3);
  17587. }
  17588. bool
  17589. Lowerer::GenerateSimplifiedInt4Rem(
  17590. IR::Instr *const remInstr,
  17591. IR::LabelInstr *const skipBailOutLabel) const
  17592. {
  17593. Assert(remInstr);
  17594. Assert(remInstr->m_opcode == Js::OpCode::Rem_I4);
  17595. auto *dst = remInstr->GetDst(), *src1 = remInstr->GetSrc1(), *src2 = remInstr->GetSrc2();
  17596. Assert(src1 && src2);
  17597. Assert(dst->IsRegOpnd());
  17598. bool isModByPowerOf2 = (remInstr->HasBailOutInfo() && remInstr->GetBailOutKind() == IR::BailOnModByPowerOf2);
  17599. if (PHASE_OFF(Js::Phase::MathFastPathPhase, remInstr->m_func->GetTopFunc()) && !isModByPowerOf2)
  17600. return false;
  17601. if (!(src2->IsIntConstOpnd() && Math::IsPow2(src2->AsIntConstOpnd()->AsInt32())) && !isModByPowerOf2)
  17602. {
  17603. return false;
  17604. }
  17605. // We have:
  17606. // s3 = s1 % s2 , where s2 = +2^i
  17607. //
  17608. // Generate:
  17609. // test s1, s1
  17610. // js $slowPathLabel
  17611. // s3 = and s1, 0x00..fff (2^i - 1)
  17612. // jmp $doneLabel
  17613. // $slowPathLabel:
  17614. // (Slow path)
  17615. // (Neg zero check)
  17616. // (Bailout code)
  17617. // $doneLabel:
  17618. IR::LabelInstr *doneLabel = skipBailOutLabel, *slowPathLabel;
  17619. if (!doneLabel)
  17620. {
  17621. doneLabel = IR::LabelInstr::New(Js::OpCode::Label, remInstr->m_func);
  17622. remInstr->InsertAfter(doneLabel);
  17623. }
  17624. slowPathLabel = IR::LabelInstr::New(Js::OpCode::Label, remInstr->m_func, isModByPowerOf2);
  17625. remInstr->InsertBefore(slowPathLabel);
  17626. // test s1, s1
  17627. InsertTest(src1, src1, slowPathLabel);
  17628. // jsb $slowPathLabel
  17629. InsertBranch(LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), slowPathLabel, slowPathLabel);
  17630. // s3 = and s1, 0x00..fff (2^i - 1)
  17631. IR::Opnd* maskOpnd;
  17632. if(isModByPowerOf2)
  17633. {
  17634. Assert(isModByPowerOf2);
  17635. maskOpnd = IR::RegOpnd::New(TyInt32, remInstr->m_func);
  17636. // mov maskOpnd, s2
  17637. InsertMove(maskOpnd, src2, slowPathLabel);
  17638. // dec maskOpnd
  17639. InsertSub(/*needFlags*/ true, maskOpnd, maskOpnd, IR::IntConstOpnd::New(1, TyInt32, this->m_func, /*dontEncode*/true), slowPathLabel);
  17640. // maskOpnd < 0 goto $slowPath
  17641. InsertBranch(LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), slowPathLabel, slowPathLabel);
  17642. // TEST src2, maskOpnd
  17643. InsertTestBranch(src2, maskOpnd, Js::OpCode::BrNeq_A, slowPathLabel, slowPathLabel);
  17644. }
  17645. else
  17646. {
  17647. Assert(src2->IsIntConstOpnd());
  17648. int32 mask = src2->AsIntConstOpnd()->AsInt32() - 1;
  17649. maskOpnd = IR::IntConstOpnd::New(mask, TyInt32, remInstr->m_func);
  17650. }
  17651. // dst = src1 & maskOpnd
  17652. InsertAnd(dst, src1, maskOpnd, slowPathLabel);
  17653. // jmp $doneLabel
  17654. InsertBranch(Js::OpCode::Br, doneLabel, slowPathLabel);
  17655. return true;
  17656. }
  17657. #if DBG
  17658. bool
  17659. Lowerer::ValidOpcodeAfterLower(IR::Instr* instr, Func * func)
  17660. {
  17661. Js::OpCode opcode = instr->m_opcode;
  17662. if (opcode > Js::OpCode::MDStart)
  17663. {
  17664. return true;
  17665. }
  17666. switch (opcode)
  17667. {
  17668. case Js::OpCode::Label:
  17669. case Js::OpCode::StatementBoundary:
  17670. case Js::OpCode::DeletedNonHelperBranch:
  17671. case Js::OpCode::FunctionEntry:
  17672. case Js::OpCode::FunctionExit:
  17673. case Js::OpCode::TryCatch:
  17674. case Js::OpCode::TryFinally:
  17675. case Js::OpCode::Catch:
  17676. case Js::OpCode::GeneratorResumeJumpTable:
  17677. case Js::OpCode::Break:
  17678. #ifdef _M_X64
  17679. case Js::OpCode::PrologStart:
  17680. case Js::OpCode::PrologEnd:
  17681. #endif
  17682. #ifdef _M_IX86
  17683. case Js::OpCode::BailOutStackRestore:
  17684. #endif
  17685. return true;
  17686. case Js::OpCode::RestoreOutParam:
  17687. Assert(func->isPostRegAlloc);
  17688. return true;
  17689. // These may be removed by peep
  17690. case Js::OpCode::StartCall:
  17691. case Js::OpCode::LoweredStartCall:
  17692. case Js::OpCode::Nop:
  17693. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  17694. return func && !func->isPostPeeps;
  17695. case Js::OpCode::InlineeStart:
  17696. case Js::OpCode::InlineeEnd:
  17697. return instr->m_func->m_hasInlineArgsOpt;
  17698. #ifdef _M_X64
  17699. case Js::OpCode::LdArgSize:
  17700. case Js::OpCode::LdSpillSize:
  17701. return func && !func->isPostFinalLower;
  17702. #endif
  17703. case Js::OpCode::Leave:
  17704. Assert(!func->IsLoopBodyInTry());
  17705. Assert(func->HasTry() && func->DoOptimizeTryCatch());
  17706. return func && !func->isPostFinalLower; //Lowered in FinalLower phase
  17707. };
  17708. return false;
  17709. }
  17710. #endif
  17711. void Lowerer::LowerProfiledBeginSwitch(IR::JitProfilingInstr* instr)
  17712. {
  17713. Assert(instr->isBeginSwitch);
  17714. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  17715. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
  17716. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  17717. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleProfiledSwitch, m_func));
  17718. m_lowererMD.LowerCall(instr, 0);
  17719. }
  17720. void Lowerer::LowerProfiledBinaryOp(IR::JitProfilingInstr* instr, IR::JnHelperMethod meth)
  17721. {
  17722. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
  17723. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  17724. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
  17725. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  17726. instr->SetSrc1(IR::HelperCallOpnd::New(meth, m_func));
  17727. m_lowererMD.LowerCall(instr, 0);
  17728. }
  17729. void Lowerer::GenerateNullOutGeneratorFrame(IR::Instr* insertInstr)
  17730. {
  17731. // null out frame pointer on generator object to signal completion to JavascriptGenerator::CallGenerator
  17732. // s = MOV prm1
  17733. // s[offset of JavascriptGenerator::frame] = MOV nullptr
  17734. StackSym *symSrc = StackSym::NewParamSlotSym(1, m_func);
  17735. m_func->SetArgOffset(symSrc, LowererMD::GetFormalParamOffset() * MachPtr);
  17736. IR::SymOpnd *srcOpnd = IR::SymOpnd::New(symSrc, TyMachPtr, m_func);
  17737. IR::RegOpnd *dstOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  17738. m_lowererMD.CreateAssign(dstOpnd, srcOpnd, insertInstr);
  17739. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(dstOpnd, Js::JavascriptGenerator::GetFrameOffset(), TyMachPtr, m_func);
  17740. IR::AddrOpnd *addrOpnd = IR::AddrOpnd::NewNull(m_func);
  17741. m_lowererMD.CreateAssign(indirOpnd, addrOpnd, insertInstr);
  17742. }
  17743. void Lowerer::LowerFunctionExit(IR::Instr* funcExit)
  17744. {
  17745. if (m_func->GetJnFunction()->IsGenerator())
  17746. {
  17747. GenerateNullOutGeneratorFrame(funcExit->m_prev);
  17748. }
  17749. if (!m_func->DoSimpleJitDynamicProfile())
  17750. {
  17751. return;
  17752. }
  17753. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, m_func);
  17754. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleCleanImplicitCallFlags, m_func));
  17755. funcExit->m_prev->InsertBefore(callInstr);
  17756. m_lowererMD.LoadHelperArgument(callInstr, CreateFunctionBodyOpnd(funcExit->m_func));
  17757. m_lowererMD.LowerCall(callInstr, 0);
  17758. }
  17759. void Lowerer::LowerFunctionEntry(IR::Instr* funcEntry)
  17760. {
  17761. Assert(funcEntry->m_opcode == Js::OpCode::FunctionEntry);
  17762. //Don't do a body call increment for loops or asm.js
  17763. if (m_func->IsLoopBody() || m_func->GetJnFunction()->GetIsAsmjsMode())
  17764. {
  17765. return;
  17766. }
  17767. IR::Instr *const insertBeforeInstr = this->m_func->GetFunctionEntryInsertionPoint();
  17768. LowerFunctionBodyCallCountChange(insertBeforeInstr);
  17769. if (m_func->DoSimpleJitDynamicProfile())
  17770. {
  17771. const auto jn = m_func->GetJnFunction();
  17772. // Only generate the argument profiling if the function expects to have some arguments to profile and only if
  17773. // it has implicit ArgIns (the latter is a restriction imposed by the Interpreter, so it is mirrored in SimpleJit)
  17774. if (jn->GetInParamsCount() > 1 && jn->GetHasImplicitArgIns())
  17775. {
  17776. // Call out to the argument profiling helper
  17777. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, m_func);
  17778. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleProfileParameters, m_func));
  17779. insertBeforeInstr->InsertBefore(callInstr);
  17780. m_lowererMD.LoadHelperArgument(callInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  17781. m_lowererMD.LowerCall(callInstr, 0);
  17782. }
  17783. // Clear existing ImplicitCallFlags
  17784. const auto starFlag = GetImplicitCallFlagsOpnd();
  17785. this->InsertMove(starFlag, CreateClearImplicitCallFlagsOpnd(), insertBeforeInstr);
  17786. }
  17787. }
  17788. void Lowerer::LowerFunctionBodyCallCountChange(IR::Instr *const insertBeforeInstr)
  17789. {
  17790. Assert(insertBeforeInstr);
  17791. Func *const func = insertBeforeInstr->m_func;
  17792. const bool isSimpleJit = func->IsSimpleJit();
  17793. if ((isSimpleJit && !func->GetTopFunc()->GetJnFunction()->DoFullJit()))
  17794. {
  17795. return;
  17796. }
  17797. // mov countAddress, <countAddress>
  17798. IR::RegOpnd *const countAddressOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, func), TyMachPtr, func);
  17799. const IR::AutoReuseOpnd autoReuseCountAddressOpnd(countAddressOpnd, func);
  17800. InsertMove(
  17801. countAddressOpnd,
  17802. IR::AddrOpnd::New(func->GetCallsCountAddress(), IR::AddrOpndKindDynamicMisc, func, true),
  17803. insertBeforeInstr);
  17804. IR::IndirOpnd *const countOpnd = IR::IndirOpnd::New(countAddressOpnd, 0, TyUint8, func);
  17805. const IR::AutoReuseOpnd autoReuseCountOpnd(countOpnd, func);
  17806. if(!isSimpleJit)
  17807. {
  17808. // InsertIncUint8PreventOverflow [countAddress]
  17809. InsertIncUInt8PreventOverflow(countOpnd, countOpnd, insertBeforeInstr);
  17810. return;
  17811. }
  17812. // InsertDecUint8PreventOverflow [countAddress]
  17813. IR::Instr *onOverflowInsertBeforeInstr;
  17814. InsertDecUInt8PreventOverflow(
  17815. countOpnd,
  17816. countOpnd,
  17817. insertBeforeInstr,
  17818. &onOverflowInsertBeforeInstr);
  17819. // ($overflow:)
  17820. // TransitionFromSimpleJit(framePointer)
  17821. m_lowererMD.LoadHelperArgument(onOverflowInsertBeforeInstr, IR::Opnd::CreateFramePointerOpnd(func));
  17822. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  17823. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperTransitionFromSimpleJit, func));
  17824. onOverflowInsertBeforeInstr->InsertBefore(callInstr);
  17825. m_lowererMD.LowerCall(callInstr, 0);
  17826. }
  17827. IR::Opnd*
  17828. Lowerer::GetImplicitCallFlagsOpnd()
  17829. {
  17830. return GetImplicitCallFlagsOpnd(m_func);
  17831. }
  17832. IR::Opnd*
  17833. Lowerer::GetImplicitCallFlagsOpnd(Func * func)
  17834. {
  17835. return IR::MemRefOpnd::New(func->GetScriptContext()->GetThreadContext()->GetAddressOfImplicitCallFlags(), GetImplicitCallFlagsType(), func);
  17836. }
  17837. IR::Opnd*
  17838. Lowerer::CreateClearImplicitCallFlagsOpnd()
  17839. {
  17840. return IR::IntConstOpnd::New(Js::ImplicitCall_None, GetImplicitCallFlagsType(), m_func);
  17841. }
  17842. void
  17843. Lowerer::LowerSpreadArrayLiteral(IR::Instr *instr)
  17844. {
  17845. LoadScriptContext(instr);
  17846. IR::Opnd *src2Opnd = instr->UnlinkSrc2();
  17847. m_lowererMD.LoadHelperArgument(instr, src2Opnd);
  17848. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  17849. m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  17850. this->m_lowererMD.ChangeToHelperCall(instr, IR::HelperSpreadArrayLiteral);
  17851. }
  17852. IR::Instr *
  17853. Lowerer::LowerSpreadCall(IR::Instr *instr, Js::CallFlags callFlags, bool setupProfiledVersion)
  17854. {
  17855. // Get the target function object, and emit function object test.
  17856. IR::RegOpnd * functionObjOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  17857. functionObjOpnd->m_isCallArg = true;
  17858. if (!(callFlags & Js::CallFlags_New) && !setupProfiledVersion)
  17859. {
  17860. IR::LabelInstr* continueAfterExLabel = InsertContinueAfterExceptionLabelForDebugger(m_func, instr, false);
  17861. this->m_lowererMD.GenerateFunctionObjectTest(instr, functionObjOpnd, false, continueAfterExLabel);
  17862. }
  17863. IR::Instr *spreadIndicesInstr;
  17864. spreadIndicesInstr = GetLdSpreadIndicesInstr(instr);
  17865. Assert(spreadIndicesInstr->m_opcode == Js::OpCode::LdSpreadIndices);
  17866. // Get AuxArray
  17867. IR::Opnd *spreadIndicesOpnd = spreadIndicesInstr->UnlinkSrc1();
  17868. // Remove LdSpreadIndices from the argument chain
  17869. instr->ReplaceSrc2(spreadIndicesInstr->UnlinkSrc2());
  17870. // Emit the normal args
  17871. callFlags = (Js::CallFlags)(callFlags | (instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed));
  17872. // Profiled helper call requires three more parameters, ArrayProfileId, profileId, and the frame pointer.
  17873. // This is just following the convention of HelperProfiledNewScObjArray call.
  17874. const unsigned short extraArgsCount = setupProfiledVersion ? 5 : 2; // function object and AuxArray
  17875. int32 argCount = this->m_lowererMD.LowerCallArgs(instr, (ushort)callFlags, extraArgsCount);
  17876. // Emit our extra (first) args for the Spread helper in reverse order
  17877. if (setupProfiledVersion)
  17878. {
  17879. IR::JitProfilingInstr* jitInstr = (IR::JitProfilingInstr*)instr;
  17880. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(jitInstr->arrayProfileId, m_func));
  17881. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(jitInstr->profileId, m_func));
  17882. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  17883. }
  17884. m_lowererMD.LoadHelperArgument(instr, functionObjOpnd);
  17885. m_lowererMD.LoadHelperArgument(instr, spreadIndicesOpnd);
  17886. // Change the call target to our helper
  17887. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(setupProfiledVersion ? IR::HelperProfiledNewScObjArraySpread : IR::HelperSpreadCall, this->m_func);
  17888. instr->SetSrc1(helperOpnd);
  17889. return this->m_lowererMD.LowerCall(instr, (Js::ArgSlot)argCount);
  17890. }
  17891. void
  17892. Lowerer::LowerDivI4Common(IR::Instr * instr)
  17893. {
  17894. Assert(instr);
  17895. Assert(instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::Div_I4);
  17896. Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
  17897. // MIN_INT/-1 path is only needed for signed operations
  17898. // TEST src2, src2
  17899. // JEQ $div0
  17900. // CMP src1, MIN_INT
  17901. // JEQ $minInt
  17902. // JMP $div
  17903. // $div0: [helper]
  17904. // MOV dst, 0
  17905. // JMP $done
  17906. // $minInt: [helper]
  17907. // CMP src2, -1
  17908. // JNE $div
  17909. // dst = MOV src1 / 0
  17910. // JMP $done
  17911. // $div:
  17912. // dst = IDIV src2, src1
  17913. // $done:
  17914. IR::LabelInstr * div0Label = InsertLabel(true, instr);
  17915. IR::LabelInstr * divLabel = InsertLabel(false, instr);
  17916. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  17917. InsertTestBranch(instr->GetSrc2(), instr->GetSrc2(), Js::OpCode::BrEq_A, div0Label, div0Label);
  17918. InsertMove(instr->GetDst(), IR::IntConstOpnd::New(0, TyInt32, m_func), divLabel);
  17919. InsertBranch(Js::OpCode::Br, doneLabel, divLabel);
  17920. if (instr->GetSrc1()->GetType() == TyInt32)
  17921. {
  17922. IR::LabelInstr * minIntLabel = nullptr;
  17923. // we need to check for INT_MIN/-1 if divisor is either -1 or variable, and dividend is either INT_MIN or variable
  17924. bool needsMinOverNeg1Check = !(instr->GetSrc2()->IsIntConstOpnd() && instr->GetSrc2()->AsIntConstOpnd()->GetValue() != -1);
  17925. if (instr->GetSrc1()->IsIntConstOpnd())
  17926. {
  17927. if (needsMinOverNeg1Check && instr->GetSrc1()->AsIntConstOpnd()->GetValue() == INT_MIN)
  17928. {
  17929. minIntLabel = InsertLabel(true, divLabel);
  17930. InsertBranch(Js::OpCode::Br, minIntLabel, div0Label);
  17931. }
  17932. else
  17933. {
  17934. needsMinOverNeg1Check = false;
  17935. }
  17936. }
  17937. else if(needsMinOverNeg1Check)
  17938. {
  17939. minIntLabel = InsertLabel(true, divLabel);
  17940. InsertCompareBranch(instr->GetSrc1(), IR::IntConstOpnd::New(INT_MIN, TyInt32, m_func), Js::OpCode::BrEq_A, minIntLabel, div0Label);
  17941. }
  17942. if (needsMinOverNeg1Check)
  17943. {
  17944. Assert(minIntLabel);
  17945. Assert(!instr->GetSrc2()->IsIntConstOpnd() || instr->GetSrc2()->AsIntConstOpnd()->GetValue() == -1);
  17946. if (!instr->GetSrc2()->IsIntConstOpnd())
  17947. {
  17948. InsertCompareBranch(instr->GetSrc2(), IR::IntConstOpnd::New(-1, TyInt32, m_func), Js::OpCode::BrNeq_A, divLabel, divLabel);
  17949. }
  17950. InsertMove(instr->GetDst(), instr->m_opcode == Js::OpCode::Div_I4 ? instr->GetSrc1() : IR::IntConstOpnd::New(0, TyInt32, m_func), divLabel);
  17951. InsertBranch(Js::OpCode::Br, doneLabel, divLabel);
  17952. }
  17953. }
  17954. InsertBranch(Js::OpCode::Br, divLabel, div0Label);
  17955. m_lowererMD.EmitInt4Instr(instr);
  17956. }
  17957. void
  17958. Lowerer::LowerRemI4(IR::Instr * instr)
  17959. {
  17960. Assert(instr);
  17961. Assert(instr->m_opcode == Js::OpCode::Rem_I4);
  17962. if (m_func->GetJnFunction()->GetIsAsmjsMode())
  17963. {
  17964. LowerDivI4Common(instr);
  17965. }
  17966. else
  17967. {
  17968. m_lowererMD.EmitInt4Instr(instr);
  17969. }
  17970. }
  17971. void
  17972. Lowerer::LowerDivI4(IR::Instr * instr)
  17973. {
  17974. Assert(instr);
  17975. Assert(instr->m_opcode == Js::OpCode::Div_I4);
  17976. if (m_func->GetJnFunction()->GetIsAsmjsMode())
  17977. {
  17978. LowerDivI4Common(instr);
  17979. return;
  17980. }
  17981. if(!instr->HasBailOutInfo())
  17982. {
  17983. m_lowererMD.EmitInt4Instr(instr);
  17984. return;
  17985. }
  17986. Assert(!(instr->GetBailOutKind() & ~(IR::BailOnDivResultNotInt | IR::BailOutOnNegativeZero | IR::BailOutOnDivByZero | IR::BailOutOnDivOfMinInt)));
  17987. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  17988. // Split out and generate the bailout instruction
  17989. const auto nonBailOutInstr = IR::Instr::New(instr->m_opcode, instr->m_func);
  17990. instr->TransferTo(nonBailOutInstr);
  17991. instr->InsertBefore(nonBailOutInstr);
  17992. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  17993. instr->InsertAfter(doneLabel);
  17994. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  17995. // ordering instructions anymore.
  17996. IR::LabelInstr * bailOutLabel = GenerateBailOut(instr);
  17997. IR::Opnd * denominatorOpnd = nonBailOutInstr->GetSrc2();
  17998. IR::Opnd * nominatorOpnd = nonBailOutInstr->GetSrc1();
  17999. if (bailOutKind & IR::BailOutOnDivOfMinInt)
  18000. {
  18001. // Bailout if numerator is MIN_INT (could also check for denominator being -1
  18002. // before bailing out, but does not seem worth the extra code..)
  18003. InsertCompareBranch(nominatorOpnd, IR::IntConstOpnd::New(INT32_MIN, TyInt32, this->m_func, true), Js::OpCode::BrEq_A, bailOutLabel, nonBailOutInstr);
  18004. }
  18005. if (denominatorOpnd->IsIntConstOpnd() && Math::IsPow2(denominatorOpnd->AsIntConstOpnd()->AsInt32()))
  18006. {
  18007. Assert((bailOutKind & (IR::BailOutOnNegativeZero | IR::BailOutOnDivByZero)) == 0);
  18008. int pow2 = denominatorOpnd->AsIntConstOpnd()->AsInt32();
  18009. InsertTestBranch(nominatorOpnd, IR::IntConstOpnd::New(pow2 - 1, TyInt32, this->m_func, true),
  18010. Js::OpCode::BrNeq_A, bailOutLabel, nonBailOutInstr);
  18011. nonBailOutInstr->m_opcode = Js::OpCode::Shr_A;
  18012. nonBailOutInstr->ReplaceSrc2(IR::IntConstOpnd::New(Math::Log2(pow2), TyInt32, this->m_func, true));
  18013. LowererMD::ChangeToShift(nonBailOutInstr, false);
  18014. LowererMD::Legalize(nonBailOutInstr);
  18015. }
  18016. else
  18017. {
  18018. if (bailOutKind & IR::BailOutOnDivByZero)
  18019. {
  18020. // Bailout if denominator is 0
  18021. InsertTestBranch(denominatorOpnd, denominatorOpnd, Js::OpCode::BrEq_A, bailOutLabel, nonBailOutInstr);
  18022. }
  18023. // Lower the div and bailout if there is a reminder (machine specific)
  18024. IR::Instr * insertBeforeInstr = m_lowererMD.LowerDivI4AndBailOnReminder(nonBailOutInstr, bailOutLabel);
  18025. IR::Opnd * resultOpnd = nonBailOutInstr->GetDst();
  18026. if (bailOutKind & IR::BailOutOnNegativeZero)
  18027. {
  18028. // TEST result, result
  18029. // JNE skipNegDenominatorCheckLabel // Result not 0
  18030. // TEST denominator, denominator
  18031. // JNSB/BMI bailout // bail if negative
  18032. // skipNegDenominatorCheckLabel:
  18033. IR::LabelInstr * skipNegDenominatorCheckLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  18034. // Skip negative denominator check if the result is not 0
  18035. InsertTestBranch(resultOpnd, resultOpnd, Js::OpCode::BrNeq_A, skipNegDenominatorCheckLabel, insertBeforeInstr);
  18036. IR::LabelInstr * negDenominatorCheckLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  18037. insertBeforeInstr->InsertBefore(negDenominatorCheckLabel);
  18038. // Jump to done if the denominator is not negative
  18039. InsertTestBranch(denominatorOpnd, denominatorOpnd,
  18040. LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), bailOutLabel, insertBeforeInstr);
  18041. insertBeforeInstr->InsertBefore(skipNegDenominatorCheckLabel);
  18042. }
  18043. }
  18044. // We are all fine, jump around the bailout to done
  18045. InsertBranch(Js::OpCode::Br, doneLabel, bailOutLabel);
  18046. }
  18047. void
  18048. Lowerer::LowerRemR8(IR::Instr * instr)
  18049. {
  18050. Assert(instr);
  18051. Assert(instr->m_opcode == Js::OpCode::Rem_A);
  18052. Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
  18053. m_lowererMD.LoadDoubleHelperArgument(instr, instr->UnlinkSrc2());
  18054. m_lowererMD.LoadDoubleHelperArgument(instr, instr->UnlinkSrc1());
  18055. instr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperOp_Rem_Double, m_func));
  18056. m_lowererMD.LowerCall(instr, 0);
  18057. }
  18058. void
  18059. Lowerer::LowerNewScopeSlots(IR::Instr * instr, bool doStackSlots)
  18060. {
  18061. Func * func = m_func;
  18062. if (PHASE_OFF(Js::NewScopeSlotFastPathPhase, func))
  18063. {
  18064. this->LowerUnaryHelperMemWithFuncBody(instr, IR::HelperOP_NewScopeSlots);
  18065. return;
  18066. }
  18067. uint const count = instr->GetSrc1()->AsIntConstOpnd()->AsUint32();
  18068. uint const allocSize = count * sizeof(Js::Var);
  18069. uint const actualSlotCount = count - Js::ScopeSlots::FirstSlotIndex;
  18070. IR::RegOpnd * dst = instr->UnlinkDst()->AsRegOpnd();
  18071. // dst = RecyclerAlloc(allocSize)
  18072. // dst[EncodedSlotCountSlotIndex = EncodedSlotCountSlotIOndex];
  18073. // dst[ScopeMetadataSlotIndex] = FunctionBody;
  18074. // mov undefinedOpnd, undefined
  18075. // dst[FirstSlotIndex..count] = undefinedOpnd;
  18076. // Note: stack allocation of both scope slots and frame display are done together
  18077. // in lowering of NewStackFrameDisplay
  18078. if (!doStackSlots)
  18079. {
  18080. GenerateRecyclerAlloc(IR::HelperAllocMemForVarArray, allocSize, dst, instr);
  18081. }
  18082. GenerateMemInit(dst, Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
  18083. min<uint>(actualSlotCount, Js::ScopeSlots::MaxEncodedSlotCount), instr, !doStackSlots);
  18084. IR::Opnd * functionBodyOpnd = this->LoadFunctionBodyOpnd(instr);
  18085. GenerateMemInit(dst, Js::ScopeSlots::ScopeMetadataSlotIndex * sizeof(Js::Var),
  18086. functionBodyOpnd, instr, !doStackSlots);
  18087. IR::Opnd * undefinedOpnd = this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined);
  18088. const IR::AutoReuseOpnd autoReuseUndefinedOpnd(undefinedOpnd, func);
  18089. // avoid using a register for the undefined pointer if we are going to assign 1 or 2
  18090. if (actualSlotCount > 2 && !undefinedOpnd->IsRegOpnd())
  18091. {
  18092. // mov undefinedOpnd, undefined
  18093. IR::RegOpnd * regOpnd = IR::RegOpnd::New(TyVar, func);
  18094. InsertMove(regOpnd, undefinedOpnd, instr);
  18095. undefinedOpnd = regOpnd;
  18096. }
  18097. int const loopUnrollCount = 8;
  18098. if (actualSlotCount <= loopUnrollCount * 2)
  18099. {
  18100. // Just generate all the assignment in straight line code
  18101. // mov[dst + Js::FirstSlotIndex], undefinedOpnd
  18102. // ...
  18103. // mov[dst + count - 1], undefinedOpnd
  18104. for (unsigned int i = Js::ScopeSlots::FirstSlotIndex; i < count; i++)
  18105. {
  18106. GenerateMemInit(dst, sizeof(Js::Var) * i, undefinedOpnd, instr, !doStackSlots);
  18107. }
  18108. }
  18109. else
  18110. {
  18111. // Just generate all the assignment in loop of loopUnrollCount and the rest as straight line code
  18112. //
  18113. // lea currOpnd, [dst + sizeof(Var) * (loopAssignCount + Js::ScopeSlots::FirstSlotIndex - loopUnrollCount)];
  18114. // mov [currOpnd + loopUnrollCount + leftOverAssignCount - 1] , undefinedOpnd
  18115. // mov [currOpnd + loopUnrollCount + leftOverAssignCount - 2] , undefinedOpnd
  18116. // ...
  18117. // mov [currOpnd + loopUnrollCount], undefinedOpnd
  18118. // $LoopTop:
  18119. // mov [currOpnd + loopUnrollCount - 1], undefinedOpnd
  18120. // mov [currOpnd + loopUnrollCount - 2], undefinedOpnd
  18121. // ...
  18122. // mov [currOpnd], undefinedOpnd
  18123. // lea currOpnd, [currOpnd - loopUnrollCount]
  18124. // cmp dst, currOpnd
  18125. // jlt $Looptop
  18126. uint nLoop = actualSlotCount / loopUnrollCount;
  18127. uint loopAssignCount = nLoop * loopUnrollCount;
  18128. uint leftOverAssignCount = actualSlotCount - loopAssignCount; // The left over assignments
  18129. IR::RegOpnd * currOpnd = IR::RegOpnd::New(TyMachPtr, func);
  18130. const IR::AutoReuseOpnd autoReuseCurrOpnd(currOpnd, m_func);
  18131. InsertLea(
  18132. currOpnd,
  18133. IR::IndirOpnd::New(
  18134. dst,
  18135. sizeof(Js::Var) * (loopAssignCount + Js::ScopeSlots::FirstSlotIndex - loopUnrollCount),
  18136. TyMachPtr,
  18137. func),
  18138. instr);
  18139. for (unsigned int i = 0; i < leftOverAssignCount; i++)
  18140. {
  18141. GenerateMemInit(currOpnd, sizeof(Js::Var) * (loopUnrollCount + leftOverAssignCount - i - 1), undefinedOpnd, instr, !doStackSlots);
  18142. }
  18143. IR::LabelInstr * loopTop = IR::LabelInstr::New(Js::OpCode::Label, func);
  18144. instr->InsertBefore(loopTop);
  18145. loopTop->m_isLoopTop = true;
  18146. Loop *loop = JitAnew(func->m_alloc, Loop, func->m_alloc, this->m_func);
  18147. loopTop->SetLoop(loop);
  18148. loop->SetLoopTopInstr(loopTop);
  18149. loop->regAlloc.liveOnBackEdgeSyms = JitAnew(func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
  18150. for (unsigned int i = 0; i < loopUnrollCount; i++)
  18151. {
  18152. GenerateMemInit(currOpnd, sizeof(Js::Var) * (loopUnrollCount - i - 1), undefinedOpnd, instr, !doStackSlots);
  18153. }
  18154. InsertLea(currOpnd, IR::IndirOpnd::New(currOpnd, -((int)sizeof(Js::Var) * loopUnrollCount), TyMachPtr, func), instr);
  18155. InsertCompareBranch(dst, currOpnd, Js::OpCode::BrLt_A, true, loopTop, instr);
  18156. loop->regAlloc.liveOnBackEdgeSyms->Set(currOpnd->m_sym->m_id);
  18157. loop->regAlloc.liveOnBackEdgeSyms->Set(dst->m_sym->m_id);
  18158. loop->regAlloc.liveOnBackEdgeSyms->Set(undefinedOpnd->AsRegOpnd()->m_sym->m_id);
  18159. }
  18160. if (!doStackSlots)
  18161. {
  18162. InsertMove(IR::RegOpnd::New(instr->m_func->GetLocalClosureSym(), TyMachPtr, func), dst, instr);
  18163. }
  18164. instr->Remove();
  18165. }
  18166. void Lowerer::LowerLdInnerFrameDisplay(IR::Instr *instr)
  18167. {
  18168. bool isStrict = instr->m_func->GetJnFunction()->GetIsStrictMode();
  18169. if (isStrict)
  18170. {
  18171. if (instr->GetSrc2())
  18172. {
  18173. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdStrictInnerFrameDisplay);
  18174. }
  18175. else
  18176. {
  18177. #if DBG
  18178. instr->m_opcode = Js::OpCode::LdInnerFrameDisplayNoParent;
  18179. #endif
  18180. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdStrictInnerFrameDisplayNoParent);
  18181. }
  18182. }
  18183. else
  18184. {
  18185. if (instr->GetSrc2())
  18186. {
  18187. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdInnerFrameDisplay);
  18188. }
  18189. else
  18190. {
  18191. #if DBG
  18192. instr->m_opcode = Js::OpCode::LdInnerFrameDisplayNoParent;
  18193. #endif
  18194. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdInnerFrameDisplayNoParent);
  18195. }
  18196. }
  18197. }
  18198. void Lowerer::LowerLdFrameDisplay(IR::Instr *instr, bool doStackFrameDisplay)
  18199. {
  18200. bool isStrict = instr->m_func->GetJnFunction()->GetIsStrictMode();
  18201. uint16 envDepth = instr->m_func->GetJnFunction()->GetEnvDepth();
  18202. Func *func = this->m_func;
  18203. // envDepth of -1 indicates unknown depth (eval expression or HTML event handler).
  18204. // We could still fast-path these by generating a loop over the (dynamically loaded) scope chain length,
  18205. // but I doubt it's worth it.
  18206. // If the dst opnd is a byte code temp, that indicates we're prepending a block scope or some such and
  18207. // shouldn't attempt to do this.
  18208. if (envDepth == (uint16)-1 ||
  18209. (!doStackFrameDisplay && instr->GetDst()->AsRegOpnd()->m_sym->IsTempReg(instr->m_func)) ||
  18210. PHASE_OFF(Js::FrameDisplayFastPathPhase, func))
  18211. {
  18212. if (isStrict)
  18213. {
  18214. if (instr->GetSrc2())
  18215. {
  18216. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdStrictFrameDisplay);
  18217. }
  18218. else
  18219. {
  18220. #if DBG
  18221. instr->m_opcode = Js::OpCode::LdFrameDisplayNoParent;
  18222. #endif
  18223. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdStrictFrameDisplayNoParent);
  18224. }
  18225. }
  18226. else
  18227. {
  18228. if (instr->GetSrc2())
  18229. {
  18230. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdFrameDisplay);
  18231. }
  18232. else
  18233. {
  18234. #if DBG
  18235. instr->m_opcode = Js::OpCode::LdFrameDisplayNoParent;
  18236. #endif
  18237. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdFrameDisplayNoParent);
  18238. }
  18239. }
  18240. return;
  18241. }
  18242. uint16 frameDispLength = envDepth + 1;
  18243. Assert(frameDispLength > 0);
  18244. IR::RegOpnd *dstOpnd = instr->UnlinkDst()->AsRegOpnd();
  18245. IR::RegOpnd *currentFrameOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  18246. uint allocSize = sizeof(Js::FrameDisplay) + (frameDispLength * sizeof(Js::Var));
  18247. if (doStackFrameDisplay)
  18248. {
  18249. IR::Instr *insertInstr = func->GetFunctionEntryInsertionPoint();
  18250. // Initialize stack pointers for scope slots and frame display together at the top of the function
  18251. // (in case we bail out before executing the instructions).
  18252. IR::LabelInstr *labelNoStackFunc = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  18253. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  18254. // Check whether stack functions have been disabled since we jitted.
  18255. // If they have, then we must allocate closure memory on the heap.
  18256. InsertTestBranch(IR::MemRefOpnd::New(m_func->GetJnFunction()->GetAddressOfFlags(), TyInt8, m_func),
  18257. IR::IntConstOpnd::New(Js::FunctionBody::Flags_StackNestedFunc, TyInt8, m_func, true),
  18258. Js::OpCode::BrEq_A, labelNoStackFunc, insertInstr);
  18259. // allocSize is greater than TyMachPtr and hence changing the initial size to TyMisc
  18260. StackSym * stackSym = StackSym::New(TyMisc, instr->m_func);
  18261. m_func->StackAllocate(stackSym, allocSize);
  18262. InsertLea(dstOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertInstr);
  18263. uint scopeSlotAllocSize =
  18264. (m_func->GetJnFunction()->scopeSlotArraySize + Js::ScopeSlots::FirstSlotIndex) * sizeof(Js::Var);
  18265. stackSym = StackSym::New(TyMisc, instr->m_func);
  18266. m_func->StackAllocate(stackSym, scopeSlotAllocSize);
  18267. InsertLea(currentFrameOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertInstr);
  18268. InsertBranch(Js::OpCode::Br, labelDone, insertInstr);
  18269. insertInstr->InsertBefore(labelNoStackFunc);
  18270. GenerateRecyclerAlloc(IR::HelperAllocMemForFrameDisplay, allocSize, dstOpnd, insertInstr, true);
  18271. GenerateRecyclerAlloc(IR::HelperAllocMemForVarArray, scopeSlotAllocSize, currentFrameOpnd, insertInstr, true);
  18272. insertInstr->InsertBefore(labelDone);
  18273. m_lowererMD.CreateAssign(IR::SymOpnd::New(m_func->GetLocalFrameDisplaySym(), 0, TyMachReg, m_func), dstOpnd, insertInstr);
  18274. m_lowererMD.CreateAssign(IR::SymOpnd::New(m_func->GetLocalClosureSym(), 0, TyMachReg, m_func), currentFrameOpnd, insertInstr);
  18275. }
  18276. else
  18277. {
  18278. GenerateRecyclerAlloc(IR::HelperAllocMemForFrameDisplay, allocSize, dstOpnd, instr);
  18279. }
  18280. // Copy contents of environment
  18281. // Work back to front to leave the head element(s) in cache
  18282. if (envDepth > 0)
  18283. {
  18284. IR::RegOpnd *envOpnd = instr->UnlinkSrc2()->AsRegOpnd();
  18285. for (uint16 i = envDepth; i >= 1; i--)
  18286. {
  18287. IR::Opnd *scopeOpnd = IR::RegOpnd::New(TyMachReg, func);
  18288. IR::Opnd *envLoadOpnd =
  18289. IR::IndirOpnd::New(envOpnd, Js::FrameDisplay::GetOffsetOfScopes() + ((i - 1) * sizeof(Js::Var)), TyMachReg, func);
  18290. m_lowererMD.CreateAssign(scopeOpnd, envLoadOpnd, instr);
  18291. IR::Opnd *dstStoreOpnd =
  18292. IR::IndirOpnd::New(dstOpnd, Js::FrameDisplay::GetOffsetOfScopes() + (i * sizeof(Js::Var)), TyMachReg, func);
  18293. m_lowererMD.CreateAssign(dstStoreOpnd, scopeOpnd, instr);
  18294. }
  18295. }
  18296. // Assign current element.
  18297. m_lowererMD.CreateAssign(
  18298. IR::IndirOpnd::New(dstOpnd, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg, func),
  18299. currentFrameOpnd,
  18300. instr);
  18301. // Combine tag, strict mode flag, and length
  18302. uintptr_t bits = 1 |
  18303. (isStrict << (Js::FrameDisplay::GetOffsetOfStrictMode() * 8)) |
  18304. (frameDispLength << (Js::FrameDisplay::GetOffsetOfLength() * 8));
  18305. m_lowererMD.CreateAssign(
  18306. IR::IndirOpnd::New(dstOpnd, 0, TyMachReg, func),
  18307. IR::AddrOpnd::New((void*)bits, IR::AddrOpndKindConstant, func, true),
  18308. instr);
  18309. instr->Remove();
  18310. }
  18311. IR::AddrOpnd *Lowerer::CreateFunctionBodyOpnd(Func *const func) const
  18312. {
  18313. return CreateFunctionBodyOpnd(func->GetJnFunction());
  18314. }
  18315. IR::AddrOpnd *Lowerer::CreateFunctionBodyOpnd(Js::FunctionBody *const functionBody) const
  18316. {
  18317. return IR::AddrOpnd::New(functionBody, IR::AddrOpndKindDynamicFunctionBody, m_func, true);
  18318. }
  18319. bool
  18320. Lowerer::GenerateRecyclerOrMarkTempAlloc(IR::Instr * instr, IR::RegOpnd * dstOpnd, IR::JnHelperMethod allocHelper, size_t allocSize, IR::SymOpnd ** tempObjectSymOpnd)
  18321. {
  18322. if (instr->dstIsTempObject)
  18323. {
  18324. *tempObjectSymOpnd = GenerateMarkTempAlloc(dstOpnd, allocSize, instr);
  18325. return false;
  18326. }
  18327. this->GenerateRecyclerAlloc(allocHelper, allocSize, dstOpnd, instr);
  18328. *tempObjectSymOpnd = nullptr;
  18329. return true;
  18330. }
  18331. IR::SymOpnd *
  18332. Lowerer::GenerateMarkTempAlloc(IR::RegOpnd *const dstOpnd, const size_t allocSize, IR::Instr *const insertBeforeInstr)
  18333. {
  18334. Assert(dstOpnd);
  18335. Assert(allocSize != 0);
  18336. Assert(insertBeforeInstr);
  18337. Func *const func = insertBeforeInstr->m_func;
  18338. // Allocate stack space for the reg exp instance, and a slot for the boxed value
  18339. StackSym *const tempObjectSym = StackSym::New(TyMisc, func);
  18340. m_func->StackAllocate(tempObjectSym, (int)(allocSize + sizeof(void *)));
  18341. IR::SymOpnd * tempObjectOpnd = IR::SymOpnd::New(tempObjectSym, sizeof(void *), TyVar, func);
  18342. InsertLea(dstOpnd, tempObjectOpnd, insertBeforeInstr);
  18343. // Initialize the boxed instance slot
  18344. if (this->outerMostLoopLabel == nullptr)
  18345. {
  18346. GenerateMemInit(dstOpnd, -(int)sizeof(void *), IR::AddrOpnd::NewNull(func), insertBeforeInstr, false);
  18347. }
  18348. else if (!PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func))
  18349. {
  18350. InsertMove(IR::SymOpnd::New(tempObjectSym, TyMachPtr, func), IR::AddrOpnd::NewNull(func), this->outerMostLoopLabel, false);
  18351. }
  18352. return tempObjectOpnd;
  18353. }
  18354. void Lowerer::LowerBrFncCachedScopeEq(IR::Instr *instr)
  18355. {
  18356. Assert(instr->m_opcode == Js::OpCode::BrFncCachedScopeEq || instr->m_opcode == Js::OpCode::BrFncCachedScopeNeq);
  18357. Js::OpCode opcode = (instr->m_opcode == Js::OpCode::BrFncCachedScopeEq ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A);
  18358. IR::RegOpnd *src1Reg = instr->UnlinkSrc1()->AsRegOpnd();
  18359. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src1Reg, Js::ScriptFunction::GetOffsetOfCachedScopeObj(), TyMachReg, this->m_func);
  18360. this->InsertCompareBranch(indirOpnd, instr->UnlinkSrc2(), opcode, false, instr->AsBranchInstr()->GetTarget(), instr->m_next);
  18361. instr->Remove();
  18362. }
  18363. IR::Instr* Lowerer::InsertLoweredRegionStartMarker(IR::Instr* instrToInsertBefore)
  18364. {
  18365. AssertMsg(instrToInsertBefore->m_prev != nullptr, "Can't insert lowered region start marker as the first instr in the func.");
  18366. IR::LabelInstr* startMarkerLabel = IR::LabelInstr::New(Js::OpCode::Label, instrToInsertBefore->m_func);
  18367. instrToInsertBefore->InsertBefore(startMarkerLabel);
  18368. return startMarkerLabel;
  18369. }
  18370. IR::Instr* Lowerer::RemoveLoweredRegionStartMarker(IR::Instr* startMarkerInstr)
  18371. {
  18372. AssertMsg(startMarkerInstr->m_prev != nullptr, "Lowered region start marker became the first instruction in the func after lowering?");
  18373. IR::Instr* prevInstr = startMarkerInstr->m_prev;
  18374. startMarkerInstr->Remove();
  18375. return prevInstr;
  18376. }
  18377. IR::Instr* Lowerer::GetLdSpreadIndicesInstr(IR::Instr *instr)
  18378. {
  18379. IR::Opnd *src2 = instr->GetSrc2();
  18380. if (!src2->IsSymOpnd())
  18381. {
  18382. return nullptr;
  18383. }
  18384. IR::SymOpnd * argLinkOpnd = src2->AsSymOpnd();
  18385. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  18386. Assert(argLinkSym->IsSingleDef());
  18387. return argLinkSym->m_instrDef;
  18388. }
  18389. bool Lowerer::IsSpreadCall(IR::Instr *instr)
  18390. {
  18391. IR::Instr *lastInstr = GetLdSpreadIndicesInstr(instr);
  18392. return lastInstr && lastInstr->m_opcode == Js::OpCode::LdSpreadIndices;
  18393. }
  18394. // When under debugger, generate a new label to be used as safe place to jump after ignore exception,
  18395. // insert it after insertAfterInstr, and return the label inserted.
  18396. // Returns nullptr/NoOP for non-debugger code path.
  18397. //static
  18398. IR::LabelInstr* Lowerer::InsertContinueAfterExceptionLabelForDebugger(Func* func, IR::Instr* insertAfterInstr, bool isHelper)
  18399. {
  18400. Assert(func);
  18401. Assert(insertAfterInstr);
  18402. IR::LabelInstr* continueAfterExLabel = nullptr;
  18403. if (func->IsJitInDebugMode())
  18404. {
  18405. continueAfterExLabel = IR::LabelInstr::New(Js::OpCode::Label, func, isHelper);
  18406. insertAfterInstr->InsertAfter(continueAfterExLabel);
  18407. }
  18408. return continueAfterExLabel;
  18409. }
  18410. void Lowerer::GenerateSingleCharStrJumpTableLookup(IR::Instr * instr)
  18411. {
  18412. IR::MultiBranchInstr * multiBrInstr = instr->AsBranchInstr()->AsMultiBrInstr();
  18413. Func * func = instr->m_func;
  18414. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  18415. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  18416. // MOV strLengthOpnd, str->length
  18417. IR::RegOpnd * strLengthOpnd = IR::RegOpnd::New(TyUint32, func);
  18418. InsertMove(strLengthOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func), instr);
  18419. // CMP strLengthOpnd, 1
  18420. // JNE defaultLabel
  18421. IR::LabelInstr * defaultLabelInstr = (IR::LabelInstr *)multiBrInstr->GetBranchJumpTable()->defaultTarget;
  18422. InsertCompareBranch(strLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
  18423. // MOV strBuffer, str->psz
  18424. IR::RegOpnd * strBufferOpnd = IR::RegOpnd::New(TyMachPtr, func);
  18425. InsertMove(strBufferOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, func), instr);
  18426. // TST strBuffer, strBuffer
  18427. // JNE $continue
  18428. InsertTestBranch(strBufferOpnd, strBufferOpnd, Js::OpCode::BrNeq_A, continueLabel, instr);
  18429. // $helper:
  18430. // PUSH str
  18431. // CALL JavascriptString::GetSzHelper
  18432. // MOV strBuffer, eax
  18433. // $continue:
  18434. instr->InsertBefore(helperLabel);
  18435. m_lowererMD.LoadHelperArgument(instr, instr->GetSrc1());
  18436. IR::Instr * instrCall = IR::Instr::New(Js::OpCode::Call, strBufferOpnd, IR::HelperCallOpnd::New(IR::HelperString_GetSz, func), func);
  18437. instr->InsertBefore(instrCall);
  18438. m_lowererMD.LowerCall(instrCall, 0);
  18439. instr->InsertBefore(continueLabel);
  18440. // MOV charOpnd, [strBuffer]
  18441. IR::RegOpnd * charOpnd = IR::RegOpnd::New(TyUint32, func);
  18442. InsertMove(charOpnd, IR::IndirOpnd::New(strBufferOpnd, 0, TyUint16, func), instr);
  18443. if (multiBrInstr->m_baseCaseValue != 0)
  18444. {
  18445. // SUB charOpnd, baseIndex
  18446. InsertSub(false, charOpnd, charOpnd, IR::IntConstOpnd::New(multiBrInstr->m_baseCaseValue, TyUint32, func), instr);
  18447. }
  18448. // CMP charOpnd, lastCaseIndex - baseCaseIndex
  18449. // JA defaultLabel
  18450. InsertCompareBranch(charOpnd, IR::IntConstOpnd::New(multiBrInstr->m_lastCaseValue - multiBrInstr->m_baseCaseValue, TyUint32, func, true),
  18451. Js::OpCode::BrGt_A, true, defaultLabelInstr, instr);
  18452. instr->UnlinkSrc1();
  18453. LowerJumpTableMultiBranch(multiBrInstr, charOpnd);
  18454. }
  18455. void Lowerer::GenerateSwitchStringLookup(IR::Instr * instr)
  18456. {
  18457. /* Collect information about string length in all the case*/
  18458. charcount_t minLength = UINT_MAX;
  18459. charcount_t maxLength = 0;
  18460. BVUnit32 bvLength;
  18461. instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary()->dictionary.Map([&](Js::JavascriptString * str, void *)
  18462. {
  18463. charcount_t len = str->GetLength();
  18464. minLength = min(minLength, str->GetLength());
  18465. maxLength = max(maxLength, str->GetLength());
  18466. if (len < 32)
  18467. {
  18468. bvLength.Set(len);
  18469. }
  18470. });
  18471. Func * func = instr->m_func;
  18472. IR::RegOpnd * strLengthOpnd = IR::RegOpnd::New(TyUint32, func);
  18473. InsertMove(strLengthOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func), instr);
  18474. IR::LabelInstr * defaultLabelInstr = (IR::LabelInstr *)instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary()->defaultTarget;
  18475. if (minLength == maxLength)
  18476. {
  18477. // Generate single length filter
  18478. InsertCompareBranch(strLengthOpnd, IR::IntConstOpnd::New(minLength, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
  18479. }
  18480. else if (maxLength < 32)
  18481. {
  18482. // Generate bit filter
  18483. // Jump to default label if the bit is not on for the length % 32
  18484. IR::IntConstOpnd * lenBitMaskOpnd = IR::IntConstOpnd::New(bvLength.GetWord(), TyUint32, func);
  18485. InsertBitTestBranch(lenBitMaskOpnd, strLengthOpnd, false, defaultLabelInstr, instr);
  18486. // Jump to default label if the bit is > 32
  18487. InsertTestBranch(strLengthOpnd, IR::IntConstOpnd::New(UINT32_MAX ^ 31, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
  18488. }
  18489. else
  18490. {
  18491. // CONSIDER: Generate range filter
  18492. }
  18493. this->LowerMultiBr(instr, IR::HelperOp_SwitchStringLookUp);
  18494. }
  18495. IR::Instr *
  18496. Lowerer::LowerTry(IR::Instr* instr, bool tryCatch)
  18497. {
  18498. if (this->m_func->hasBailout)
  18499. {
  18500. this->EnsureBailoutReturnValueSym();
  18501. }
  18502. this->EnsureHasBailedOutSym();
  18503. IR::SymOpnd * hasBailedOutOpnd = IR::SymOpnd::New(this->m_func->m_hasBailedOutSym, TyUint32, this->m_func);
  18504. IR::Instr * setInstr = IR::Instr::New(LowererMD::GetStoreOp(TyUint32), hasBailedOutOpnd, IR::IntConstOpnd::New(0, TyUint32, this->m_func), this->m_func);
  18505. instr->InsertBefore(setInstr);
  18506. LowererMD::Legalize(setInstr);
  18507. return m_lowererMD.LowerTry(instr, tryCatch ? IR::HelperOp_TryCatch : IR::HelperOp_TryFinally);
  18508. }
  18509. void
  18510. Lowerer::EnsureBailoutReturnValueSym()
  18511. {
  18512. if (this->m_func->m_bailoutReturnValueSym == nullptr)
  18513. {
  18514. this->m_func->m_bailoutReturnValueSym = StackSym::New(TyVar, this->m_func);
  18515. this->m_func->StackAllocate(this->m_func->m_bailoutReturnValueSym, sizeof(Js::Var));
  18516. }
  18517. }
  18518. void
  18519. Lowerer::EnsureHasBailedOutSym()
  18520. {
  18521. if (this->m_func->m_hasBailedOutSym == nullptr)
  18522. {
  18523. this->m_func->m_hasBailedOutSym = StackSym::New(TyUint32, this->m_func);
  18524. this->m_func->StackAllocate(this->m_func->m_hasBailedOutSym, MachRegInt);
  18525. }
  18526. }
  18527. void
  18528. Lowerer::InsertReturnThunkForRegion(Region* region, IR::LabelInstr* restoreLabel)
  18529. {
  18530. Assert(this->m_func->isPostLayout);
  18531. Assert(region->GetType() == RegionTypeTry || region->GetType() == RegionTypeCatch);
  18532. if (!region->returnThunkEmitted)
  18533. {
  18534. this->m_func->m_exitInstr->InsertAfter(region->GetBailoutReturnThunkLabel());
  18535. bool newLastInstrInserted = false;
  18536. IR::Instr * insertBeforeInstr = region->GetBailoutReturnThunkLabel()->m_next;
  18537. if (insertBeforeInstr == nullptr)
  18538. {
  18539. Assert(this->m_func->m_exitInstr == this->m_func->m_tailInstr);
  18540. insertBeforeInstr = IR::Instr::New(Js::OpCode::Nop, this->m_func);
  18541. newLastInstrInserted = true;
  18542. region->GetBailoutReturnThunkLabel()->InsertAfter(insertBeforeInstr);
  18543. this->m_func->m_tailInstr = insertBeforeInstr;
  18544. }
  18545. IR::LabelOpnd * continuationAddr;
  18546. if (region->GetParent()->GetType() != RegionTypeRoot)
  18547. {
  18548. continuationAddr = IR::LabelOpnd::New(region->GetParent()->GetBailoutReturnThunkLabel(), this->m_func);
  18549. }
  18550. else
  18551. {
  18552. continuationAddr = IR::LabelOpnd::New(restoreLabel, this->m_func);
  18553. }
  18554. IR::Instr * lastInstr = m_lowererMD.LowerEHRegionReturn(insertBeforeInstr, continuationAddr);
  18555. if (newLastInstrInserted)
  18556. {
  18557. Assert(this->m_func->m_tailInstr == insertBeforeInstr);
  18558. insertBeforeInstr->Remove();
  18559. this->m_func->m_tailInstr = lastInstr;
  18560. }
  18561. region->returnThunkEmitted = true;
  18562. }
  18563. }
  18564. void
  18565. Lowerer::SetHasBailedOut(IR::Instr * bailoutInstr)
  18566. {
  18567. Assert(this->m_func->isPostLayout);
  18568. IR::SymOpnd * hasBailedOutOpnd = IR::SymOpnd::New(this->m_func->m_hasBailedOutSym, TyUint32, this->m_func);
  18569. IR::Instr * setInstr = IR::Instr::New(LowererMD::GetStoreOp(TyUint32), hasBailedOutOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), this->m_func);
  18570. bailoutInstr->InsertBefore(setInstr);
  18571. LowererMD::Legalize(setInstr, true);
  18572. }
  18573. IR::Instr*
  18574. Lowerer::EmitEHBailoutStackRestore(IR::Instr * bailoutInstr)
  18575. {
  18576. Assert(this->m_func->isPostLayout);
  18577. #ifdef _M_IX86
  18578. BailOutInfo * bailoutInfo = bailoutInstr->GetBailOutInfo();
  18579. if (bailoutInfo->startCallCount != 0)
  18580. {
  18581. uint totalStackToBeRestored = 0;
  18582. uint stackAlignmentAdjustment = 0;
  18583. for (uint i = 0; i < bailoutInfo->startCallCount; i++)
  18584. {
  18585. uint startCallOutParamCount = bailoutInfo->GetStartCallOutParamCount(i);
  18586. if ((Math::Align<int32>(startCallOutParamCount * MachPtr, MachStackAlignment) - (startCallOutParamCount * MachPtr)) != 0)
  18587. {
  18588. stackAlignmentAdjustment++;
  18589. }
  18590. }
  18591. totalStackToBeRestored = (bailoutInfo->totalOutParamCount + stackAlignmentAdjustment) * MachPtr;
  18592. IR::RegOpnd * espOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegStackPointer(), TyMachReg, this->m_func);
  18593. IR::Opnd * opnd = IR::IndirOpnd::New(espOpnd, totalStackToBeRestored, TyMachReg, this->m_func);
  18594. IR::Instr * stackRestoreInstr = IR::Instr::New(Js::OpCode::LEA, espOpnd, opnd, this->m_func);
  18595. bailoutInstr->InsertAfter(stackRestoreInstr);
  18596. return stackRestoreInstr;
  18597. }
  18598. #endif
  18599. return bailoutInstr;
  18600. }
  18601. void
  18602. Lowerer::EmitSaveEHBailoutReturnValueAndJumpToRetThunk(IR::Instr * insertAfterInstr)
  18603. {
  18604. Assert(this->m_func->isPostLayout);
  18605. // After the CALL SaveAllRegistersAndBailout instruction, emit
  18606. //
  18607. // MOV bailoutReturnValueSym, eax
  18608. // JMP $currentRegion->bailoutReturnThunkLabel
  18609. IR::SymOpnd * bailoutReturnValueSymOpnd = IR::SymOpnd::New(this->m_func->m_bailoutReturnValueSym, TyVar, this->m_func);
  18610. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  18611. IR::Instr * movInstr = IR::Instr::New(LowererMD::GetStoreOp(TyVar), bailoutReturnValueSymOpnd, eaxOpnd, this->m_func);
  18612. insertAfterInstr->InsertAfter(movInstr);
  18613. LowererMD::Legalize(movInstr, true);
  18614. IR::BranchInstr * jumpInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, this->currentRegion->GetBailoutReturnThunkLabel(), this->m_func);
  18615. movInstr->InsertAfter(jumpInstr);
  18616. }
  18617. void
  18618. Lowerer::EmitRestoreReturnValueFromEHBailout(IR::LabelInstr * restoreLabel, IR::LabelInstr * epilogLabel)
  18619. {
  18620. Assert(this->m_func->isPostLayout);
  18621. // JMP $epilog
  18622. // $restore:
  18623. // MOV eax, bailoutReturnValueSym
  18624. // $epilog:
  18625. IR::SymOpnd * bailoutReturnValueSymOpnd = IR::SymOpnd::New(this->m_func->m_bailoutReturnValueSym, TyVar, this->m_func);
  18626. IR::RegOpnd * eaxOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  18627. IR::Instr * movInstr = IR::Instr::New(LowererMD::GetLoadOp(TyVar), eaxOpnd, bailoutReturnValueSymOpnd, this->m_func);
  18628. epilogLabel->InsertBefore(restoreLabel);
  18629. epilogLabel->InsertBefore(movInstr);
  18630. LowererMD::Legalize(movInstr, true);
  18631. restoreLabel->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, epilogLabel, this->m_func));
  18632. }
  18633. void
  18634. Lowerer::InsertBitTestBranch(IR::Opnd * bitMaskOpnd, IR::Opnd * bitIndex, bool jumpIfBitOn, IR::LabelInstr * targetLabel, IR::Instr * insertBeforeInstr)
  18635. {
  18636. #if defined(_M_IX86) || defined(_M_AMD64)
  18637. // Generate bit test and branch
  18638. // BT bitMaskOpnd, bitIndex
  18639. // JB/JAE targetLabel
  18640. Func * func = this->m_func;
  18641. IR::Instr * instr = IR::Instr::New(Js::OpCode::BT, func);
  18642. instr->SetSrc1(bitMaskOpnd);
  18643. instr->SetSrc2(bitIndex);
  18644. insertBeforeInstr->InsertBefore(instr);
  18645. if (!(bitMaskOpnd->IsRegOpnd() || bitMaskOpnd->IsIndirOpnd() || bitMaskOpnd->IsMemRefOpnd()))
  18646. {
  18647. instr->HoistSrc1(Js::OpCode::MOV);
  18648. }
  18649. InsertBranch(jumpIfBitOn ? Js::OpCode::JB : Js::OpCode::JAE, targetLabel, insertBeforeInstr);
  18650. #elif defined(_M_ARM)
  18651. // ARM don't have bit test instruction, so just generated
  18652. // MOV r1, 1
  18653. // SHL r1, bitIndex
  18654. // TEST bitMaskOpnd, r1
  18655. // BEQ/BNEQ targetLabel
  18656. Func * func = this->m_func;
  18657. IR::RegOpnd * lenBitOpnd = IR::RegOpnd::New(TyUint32, func);
  18658. InsertMove(lenBitOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
  18659. InsertShift(Js::OpCode::Shl_I4, false, lenBitOpnd, lenBitOpnd, bitIndex, insertBeforeInstr);
  18660. InsertTestBranch(lenBitOpnd, bitMaskOpnd, jumpIfBitOn? Js::OpCode::BrNeq_A :Js::OpCode::BrEq_A, targetLabel, insertBeforeInstr);
  18661. #else
  18662. AssertMsg(false, "Not implemented");
  18663. #endif
  18664. }
  18665. //
  18666. // Generates an object test and then a string test with the static string type
  18667. //
  18668. void
  18669. Lowerer::GenerateStringTest(IR::RegOpnd *srcReg, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr * continueLabel, bool generateObjectCheck)
  18670. {
  18671. Assert(srcReg);
  18672. if (!srcReg->GetValueType().IsString())
  18673. {
  18674. if (generateObjectCheck && !srcReg->IsNotTaggedValue())
  18675. {
  18676. this->m_lowererMD.GenerateObjectTest(srcReg, insertInstr, labelHelper);
  18677. }
  18678. // CMP [regSrcStr + offset(type)] , static string type -- check base string type
  18679. // BrEq/BrNeq labelHelper.
  18680. IR::IndirOpnd * src1 = IR::IndirOpnd::New(srcReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func);
  18681. IR::Opnd * src2 = this->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueStringTypeStatic);
  18682. if (continueLabel)
  18683. {
  18684. InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, continueLabel, insertInstr);
  18685. }
  18686. else
  18687. {
  18688. InsertCompareBranch(src1, src2, Js::OpCode::BrNeq_A, labelHelper, insertInstr);
  18689. }
  18690. }
  18691. }
  18692. void
  18693. Lowerer::LowerConvNum(IR::Instr *instrLoad, bool noMathFastPath)
  18694. {
  18695. if (PHASE_OFF(Js::OtherFastPathPhase, this->m_func) || noMathFastPath || !instrLoad->GetSrc1()->IsRegOpnd())
  18696. {
  18697. this->LowerUnaryHelperMemWithTemp2(instrLoad, IR_HELPER_OP_FULL_OR_INPLACE(ConvNumber));
  18698. return;
  18699. }
  18700. // MOV dst, src1
  18701. // TEST src1, 1
  18702. // JNE $done
  18703. // call ToNumber
  18704. //$done:
  18705. bool isInt = false;
  18706. bool isNotInt = false;
  18707. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  18708. IR::LabelInstr *labelDone = NULL;
  18709. IR::Instr *instr;
  18710. if (src1->IsTaggedInt())
  18711. {
  18712. isInt = true;
  18713. }
  18714. else if (src1->IsNotInt())
  18715. {
  18716. isNotInt = true;
  18717. }
  18718. if (!isNotInt)
  18719. {
  18720. // MOV dst, src1
  18721. instr = LowererMD::CreateAssign(instrLoad->GetDst(), src1, instrLoad);
  18722. if (!isInt)
  18723. {
  18724. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  18725. bool didTest = m_lowererMD.GenerateObjectTest(src1, instrLoad, labelDone);
  18726. if (didTest)
  18727. {
  18728. // This label is needed only to mark the helper block
  18729. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  18730. instrLoad->InsertBefore(labelHelper);
  18731. }
  18732. }
  18733. }
  18734. if (!isInt)
  18735. {
  18736. if (labelDone)
  18737. {
  18738. instrLoad->InsertAfter(labelDone);
  18739. }
  18740. this->LowerUnaryHelperMemWithTemp2(instrLoad, IR_HELPER_OP_FULL_OR_INPLACE(ConvNumber));
  18741. }
  18742. else
  18743. {
  18744. instrLoad->Remove();
  18745. }
  18746. }
  18747. IR::Opnd *
  18748. Lowerer::LoadSlotArrayWithCachedLocalType(IR::Instr * instrInsert, IR::PropertySymOpnd *propertySymOpnd)
  18749. {
  18750. IR::RegOpnd *opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  18751. if (propertySymOpnd->UsesAuxSlot())
  18752. {
  18753. // If we use the auxiliary slot array, load it and return it
  18754. IR::RegOpnd *opndSlotArray = IR::RegOpnd::New(TyMachReg, this->m_func);
  18755. IR::Opnd *opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  18756. LowererMD::CreateAssign(opndSlotArray, opndIndir, instrInsert);
  18757. return opndSlotArray;
  18758. }
  18759. else
  18760. {
  18761. // If we use inline slot return the address to the object header
  18762. return opndBase;
  18763. }
  18764. }
  18765. IR::Opnd *
  18766. Lowerer::LoadSlotArrayWithCachedProtoType(IR::Instr * instrInsert, IR::PropertySymOpnd *propertySymOpnd)
  18767. {
  18768. // Get the prototype object from the cache
  18769. Js::RecyclableObject *prototypeObject = propertySymOpnd->GetProtoObject();
  18770. Assert(prototypeObject != nullptr);
  18771. if (propertySymOpnd->UsesAuxSlot())
  18772. {
  18773. // If we use the auxiliary slot array, load it from the prototype object and return it
  18774. IR::RegOpnd *opndSlotArray = IR::RegOpnd::New(TyMachReg, this->m_func);
  18775. IR::Opnd *opnd = IR::MemRefOpnd::New((char*)prototypeObject + Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func, IR::AddrOpndKindDynamicAuxSlotArrayRef);
  18776. LowererMD::CreateAssign(opndSlotArray, opnd, instrInsert);
  18777. return opndSlotArray;
  18778. }
  18779. else
  18780. {
  18781. // If we use inline slot return the address of the prototype object
  18782. return IR::MemRefOpnd::New(prototypeObject, TyMachReg, this->m_func);
  18783. }
  18784. }
  18785. IR::Instr *
  18786. Lowerer::LowerLdAsmJsEnv(IR::Instr * instr)
  18787. {
  18788. Assert(m_func->GetJnFunction()->GetIsAsmJsFunction());
  18789. IR::Opnd * functionObjOpnd;
  18790. IR::Instr * instrPrev = this->m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  18791. Assert(!instr->GetSrc1());
  18792. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::AsmJsScriptFunction::GetOffsetOfModuleMemory(), TyMachPtr, m_func);
  18793. instr->SetSrc1(indirOpnd);
  18794. LowererMD::ChangeToAssign(instr);
  18795. return instrPrev;
  18796. }
  18797. IR::Instr *
  18798. Lowerer::LowerLdEnv(IR::Instr * instr)
  18799. {
  18800. IR::Opnd * src1 = instr->GetSrc1();
  18801. IR::Opnd * functionObjOpnd;
  18802. IR::Instr * instrPrev = this->m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  18803. Assert(!instr->GetSrc1());
  18804. if (src1 == nullptr || functionObjOpnd->IsRegOpnd())
  18805. {
  18806. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(),
  18807. Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, m_func);
  18808. instr->SetSrc1(indirOpnd);
  18809. }
  18810. else
  18811. {
  18812. Assert(functionObjOpnd->IsAddrOpnd());
  18813. IR::AddrOpnd* functionObjAddrOpnd = functionObjOpnd->AsAddrOpnd();
  18814. IR::MemRefOpnd* functionEnvMemRefOpnd = IR::MemRefOpnd::New((void *)((intptr_t)functionObjAddrOpnd->m_address + Js::ScriptFunction::GetOffsetOfEnvironment()),
  18815. TyMachPtr, this->m_func, IR::AddrOpndKindDynamicFunctionEnvironmentRef);
  18816. instr->SetSrc1(functionEnvMemRefOpnd);
  18817. }
  18818. LowererMD::ChangeToAssign(instr);
  18819. return instrPrev;
  18820. }
  18821. IR::Instr *
  18822. Lowerer::LowerFrameDisplayCheck(IR::Instr * instr)
  18823. {
  18824. IR::Instr *instrPrev = instr->m_prev;
  18825. IR::Instr *insertInstr = instr->m_next;
  18826. IR::AddrOpnd *addrOpnd = instr->UnlinkSrc2()->AsAddrOpnd();
  18827. FrameDisplayCheckRecord *record = (FrameDisplayCheckRecord*)addrOpnd->m_address;
  18828. IR::LabelInstr *errorLabel = nullptr;
  18829. IR::LabelInstr *continueLabel = nullptr;
  18830. IR::RegOpnd *envOpnd = instr->GetDst()->AsRegOpnd();
  18831. uint32 frameDisplayOffset = Js::FrameDisplay::GetOffsetOfScopes()/sizeof(Js::Var);
  18832. if (record->slotId != (uint32)-1 && record->slotId > frameDisplayOffset)
  18833. {
  18834. // Check that the frame display has enough scopes in it to satisfy the code.
  18835. errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  18836. continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  18837. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(envOpnd,
  18838. Js::FrameDisplay::GetOffsetOfLength(),
  18839. TyUint16, m_func, true);
  18840. IR::IntConstOpnd *slotIdOpnd = IR::IntConstOpnd::New(record->slotId - frameDisplayOffset, TyUint16, m_func);
  18841. InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrLe_A, true, errorLabel, insertInstr);
  18842. }
  18843. if (record->table)
  18844. {
  18845. // Check the size of each of the slot arrays in the scope chain.
  18846. FOREACH_HASHTABLE_ENTRY(uint32, bucket, record->table)
  18847. {
  18848. uint32 slotId = bucket.element;
  18849. if (slotId != (uint32)-1 && slotId > Js::ScopeSlots::FirstSlotIndex)
  18850. {
  18851. if (errorLabel == nullptr)
  18852. {
  18853. errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  18854. continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  18855. }
  18856. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(envOpnd,
  18857. bucket.value * sizeof(Js::Var),
  18858. TyVar, m_func, true);
  18859. IR::RegOpnd * slotArrayOpnd = IR::RegOpnd::New(TyVar, m_func);
  18860. InsertMove(slotArrayOpnd, indirOpnd, insertInstr);
  18861. indirOpnd = IR::IndirOpnd::New(slotArrayOpnd,
  18862. Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
  18863. TyUint32, m_func, true);
  18864. IR::IntConstOpnd * slotIdOpnd = IR::IntConstOpnd::New(slotId - Js::ScopeSlots::FirstSlotIndex,
  18865. TyUint32, m_func);
  18866. InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrLe_A, true, errorLabel, insertInstr);
  18867. }
  18868. }
  18869. NEXT_HASHTABLE_ENTRY;
  18870. }
  18871. if (errorLabel)
  18872. {
  18873. InsertBranch(Js::OpCode::Br, continueLabel, insertInstr);
  18874. insertInstr->InsertBefore(errorLabel);
  18875. IR::Instr * instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
  18876. insertInstr->InsertBefore(instrHelper);
  18877. m_lowererMD.ChangeToHelperCall(instrHelper, IR::HelperOp_FatalInternalError);
  18878. insertInstr->InsertBefore(continueLabel);
  18879. }
  18880. m_lowererMD.ChangeToAssign(instr);
  18881. return instrPrev;
  18882. }
  18883. IR::Instr *
  18884. Lowerer::LowerSlotArrayCheck(IR::Instr * instr)
  18885. {
  18886. IR::Instr *instrPrev = instr->m_prev;
  18887. IR::Instr *insertInstr = instr->m_next;
  18888. IR::RegOpnd *slotArrayOpnd = instr->GetDst()->AsRegOpnd();
  18889. StackSym *stackSym = slotArrayOpnd->m_sym;
  18890. IR::IntConstOpnd *slotIdOpnd = instr->UnlinkSrc2()->AsIntConstOpnd();
  18891. uint32 slotId = (uint32)slotIdOpnd->GetValue();
  18892. Assert(slotId != (uint32)-1 && slotId >= Js::ScopeSlots::FirstSlotIndex);
  18893. if (slotId > Js::ScopeSlots::FirstSlotIndex)
  18894. {
  18895. if (m_func->DoStackFrameDisplay() && stackSym->m_id == m_func->GetLocalClosureSym()->m_id)
  18896. {
  18897. // The pointer we loaded points to the reserved/known address where the slot array can be boxed.
  18898. // Deref to get the real value.
  18899. IR::IndirOpnd * srcOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(stackSym, TyVar, m_func), 0, TyVar, m_func);
  18900. IR::RegOpnd * dstOpnd = IR::RegOpnd::New(TyVar, m_func);
  18901. InsertMove(dstOpnd, srcOpnd, insertInstr);
  18902. stackSym = dstOpnd->m_sym;
  18903. }
  18904. IR::LabelInstr *errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  18905. IR::LabelInstr *continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  18906. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(stackSym, TyVar, m_func),
  18907. Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
  18908. TyUint32, m_func, true);
  18909. slotIdOpnd->SetValue(slotId - Js::ScopeSlots::FirstSlotIndex);
  18910. InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrGt_A, true, continueLabel, insertInstr);
  18911. insertInstr->InsertBefore(errorLabel);
  18912. IR::Instr * instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
  18913. insertInstr->InsertBefore(instrHelper);
  18914. m_lowererMD.ChangeToHelperCall(instrHelper, IR::HelperOp_FatalInternalError);
  18915. insertInstr->InsertBefore(continueLabel);
  18916. }
  18917. m_lowererMD.ChangeToAssign(instr);
  18918. return instrPrev;
  18919. }
  18920. IR::RegOpnd *
  18921. Lowerer::LoadIndexFromLikelyFloat(
  18922. IR::RegOpnd *indexOpnd,
  18923. const bool skipNegativeCheck,
  18924. IR::LabelInstr *const notIntLabel,
  18925. IR::LabelInstr *const negativeLabel,
  18926. IR::Instr *const insertBeforeInstr)
  18927. {
  18928. #ifdef _M_IX86
  18929. // We should only generate this if sse2 is available
  18930. Assert(AutoSystemInfo::Data.SSE2Available());
  18931. #endif
  18932. Func *func = insertBeforeInstr->m_func;
  18933. IR::LabelInstr * convertToUint = IR::LabelInstr::New(Js::OpCode::Label, func);
  18934. IR::LabelInstr * fallThrough = IR::LabelInstr::New(Js::OpCode::Label, func);
  18935. // First generate test for tagged int even though profile data says likely float. Indices are usually int and we need a fast path before we try to convert float to int
  18936. // mov intIndex, index
  18937. // sar intIndex, 1
  18938. // jae convertToInt
  18939. IR::RegOpnd *int32IndexOpnd = GenerateUntagVar(indexOpnd, convertToUint, insertBeforeInstr, !indexOpnd->IsTaggedInt());
  18940. if (!skipNegativeCheck)
  18941. {
  18942. // test index, index
  18943. // js $notTaggedIntOrNegative
  18944. InsertTestBranch(int32IndexOpnd, int32IndexOpnd, LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), negativeLabel, insertBeforeInstr);
  18945. }
  18946. InsertBranch(Js::OpCode::Br, fallThrough, insertBeforeInstr);
  18947. insertBeforeInstr->InsertBefore(convertToUint);
  18948. // try to convert float to int in a fast path
  18949. #if FLOATVAR
  18950. IR::RegOpnd* floatIndexOpnd = m_lowererMD.CheckFloatAndUntag(indexOpnd, insertBeforeInstr, notIntLabel);
  18951. #else
  18952. m_lowererMD.GenerateFloatTest(indexOpnd, insertBeforeInstr, notIntLabel);
  18953. IR::IndirOpnd * floatIndexOpnd = IR::IndirOpnd::New(indexOpnd, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  18954. #endif
  18955. IR::LabelInstr * doneConvUint32 = IR::LabelInstr::New(Js::OpCode::Label, func);
  18956. IR::LabelInstr * helperConvUint32 = IR::LabelInstr::New(Js::OpCode::Label, func, true /*helper*/);
  18957. m_lowererMD.ConvertFloatToInt32(int32IndexOpnd, floatIndexOpnd, helperConvUint32, doneConvUint32, insertBeforeInstr);
  18958. // helper path
  18959. insertBeforeInstr->InsertBefore(helperConvUint32);
  18960. m_lowererMD.LoadDoubleHelperArgument(insertBeforeInstr, floatIndexOpnd);
  18961. IR::Instr * helperCall = IR::Instr::New(Js::OpCode::Call, int32IndexOpnd, this->m_func);
  18962. insertBeforeInstr->InsertBefore(helperCall);
  18963. m_lowererMD.ChangeToHelperCall(helperCall, IR::HelperConv_ToUInt32Core);
  18964. // main path
  18965. insertBeforeInstr->InsertBefore(doneConvUint32);
  18966. //Convert uint32 to back to float for comparison that conversion was indeed successful
  18967. IR::RegOpnd *floatOpndFromUint32 = IR::RegOpnd::New(TyFloat64, func);
  18968. m_lowererMD.EmitUIntToFloat(floatOpndFromUint32, int32IndexOpnd, insertBeforeInstr);
  18969. // compare with float from the original indexOpnd, we need floatIndex == (float64)(uint32)floatIndex
  18970. InsertCompareBranch(floatOpndFromUint32, floatIndexOpnd, Js::OpCode::BrNeq_A, notIntLabel, insertBeforeInstr, false);
  18971. insertBeforeInstr->InsertBefore(fallThrough);
  18972. return int32IndexOpnd;
  18973. }
  18974. #if DBG
  18975. void
  18976. Lowerer::LegalizeVerifyRange(IR::Instr * instrStart, IR::Instr * instrLast)
  18977. {
  18978. FOREACH_INSTR_IN_RANGE(verifyLegalizeInstr, instrStart, instrLast)
  18979. {
  18980. LowererMD::Legalize<true>(verifyLegalizeInstr);
  18981. }
  18982. NEXT_INSTR_IN_RANGE;
  18983. }
  18984. #endif